Added creation of database with flag to program, refactored
This commit is contained in:
parent
66de1ffa36
commit
315bdd6841
6 changed files with 113 additions and 62 deletions
|
|
@ -1,10 +1,11 @@
|
||||||
use crate::database::PsqlData;
|
use crate::database::PsqlData;
|
||||||
use futures_util::TryStreamExt;
|
use futures_util::TryStreamExt;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::{postgres::PgPool, Pool, Postgres};
|
||||||
|
use std::{error::Error, path::Path};
|
||||||
use std::{
|
use std::{
|
||||||
error::Error,
|
fs::{self, metadata},
|
||||||
path::Path,
|
path::PathBuf,
|
||||||
};
|
};
|
||||||
use time::{macros::format_description, Date};
|
use time::{macros::format_description, Date};
|
||||||
|
|
||||||
|
|
@ -133,3 +134,22 @@ impl PsqlData for Article {
|
||||||
crate::psql_delete!(id, pool, "articles")
|
crate::psql_delete!(id, pool, "articles")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn load_articles(pool: &Pool<Postgres>) -> Result<(), Box<dyn Error>> {
|
||||||
|
let paths = fs::read_dir("blog/").unwrap();
|
||||||
|
for path_entry in paths {
|
||||||
|
let path: PathBuf = path_entry.unwrap().path();
|
||||||
|
let metadata = metadata(path.clone()).unwrap();
|
||||||
|
if metadata.is_file() {
|
||||||
|
let mut article: Article = Article::from_file(path);
|
||||||
|
match Article::read_by_reference(pool, &article.reference).await {
|
||||||
|
Ok(art) => {
|
||||||
|
article.id = art.id;
|
||||||
|
article.update(pool).await?
|
||||||
|
}
|
||||||
|
Err(_) => article.insert(pool).await?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
||||||
45
src/database/create.rs
Normal file
45
src/database/create.rs
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
use sqlx::{PgPool, Pool, Postgres};
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
const LINK_TYPE: &str = "CREATE TYPE link_type as ENUM ('article', 'blog');";
|
||||||
|
|
||||||
|
const ARTICLE_TABLE: &str = "CREATE TABLE IF NOT EXISTS articles (
|
||||||
|
reference varchar(20) not null,
|
||||||
|
title varchar(50) not null,
|
||||||
|
previous varchar(20),
|
||||||
|
next varchar(20),
|
||||||
|
description text,
|
||||||
|
content text not null,
|
||||||
|
date date not null,
|
||||||
|
id serial primary key);";
|
||||||
|
|
||||||
|
const PAGE_TABLE: &str = "CREATE TABLE IF NOT EXISTS pages (
|
||||||
|
reference varchar(20) not null,
|
||||||
|
title varchar(50) not null,
|
||||||
|
content text not null,
|
||||||
|
date_created date not null,
|
||||||
|
date_last_updated date not null,
|
||||||
|
description text,
|
||||||
|
id serial primary key);";
|
||||||
|
|
||||||
|
const LINK_TABLE: &str = "CREATE TABLE IF NOT EXISTS links (
|
||||||
|
url varchar(100) not null,
|
||||||
|
date_added date not null,
|
||||||
|
description text,
|
||||||
|
author varchar(50) not null,
|
||||||
|
type link_type not null,
|
||||||
|
id serial primary key);";
|
||||||
|
|
||||||
|
pub async fn create_database(pool: &PgPool) -> Result<(), Box<dyn Error>> {
|
||||||
|
create(pool, LINK_TYPE).await?;
|
||||||
|
create(pool, ARTICLE_TABLE).await?;
|
||||||
|
create(pool, PAGE_TABLE).await?;
|
||||||
|
create(pool, LINK_TABLE).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create(pool: &Pool<Postgres>, query: &str) -> Result<(), sqlx::Error> {
|
||||||
|
sqlx::query(query).execute(pool).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
@ -9,6 +9,7 @@ use std::{
|
||||||
|
|
||||||
pub mod article;
|
pub mod article;
|
||||||
pub mod page;
|
pub mod page;
|
||||||
|
pub mod create;
|
||||||
|
|
||||||
pub async fn establish_connection() -> Result<PgPool, Box<dyn Error>> {
|
pub async fn establish_connection() -> Result<PgPool, Box<dyn Error>> {
|
||||||
let db_url = match env::var("DATABASE_URL") {
|
let db_url = match env::var("DATABASE_URL") {
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,12 @@
|
||||||
use crate::database::PsqlData;
|
use crate::database::PsqlData;
|
||||||
use futures_util::TryStreamExt;
|
use futures_util::TryStreamExt;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::{postgres::PgPool, Pool, Postgres};
|
||||||
use std::{error::Error, path::Path};
|
use std::{error::Error, path::Path};
|
||||||
|
use std::{
|
||||||
|
fs::{self, metadata},
|
||||||
|
path::PathBuf,
|
||||||
|
};
|
||||||
use time::{macros::format_description, Date};
|
use time::{macros::format_description, Date};
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
||||||
|
|
@ -124,3 +128,22 @@ impl PsqlData for Page {
|
||||||
crate::psql_delete!(id, pool, "pages")
|
crate::psql_delete!(id, pool, "pages")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn load_pages(pool: &Pool<Postgres>) -> Result<(), Box<dyn Error>> {
|
||||||
|
let paths = fs::read_dir("pages/").unwrap();
|
||||||
|
for path_entry in paths {
|
||||||
|
let path: PathBuf = path_entry.unwrap().path();
|
||||||
|
let metadata = metadata(path.clone()).unwrap();
|
||||||
|
if metadata.is_file() {
|
||||||
|
let mut page: Page = Page::from_file(path);
|
||||||
|
match Page::read_by_reference(pool, &page.reference).await {
|
||||||
|
Ok(proj) => {
|
||||||
|
page.id = proj.id;
|
||||||
|
page.update(pool).await?
|
||||||
|
}
|
||||||
|
Err(_) => page.insert(pool).await?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
||||||
65
src/lib.rs
65
src/lib.rs
|
|
@ -1,22 +1,15 @@
|
||||||
#![allow(async_fn_in_trait)]
|
#![allow(async_fn_in_trait)]
|
||||||
use crate::database::{article::Article, page::Page, PsqlData};
|
use crate::database::{article::load_articles, create::create_database, page::load_pages};
|
||||||
use sqlx::{PgPool, Pool, Postgres};
|
use sqlx::PgPool;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::{
|
|
||||||
fs::{self, metadata},
|
|
||||||
path::PathBuf,
|
|
||||||
};
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
mod database;
|
|
||||||
|
pub mod database;
|
||||||
mod html;
|
mod html;
|
||||||
mod macros;
|
mod macros;
|
||||||
|
|
||||||
pub async fn run_server() -> std::io::Result<()> {
|
pub async fn run_server(pool: PgPool) -> std::io::Result<()> {
|
||||||
let pool: PgPool = match database::establish_connection().await {
|
|
||||||
Ok(p) => p,
|
|
||||||
Err(_) => panic!("error connecting to database"),
|
|
||||||
};
|
|
||||||
tracing_subscriber::registry()
|
tracing_subscriber::registry()
|
||||||
.with(
|
.with(
|
||||||
tracing_subscriber::EnvFilter::try_from_default_env()
|
tracing_subscriber::EnvFilter::try_from_default_env()
|
||||||
|
|
@ -39,51 +32,13 @@ pub async fn run_server() -> std::io::Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn run_load() -> Result<(), Box<dyn Error>> {
|
pub async fn run_load(pool: &PgPool) -> Result<(), Box<dyn Error>> {
|
||||||
let pool: PgPool = match database::establish_connection().await {
|
load_articles(pool).await?;
|
||||||
Ok(p) => p,
|
load_pages(pool).await?;
|
||||||
Err(_) => panic!("error connecting to database"),
|
|
||||||
};
|
|
||||||
|
|
||||||
load_articles(&pool).await?;
|
|
||||||
load_pages(&pool).await?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn load_articles(pool: &Pool<Postgres>) -> Result<(), Box<dyn Error>> {
|
pub async fn run_create(pool: &PgPool) -> Result<(), Box<dyn Error>> {
|
||||||
let paths = fs::read_dir("blog/").unwrap();
|
create_database(pool).await?;
|
||||||
for path_entry in paths {
|
|
||||||
let path: PathBuf = path_entry.unwrap().path();
|
|
||||||
let metadata = metadata(path.clone()).unwrap();
|
|
||||||
if metadata.is_file() {
|
|
||||||
let mut article: Article = Article::from_file(path);
|
|
||||||
match Article::read_by_reference(pool, &article.reference).await {
|
|
||||||
Ok(art) => {
|
|
||||||
article.id = art.id;
|
|
||||||
article.update(pool).await?
|
|
||||||
}
|
|
||||||
Err(_) => article.insert(pool).await?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn load_pages(pool: &Pool<Postgres>) -> Result<(), Box<dyn Error>> {
|
|
||||||
let paths = fs::read_dir("pages/").unwrap();
|
|
||||||
for path_entry in paths {
|
|
||||||
let path: PathBuf = path_entry.unwrap().path();
|
|
||||||
let metadata = metadata(path.clone()).unwrap();
|
|
||||||
if metadata.is_file() {
|
|
||||||
let mut page: Page = Page::from_file(path);
|
|
||||||
match Page::read_by_reference(pool, &page.reference).await {
|
|
||||||
Ok(proj) => {
|
|
||||||
page.id = proj.id;
|
|
||||||
page.update(pool).await?
|
|
||||||
}
|
|
||||||
Err(_) => page.insert(pool).await?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
13
src/main.rs
13
src/main.rs
|
|
@ -1,5 +1,7 @@
|
||||||
|
use achubb_backend::{run_create, run_load, run_server, database::establish_connection};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(version, about, long_about = None)]
|
#[command(version, about, long_about = None)]
|
||||||
|
|
@ -18,16 +20,21 @@ struct Args {
|
||||||
async fn main() -> Result<(), Box<dyn Error>> {
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
|
let pool: PgPool = match establish_connection().await {
|
||||||
|
Ok(p) => p,
|
||||||
|
Err(_) => panic!("error connecting to database"),
|
||||||
|
};
|
||||||
|
|
||||||
if args.setup {
|
if args.setup {
|
||||||
println!("Setup was passed");
|
run_create(&pool).await?
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.load {
|
if args.load {
|
||||||
achubb_backend::run_load().await?;
|
run_load(&pool).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.run || !(args.load || args.setup) {
|
if args.run || !(args.load || args.setup) {
|
||||||
achubb_backend::run_server().await?;
|
run_server(pool).await?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue