From 315bdd68419d5fc52ca266b99c048bd652d9be8e Mon Sep 17 00:00:00 2001 From: Awstin Date: Wed, 31 Jul 2024 22:14:32 -0400 Subject: [PATCH] Added creation of database with flag to program, refactored --- src/database/article.rs | 26 +++++++++++++++-- src/database/create.rs | 45 ++++++++++++++++++++++++++++ src/database/mod.rs | 1 + src/database/page.rs | 25 +++++++++++++++- src/lib.rs | 65 +++++++---------------------------------- src/main.rs | 13 +++++++-- 6 files changed, 113 insertions(+), 62 deletions(-) create mode 100644 src/database/create.rs diff --git a/src/database/article.rs b/src/database/article.rs index da2b05f..cc0b158 100644 --- a/src/database/article.rs +++ b/src/database/article.rs @@ -1,10 +1,11 @@ use crate::database::PsqlData; use futures_util::TryStreamExt; use serde::{Deserialize, Serialize}; -use sqlx::postgres::PgPool; +use sqlx::{postgres::PgPool, Pool, Postgres}; +use std::{error::Error, path::Path}; use std::{ - error::Error, - path::Path, + fs::{self, metadata}, + path::PathBuf, }; use time::{macros::format_description, Date}; @@ -133,3 +134,22 @@ impl PsqlData for Article { crate::psql_delete!(id, pool, "articles") } } + +pub async fn load_articles(pool: &Pool) -> Result<(), Box> { + let paths = fs::read_dir("blog/").unwrap(); + for path_entry in paths { + let path: PathBuf = path_entry.unwrap().path(); + let metadata = metadata(path.clone()).unwrap(); + if metadata.is_file() { + let mut article: Article = Article::from_file(path); + match Article::read_by_reference(pool, &article.reference).await { + Ok(art) => { + article.id = art.id; + article.update(pool).await? + } + Err(_) => article.insert(pool).await?, + } + } + } + Ok(()) +} diff --git a/src/database/create.rs b/src/database/create.rs new file mode 100644 index 0000000..07979f1 --- /dev/null +++ b/src/database/create.rs @@ -0,0 +1,45 @@ +use sqlx::{PgPool, Pool, Postgres}; +use std::error::Error; + +const LINK_TYPE: &str = "CREATE TYPE link_type as ENUM ('article', 'blog');"; + +const ARTICLE_TABLE: &str = "CREATE TABLE IF NOT EXISTS articles ( + reference varchar(20) not null, + title varchar(50) not null, + previous varchar(20), + next varchar(20), + description text, + content text not null, + date date not null, + id serial primary key);"; + +const PAGE_TABLE: &str = "CREATE TABLE IF NOT EXISTS pages ( + reference varchar(20) not null, + title varchar(50) not null, + content text not null, + date_created date not null, + date_last_updated date not null, + description text, + id serial primary key);"; + +const LINK_TABLE: &str = "CREATE TABLE IF NOT EXISTS links ( + url varchar(100) not null, + date_added date not null, + description text, + author varchar(50) not null, + type link_type not null, + id serial primary key);"; + +pub async fn create_database(pool: &PgPool) -> Result<(), Box> { + create(pool, LINK_TYPE).await?; + create(pool, ARTICLE_TABLE).await?; + create(pool, PAGE_TABLE).await?; + create(pool, LINK_TABLE).await?; + + Ok(()) +} + +async fn create(pool: &Pool, query: &str) -> Result<(), sqlx::Error> { + sqlx::query(query).execute(pool).await?; + Ok(()) +} diff --git a/src/database/mod.rs b/src/database/mod.rs index 450c483..e3b520e 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -9,6 +9,7 @@ use std::{ pub mod article; pub mod page; +pub mod create; pub async fn establish_connection() -> Result> { let db_url = match env::var("DATABASE_URL") { diff --git a/src/database/page.rs b/src/database/page.rs index fb33f02..ab64347 100644 --- a/src/database/page.rs +++ b/src/database/page.rs @@ -1,8 +1,12 @@ use crate::database::PsqlData; use futures_util::TryStreamExt; use serde::{Deserialize, Serialize}; -use sqlx::postgres::PgPool; +use sqlx::{postgres::PgPool, Pool, Postgres}; use std::{error::Error, path::Path}; +use std::{ + fs::{self, metadata}, + path::PathBuf, +}; use time::{macros::format_description, Date}; #[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] @@ -124,3 +128,22 @@ impl PsqlData for Page { crate::psql_delete!(id, pool, "pages") } } + +pub async fn load_pages(pool: &Pool) -> Result<(), Box> { + let paths = fs::read_dir("pages/").unwrap(); + for path_entry in paths { + let path: PathBuf = path_entry.unwrap().path(); + let metadata = metadata(path.clone()).unwrap(); + if metadata.is_file() { + let mut page: Page = Page::from_file(path); + match Page::read_by_reference(pool, &page.reference).await { + Ok(proj) => { + page.id = proj.id; + page.update(pool).await? + } + Err(_) => page.insert(pool).await?, + } + } + } + Ok(()) +} diff --git a/src/lib.rs b/src/lib.rs index fa9dc34..1b88303 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,22 +1,15 @@ #![allow(async_fn_in_trait)] -use crate::database::{article::Article, page::Page, PsqlData}; -use sqlx::{PgPool, Pool, Postgres}; +use crate::database::{article::load_articles, create::create_database, page::load_pages}; +use sqlx::PgPool; use std::error::Error; -use std::{ - fs::{self, metadata}, - path::PathBuf, -}; use tracing::info; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; -mod database; + +pub mod database; mod html; mod macros; -pub async fn run_server() -> std::io::Result<()> { - let pool: PgPool = match database::establish_connection().await { - Ok(p) => p, - Err(_) => panic!("error connecting to database"), - }; +pub async fn run_server(pool: PgPool) -> std::io::Result<()> { tracing_subscriber::registry() .with( tracing_subscriber::EnvFilter::try_from_default_env() @@ -39,51 +32,13 @@ pub async fn run_server() -> std::io::Result<()> { Ok(()) } -pub async fn run_load() -> Result<(), Box> { - let pool: PgPool = match database::establish_connection().await { - Ok(p) => p, - Err(_) => panic!("error connecting to database"), - }; - - load_articles(&pool).await?; - load_pages(&pool).await?; +pub async fn run_load(pool: &PgPool) -> Result<(), Box> { + load_articles(pool).await?; + load_pages(pool).await?; Ok(()) } -async fn load_articles(pool: &Pool) -> Result<(), Box> { - let paths = fs::read_dir("blog/").unwrap(); - for path_entry in paths { - let path: PathBuf = path_entry.unwrap().path(); - let metadata = metadata(path.clone()).unwrap(); - if metadata.is_file() { - let mut article: Article = Article::from_file(path); - match Article::read_by_reference(pool, &article.reference).await { - Ok(art) => { - article.id = art.id; - article.update(pool).await? - } - Err(_) => article.insert(pool).await?, - } - } - } - Ok(()) -} - -async fn load_pages(pool: &Pool) -> Result<(), Box> { - let paths = fs::read_dir("pages/").unwrap(); - for path_entry in paths { - let path: PathBuf = path_entry.unwrap().path(); - let metadata = metadata(path.clone()).unwrap(); - if metadata.is_file() { - let mut page: Page = Page::from_file(path); - match Page::read_by_reference(pool, &page.reference).await { - Ok(proj) => { - page.id = proj.id; - page.update(pool).await? - } - Err(_) => page.insert(pool).await?, - } - } - } +pub async fn run_create(pool: &PgPool) -> Result<(), Box> { + create_database(pool).await?; Ok(()) } diff --git a/src/main.rs b/src/main.rs index bc5759e..4b35986 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,7 @@ +use achubb_backend::{run_create, run_load, run_server, database::establish_connection}; use clap::Parser; use std::error::Error; +use sqlx::postgres::PgPool; #[derive(Parser, Debug)] #[command(version, about, long_about = None)] @@ -18,16 +20,21 @@ struct Args { async fn main() -> Result<(), Box> { let args = Args::parse(); + let pool: PgPool = match establish_connection().await { + Ok(p) => p, + Err(_) => panic!("error connecting to database"), + }; + if args.setup { - println!("Setup was passed"); + run_create(&pool).await? } if args.load { - achubb_backend::run_load().await?; + run_load(&pool).await?; } if args.run || !(args.load || args.setup) { - achubb_backend::run_server().await?; + run_server(pool).await?; } Ok(()) }