Added blogroll page

This commit is contained in:
Awstin 2024-08-09 21:42:46 -04:00
parent 2921474eb4
commit 603845fc40
8 changed files with 198 additions and 9 deletions

View file

@ -128,6 +128,12 @@ img {
padding: 20px;
}
.no-bul {
list-style-type: none;
padding: 0;
margin: 0;
}
@media (max-width: 1024px) {
.container {
flex-direction: column;

27
links.txt Normal file
View file

@ -0,0 +1,27 @@
url: https://www.henrikkarlsson.xyz
title: Escaping Flatland
author: Henrik Karlsson
type: blog
date_added: 2024-08-08
description: Extremely deep and thought provoking writing. On thinking, life, learning and many things.
---
url: https://ploum.net
title: Ploum
author: Lionel Dricot
type: blog
date_added: 2024-08-08
description: Mostly French, originally found through his article on the forever computer. Computing, culture, and writing.
---
url: https://sive.rs
title: Derek Sivers
author: Derek Sivers
type: blog
date_added: 2024-08-08
description: One of my favourite authors, thinkers, and one of the original inspirations for my self hosted journey.
---
url: https://herman.bearblog.dev
title: Herman
author: Herman Martinus
type: blog
date_added: 2024-08-08
description: Developper of a wonderful minimalist blogging platform based in South Africa.

18
pages/blogroll.html Normal file
View file

@ -0,0 +1,18 @@
id: blogroll
title: Blogroll
date_created: 2024-08-08
date_last_updated: 2024-08-08
description: A list of blogs that I follow
---
<h2>Blogroll</h2>
<p>
I follow quite a few blogs.
I love good writing and personal websites.
Places away from the noise of the modern website, without advertisement or trackers.
Following links through these is how I spend most of my time online these days.
</p>
<p>
Here they are in no particular order.
</p>
<ul class="no-bul" hx-trigger="load" hx-get="/api/blogrolllinks">
</ul>

View file

@ -30,8 +30,10 @@ description: Home page and starting point for exploring my digital garden
Interests, colophon, links, blogroll, and AI I think.
</p>
<ul>
<li><a href="/blogroll">Blogroll</a></li>
<li><a href="/uses">Uses</a></li>
<li><a href="/now">Now</a></li>
<li><a href="/about">About</a></li>
<li><a href="/ai">AI</a></li>
<li><a href="/contact">Contact</a></li>
</ul>

View file

@ -1,9 +1,9 @@
use crate::database::PsqlData;
use futures_util::TryStreamExt;
use serde::{Deserialize, Serialize};
use sqlx::{self, postgres::PgPool};
use std::error::Error;
use time::Date;
use sqlx::{self, postgres::PgPool, Pool, Postgres};
use std::{error::Error, path::Path};
use time::{macros::format_description, Date};
#[derive(Debug, Serialize, Deserialize, PartialEq, PartialOrd, Clone, sqlx::Type)]
#[sqlx(type_name = "link_type", rename_all = "lowercase")]
@ -23,6 +23,62 @@ pub struct Link {
pub date_added: Date,
}
impl Link {
fn from_lines(mut lines: Vec<String>) -> Self {
let mut url: Option<String> = None;
let mut description: Option<String> = None;
let mut title: Option<String> = None;
let mut author: Option<String> = None;
let mut link_type: Option<LinkType> = None;
let mut date_added: Option<Date> = None;
while lines.len() > 0 {
let line: String = lines.pop().expect("Something went terribly wrong here");
if line.contains("url: ") {
url = Some(line.clone().replace("url: ", ""));
} else if line.contains("title: ") {
title = Some(line.clone().replace("title: ", ""));
} else if line.contains("author: ") {
author = Some(line.clone().replace("author: ", ""));
} else if line.contains("type: ") {
link_type = Some(match line.clone().replace("type: ", "").as_str() {
"blog" => LinkType::BLOG,
_ => LinkType::ARTICLE,
});
} else if line.contains("date_added: ") {
let format = format_description!("[year]-[month]-[day]");
date_added = Some(
Date::parse(&line.clone().replace("date_added: ", ""), format)
.expect("not a date"),
);
} else if line.contains("description: ") {
description = Some(line.clone().replace("description: ", ""));
} else if line.eq("---") {
break;
}
}
Link {
id: 0,
url: url.unwrap(),
title,
author: author.unwrap(),
date_added: date_added.unwrap(),
link_type: link_type.unwrap(),
description,
}
}
pub async fn read_by_url(
pool: &PgPool,
url: &String,
) -> Result<Box<Self>, Box<dyn Error>> {
let result = sqlx::query_as!(Link, "SELECT id,url,title,author,date_added,link_type AS \"link_type!: LinkType\",description FROM links WHERE url = $1;", url)
.fetch_one(pool)
.await?;
Ok(Box::new(result))
}
}
impl PsqlData for Link {
async fn read_all(pool: &PgPool) -> Result<Vec<Box<Self>>, Box<dyn Error>> {
let mut results = sqlx::query_as!(Link, "SELECT id,url,title,author,date_added,link_type AS \"link_type!: LinkType\",description FROM links;")
@ -87,3 +143,39 @@ impl PsqlData for Link {
crate::psql_delete!(id, pool, "links")
}
}
fn split_into_links(mut lines: Vec<String>) -> Vec<Vec<String>> {
let mut link_lines: Vec<Vec<String>> = Vec::new();
let mut link: Vec<String> = Vec::new();
while lines.len() > 0 {
let line: String = lines
.pop()
.expect("This should not be trying to pop en empty Vec");
if line.eq("---") {
link_lines.push(link.clone());
link = Vec::new();
} else {
link.push(line);
}
}
link_lines.push(link.clone());
link_lines
}
pub async fn load_links(pool: &Pool<Postgres>) -> Result<(), Box<dyn Error>> {
let path: &Path = Path::new("links.txt");
let mut links: Vec<Link> = split_into_links(super::read_lines(path))
.iter()
.map(|x: &Vec<String>| Link::from_lines(x.clone()))
.collect();
for link in links.iter_mut() {
match Link::read_by_url(pool, &link.url).await {
Ok(proj) => {
link.id = proj.id;
link.update(pool).await?
}
Err(_) => link.insert(pool).await?,
}
}
Ok(())
}

View file

@ -1,16 +1,17 @@
use super::blog::get_articles_as_links_list;
use crate::html::AppState;
use crate::{
database::{link::Link, PsqlData},
html::AppState,
};
use axum::{response::IntoResponse, routing::get, Extension, Router};
use sqlx::PgPool;
use std::error::Error;
pub fn get_router() -> Router {
Router::new()
.route("/hello", get(hello_from_the_server))
.route("/articles", get(blogs))
.route("/recentarticles", get(recent_blogs))
}
async fn hello_from_the_server() -> &'static str {
"Hello!"
.route("/blogrolllinks", get(blogroll_links))
}
async fn blogs(state: Extension<AppState>) -> impl IntoResponse {
@ -32,3 +33,39 @@ async fn recent_blogs(state: Extension<AppState>) -> impl IntoResponse {
article_head.join("\n")
}
async fn blogroll_links(state: Extension<AppState>) -> impl IntoResponse {
let db_pool = &state.db;
let blogroll_list: Vec<String> = get_blog_links_as_list(db_pool)
.await
.expect("couldn't get blogroll links");
blogroll_list.join("\n<hr>\n")
}
pub async fn get_blog_links_as_list(pool: &PgPool) -> Result<Vec<String>, Box<dyn Error>> {
let links: Vec<Link> = match Link::read_all(pool).await {
Ok(a) => a.iter().map(|x| *x.clone()).collect(),
Err(_) => Vec::new(),
};
let list: Vec<String> = links
.iter()
.map(|link| {
let title: String = match &link.title {
Some(t) => t.to_string(),
None => link.url.clone(),
};
format!(
"<li><a href=\"{}\">{}</a>:<br>{}</li>",
link.url,
title,
match &link.description {
Some(d) => d,
None => "",
}
)
})
.collect();
Ok(list)
}

View file

@ -27,6 +27,7 @@ pub fn get_router(pool: PgPool) -> Router {
.route("/contact", get(contact))
.route("/uses", get(uses))
.route("/ai", get(ai))
.route("/blogroll", get(blogroll))
.route(
"/robots.txt",
get(|| async { Redirect::permanent("/assets/robots.txt") }),
@ -57,3 +58,7 @@ async fn uses(state: Extension<AppState>) -> Result<impl IntoResponse, StatusCod
async fn ai(state: Extension<AppState>) -> Result<impl IntoResponse, StatusCode> {
get_page(&state.db, "ai").await
}
async fn blogroll(state: Extension<AppState>) -> Result<impl IntoResponse, StatusCode> {
get_page(&state.db, "blogroll").await
}

View file

@ -1,5 +1,6 @@
#![allow(async_fn_in_trait)]
use crate::database::{article::load_articles, page::load_pages};
use database::link::load_links;
use sqlx::PgPool;
use std::error::Error;
use tracing::info;
@ -35,5 +36,6 @@ pub async fn run_server(pool: PgPool) -> std::io::Result<()> {
pub async fn run_load(pool: &PgPool) -> Result<(), Box<dyn Error>> {
load_articles(pool).await?;
load_pages(pool).await?;
load_links(pool).await?;
Ok(())
}