From 603845fc40e4ec001a85a8d3ff1ec94c76bb30f0 Mon Sep 17 00:00:00 2001 From: Awstin Date: Fri, 9 Aug 2024 21:42:46 -0400 Subject: [PATCH] Added blogroll page --- assets/main.css | 6 +++ links.txt | 27 ++++++++++++ pages/blogroll.html | 18 ++++++++ pages/garden.html | 2 + src/database/link.rs | 98 ++++++++++++++++++++++++++++++++++++++++++-- src/html/api.rs | 49 +++++++++++++++++++--- src/html/root.rs | 5 +++ src/lib.rs | 2 + 8 files changed, 198 insertions(+), 9 deletions(-) create mode 100644 links.txt create mode 100644 pages/blogroll.html diff --git a/assets/main.css b/assets/main.css index 260a329..59343fe 100644 --- a/assets/main.css +++ b/assets/main.css @@ -128,6 +128,12 @@ img { padding: 20px; } +.no-bul { + list-style-type: none; + padding: 0; + margin: 0; +} + @media (max-width: 1024px) { .container { flex-direction: column; diff --git a/links.txt b/links.txt new file mode 100644 index 0000000..8957694 --- /dev/null +++ b/links.txt @@ -0,0 +1,27 @@ +url: https://www.henrikkarlsson.xyz +title: Escaping Flatland +author: Henrik Karlsson +type: blog +date_added: 2024-08-08 +description: Extremely deep and thought provoking writing. On thinking, life, learning and many things. +--- +url: https://ploum.net +title: Ploum +author: Lionel Dricot +type: blog +date_added: 2024-08-08 +description: Mostly French, originally found through his article on the forever computer. Computing, culture, and writing. +--- +url: https://sive.rs +title: Derek Sivers +author: Derek Sivers +type: blog +date_added: 2024-08-08 +description: One of my favourite authors, thinkers, and one of the original inspirations for my self hosted journey. +--- +url: https://herman.bearblog.dev +title: Herman +author: Herman Martinus +type: blog +date_added: 2024-08-08 +description: Developper of a wonderful minimalist blogging platform based in South Africa. diff --git a/pages/blogroll.html b/pages/blogroll.html new file mode 100644 index 0000000..7b435d6 --- /dev/null +++ b/pages/blogroll.html @@ -0,0 +1,18 @@ +id: blogroll +title: Blogroll +date_created: 2024-08-08 +date_last_updated: 2024-08-08 +description: A list of blogs that I follow +--- +

Blogroll

+

+ I follow quite a few blogs. + I love good writing and personal websites. + Places away from the noise of the modern website, without advertisement or trackers. + Following links through these is how I spend most of my time online these days. +

+

+ Here they are in no particular order. +

+ diff --git a/pages/garden.html b/pages/garden.html index b38e4f4..f5006f9 100644 --- a/pages/garden.html +++ b/pages/garden.html @@ -30,8 +30,10 @@ description: Home page and starting point for exploring my digital garden Interests, colophon, links, blogroll, and AI I think.

diff --git a/src/database/link.rs b/src/database/link.rs index b257c66..610753f 100644 --- a/src/database/link.rs +++ b/src/database/link.rs @@ -1,9 +1,9 @@ use crate::database::PsqlData; use futures_util::TryStreamExt; use serde::{Deserialize, Serialize}; -use sqlx::{self, postgres::PgPool}; -use std::error::Error; -use time::Date; +use sqlx::{self, postgres::PgPool, Pool, Postgres}; +use std::{error::Error, path::Path}; +use time::{macros::format_description, Date}; #[derive(Debug, Serialize, Deserialize, PartialEq, PartialOrd, Clone, sqlx::Type)] #[sqlx(type_name = "link_type", rename_all = "lowercase")] @@ -23,6 +23,62 @@ pub struct Link { pub date_added: Date, } +impl Link { + fn from_lines(mut lines: Vec) -> Self { + let mut url: Option = None; + let mut description: Option = None; + let mut title: Option = None; + let mut author: Option = None; + let mut link_type: Option = None; + let mut date_added: Option = None; + while lines.len() > 0 { + let line: String = lines.pop().expect("Something went terribly wrong here"); + if line.contains("url: ") { + url = Some(line.clone().replace("url: ", "")); + } else if line.contains("title: ") { + title = Some(line.clone().replace("title: ", "")); + } else if line.contains("author: ") { + author = Some(line.clone().replace("author: ", "")); + } else if line.contains("type: ") { + link_type = Some(match line.clone().replace("type: ", "").as_str() { + "blog" => LinkType::BLOG, + _ => LinkType::ARTICLE, + }); + } else if line.contains("date_added: ") { + let format = format_description!("[year]-[month]-[day]"); + date_added = Some( + Date::parse(&line.clone().replace("date_added: ", ""), format) + .expect("not a date"), + ); + } else if line.contains("description: ") { + description = Some(line.clone().replace("description: ", "")); + } else if line.eq("---") { + break; + } + } + Link { + id: 0, + url: url.unwrap(), + title, + author: author.unwrap(), + date_added: date_added.unwrap(), + link_type: link_type.unwrap(), + description, + } + } + + pub async fn read_by_url( + pool: &PgPool, + url: &String, + ) -> Result, Box> { + let result = sqlx::query_as!(Link, "SELECT id,url,title,author,date_added,link_type AS \"link_type!: LinkType\",description FROM links WHERE url = $1;", url) + .fetch_one(pool) + .await?; + + Ok(Box::new(result)) + } +} + impl PsqlData for Link { async fn read_all(pool: &PgPool) -> Result>, Box> { let mut results = sqlx::query_as!(Link, "SELECT id,url,title,author,date_added,link_type AS \"link_type!: LinkType\",description FROM links;") @@ -87,3 +143,39 @@ impl PsqlData for Link { crate::psql_delete!(id, pool, "links") } } + +fn split_into_links(mut lines: Vec) -> Vec> { + let mut link_lines: Vec> = Vec::new(); + let mut link: Vec = Vec::new(); + while lines.len() > 0 { + let line: String = lines + .pop() + .expect("This should not be trying to pop en empty Vec"); + if line.eq("---") { + link_lines.push(link.clone()); + link = Vec::new(); + } else { + link.push(line); + } + } + link_lines.push(link.clone()); + link_lines +} + +pub async fn load_links(pool: &Pool) -> Result<(), Box> { + let path: &Path = Path::new("links.txt"); + let mut links: Vec = split_into_links(super::read_lines(path)) + .iter() + .map(|x: &Vec| Link::from_lines(x.clone())) + .collect(); + for link in links.iter_mut() { + match Link::read_by_url(pool, &link.url).await { + Ok(proj) => { + link.id = proj.id; + link.update(pool).await? + } + Err(_) => link.insert(pool).await?, + } + } + Ok(()) +} diff --git a/src/html/api.rs b/src/html/api.rs index d9c0aca..3dd3e79 100644 --- a/src/html/api.rs +++ b/src/html/api.rs @@ -1,16 +1,17 @@ use super::blog::get_articles_as_links_list; -use crate::html::AppState; +use crate::{ + database::{link::Link, PsqlData}, + html::AppState, +}; use axum::{response::IntoResponse, routing::get, Extension, Router}; +use sqlx::PgPool; +use std::error::Error; pub fn get_router() -> Router { Router::new() - .route("/hello", get(hello_from_the_server)) .route("/articles", get(blogs)) .route("/recentarticles", get(recent_blogs)) -} - -async fn hello_from_the_server() -> &'static str { - "Hello!" + .route("/blogrolllinks", get(blogroll_links)) } async fn blogs(state: Extension) -> impl IntoResponse { @@ -32,3 +33,39 @@ async fn recent_blogs(state: Extension) -> impl IntoResponse { article_head.join("\n") } + +async fn blogroll_links(state: Extension) -> impl IntoResponse { + let db_pool = &state.db; + let blogroll_list: Vec = get_blog_links_as_list(db_pool) + .await + .expect("couldn't get blogroll links"); + + blogroll_list.join("\n
\n") +} + +pub async fn get_blog_links_as_list(pool: &PgPool) -> Result, Box> { + let links: Vec = match Link::read_all(pool).await { + Ok(a) => a.iter().map(|x| *x.clone()).collect(), + Err(_) => Vec::new(), + }; + + let list: Vec = links + .iter() + .map(|link| { + let title: String = match &link.title { + Some(t) => t.to_string(), + None => link.url.clone(), + }; + format!( + "
  • {}:
    {}
  • ", + link.url, + title, + match &link.description { + Some(d) => d, + None => "", + } + ) + }) + .collect(); + Ok(list) +} diff --git a/src/html/root.rs b/src/html/root.rs index c29b912..ae2eb75 100644 --- a/src/html/root.rs +++ b/src/html/root.rs @@ -27,6 +27,7 @@ pub fn get_router(pool: PgPool) -> Router { .route("/contact", get(contact)) .route("/uses", get(uses)) .route("/ai", get(ai)) + .route("/blogroll", get(blogroll)) .route( "/robots.txt", get(|| async { Redirect::permanent("/assets/robots.txt") }), @@ -57,3 +58,7 @@ async fn uses(state: Extension) -> Result) -> Result { get_page(&state.db, "ai").await } + +async fn blogroll(state: Extension) -> Result { + get_page(&state.db, "blogroll").await +} diff --git a/src/lib.rs b/src/lib.rs index a3146e8..3721438 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,6 @@ #![allow(async_fn_in_trait)] use crate::database::{article::load_articles, page::load_pages}; +use database::link::load_links; use sqlx::PgPool; use std::error::Error; use tracing::info; @@ -35,5 +36,6 @@ pub async fn run_server(pool: PgPool) -> std::io::Result<()> { pub async fn run_load(pool: &PgPool) -> Result<(), Box> { load_articles(pool).await?; load_pages(pool).await?; + load_links(pool).await?; Ok(()) }