Compare commits

..

No commits in common. "cf89d17035af82edda2bd2ffc9b7f9c76a4a84cb" and "6fccda5827d4877e81ff5e6fa1859dec71c0e74a" have entirely different histories.

4 changed files with 58 additions and 83 deletions

View File

@ -61,9 +61,7 @@ pub async fn setup_demo_data(pool: &sqlx::SqlitePool) {
.await .await
.expect("Failed to create demo user"); .expect("Failed to create demo user");
let feeds: Vec<Feed> = DEMO_FEEDS for demo_feed in DEMO_FEEDS {
.iter()
.map(|demo_feed| {
let mut feed = Feed::new( let mut feed = Feed::new(
demo_feed.name.to_string(), demo_feed.name.to_string(),
demo_feed.url.parse().unwrap(), demo_feed.url.parse().unwrap(),
@ -72,26 +70,11 @@ pub async fn setup_demo_data(pool: &sqlx::SqlitePool) {
if let Some(category) = demo_feed.category { if let Some(category) = demo_feed.category {
feed.categorization = vec![category.to_string()]; feed.categorization = vec![category.to_string()];
} }
feed
})
.collect();
for feed in feeds {
feed.write_to_database(pool) feed.write_to_database(pool)
.await .await
.expect("Failed to create demo feed"); .expect("Failed to create demo feed");
} }
/*
for feed in feeds {
let url = feed.url;
let entries = match fetch_new_entries(&url).await {
};
update_entry_db(&entries, &feed_id, &mut db).await?;
}
*/
info!("Successfully set up demo data"); info!("Successfully set up demo data");
} }

View File

@ -9,7 +9,6 @@ mod demo;
mod feed_utils; mod feed_utils;
mod feeds; mod feeds;
mod poll; mod poll;
mod poll_utils;
mod session_store; mod session_store;
mod user; mod user;

View File

@ -1,13 +1,14 @@
use crate::poll_utils::Entry;
use crate::user::AuthenticatedUser; use crate::user::AuthenticatedUser;
use crate::Db; use crate::{feed_utils::fetch_feed, Db};
use chrono::{Duration, Utc}; use chrono::{DateTime, Duration, Utc};
use feed_rs::model::Text;
use rocket::http::Status; use rocket::http::Status;
use rocket::serde::uuid::Uuid; use rocket::serde::uuid::Uuid;
use rocket::serde::{self, json::Json, Deserialize, Serialize}; use rocket::serde::{self, json::Json, Deserialize, Serialize};
use rocket_db_pools::Connection; use rocket_db_pools::Connection;
use sqlx::{Acquire, SqliteConnection}; use sqlx::{Acquire, SqliteConnection};
use tracing::{error, info}; use tracing::{error, info};
use url::Url;
const POLLING_INTERVAL: Duration = Duration::minutes(20); const POLLING_INTERVAL: Duration = Duration::minutes(20);
const MAX_ENTRIES_PER_FEED: i32 = 30; const MAX_ENTRIES_PER_FEED: i32 = 30;
@ -20,6 +21,23 @@ pub struct FeedPollResponse {
entries: Vec<Entry>, entries: Vec<Entry>,
} }
#[derive(Debug, Serialize)]
#[serde(crate = "rocket::serde")]
struct Entry {
/// id is the id from the feed, and is the primary key of entries
id: String,
/// local_id is a UUID generated locally. it is used because we don't have control over the
/// exact format of the id from the feed entry
local_id: Uuid,
title: String,
published: Option<DateTime<Utc>>,
updated: Option<DateTime<Utc>>,
summary: String,
content: Option<feed_rs::model::Content>,
link: Option<String>,
marked_read: Option<DateTime<Utc>>,
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(crate = "rocket::serde")] #[serde(crate = "rocket::serde")]
pub struct EntryStateUpdate { pub struct EntryStateUpdate {
@ -164,6 +182,33 @@ async fn read_entries(feed_id: &str, db: &mut SqliteConnection) -> Result<Vec<En
Ok(entries) Ok(entries)
} }
/// Perform the request to fetch from the remote feed url
async fn fetch_new_entries(url: &Url) -> Result<Vec<Entry>, Status> {
let feed_data = fetch_feed(url).await.map_err(|_| Status::BadGateway)?;
fn get(item: Option<Text>, name: &'static str) -> String {
item.map(|t| t.content.to_string())
.unwrap_or(format!("<no {name}>"))
}
let entries: Vec<Entry> = feed_data
.entries
.into_iter()
.map(|feed_entry| Entry {
id: feed_entry.id,
local_id: Uuid::new_v4(),
title: get(feed_entry.title, "title"),
published: feed_entry.published,
updated: feed_entry.updated,
summary: get(feed_entry.summary, "summary"),
content: feed_entry.content,
link: feed_entry.links.first().map(|l| l.href.clone()),
marked_read: None,
})
.collect();
Ok(entries)
}
#[post("/poll/<feed_id>")] #[post("/poll/<feed_id>")]
pub async fn poll_feed( pub async fn poll_feed(
mut db: Connection<Db>, mut db: Connection<Db>,
@ -194,7 +239,7 @@ pub async fn poll_feed(
read_entries(&feed_id, &mut db).await? read_entries(&feed_id, &mut db).await?
} else { } else {
info!("Fetching new entries for feed {}", feed_id); info!("Fetching new entries for feed {}", feed_id);
let entries = crate::poll_utils::fetch_new_entries(&url).await?; let entries = fetch_new_entries(&url).await?;
update_entry_db(&entries, &feed_id, &mut db).await?; update_entry_db(&entries, &feed_id, &mut db).await?;
entries entries
}; };

View File

@ -1,52 +0,0 @@
use crate::feed_utils::fetch_feed;
use chrono::{DateTime, Utc};
use feed_rs::model::Text;
use rocket::http::Status;
use rocket::serde::uuid::Uuid;
use rocket::serde::Serialize;
use url::Url;
#[derive(Debug, Serialize)]
#[serde(crate = "rocket::serde")]
pub struct Entry {
/// id is the id from the feed, and is the primary key of entries
pub id: String,
/// local_id is a UUID generated locally. it is used because we don't have control over the
/// exact format of the id from the feed entry
pub local_id: Uuid,
pub title: String,
pub published: Option<DateTime<Utc>>,
pub updated: Option<DateTime<Utc>>,
pub summary: String,
pub content: Option<feed_rs::model::Content>,
pub link: Option<String>,
pub marked_read: Option<DateTime<Utc>>,
}
///
/// Perform the request to fetch from the remote feed url
pub async fn fetch_new_entries(url: &Url) -> Result<Vec<Entry>, Status> {
let feed_data = fetch_feed(url).await.map_err(|_| Status::BadGateway)?;
fn get(item: Option<Text>, name: &'static str) -> String {
item.map(|t| t.content.to_string())
.unwrap_or(format!("<no {name}>"))
}
let entries: Vec<Entry> = feed_data
.entries
.into_iter()
.map(|feed_entry| Entry {
id: feed_entry.id,
local_id: Uuid::new_v4(),
title: get(feed_entry.title, "title"),
published: feed_entry.published,
updated: feed_entry.updated,
summary: get(feed_entry.summary, "summary"),
content: feed_entry.content,
link: feed_entry.links.first().map(|l| l.href.clone()),
marked_read: None,
})
.collect();
Ok(entries)
}