not working truncation
This commit is contained in:
parent
60b2744a1e
commit
93d22bfe3f
1 changed files with 43 additions and 12 deletions
55
src/lib.rs
55
src/lib.rs
|
|
@ -1,4 +1,4 @@
|
||||||
use std::{collections::HashMap, time::Duration};
|
use std::{collections::HashMap, fmt::Display, time::Duration};
|
||||||
|
|
||||||
use feed_rs::parser::parse;
|
use feed_rs::parser::parse;
|
||||||
use html2md::{TagHandler, TagHandlerFactory, dummy::DummyHandler};
|
use html2md::{TagHandler, TagHandlerFactory, dummy::DummyHandler};
|
||||||
|
|
@ -44,7 +44,7 @@ pub struct BlogdorTheAggregator {
|
||||||
blogdor_token: String, // sent *from zulip* in POSTs *to us*
|
blogdor_token: String, // sent *from zulip* in POSTs *to us*
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct FeedEntry {
|
pub struct FeedEntry {
|
||||||
post_url: String,
|
post_url: String,
|
||||||
feed_url: String,
|
feed_url: String,
|
||||||
|
|
@ -54,7 +54,7 @@ pub struct FeedEntry {
|
||||||
published: DateTime<Utc>,
|
published: DateTime<Utc>,
|
||||||
received: DateTime<Utc>,
|
received: DateTime<Utc>,
|
||||||
feed_description: Option<String>,
|
feed_description: Option<String>,
|
||||||
body: Option<String>,
|
body: PostBody,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
|
@ -79,6 +79,26 @@ pub enum Action {
|
||||||
List,
|
List,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
enum PostBody {
|
||||||
|
Full(String),
|
||||||
|
Truncated(String),
|
||||||
|
Empty,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for PostBody {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let s = match self {
|
||||||
|
PostBody::Empty => "<no preview>".to_string(),
|
||||||
|
PostBody::Full(s) => s.to_owned(),
|
||||||
|
PostBody::Truncated(s) => {
|
||||||
|
format!("{s}[...]")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
write!(f, "{s}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct UserRequest {
|
pub struct UserRequest {
|
||||||
command: FeedCommand,
|
command: FeedCommand,
|
||||||
|
|
@ -571,6 +591,24 @@ async fn check_feed(
|
||||||
|
|
||||||
feed.entries.sort_by_key(|e| std::cmp::Reverse(e.posted()));
|
feed.entries.sort_by_key(|e| std::cmp::Reverse(e.posted()));
|
||||||
for post in feed.entries.into_iter().take(5) {
|
for post in feed.entries.into_iter().take(5) {
|
||||||
|
let body = post
|
||||||
|
.content
|
||||||
|
.and_then(|c| match c.body {
|
||||||
|
None => PostBody::Empty,
|
||||||
|
Some(c) => {
|
||||||
|
let s = html2md::parse_html_custom(&c, &handlers)
|
||||||
|
.graphemes(false)
|
||||||
|
.take(ZULIP_MESSAGE_CUTOFF)
|
||||||
|
.collect::<String>();
|
||||||
|
if s.len() == c.len() {
|
||||||
|
PostBody::Full(s)
|
||||||
|
} else {
|
||||||
|
PostBody::Truncated(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or(PostBody::Empty);
|
||||||
|
|
||||||
if post.posted().unwrap_or(LAST_FETCHED) > last_fetched {
|
if post.posted().unwrap_or(LAST_FETCHED) > last_fetched {
|
||||||
let entry = FeedEntry {
|
let entry = FeedEntry {
|
||||||
post_url: post
|
post_url: post
|
||||||
|
|
@ -590,16 +628,9 @@ async fn check_feed(
|
||||||
published: post.posted().unwrap_or(now),
|
published: post.posted().unwrap_or(now),
|
||||||
received: now,
|
received: now,
|
||||||
feed_description: feed.description.to_owned().map(|d| d.content),
|
feed_description: feed.description.to_owned().map(|d| d.content),
|
||||||
body: post.content.and_then(|c| {
|
body,
|
||||||
c.body.map(|f| {
|
|
||||||
let s = html2md::parse_html_custom(&f, &handlers)
|
|
||||||
.graphemes(false)
|
|
||||||
.take(ZULIP_MESSAGE_CUTOFF)
|
|
||||||
.collect::<String>();
|
|
||||||
s.to_string()
|
|
||||||
})
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
entries.push(entry);
|
entries.push(entry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue