feat: add basic rss feat support
This commit is contained in:
parent
183b2c2c88
commit
452b8b49c3
13 changed files with 232 additions and 166 deletions
|
|
@ -22,6 +22,7 @@ pub struct AppState {
|
|||
pub db: Arc<Mutex<rusqlite::Connection>>,
|
||||
pub client: reqwest::Client,
|
||||
pub inbound_api_key: String,
|
||||
pub public_base_url: Option<String>,
|
||||
pub webhooks: Vec<WebhookTarget>,
|
||||
pub retry_attempts: u32,
|
||||
pub retry_base_delay_secs: u64,
|
||||
|
|
@ -30,10 +31,7 @@ pub struct AppState {
|
|||
}
|
||||
|
||||
fn unix_now() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs()
|
||||
}
|
||||
|
||||
fn format_full_timestamp(ts: u64) -> String {
|
||||
|
|
@ -42,13 +40,92 @@ fn format_full_timestamp(ts: u64) -> String {
|
|||
.unwrap_or_else(|| format!("unix timestamp {ts}"))
|
||||
}
|
||||
|
||||
fn format_rfc2822_timestamp(ts: u64) -> String {
|
||||
DateTime::from_timestamp(ts as i64, 0)
|
||||
.map(|dt: DateTime<Utc>| dt.to_rfc2822())
|
||||
.unwrap_or_else(|| "Thu, 01 Jan 1970 00:00:00 +0000".to_string())
|
||||
}
|
||||
|
||||
fn xml_escape(text: &str) -> String {
|
||||
let mut escaped = String::with_capacity(text.len());
|
||||
for ch in text.chars() {
|
||||
match ch {
|
||||
'&' => escaped.push_str("&"),
|
||||
'<' => escaped.push_str("<"),
|
||||
'>' => escaped.push_str(">"),
|
||||
'"' => escaped.push_str("""),
|
||||
'\'' => escaped.push_str("'"),
|
||||
_ => escaped.push(ch),
|
||||
}
|
||||
}
|
||||
escaped
|
||||
}
|
||||
|
||||
fn header_value(headers: &HeaderMap, name: &'static str) -> Option<String> {
|
||||
headers
|
||||
.get(name)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToOwned::to_owned)
|
||||
}
|
||||
|
||||
fn public_base_url(state: &AppState, headers: &HeaderMap) -> String {
|
||||
if let Some(url) = &state.public_base_url {
|
||||
return url.clone();
|
||||
}
|
||||
|
||||
let host = header_value(headers, "x-forwarded-host")
|
||||
.or_else(|| header_value(headers, "host"))
|
||||
.unwrap_or_else(|| "localhost:3000".to_string());
|
||||
let scheme = header_value(headers, "x-forwarded-proto").unwrap_or_else(|| "http".to_string());
|
||||
format!("{scheme}://{host}")
|
||||
}
|
||||
|
||||
fn build_rss_feed(base_url: &str, status: &CacheStatusResponse) -> String {
|
||||
let item_timestamp = status.since.or(status.last_checked).unwrap_or(0);
|
||||
let pub_date = format_rfc2822_timestamp(item_timestamp);
|
||||
let feed_url = format!("{base_url}/rss.xml");
|
||||
let status_url = format!("{base_url}/status");
|
||||
let guid = format!("urn:noisebell:status:{}:{item_timestamp}", status.status.as_str());
|
||||
let title = format!("Noisebell is {}", status.status);
|
||||
let description = if status.human_readable.is_empty() {
|
||||
format!("Current status: {}.", status.status)
|
||||
} else {
|
||||
status.human_readable.clone()
|
||||
};
|
||||
|
||||
format!(
|
||||
concat!(
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n",
|
||||
"<rss version=\"2.0\">\n",
|
||||
" <channel>\n",
|
||||
" <title>Noisebell status</title>\n",
|
||||
" <link>{channel_link}</link>\n",
|
||||
" <description>Current noisebell state as a single rolling RSS item.</description>\n",
|
||||
" <lastBuildDate>{pub_date}</lastBuildDate>\n",
|
||||
" <ttl>5</ttl>\n",
|
||||
" <item>\n",
|
||||
" <title>{item_title}</title>\n",
|
||||
" <link>{item_link}</link>\n",
|
||||
" <guid isPermaLink=\"false\">{item_guid}</guid>\n",
|
||||
" <pubDate>{pub_date}</pubDate>\n",
|
||||
" <description>{item_description}</description>\n",
|
||||
" </item>\n",
|
||||
" </channel>\n",
|
||||
"</rss>\n"
|
||||
),
|
||||
channel_link = xml_escape(&feed_url),
|
||||
pub_date = xml_escape(&pub_date),
|
||||
item_title = xml_escape(&title),
|
||||
item_link = xml_escape(&status_url),
|
||||
item_guid = xml_escape(&guid),
|
||||
item_description = xml_escape(&description),
|
||||
)
|
||||
}
|
||||
|
||||
fn format_duration(seconds: u64) -> String {
|
||||
let units = [
|
||||
(86_400, "day"),
|
||||
(3_600, "hour"),
|
||||
(60, "minute"),
|
||||
(1, "second"),
|
||||
];
|
||||
let units = [(86_400, "day"), (3_600, "hour"), (60, "minute"), (1, "second")];
|
||||
|
||||
let mut remaining = seconds;
|
||||
let mut parts = Vec::new();
|
||||
|
|
@ -75,7 +152,12 @@ fn format_duration(seconds: u64) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
fn status_summary(status: DoorStatus, since: Option<u64>, last_checked: Option<u64>, now: u64) -> String {
|
||||
fn status_summary(
|
||||
status: DoorStatus,
|
||||
since: Option<u64>,
|
||||
last_checked: Option<u64>,
|
||||
now: u64,
|
||||
) -> String {
|
||||
let since_text = since
|
||||
.map(|ts| {
|
||||
format!(
|
||||
|
|
@ -111,16 +193,10 @@ pub async fn post_webhook(
|
|||
|
||||
// Simple rate limiting: reset tokens every window, reject if exhausted.
|
||||
let now = unix_now();
|
||||
let last = state
|
||||
.webhook_last_request
|
||||
.load(std::sync::atomic::Ordering::Relaxed);
|
||||
let last = state.webhook_last_request.load(std::sync::atomic::Ordering::Relaxed);
|
||||
if now.saturating_sub(last) >= WEBHOOK_RATE_WINDOW_SECS {
|
||||
state
|
||||
.webhook_tokens
|
||||
.store(WEBHOOK_RATE_LIMIT, std::sync::atomic::Ordering::Relaxed);
|
||||
state
|
||||
.webhook_last_request
|
||||
.store(now, std::sync::atomic::Ordering::Relaxed);
|
||||
state.webhook_tokens.store(WEBHOOK_RATE_LIMIT, std::sync::atomic::Ordering::Relaxed);
|
||||
state.webhook_last_request.store(now, std::sync::atomic::Ordering::Relaxed);
|
||||
}
|
||||
let remaining = state.webhook_tokens.fetch_update(
|
||||
std::sync::atomic::Ordering::Relaxed,
|
||||
|
|
@ -153,10 +229,7 @@ pub async fn post_webhook(
|
|||
webhook::forward(
|
||||
&state.client,
|
||||
&state.webhooks,
|
||||
&WebhookPayload {
|
||||
status,
|
||||
timestamp: body.timestamp,
|
||||
},
|
||||
&WebhookPayload { status, timestamp: body.timestamp },
|
||||
state.retry_attempts,
|
||||
state.retry_base_delay_secs,
|
||||
)
|
||||
|
|
@ -200,41 +273,63 @@ pub async fn get_status(
|
|||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
status.human_readable = status_summary(status.status, status.since, status.last_checked, unix_now());
|
||||
status.human_readable =
|
||||
status_summary(status.status, status.since, status.last_checked, unix_now());
|
||||
|
||||
Ok(Json(status))
|
||||
}
|
||||
|
||||
pub async fn get_rss(
|
||||
State(state): State<Arc<AppState>>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
let db = state.db.clone();
|
||||
let mut status = tokio::task::spawn_blocking(move || {
|
||||
let conn = db.blocking_lock();
|
||||
db::get_status(&conn)
|
||||
})
|
||||
.await
|
||||
.expect("db task panicked")
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to get status for rss");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
status.human_readable =
|
||||
status_summary(status.status, status.since, status.last_checked, unix_now());
|
||||
let base_url = public_base_url(&state, &headers);
|
||||
let feed = build_rss_feed(&base_url, &status);
|
||||
|
||||
Ok((
|
||||
[
|
||||
(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8"),
|
||||
(header::CACHE_CONTROL, "public, max-age=60"),
|
||||
],
|
||||
feed,
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn health() -> StatusCode {
|
||||
StatusCode::OK
|
||||
}
|
||||
|
||||
pub async fn get_image_open() -> impl IntoResponse {
|
||||
(
|
||||
[
|
||||
(header::CONTENT_TYPE, "image/png"),
|
||||
(header::CACHE_CONTROL, "public, max-age=86400"),
|
||||
],
|
||||
[(header::CONTENT_TYPE, "image/png"), (header::CACHE_CONTROL, "public, max-age=86400")],
|
||||
OPEN_PNG,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_image_closed() -> impl IntoResponse {
|
||||
(
|
||||
[
|
||||
(header::CONTENT_TYPE, "image/png"),
|
||||
(header::CACHE_CONTROL, "public, max-age=86400"),
|
||||
],
|
||||
[(header::CONTENT_TYPE, "image/png"), (header::CACHE_CONTROL, "public, max-age=86400")],
|
||||
CLOSED_PNG,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_image_offline() -> impl IntoResponse {
|
||||
(
|
||||
[
|
||||
(header::CONTENT_TYPE, "image/png"),
|
||||
(header::CACHE_CONTROL, "public, max-age=86400"),
|
||||
],
|
||||
[(header::CONTENT_TYPE, "image/png"), (header::CACHE_CONTROL, "public, max-age=86400")],
|
||||
OFFLINE_PNG,
|
||||
)
|
||||
}
|
||||
|
|
@ -260,13 +355,7 @@ pub async fn get_image(State(state): State<Arc<AppState>>) -> Response {
|
|||
DoorStatus::Closed => CLOSED_PNG,
|
||||
DoorStatus::Offline => OFFLINE_PNG,
|
||||
};
|
||||
(
|
||||
[
|
||||
(header::CONTENT_TYPE, "image/png"),
|
||||
(header::CACHE_CONTROL, "public, max-age=5"),
|
||||
],
|
||||
image,
|
||||
)
|
||||
([(header::CONTENT_TYPE, "image/png"), (header::CACHE_CONTROL, "public, max-age=5")], image)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
|
|
@ -290,4 +379,25 @@ mod tests {
|
|||
assert!(summary.contains("Last checked"));
|
||||
assert!(summary.contains("55 seconds ago"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rss_feed_uses_single_current_item() {
|
||||
let feed = build_rss_feed(
|
||||
"https://noisebell.example.com",
|
||||
&CacheStatusResponse {
|
||||
status: DoorStatus::Closed,
|
||||
since: Some(1_700_000_000),
|
||||
last_checked: Some(1_700_000_120),
|
||||
human_readable:
|
||||
"We've been closed since Tuesday, November 14, 2023 at 10:13:20 PM UTC."
|
||||
.to_string(),
|
||||
},
|
||||
);
|
||||
|
||||
assert!(feed.contains("<title>Noisebell is closed</title>"));
|
||||
assert!(feed
|
||||
.contains("<guid isPermaLink=\"false\">urn:noisebell:status:closed:1700000000</guid>"));
|
||||
assert!(feed.contains("<link>https://noisebell.example.com/status</link>"));
|
||||
assert_eq!(feed.matches("<item>").count(), 1);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue