feat: reorganize with remote
This commit is contained in:
parent
a74e5753fa
commit
dc7b8cbadd
28 changed files with 622 additions and 3024 deletions
|
|
@ -1,38 +1,34 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use axum::extract::{Query, State};
|
||||
use axum::http::{HeaderMap, StatusCode, header};
|
||||
use axum::extract::State;
|
||||
use axum::http::{StatusCode, header};
|
||||
use axum::response::IntoResponse;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{Json, Router};
|
||||
use rusqlite::Connection;
|
||||
use serde::Deserialize;
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{error, info};
|
||||
use axum::routing::get;
|
||||
use axum::Router;
|
||||
use noisebell_common::HistoryEntry;
|
||||
use tower_http::trace::TraceLayer;
|
||||
use tracing::{error, info, Level};
|
||||
|
||||
struct AppState {
|
||||
db: Arc<Mutex<Connection>>,
|
||||
webhook_secret: String,
|
||||
client: reqwest::Client,
|
||||
cache_url: String,
|
||||
site_url: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct WebhookPayload {
|
||||
status: String,
|
||||
timestamp: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct FeedQuery {
|
||||
limit: Option<u32>,
|
||||
}
|
||||
|
||||
fn unix_to_rfc3339(ts: u64) -> String {
|
||||
let dt = time::OffsetDateTime::from_unix_timestamp(ts as i64).unwrap_or(time::OffsetDateTime::UNIX_EPOCH);
|
||||
dt.format(&time::format_description::well_known::Rfc3339).unwrap_or_else(|_| "1970-01-01T00:00:00Z".to_string())
|
||||
}
|
||||
|
||||
fn escape_xml(s: &str) -> String {
|
||||
s.replace('&', "&")
|
||||
.replace('<', "<")
|
||||
.replace('>', ">")
|
||||
.replace('"', """)
|
||||
.replace('\'', "'")
|
||||
}
|
||||
|
||||
fn status_description(status: &str) -> &str {
|
||||
match status {
|
||||
"open" => "The door at Noisebridge is open.",
|
||||
|
|
@ -51,28 +47,6 @@ fn status_title(status: &str) -> &str {
|
|||
}
|
||||
}
|
||||
|
||||
fn validate_bearer(headers: &HeaderMap, expected: &str) -> bool {
|
||||
headers
|
||||
.get("authorization")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|v| v.strip_prefix("Bearer ").unwrap_or("") == expected)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn init_db(path: &str) -> Result<Connection> {
|
||||
let conn = Connection::open(path).context("failed to open SQLite database")?;
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
status TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
received_at INTEGER NOT NULL
|
||||
);",
|
||||
)
|
||||
.context("failed to initialize database schema")?;
|
||||
Ok(conn)
|
||||
}
|
||||
|
||||
fn unix_now() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
|
|
@ -80,65 +54,37 @@ fn unix_now() -> u64 {
|
|||
.as_secs()
|
||||
}
|
||||
|
||||
async fn post_webhook(
|
||||
State(state): State<Arc<AppState>>,
|
||||
headers: HeaderMap,
|
||||
Json(body): Json<WebhookPayload>,
|
||||
) -> StatusCode {
|
||||
if !validate_bearer(&headers, &state.webhook_secret) {
|
||||
return StatusCode::UNAUTHORIZED;
|
||||
}
|
||||
|
||||
let now = unix_now();
|
||||
let conn = state.db.lock().await;
|
||||
match conn.execute(
|
||||
"INSERT INTO events (status, timestamp, received_at) VALUES (?1, ?2, ?3)",
|
||||
rusqlite::params![body.status, body.timestamp, now],
|
||||
) {
|
||||
Ok(_) => {
|
||||
info!(status = %body.status, timestamp = body.timestamp, "event recorded");
|
||||
StatusCode::OK
|
||||
}
|
||||
Err(e) => {
|
||||
error!(error = %e, "failed to insert event");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_feed(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Query(query): Query<FeedQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let limit = query.limit.unwrap_or(50);
|
||||
let conn = state.db.lock().await;
|
||||
let url = format!("{}/history", state.cache_url);
|
||||
|
||||
let mut stmt = match conn.prepare(
|
||||
"SELECT status, timestamp FROM events ORDER BY id DESC LIMIT ?1",
|
||||
) {
|
||||
Ok(s) => s,
|
||||
let resp = match state.client.get(&url).send().await {
|
||||
Ok(resp) if resp.status().is_success() => resp,
|
||||
Ok(resp) => {
|
||||
error!(status = %resp.status(), "cache service returned error");
|
||||
return (StatusCode::BAD_GATEWAY, "upstream error").into_response();
|
||||
}
|
||||
Err(e) => {
|
||||
error!(error = %e, "failed to prepare query");
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, "internal error").into_response();
|
||||
error!(error = %e, "failed to reach cache service");
|
||||
return (StatusCode::BAD_GATEWAY, "upstream unavailable").into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let entries: Vec<(String, u64)> = match stmt
|
||||
.query_map(rusqlite::params![limit], |row| {
|
||||
Ok((row.get::<_, String>(0)?, row.get::<_, u64>(1)?))
|
||||
}) {
|
||||
Ok(rows) => rows.filter_map(|r| r.ok()).collect(),
|
||||
let entries: Vec<HistoryEntry> = match resp.json().await {
|
||||
Ok(entries) => entries,
|
||||
Err(e) => {
|
||||
error!(error = %e, "failed to query events");
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, "internal error").into_response();
|
||||
error!(error = %e, "failed to parse cache response");
|
||||
return (StatusCode::BAD_GATEWAY, "invalid upstream response").into_response();
|
||||
}
|
||||
};
|
||||
|
||||
let updated = entries
|
||||
.first()
|
||||
.map(|(_, ts)| unix_to_rfc3339(*ts))
|
||||
.map(|e| unix_to_rfc3339(e.timestamp))
|
||||
.unwrap_or_else(|| unix_to_rfc3339(unix_now()));
|
||||
|
||||
let site_url = escape_xml(&state.site_url);
|
||||
let mut xml = format!(
|
||||
r#"<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
|
|
@ -148,24 +94,26 @@ async fn get_feed(
|
|||
<id>urn:noisebell:door-status</id>
|
||||
<updated>{updated}</updated>
|
||||
"#,
|
||||
site_url = state.site_url,
|
||||
updated = updated,
|
||||
);
|
||||
|
||||
for (status, timestamp) in &entries {
|
||||
let ts_rfc = unix_to_rfc3339(*timestamp);
|
||||
let seven_days_ago = unix_now().saturating_sub(7 * 24 * 60 * 60);
|
||||
for entry in &entries {
|
||||
if entry.timestamp < seven_days_ago {
|
||||
continue;
|
||||
}
|
||||
let ts_rfc = unix_to_rfc3339(entry.timestamp);
|
||||
xml.push_str(&format!(
|
||||
r#" <entry>
|
||||
<title>{title}</title>
|
||||
<id>urn:noisebell:event:{timestamp}</id>
|
||||
<id>urn:noisebell:event:{id}</id>
|
||||
<updated>{ts}</updated>
|
||||
<content type="text">{description}</content>
|
||||
</entry>
|
||||
"#,
|
||||
title = status_title(status),
|
||||
timestamp = timestamp,
|
||||
title = escape_xml(status_title(&entry.status)),
|
||||
id = entry.id,
|
||||
ts = ts_rfc,
|
||||
description = status_description(status),
|
||||
description = escape_xml(status_description(&entry.status)),
|
||||
));
|
||||
}
|
||||
|
||||
|
|
@ -190,30 +138,33 @@ async fn main() -> Result<()> {
|
|||
.parse()
|
||||
.context("NOISEBELL_RSS_PORT must be a valid u16")?;
|
||||
|
||||
let webhook_secret = std::env::var("NOISEBELL_RSS_WEBHOOK_SECRET")
|
||||
.context("NOISEBELL_RSS_WEBHOOK_SECRET is required")?;
|
||||
|
||||
let data_dir =
|
||||
std::env::var("NOISEBELL_RSS_DATA_DIR").unwrap_or_else(|_| "/var/lib/noisebell-rss".into());
|
||||
let cache_url = std::env::var("NOISEBELL_RSS_CACHE_URL")
|
||||
.context("NOISEBELL_RSS_CACHE_URL is required")?;
|
||||
|
||||
let site_url = std::env::var("NOISEBELL_RSS_SITE_URL")
|
||||
.unwrap_or_else(|_| format!("https://rss.noisebell.extremist.software"));
|
||||
.unwrap_or_else(|_| "https://rss.noisebell.extremist.software".to_string());
|
||||
|
||||
info!(port, "starting noisebell-rss");
|
||||
info!(port, %cache_url, "starting noisebell-rss");
|
||||
|
||||
let db_path = format!("{data_dir}/rss.db");
|
||||
let conn = init_db(&db_path)?;
|
||||
let db = Arc::new(Mutex::new(conn));
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.build()
|
||||
.context("failed to build HTTP client")?;
|
||||
|
||||
let app_state = Arc::new(AppState {
|
||||
db,
|
||||
webhook_secret,
|
||||
client,
|
||||
cache_url,
|
||||
site_url,
|
||||
});
|
||||
|
||||
let app = Router::new()
|
||||
.route("/webhook", post(post_webhook))
|
||||
.route("/health", get(|| async { StatusCode::OK }))
|
||||
.route("/feed", get(get_feed))
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(tower_http::trace::DefaultMakeSpan::new().level(Level::INFO))
|
||||
.on_response(tower_http::trace::DefaultOnResponse::new().level(Level::INFO)),
|
||||
)
|
||||
.with_state(app_state);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(("0.0.0.0", port))
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue