poop-graph/src/main.rs

106 lines
2.9 KiB
Rust
Raw Normal View History

2022-04-06 18:46:17 -04:00
use std::{sync::Arc, time::Duration};
2022-04-16 00:03:09 -04:00
use actix_web::{
get, http::header::ContentType, middleware::Logger, web, App, HttpResponse, HttpServer,
Responder,
};
2022-04-06 18:46:17 -04:00
use lazy_static::lazy_static;
use tokio::sync::RwLock;
mod extract;
2022-04-05 18:21:33 -04:00
mod fetch;
2022-04-07 01:09:45 -04:00
mod serialize;
2022-04-05 14:07:28 -04:00
use extract::DataSet;
2022-04-05 18:21:33 -04:00
use fetch::PdfFetcher;
2022-04-07 01:09:45 -04:00
use serialize::{Csv, DataSerializer, Json};
2022-04-05 14:07:28 -04:00
2022-04-06 18:46:17 -04:00
lazy_static! {
static ref UPDATE_INTERVAL: Duration = Duration::from_secs(3600);
}
struct AppState {
dataset: RwLock<Arc<DataSet>>,
}
#[derive(thiserror::Error, Debug)]
enum Error {
#[error("Failed to fetch PDF")]
Fetch(#[from] fetch::Error),
#[error("Failed to extract data from PDF")]
Extract(#[from] extract::Error),
}
async fn load_data(fetcher: &mut PdfFetcher) -> Result<DataSet, Error> {
Ok(DataSet::extract(&fetcher.fetch().await?)?)
}
async fn try_update(state: &AppState, fetcher: &mut PdfFetcher) -> Result<(), Error> {
*state.dataset.write().await = Arc::new(load_data(fetcher).await?);
Ok(())
}
async fn start_updater() -> Result<web::Data<AppState>, Error> {
let mut fetcher = PdfFetcher::new()?;
let state = web::Data::new(AppState {
dataset: RwLock::new(Arc::new(load_data(&mut fetcher).await?)),
});
let state_copy = state.clone();
std::thread::spawn(move || {
actix_web::rt::System::new().block_on(async {
loop {
actix_web::rt::time::sleep(*UPDATE_INTERVAL).await;
2022-04-06 18:46:17 -04:00
if let Err(e) = try_update(&state_copy, &mut fetcher).await {
eprintln!("Error updating data: {:#?}", e);
}
}
});
});
Ok(state)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
2022-04-16 00:03:09 -04:00
simple_logger::init_with_level(log::Level::Info).unwrap();
2022-04-06 18:46:17 -04:00
let state = start_updater().await.expect("Failed to initialize state");
HttpServer::new(move || {
App::new()
.app_data(state.clone())
2022-04-16 00:03:09 -04:00
.wrap(Logger::default())
2022-04-06 18:46:17 -04:00
.service(csv)
2022-04-07 01:09:45 -04:00
.service(json)
2022-04-06 18:46:17 -04:00
.service(actix_files::Files::new("/", "./static/").index_file("index.html"))
})
.bind("127.0.0.1:8080")?
.run()
.await
}
#[get("/data.csv")]
async fn csv(data: web::Data<AppState>) -> impl Responder {
let dataset = { data.dataset.read().await.clone() };
2022-04-07 01:09:45 -04:00
let rows = tokio_stream::iter(
DataSerializer::new(dataset, Csv).map(|item| item.map(bytes::Bytes::from)),
);
2022-04-06 18:46:17 -04:00
HttpResponse::Ok()
.content_type("text/csv; charset=utf-8")
.body(actix_web::body::BodyStream::new(rows))
}
2022-04-07 01:09:45 -04:00
#[get("/data.json")]
async fn json(data: web::Data<AppState>) -> impl Responder {
let dataset = { data.dataset.read().await.clone() };
let rows = tokio_stream::iter(
DataSerializer::new(dataset, Json).map(|item| item.map(bytes::Bytes::from)),
);
HttpResponse::Ok()
.insert_header(ContentType::json())
.body(actix_web::body::BodyStream::new(rows))
}