simple web server
This commit is contained in:
parent
50a25c494a
commit
921b62ed97
5 changed files with 624 additions and 40 deletions
117
src/main.rs
117
src/main.rs
|
|
@ -1,15 +1,118 @@
|
|||
use std::{sync::Arc, time::Duration};
|
||||
|
||||
use actix_web::{get, web, App, HttpResponse, HttpServer, Responder};
|
||||
use lazy_static::lazy_static;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
mod extract;
|
||||
mod fetch;
|
||||
|
||||
use extract::DataSet;
|
||||
use fetch::PdfFetcher;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let mut fetcher = PdfFetcher::new().expect("Failed to initialize PDF fetcher");
|
||||
let doc = fetcher.fetch().await.expect("Failed to fetch PDF");
|
||||
let dataset = DataSet::extract(&doc).expect("Failed to extract dataset");
|
||||
for row in dataset.csv_rows() {
|
||||
println!("{}", row.unwrap());
|
||||
lazy_static! {
|
||||
static ref UPDATE_INTERVAL: Duration = Duration::from_secs(3600);
|
||||
}
|
||||
|
||||
struct AppState {
|
||||
dataset: RwLock<Arc<DataSet>>,
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
enum Error {
|
||||
#[error("Failed to fetch PDF")]
|
||||
Fetch(#[from] fetch::Error),
|
||||
#[error("Failed to extract data from PDF")]
|
||||
Extract(#[from] extract::Error),
|
||||
}
|
||||
|
||||
async fn load_data(fetcher: &mut PdfFetcher) -> Result<DataSet, Error> {
|
||||
Ok(DataSet::extract(&fetcher.fetch().await?)?)
|
||||
}
|
||||
|
||||
async fn try_update(state: &AppState, fetcher: &mut PdfFetcher) -> Result<(), Error> {
|
||||
*state.dataset.write().await = Arc::new(load_data(fetcher).await?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn start_updater() -> Result<web::Data<AppState>, Error> {
|
||||
let mut fetcher = PdfFetcher::new()?;
|
||||
let state = web::Data::new(AppState {
|
||||
dataset: RwLock::new(Arc::new(load_data(&mut fetcher).await?)),
|
||||
});
|
||||
|
||||
let state_copy = state.clone();
|
||||
std::thread::spawn(move || {
|
||||
actix_web::rt::System::new().block_on(async {
|
||||
loop {
|
||||
if let Err(e) = try_update(&state_copy, &mut fetcher).await {
|
||||
eprintln!("Error updating data: {:#?}", e);
|
||||
}
|
||||
actix_web::rt::time::sleep(*UPDATE_INTERVAL).await;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
Ok(state)
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
let state = start_updater().await.expect("Failed to initialize state");
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.app_data(state.clone())
|
||||
.service(csv)
|
||||
.service(actix_files::Files::new("/", "./static/").index_file("index.html"))
|
||||
})
|
||||
.bind("127.0.0.1:8080")?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
||||
struct DataIterator {
|
||||
dataset: Arc<DataSet>,
|
||||
index: Option<usize>,
|
||||
}
|
||||
|
||||
impl DataIterator {
|
||||
fn new(dataset: Arc<DataSet>) -> Self {
|
||||
Self {
|
||||
dataset,
|
||||
index: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for DataIterator {
|
||||
type Item = Result<String, std::fmt::Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self.index {
|
||||
None => {
|
||||
self.index = Some(0);
|
||||
Some(self.dataset.csv_header().map(|s| s + "\n"))
|
||||
}
|
||||
Some(i) => {
|
||||
if let Some(row) = self.dataset.rows.get(i) {
|
||||
self.index = Some(i + 1);
|
||||
Some(self.dataset.csv_row(row).map(|s| s + "\n"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/data.csv")]
|
||||
async fn csv(data: web::Data<AppState>) -> impl Responder {
|
||||
let dataset = { data.dataset.read().await.clone() };
|
||||
|
||||
let rows =
|
||||
tokio_stream::iter(DataIterator::new(dataset).map(|item| item.map(bytes::Bytes::from)));
|
||||
HttpResponse::Ok()
|
||||
.content_type("text/csv; charset=utf-8")
|
||||
.body(actix_web::body::BodyStream::new(rows))
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue