Backend update

This commit is contained in:
2024-09-14 21:57:41 +02:00
parent c58a26a691
commit 3931504d49
4 changed files with 141 additions and 101 deletions

View File

@@ -21,4 +21,6 @@ futures = "0.3.30"
chrono = { version = "0.4.38", features = ["alloc"]} chrono = { version = "0.4.38", features = ["alloc"]}
futures-util = "0.3.30" futures-util = "0.3.30"
h2 = "0.4.6" h2 = "0.4.6"
[target.'cfg(unix)'.dependencies]
daemonize = "0.5.0" daemonize = "0.5.0"

View File

@@ -0,0 +1,14 @@
CREATE TABLE IF NOT EXISTS players
(
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS votes
(
id INTEGER PRIMARY KEY NOT NULL,
submit_date TEXT NOT NULL,
player_id INTEGER NOT NULL,
nickname TEXT NOT NULL,
reason TEXT NOT NULL,
is_plus BOOLEAN NOT NULL DEFAULT 0
)

4
settings.json Normal file
View File

@@ -0,0 +1,4 @@
{
"database_url": "sqlite:vote.db",
"bind_address": "127.0.0.1:8080"
}

View File

@@ -1,9 +1,6 @@
extern crate core;
use bytes::Bytes;
use bytes::Buf; use bytes::Buf;
use bytes::Bytes;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use dotenvy::dotenv;
use futures; use futures;
use http_body_util::{BodyExt, Full}; use http_body_util::{BodyExt, Full};
use hyper::body::Incoming; use hyper::body::Incoming;
@@ -16,16 +13,16 @@ use serde_json::{from_reader, Value};
use sqlx::sqlite::SqlitePool; use sqlx::sqlite::SqlitePool;
use std::collections::HashMap; use std::collections::HashMap;
use std::env; use std::env;
use std::env::vars;
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::str::FromStr; use std::str::FromStr;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::time::SystemTime; use std::time::SystemTime;
use daemonize::Daemonize;
use tokio::net::TcpListener; use tokio::net::TcpListener;
#[cfg(target_os = "linux")]
use daemonize::Daemonize;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
struct Player { struct Player {
id: i64, id: i64,
@@ -34,20 +31,22 @@ struct Player {
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
struct Vote { struct Vote {
vote_plus_id: i64, player_id: i64,
vote_plus_nickname: String, nickname: String,
vote_plus_reason: String, reason: String,
vote_moins_id: i64, is_plus: bool,
vote_moins_nickname: String, }
vote_moins_reason: String,
#[derive(Serialize, Deserialize)]
struct Settings {
database_url: String,
bind_address: String,
} }
async fn service(req: Request<Incoming>, db: Arc<Mutex<SqlitePool>>) -> Result<Response<Full<Bytes>>, Error> { async fn service(req: Request<Incoming>, db: Arc<Mutex<SqlitePool>>) -> Result<Response<Full<Bytes>>, Error> {
match req.method() { match req.method() {
&Method::GET => { get(req,db).await } &Method::GET => { get(req, db).await }
&Method::POST => { &Method::POST => { post(req, db).await }
post(req, db).await _ => { Ok(Response::builder().status(StatusCode::IM_A_TEAPOT).body(Full::new(Bytes::new())).unwrap()) }
}
_ => { Ok(Response::new(Full::new(Bytes::from("This method is unimplemented")))) }
} }
} }
@@ -63,34 +62,34 @@ async fn get(req: Request<Incoming>, db: Arc<Mutex<SqlitePool>>) -> Result<Respo
} }
async fn get_page(path: &str) -> Result<Response<Full<Bytes>>, Error> { async fn get_page(path: &str) -> Result<Response<Full<Bytes>>, Error> {
let uri_map_path = find_in_vars("ROUTES_FILE"); let mut routes = env::current_dir().expect("Could not get app directory (Required to get routes)");
let file = File::open(uri_map_path).expect("Could not find routes"); routes.push("routes.json");
let map: Value = from_reader(file).expect("Could not read data"); let file = File::open(routes).expect("Could not open routes file.");
let map: Value = from_reader(file).expect("Could not parse routes, please verify syntax.");
match &map[path] { match &map[path] {
Value::String(s) => get_file(s.as_str()).await, Value::String(s) => get_file(&s).await,
_ => not_found().await _ => not_found().await
} }
} }
async fn get_file(path: &str) -> Result<Response<Full<Bytes>>, Error> { async fn get_file(mut path: &str) -> Result<Response<Full<Bytes>>, Error> {
let current_dir = env::current_dir().unwrap().clone(); let mut file_path = env::current_dir().expect("Could not get app directory.");
let mut path = path;
if path.starts_with(r"/") { if path.starts_with(r"/") {
path = path.strip_prefix(r"/").unwrap(); path = path.strip_prefix(r"/").unwrap();
} }
let file_path = current_dir.join(path); file_path.push(path);
match File::open(&file_path) { match File::open(&file_path) {
Ok(mut file) => { Ok(mut file) => {
let mut buf = Vec::new(); let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap(); file.read_to_end(&mut buf).unwrap();
let header = match file_path.extension().unwrap().to_str().unwrap() { let content_type = match file_path.extension().unwrap().to_str().unwrap() {
"js" => "text/javascript", "js" => "text/javascript",
"html" => "text/html", "html" => "text/html",
"css" => "text/css", "css" => "text/css",
_ => "" _ => ""
}; };
Ok(Response::builder().header("content-type", header).body(Full::new(Bytes::from(buf))).unwrap()) Ok(Response::builder().header("content-type", content_type).body(Full::new(Bytes::from(buf))).unwrap())
} }
Err(_) => not_found().await Err(_) => not_found().await
} }
@@ -100,7 +99,7 @@ async fn get_data(path: &str, req: &Request<Incoming>, db: Arc<Mutex<SqlitePool>
let pool = db.clone().lock().unwrap().clone(); let pool = db.clone().lock().unwrap().clone();
match path { match path {
"/data/players" => { "/data/players" => {
let items = sqlx::query!(r#"SELECT id, name FROM players"#).fetch_all(&pool).await.unwrap(); let items = sqlx::query!(r#"SELECT * FROM players"#).fetch_all(&pool).await.unwrap();
let players: Vec<Player> = items.iter().map(|x| Player { id: x.id, name: x.name.clone() }).collect(); let players: Vec<Player> = items.iter().map(|x| Player { id: x.id, name: x.name.clone() }).collect();
Ok(Response::new(Full::new(Bytes::from(serde_json::to_string(&players).unwrap())))) Ok(Response::new(Full::new(Bytes::from(serde_json::to_string(&players).unwrap()))))
} }
@@ -110,24 +109,36 @@ async fn get_data(path: &str, req: &Request<Incoming>, db: Arc<Mutex<SqlitePool>
} }
"/data/results" => { "/data/results" => {
let votes = get_votes(req, db).await; let votes = get_votes(req, db).await;
let ids: Vec<[i64; 2]> = votes.iter().map(|x| [x.vote_plus_id, x.vote_moins_id]).collect(); let ids: Vec<(i64, bool)> = votes.iter().map(|x| (x.player_id, x.is_plus)).collect();
let mut results: Vec<HashMap<i64, i64>> = Vec::new();
for i in 0..=1 { let mut plus_results: HashMap<i64, i64> = HashMap::new();
let mut counts = HashMap::new(); let mut minus_results: HashMap<i64, i64> = HashMap::new();
ids.iter().for_each(|x| {
if counts.get(&x[i]).is_none() { let _ = ids.iter().filter(|x| x.1).for_each(|x| {
counts.insert(x[i], 0); let id = x.0;
if plus_results.contains_key(&id) {
plus_results.insert(id, 0);
} }
*counts.get_mut(&x[i]).unwrap() += 1; *plus_results.get_mut(&id).unwrap() += 1;
}); });
results.push(counts);
let _ = ids.iter().filter(|x| !x.1).for_each(|x| {
let id = x.0;
if minus_results.contains_key(&id) {
minus_results.insert(id, 0);
} }
*minus_results.get_mut(&id).unwrap() += 1;
});
let mut sorted: Vec<Vec<(i64, i64)>> = results.into_iter().map(|hashmap| hashmap.into_iter().collect::<Vec<(i64, i64)>>()).collect(); let mut plus_results: Vec<(i64, i64)> = plus_results.into_iter().collect();
let mut minus_results: Vec<(i64, i64)> = minus_results.into_iter().collect();
sorted.iter_mut().for_each(|x| x.sort_by(|a, b| b.1.cmp(&a.1))); plus_results.sort_by(|a, b| { b.1.cmp(&a.1) });
minus_results.sort_by(|a, b| { b.1.cmp(&a.1) });
Ok(Response::new(Full::new(Bytes::from(serde_json::to_string(&sorted).unwrap())))) let sorted_results = vec![plus_results, minus_results];
Ok(Response::new(Full::new(Bytes::from(serde_json::to_string(&sorted_results).unwrap()))))
} }
_ => not_found().await _ => not_found().await
} }
@@ -153,15 +164,13 @@ async fn get_votes(req: &Request<Incoming>, db: Arc<Mutex<SqlitePool>>) -> Vec<V
} }
let items = futures::executor::block_on(async move { let items = futures::executor::block_on(async move {
let formatted_date = format!("{}", date.unwrap().format("%d/%m/%Y")); let formatted_date = format!("{}", date.unwrap().format("%d/%m/%Y"));
sqlx::query!(r#"SELECT * FROM votes WHERE timestamp = ?1 ORDER BY id"#, formatted_date).fetch_all(&pool).await.unwrap() sqlx::query!(r#"SELECT * FROM votes WHERE SUBMIT_DATE = ?1 ORDER BY id"#, formatted_date).fetch_all(&pool).await.unwrap()
}); });
items.iter().map(|x| Vote { items.iter().map(|x| Vote {
vote_plus_id: x.plus_id, player_id: x.player_id,
vote_plus_nickname: x.plus_nickname.clone(), nickname: x.nickname.clone(),
vote_plus_reason: x.plus_reason.clone(), reason: x.reason.clone(),
vote_moins_id: x.moins_id, is_plus: x.is_plus,
vote_moins_nickname: x.moins_nickname.clone(),
vote_moins_reason: x.moins_reason.clone(),
}).collect() }).collect()
} }
@@ -170,82 +179,93 @@ async fn post(req: Request<Incoming>, db: Arc<Mutex<SqlitePool>>) -> Result<Resp
if path != "/post" { if path != "/post" {
return Ok(Response::builder().status(StatusCode::BAD_REQUEST).body(Full::new(Bytes::from("Bad Request"))).unwrap()); return Ok(Response::builder().status(StatusCode::BAD_REQUEST).body(Full::new(Bytes::from("Bad Request"))).unwrap());
} }
let body = req.into_body().collect().await.unwrap(); let body = req.into_body().collect().await?;
let data: Result<Vote, serde_json::Error> = serde_json::from_reader(body.aggregate().reader()); let data: Result<Vote, serde_json::Error> = from_reader(body.aggregate().reader());
if data.is_err() { if data.is_err() {
return Ok(Response::builder().status(StatusCode::BAD_REQUEST).body(Full::new(Bytes::from("Bad Request"))).unwrap()) return Ok(Response::builder().status(StatusCode::BAD_REQUEST).body(Full::new(Bytes::from("Bad Request"))).unwrap());
} }
let vote = data.unwrap(); let vote = data.unwrap();
let timestamp: DateTime<Utc> = DateTime::from(SystemTime::now()); let timestamp: DateTime<Utc> = DateTime::from(SystemTime::now());
let formatted = timestamp.format("%d/%m/%Y").to_string(); let formatted = timestamp.format("%d/%m/%Y").to_string();
let pool = db.clone().lock().unwrap().clone(); let pool = db.clone().lock().unwrap().clone();
let mut conn = pool.acquire().await.unwrap(); let mut conn = pool.acquire().await.unwrap();
let result = sqlx::query!(r#"INSERT INTO votes ( plus_id, plus_nickname, plus_reason, moins_id, moins_nickname, moins_reason, timestamp ) let result = sqlx::query!(r#"INSERT INTO votes ( PLAYER_ID, NICKNAME, REASON, IS_PLUS, SUBMIT_DATE )
VALUES ( ?1, ?2, ?3, ?4, ?5, ?6, ?7 )"#, VALUES ( ?1, ?2, ?3, ?4, ?5 )"#,
vote.vote_plus_id, vote.player_id,
vote.vote_plus_nickname, vote.nickname,
vote.vote_plus_reason, vote.reason,
vote.vote_moins_id, vote.is_plus,
vote.vote_moins_nickname,
vote.vote_moins_reason,
formatted).execute(&mut *conn).await; formatted).execute(&mut *conn).await;
if result.is_err() { if result.is_err() {
return Ok(Response::builder().status(StatusCode::INTERNAL_SERVER_ERROR).body(Full::new(Bytes::from("Internet Error"))).unwrap()) return Ok(Response::builder().status(StatusCode::INTERNAL_SERVER_ERROR).body(Full::new(Bytes::from("Internet Error"))).unwrap());
} }
Ok(Response::builder().body(Full::new(Bytes::from("OK"))).unwrap()) Ok(Response::builder().body(Full::new(Bytes::new())).unwrap())
} }
async fn not_found() -> Result<Response<Full<Bytes>>, Error> { async fn not_found() -> Result<Response<Full<Bytes>>, Error> {
let current_dir = env::current_dir().unwrap().clone(); let mut file_path = env::current_dir().expect("Could not get app directory.");
let file_path = current_dir.join("static/html/404.html"); file_path.push("static/html/404.html");
let mut file = File::open(file_path).unwrap(); let mut file = File::open(file_path).unwrap();
let mut buf = Vec::new(); let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap(); file.read_to_end(&mut buf).unwrap();
Ok(Response::builder().status(StatusCode::NOT_FOUND).body(Full::new(Bytes::from(buf))).unwrap()) Ok(Response::builder().status(StatusCode::NOT_FOUND).body(Full::new(Bytes::from(buf))).unwrap())
} }
fn find_in_vars(pattern: &str) -> String { fn get_settings() -> Settings {
let mut vars = vars(); let mut settings_path = env::current_dir().expect("Could not get app directory. (Required to read settings)");
let result = vars.find(|x| x.0.contains(pattern)).expect(&format!("Could not find '{}' in environment variables", pattern)); settings_path.push("settings.json");
result.1 let settings_file = File::open(settings_path).expect("Could not open settings file, does it exists?");
let settings: Settings = from_reader(settings_file).expect("Could not parse settings, please check syntax.");
settings
} }
#[cfg(target_os = "linux")]
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let stdout = File::create("/tmp/daemon.out").unwrap(); let current_directory = env::current_dir().expect("Could not get app directory.");
let stderr = File::create("/tmp/daemon.err").unwrap(); let stdout = File::create("/var/vote/log/daemon.out").unwrap();
let stderr = File::create("/var/vote/log/daemon.err").unwrap();
let daemonize = Daemonize::new() let daemonize = Daemonize::new()
.pid_file("/tmp/test.pid") // Every method except `new` and `start` .pid_file("/var/vote/server.pid")
.chown_pid_file(true) // is optional, see `Daemonize` documentation .chown_pid_file(true)
.working_directory("/home/aindustries/vote-rllhc/") // for default behaviour. .working_directory(current_directory.to_str())
.stdout(stdout) // Redirect stdout to `/tmp/daemon.out`. .stdout(stdout)
.stderr(stderr); // Redirect stderr to `/tmp/daemon.err`. .stderr(stderr);
match daemonize.start() { match daemonize.start() {
Ok(_) => { Ok(_) => run().await,
let _ = dotenv();
let db_adrr = find_in_vars("DATABASE_ADRR");
let db_pool = Arc::new(Mutex::new(SqlitePool::connect(&db_adrr).await.unwrap()));
let bind_adrr: SocketAddr = SocketAddr::from_str(find_in_vars("BIND_ADRR").as_str()).expect("Could not parse bind address");
let listener = TcpListener::bind(bind_adrr).await.expect("Could not bind to address.");
loop {
let (tcp, _) = listener.accept().await.expect("Could not accept stream");
let io = TokioIo::new(tcp);
let db = db_pool.clone();
let service = service_fn(move |req| {
service(req, db.clone())
});
tokio::task::spawn(async move {
if let Err(err) = http1::Builder::new()
.timer(TokioTimer::new())
.serve_connection(io, service).await
{
println!("Failed to serve connection: {:?}", err);
}
});
}
},
Err(e) => eprintln!("Error, {}", e), Err(e) => eprintln!("Error, {}", e),
} }
}
#[cfg(target_os = "windows")]
#[tokio::main]
async fn main() {
run().await;
}
async fn run() {
let settings = get_settings();
let db_pool = Arc::new(Mutex::new(SqlitePool::connect(&settings.database_url).await.expect("Could not connect to database. Make sure the url is correct.")));
let bind_address: SocketAddr = SocketAddr::from_str(&settings.bind_address).expect("Could not parse bind address.");
let listener = TcpListener::bind(bind_address).await.expect("Could not bind to address.");
loop {
let (stream, _) = listener.accept().await.expect("Could not accept incoming stream.");
let io = TokioIo::new(stream);
let db = db_pool.clone();
let service = service_fn(
move |req| {
service(req, db.clone())
}
);
tokio::task::spawn(
async move {
if let Err(err) = http1::Builder::new()
.timer(TokioTimer::new())
.serve_connection(io, service).await {
println!("Failed to serve connection: {:?}", err);
}
}
);
}
} }