Compare commits
10 commits
8f9b7b413c
...
9a94efd29b
Author | SHA1 | Date | |
---|---|---|---|
|
9a94efd29b | ||
|
30b75cc936 | ||
|
94e26f8080 | ||
|
8ee362b991 | ||
|
61ad0a17e8 | ||
|
c1bea8284c | ||
|
a46a2e8847 | ||
|
efec2e670f | ||
|
24c67bc529 | ||
|
a680d01d7f |
20 changed files with 602 additions and 481 deletions
2
.env
2
.env
|
@ -1 +1 @@
|
|||
DATABASE_URL=sqlite://${HOME}/.what2watch.db
|
||||
DATABASE_URL=sqlite://${HOME}/.local/share/what2watch/what2watch.db
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,3 +1,4 @@
|
|||
/target
|
||||
/libjulid.so
|
||||
ww-style
|
||||
*.db
|
||||
|
|
711
Cargo.lock
generated
711
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
16
Cargo.toml
16
Cargo.toml
|
@ -14,26 +14,28 @@ askama = { version = "0.12", features = ["with-axum"] }
|
|||
askama_axum = "0.4"
|
||||
async-trait = "0.1"
|
||||
axum = { version = "0.7", features = ["macros"] }
|
||||
axum-login = "0.11"
|
||||
axum-login = "0.14"
|
||||
axum-macros = "0.4"
|
||||
chrono = { version = "0.4", default-features = false, features = ["std", "clock"] }
|
||||
clap = { version = "4", features = ["derive", "env", "unicode", "suggestions", "usage"] }
|
||||
figment = { version = "0.10", default-features = false, features = ["toml", "env"] }
|
||||
http = "1.0.0"
|
||||
confy = "0.6"
|
||||
dirs = "5"
|
||||
http = "1"
|
||||
julid-rs = "1"
|
||||
justerror = "1"
|
||||
parse_duration = "2.1.1"
|
||||
parse_duration = "2"
|
||||
password-auth = "1"
|
||||
password-hash = { version = "0.5", features = ["std", "getrandom"] }
|
||||
rand = "0.8"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
sha256 = { version = "1.4.0", default-features = false }
|
||||
sha256 = { version = "1", default-features = false }
|
||||
sqlx = { version = "0.7", default-features = false, features = ["runtime-tokio", "sqlite", "tls-none", "migrate"] }
|
||||
thiserror = "1"
|
||||
tokio = { version = "1", features = ["full", "tracing"], default-features = false }
|
||||
tokio = { version = "1", features = ["rt-multi-thread", "signal", "tracing"], default-features = false }
|
||||
tower = { version = "0.4", features = ["util", "timeout"], default-features = false }
|
||||
tower-http = { version = "0.5", features = ["add-extension", "trace", "tracing", "fs"], default-features = false }
|
||||
tower-sessions = { version = "0.8", default-features = false, features = ["sqlite-store"] }
|
||||
tower-sessions = { version = "0.11", default-features = false }
|
||||
tower-sessions-sqlx-store = { version = "0.11.0", default-features = false, features = ["sqlite"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
unicode-segmentation = "1"
|
||||
|
|
2
migrations/20240203053645_fts.down.sql
Normal file
2
migrations/20240203053645_fts.down.sql
Normal file
|
@ -0,0 +1,2 @@
|
|||
drop table if exists star_search;
|
||||
drop table if exists watch_search;
|
16
migrations/20240203053645_fts.up.sql
Normal file
16
migrations/20240203053645_fts.up.sql
Normal file
|
@ -0,0 +1,16 @@
|
|||
create virtual table if not exists star_search using fts5 (name, id UNINDEXED, tokenize = 'trigram', content = 'stars', content_rowid=rowid);
|
||||
create trigger if not exists stars_update_search after insert on stars begin
|
||||
insert into star_search (rowid, name, id) values (new.rowid, new.name, new.id);
|
||||
end;
|
||||
create trigger if not exists stars_delete_search after delete on stars begin
|
||||
insert into star_search (star_search, rowid, name, id) values ('delete', old.rowid, old.name, old.id);
|
||||
end;
|
||||
|
||||
create virtual table if not exists watch_search using fts5 (title, id UNINDEXED, tokenize = 'trigram', content = 'watches', content_rowid=rowid);
|
||||
create trigger if not exists watches_update_search after insert on watches begin
|
||||
insert into watch_search (rowid, title, id) values (new.rowid, new.title, new.id);
|
||||
end;
|
||||
create trigger if not exists watches_delete_search after delete on watches begin
|
||||
insert into watch_search (watch_search, rowid, title, id) values ('delete', old.rowid, old.title, old.id);
|
||||
end;
|
||||
|
|
@ -1,9 +1,12 @@
|
|||
use async_trait::async_trait;
|
||||
use axum_login::{AuthUser, AuthnBackend, UserId};
|
||||
use axum_login::{
|
||||
tower_sessions::{cookie::time::Duration, Expiry, SessionManagerLayer},
|
||||
AuthUser, AuthnBackend, UserId,
|
||||
};
|
||||
use julid::Julid;
|
||||
use password_auth::verify_password;
|
||||
use sqlx::SqlitePool;
|
||||
use tower_sessions::{cookie::time::Duration, Expiry, SessionManagerLayer, SqliteStore};
|
||||
use tower_sessions_sqlx_store::SqliteStore;
|
||||
|
||||
use crate::User;
|
||||
|
||||
|
|
|
@ -3,8 +3,9 @@ use std::time::Duration;
|
|||
use clap::Parser;
|
||||
use julid::Julid;
|
||||
use parse_duration::parse;
|
||||
use serde::Serialize;
|
||||
use sqlx::SqlitePool;
|
||||
use what2watch::{get_db_pool, Invitation, User};
|
||||
use what2watch::{conf::Config, get_db_pool, Invitation, User};
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
|
@ -18,7 +19,7 @@ struct Cli {
|
|||
pub number: u8,
|
||||
}
|
||||
|
||||
struct Iq {
|
||||
struct InvitationQuest {
|
||||
owner: Julid,
|
||||
expires: Option<Duration>,
|
||||
uses: Option<u8>,
|
||||
|
@ -32,7 +33,7 @@ fn main() {
|
|||
.expires_in
|
||||
.map(|e| parse(&e).expect("Could not parse {e} as a duration"));
|
||||
let uses = cli.uses;
|
||||
let quest = Iq {
|
||||
let quest = InvitationQuest {
|
||||
owner,
|
||||
expires,
|
||||
uses,
|
||||
|
@ -40,6 +41,9 @@ fn main() {
|
|||
|
||||
let pool = get_db_pool();
|
||||
|
||||
let conf = Config::get();
|
||||
let base_url = &conf.base_url;
|
||||
|
||||
let rt = tokio::runtime::Builder::new_multi_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
|
@ -47,10 +51,12 @@ fn main() {
|
|||
|
||||
let invites = rt.block_on(async {
|
||||
ensure_omega(&pool).await;
|
||||
generate_invites(quest, num, &pool).await
|
||||
let invites = generate_invites(quest, num, &pool).await;
|
||||
pool.close().await;
|
||||
invites
|
||||
});
|
||||
for invite in invites {
|
||||
println!("{invite}");
|
||||
println!("{base_url}/signup/{invite}");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -64,7 +70,7 @@ async fn ensure_omega(pool: &SqlitePool) {
|
|||
}
|
||||
}
|
||||
|
||||
async fn generate_invites(quest: Iq, number: u8, pool: &SqlitePool) -> Vec<Julid> {
|
||||
async fn generate_invites(quest: InvitationQuest, number: u8, pool: &SqlitePool) -> Vec<Julid> {
|
||||
let mut invites = Vec::with_capacity(number as usize);
|
||||
for _ in 0..number {
|
||||
let mut invite = Invitation::new(quest.owner);
|
||||
|
@ -74,7 +80,8 @@ async fn generate_invites(quest: Iq, number: u8, pool: &SqlitePool) -> Vec<Julid
|
|||
if let Some(expires) = quest.expires {
|
||||
invite = invite.with_expires_in(expires);
|
||||
}
|
||||
let invite = Invitation::commit(&invite, pool)
|
||||
let invite = invite
|
||||
.commit(pool)
|
||||
.await
|
||||
.expect("Error inserting invite into DB");
|
||||
invites.push(invite);
|
||||
|
|
42
src/conf.rs
Normal file
42
src/conf.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub const APP_NAME: &str = "what2watch";
|
||||
const CONFIG_NAME: Option<&str> = Some("config");
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub struct Config {
|
||||
pub base_url: String,
|
||||
pub db_file: String,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
let mut datadir = dirs::data_dir().unwrap();
|
||||
datadir.push(APP_NAME);
|
||||
datadir.push("what2watch.db");
|
||||
Self {
|
||||
base_url: "http://localhost:3000".into(),
|
||||
db_file: datadir.as_os_str().to_string_lossy().to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn get() -> Self {
|
||||
let config: Config = confy::load(APP_NAME, CONFIG_NAME).unwrap_or_else(|e| {
|
||||
tracing::debug!("Could not load {APP_NAME} config, got error {e}");
|
||||
Default::default()
|
||||
});
|
||||
confy::store(APP_NAME, CONFIG_NAME, config.clone()).unwrap_or_else(|e| {
|
||||
tracing::debug!("Could not store {APP_NAME} config, got error {e}");
|
||||
});
|
||||
tracing::info!("Loading config from {}", cpath());
|
||||
config
|
||||
}
|
||||
}
|
||||
|
||||
fn cpath() -> String {
|
||||
confy::get_configuration_file_path(APP_NAME, CONFIG_NAME)
|
||||
.map(|p| p.as_os_str().to_str().unwrap().to_string())
|
||||
.expect("couldn't get the path to the configuration file")
|
||||
}
|
10
src/db.rs
10
src/db.rs
|
@ -15,9 +15,11 @@ pub fn get_db_pool() -> SqlitePool {
|
|||
std::env::var("DATABASE_FILE").unwrap_or_else(|_| {
|
||||
#[cfg(not(test))]
|
||||
{
|
||||
let home =
|
||||
std::env::var("HOME").expect("Could not determine $HOME for finding db file");
|
||||
format!("{home}/.what2watch.db")
|
||||
let f = crate::conf::Config::get().db_file;
|
||||
let p = std::path::Path::new(&f);
|
||||
let p = p.parent().unwrap();
|
||||
std::fs::create_dir_all(p).expect("couldn't create data dir");
|
||||
f
|
||||
}
|
||||
#[cfg(test)]
|
||||
{
|
||||
|
@ -36,7 +38,6 @@ pub fn get_db_pool() -> SqlitePool {
|
|||
|
||||
let conn_opts = SqliteConnectOptions::new()
|
||||
.foreign_keys(true)
|
||||
.auto_vacuum(sqlx::sqlite::SqliteAutoVacuum::Incremental)
|
||||
.journal_mode(SqliteJournalMode::Wal)
|
||||
.synchronous(sqlx::sqlite::SqliteSynchronous::Normal)
|
||||
.filename(&db_filename)
|
||||
|
@ -44,6 +45,7 @@ pub fn get_db_pool() -> SqlitePool {
|
|||
.extension("./libjulid")
|
||||
.busy_timeout(Duration::from_secs(TIMEOUT))
|
||||
.create_if_missing(true)
|
||||
.optimize_on_close(true, None)
|
||||
.pragma("mmap_size", "3000000000");
|
||||
|
||||
let pool = SqlitePoolOptions::new()
|
||||
|
|
|
@ -82,7 +82,7 @@ pub async fn import_imdb_data(w2w_db: &SqlitePool, imdb: &SqlitePool, ids: &mut
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
for batch in iwatches.chunks(2_000) {
|
||||
for batch in iwatches.chunks(5_000) {
|
||||
let mut tx = w2w_db.acquire().await.unwrap();
|
||||
let mut tx = tx.begin().await.unwrap();
|
||||
for iwatch in batch {
|
||||
|
@ -114,24 +114,32 @@ async fn add_imdb_stars(
|
|||
|
||||
for row in principals {
|
||||
let (name_id, cat) = row;
|
||||
let name_query =
|
||||
"select nconst, primaryName, birthYear, deathYear from names where nconst = ?";
|
||||
let istar: Option<ImdbStar> = sqlx::query_as(name_query)
|
||||
.bind(&name_id)
|
||||
.fetch_optional(imdb)
|
||||
.await
|
||||
.unwrap();
|
||||
if let Some(star) = istar {
|
||||
let star = (&star).into();
|
||||
let star_id = insert_star(&star, w2w_db).await;
|
||||
ids.insert(name_id, star_id);
|
||||
let credit = Credit {
|
||||
star: star_id,
|
||||
watch,
|
||||
credit: Some(cat.to_string()),
|
||||
};
|
||||
insert_credit(&credit, w2w_db).await;
|
||||
}
|
||||
let star = if let Some(id) = ids.get(&name_id) {
|
||||
*id
|
||||
} else {
|
||||
let name_query =
|
||||
"select nconst, primaryName, birthYear, deathYear from names where nconst = ?";
|
||||
let istar: Option<ImdbStar> = sqlx::query_as(name_query)
|
||||
.bind(&name_id)
|
||||
.fetch_optional(imdb)
|
||||
.await
|
||||
.unwrap();
|
||||
if let Some(star) = istar {
|
||||
let star = (&star).into();
|
||||
let star_id = insert_star(&star, w2w_db).await;
|
||||
ids.insert(name_id, star_id);
|
||||
star_id
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let credit = Credit {
|
||||
star,
|
||||
watch,
|
||||
credit: Some(cat.to_string()),
|
||||
};
|
||||
insert_credit(&credit, w2w_db).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -47,6 +47,19 @@ pub async fn insert_credit(credit: &Credit, db: &mut SqliteConnection) {
|
|||
.bind(credit.credit.as_deref())
|
||||
.execute(db)
|
||||
.await
|
||||
.map(|_| ())
|
||||
.or_else(|e| match e {
|
||||
sqlx::Error::Database(ref db) => {
|
||||
let exit = db.code().unwrap_or_default().parse().unwrap_or(0u32);
|
||||
// https://www.sqlite.org/rescode.html codes for unique constraint violations:
|
||||
if exit == 2067 || exit == 1555 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
_ => Err(e),
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
|
|
19
src/lib.rs
19
src/lib.rs
|
@ -1,9 +1,8 @@
|
|||
use axum::{
|
||||
error_handling::HandleErrorLayer,
|
||||
middleware,
|
||||
routing::{get, post, IntoMakeService},
|
||||
BoxError,
|
||||
};
|
||||
use axum_login::AuthManagerLayerBuilder;
|
||||
use sqlx::SqlitePool;
|
||||
#[macro_use]
|
||||
extern crate justerror;
|
||||
|
@ -11,10 +10,12 @@ extern crate justerror;
|
|||
/// Some public interfaces for interacting with the database outside of the web
|
||||
/// app
|
||||
pub use db::get_db_pool;
|
||||
pub mod conf;
|
||||
pub mod imdb_utils;
|
||||
pub mod import_utils;
|
||||
pub mod misc_util;
|
||||
|
||||
pub use conf::*;
|
||||
pub use signup::Invitation;
|
||||
pub use stars::*;
|
||||
pub use users::User;
|
||||
|
@ -27,6 +28,7 @@ mod auth;
|
|||
mod db;
|
||||
mod generic_handlers;
|
||||
mod login;
|
||||
mod search;
|
||||
mod signup;
|
||||
mod stars;
|
||||
mod templates;
|
||||
|
@ -45,21 +47,18 @@ pub async fn app(db_pool: sqlx::SqlitePool) -> IntoMakeService<axum::Router> {
|
|||
use auth::*;
|
||||
use generic_handlers::{handle_slash, handle_slash_redir};
|
||||
use login::{get_login, get_logout, post_login, post_logout};
|
||||
use search::get_search_watch;
|
||||
use signup::handlers::{get_create_user, get_signup_success, post_create_user};
|
||||
use tower_http::services::ServeDir;
|
||||
use watches::handlers::{
|
||||
get_add_new_watch, get_search_watch, get_watch, get_watch_status, get_watches,
|
||||
post_add_new_watch, post_add_watch_quest,
|
||||
get_add_new_watch, get_watch, get_watch_status, get_watches, post_add_new_watch,
|
||||
post_add_watch_quest,
|
||||
};
|
||||
|
||||
let auth_layer = {
|
||||
let sessions = session_layer(db_pool.clone()).await;
|
||||
let session_layer = session_layer(db_pool.clone()).await;
|
||||
let store = AuthStore::new(db_pool.clone());
|
||||
tower::ServiceBuilder::new()
|
||||
.layer(HandleErrorLayer::new(|_: BoxError| async {
|
||||
http::StatusCode::BAD_REQUEST
|
||||
}))
|
||||
.layer(axum_login::AuthManagerLayerBuilder::new(store, sessions).build())
|
||||
AuthManagerLayerBuilder::new(store, session_layer).build()
|
||||
};
|
||||
|
||||
let assets_dir = std::env::current_dir().unwrap().join("assets");
|
||||
|
|
30
src/main.rs
30
src/main.rs
|
@ -27,8 +27,36 @@ fn main() {
|
|||
let addr: SocketAddr = ([0, 0, 0, 0], 3000).into();
|
||||
tracing::debug!("binding to {addr:?}");
|
||||
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(graceful_shutdown())
|
||||
.await
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
rt.block_on(pool.close());
|
||||
}
|
||||
|
||||
async fn graceful_shutdown() {
|
||||
use tokio::signal;
|
||||
let ctrl_c = async {
|
||||
signal::ctrl_c()
|
||||
.await
|
||||
.expect("failed to install Ctrl+C handler");
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
let terminate = async {
|
||||
signal::unix::signal(signal::unix::SignalKind::terminate())
|
||||
.expect("failed to install signal handler")
|
||||
.recv()
|
||||
.await;
|
||||
};
|
||||
|
||||
#[cfg(not(unix))]
|
||||
let terminate = std::future::pending::<()>();
|
||||
|
||||
tokio::select! {
|
||||
_ = ctrl_c => {tracing::info!("shutting down")},
|
||||
_ = terminate => {},
|
||||
}
|
||||
}
|
||||
|
|
70
src/search.rs
Normal file
70
src/search.rs
Normal file
|
@ -0,0 +1,70 @@
|
|||
use askama::Template;
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{query_as, SqlitePool};
|
||||
|
||||
use crate::{
|
||||
misc_util::empty_string_as_none, AuthSession, OptionalOptionalUser, Star, User, Watch,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default, Template, Deserialize, Serialize, PartialEq, Eq, OptionalOptionalUser)]
|
||||
#[template(path = "search_watches_page.html")]
|
||||
pub struct SearchPage {
|
||||
pub results: Vec<Watch>,
|
||||
pub user: Option<User>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, PartialEq, Eq)]
|
||||
pub enum SearchResult {
|
||||
Star(Star),
|
||||
Watch(Watch),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Deserialize, PartialEq, Eq)]
|
||||
pub struct SearchQuery {
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub search: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub title: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub kind: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub year: Option<i64>,
|
||||
}
|
||||
|
||||
pub async fn get_search_watch(
|
||||
auth: AuthSession,
|
||||
State(pool): State<SqlitePool>,
|
||||
search: Query<SearchQuery>,
|
||||
) -> impl IntoResponse {
|
||||
const DEFAULT_WATCHES_QUERY: &str =
|
||||
"select * from (select * from watches order by random() limit 50) order by release_date asc";
|
||||
|
||||
let user = auth.user;
|
||||
let search_query = search.0;
|
||||
|
||||
let query = if search_query == SearchQuery::default() {
|
||||
query_as(DEFAULT_WATCHES_QUERY)
|
||||
} else if let Some(title) = search_query.title {
|
||||
query_as(
|
||||
"select * from watches where id in (select id from watch_search where title match ? order by rank)",
|
||||
)
|
||||
.bind(title)
|
||||
} else if let Some(ref search) = search_query.search {
|
||||
query_as("select * from watches where id in (select id from watch_search where title match ?) outer join (select * from stars where id in (select id from star_search where name match ?)) s")
|
||||
.bind(search).bind(search)
|
||||
} else {
|
||||
query_as(DEFAULT_WATCHES_QUERY)
|
||||
};
|
||||
|
||||
// until tantivy search
|
||||
let watches: Vec<Watch> = query.fetch_all(&pool).await.unwrap();
|
||||
|
||||
SearchPage {
|
||||
results: watches,
|
||||
user,
|
||||
}
|
||||
}
|
|
@ -42,13 +42,14 @@ impl Default for Invitation {
|
|||
|
||||
impl Invitation {
|
||||
pub async fn commit(&self, db: &SqlitePool) -> Result<Julid, CreateInviteError> {
|
||||
sqlx::query_scalar(
|
||||
let mut tx = db.begin().await.unwrap();
|
||||
let id = sqlx::query_scalar(
|
||||
"insert into invites (owner, expires_at, remaining) values (?, ?, ?) returning id",
|
||||
)
|
||||
.bind(self.owner)
|
||||
.bind(self.expires_at)
|
||||
.bind(self.remaining)
|
||||
.fetch_optional(db)
|
||||
.fetch_optional(&mut *tx)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::debug!("Got error creating invite: {e}");
|
||||
|
@ -64,7 +65,9 @@ impl Invitation {
|
|||
CreateInviteErrorKind::Unknown
|
||||
}
|
||||
})?
|
||||
.ok_or(CreateInviteErrorKind::Unknown.into())
|
||||
.ok_or(CreateInviteErrorKind::Unknown.into());
|
||||
tx.commit().await.unwrap();
|
||||
id
|
||||
}
|
||||
|
||||
pub fn new(owner: Julid) -> Self {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use axum::{
|
||||
extract::{Form, Path, Query, State},
|
||||
extract::{Form, Path, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Redirect, Response},
|
||||
};
|
||||
|
@ -8,7 +8,7 @@ use julid::Julid;
|
|||
use serde::Deserialize;
|
||||
use sqlx::{query, query_as, query_scalar, SqlitePool};
|
||||
|
||||
use super::templates::{AddNewWatchPage, AddWatchButton, GetWatchPage, SearchWatchesPage};
|
||||
use super::templates::{AddNewWatchPage, AddWatchButton, GetWatchPage};
|
||||
use crate::{
|
||||
misc_util::{empty_string_as_none, year_to_epoch},
|
||||
AuthSession, MyWatchesPage, ShowKind, Watch, WatchQuest,
|
||||
|
@ -23,9 +23,6 @@ const GET_QUEST_QUERY: &str = "select * from watch_quests where user = ? and wat
|
|||
|
||||
const GET_WATCH_QUERY: &str = "select * from watches where id = $1";
|
||||
|
||||
const DEFAULT_WATCHES_QUERY: &str =
|
||||
"select * from (select * from watches order by random() limit 50) order by release_date asc";
|
||||
|
||||
const ADD_WATCH_QUERY: &str = "insert into watches (title, kind, release_date, metadata_url, added_by, length) values ($1, $2, $3, $4, $5, $6) returning id";
|
||||
const ADD_WATCH_QUEST_QUERY: &str =
|
||||
"insert into watch_quests (user, watch, public, watched) values ($1, $2, $3, $4)";
|
||||
|
@ -65,18 +62,6 @@ impl IntoResponse for AddError {
|
|||
// Types for receiving arguments from forms
|
||||
//-************************************************************************
|
||||
|
||||
#[derive(Debug, Default, Clone, Deserialize, PartialEq, Eq)]
|
||||
pub struct SearchQuery {
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub search: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub title: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub kind: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub year: Option<i64>,
|
||||
}
|
||||
|
||||
// kinda the main form?
|
||||
#[derive(Debug, Default, Deserialize, PartialEq, Eq)]
|
||||
pub struct PostAddNewWatch {
|
||||
|
@ -239,32 +224,6 @@ pub async fn get_watches(auth: AuthSession, State(pool): State<SqlitePool>) -> i
|
|||
MyWatchesPage { watches, user }
|
||||
}
|
||||
|
||||
pub async fn get_search_watch(
|
||||
auth: AuthSession,
|
||||
State(pool): State<SqlitePool>,
|
||||
search: Query<SearchQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let user = auth.user;
|
||||
let search_query = search.0;
|
||||
|
||||
let query = if search_query == SearchQuery::default() {
|
||||
query_as(DEFAULT_WATCHES_QUERY)
|
||||
} else if let Some(title) = search_query.title {
|
||||
let q = format!("%{title}%");
|
||||
query_as("select * from watches where title like ?").bind(q)
|
||||
} else if let Some(search) = search_query.search {
|
||||
let q = format!("%{search}");
|
||||
query_as("select * from watches where title like ?").bind(q)
|
||||
} else {
|
||||
query_as(DEFAULT_WATCHES_QUERY)
|
||||
};
|
||||
|
||||
// until tantivy search
|
||||
let watches: Vec<Watch> = query.fetch_all(&pool).await.unwrap();
|
||||
|
||||
SearchWatchesPage { watches, user }
|
||||
}
|
||||
|
||||
pub async fn get_watch_status(
|
||||
auth: AuthSession,
|
||||
State(pool): State<SqlitePool>,
|
||||
|
|
|
@ -11,13 +11,6 @@ pub struct MyWatchesPage {
|
|||
pub user: Option<User>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Template, Deserialize, Serialize, PartialEq, Eq, OptionalOptionalUser)]
|
||||
#[template(path = "search_watches_page.html")]
|
||||
pub struct SearchWatchesPage {
|
||||
pub watches: Vec<Watch>,
|
||||
pub user: Option<User>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Template, Deserialize, Serialize, PartialEq, Eq, OptionalOptionalUser)]
|
||||
#[template(path = "get_watch_page.html")]
|
||||
pub struct GetWatchPage {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for watch in watches %}
|
||||
{% for res in results %}
|
||||
{% include "watch-search-result.html" %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
<tr id="watchlist-item-{{watch.id}}">
|
||||
<td><span class="watchtitle"><a href="/watch/{{watch.id}}">{{watch.title}}</a></span></td>
|
||||
<tr id="watchlist-item-{{res.id}}">
|
||||
<td><span class="watchtitle"><a href="/watch/{{res.id}}">{{res.title}}</a></span></td>
|
||||
<td><span>
|
||||
{% match watch.metadata_url %}
|
||||
{% match res.metadata_url %}
|
||||
{% when Some with (mdurl) %}
|
||||
<a href="{{ mdurl }}">{{ mdurl }}</a>
|
||||
{% when None %}
|
||||
{% endmatch %}
|
||||
</span>
|
||||
</td>
|
||||
<td>{{watch.kind}}</td>
|
||||
<td> {% call m::get_or_default(watch.year(), "when??") -%}</td>
|
||||
<td>{{res.kind}}</td>
|
||||
<td> {% call m::get_or_default(res.year(), "when??") -%}</td>
|
||||
<td>
|
||||
<span id="add-watch-{{watch.id}}">
|
||||
<span hx-get="/watch/status/{{watch.id}}" hx-target="this" hx-trigger="load, reveal"
|
||||
<span id="add-watch-{{res.id}}">
|
||||
<span hx-get="/watch/status/{{res.id}}" hx-target="this" hx-trigger="load, reveal"
|
||||
hx-swap="outerHTML">???</span>
|
||||
</span>
|
||||
</td>
|
||||
|
|
Loading…
Reference in a new issue