chore: maintenance updates & leptos 0.8

This commit is contained in:
Asger Juul Brunshøj 2025-05-08 12:16:23 +02:00
parent e3ef695069
commit 9bbe1dd214
9 changed files with 574 additions and 399 deletions

844
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -9,7 +9,7 @@ publish = false
crate-type = ["cdylib", "rlib"] crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
axum = { version = "0.7", optional = true } axum = { version = "0.8", optional = true }
camino = { version = "1.1", optional = true } camino = { version = "1.1", optional = true }
chrono = { version = "0.4.39", features = ["now", "serde"] } chrono = { version = "0.4.39", features = ["now", "serde"] }
clap = { version = "4.5.7", features = ["derive"] } clap = { version = "4.5.7", features = ["derive"] }
@ -23,18 +23,17 @@ derive_more = { version = "2", features = [
] } ] }
http = "1" http = "1"
image = { version = "0.25", optional = true } image = { version = "0.25", optional = true }
leptos = { version = "0.7.7", features = ["tracing"] } leptos = { version = "0.8", features = ["tracing"] }
leptos_axum = { version = "0.7", optional = true } leptos_axum = { version = "0.8", optional = true }
leptos_meta = { version = "0.7" } leptos_meta = { version = "0.8" }
leptos_router = { version = "0.7.0" } leptos_router = { version = "0.8" }
moonboard-parser = { workspace = true, optional = true } moonboard-parser = { workspace = true, optional = true }
rand = { version = "0.9", default-features = false, features = ["thread_rng"] } rand = { version = "0.9", default-features = false, features = ["thread_rng"] }
ron = { version = "0.8" } ron = { version = "0.8" }
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
server_fn = { version = "0.7.4", features = ["cbor"] } server_fn = { version = "0.8", features = ["cbor"] }
smart-default = "0.7.1" smart-default = "0.7.1"
tokio = { version = "1", features = ["rt-multi-thread"], optional = true } tokio = { version = "1", features = ["rt-multi-thread"], optional = true }
tower = { version = "0.4", optional = true }
tower-http = { version = "0.5", features = ["fs"], optional = true } tower-http = { version = "0.5", features = ["fs"], optional = true }
tracing = { version = "0.1" } tracing = { version = "0.1" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
@ -46,7 +45,6 @@ xdg = { version = "2.5", optional = true }
uuid = { version = "1.12", features = ["serde", "v4"] } uuid = { version = "1.12", features = ["serde", "v4"] }
redb = { version = "2.4", optional = true } redb = { version = "2.4", optional = true }
bincode = { version = "1.3", optional = true } bincode = { version = "1.3", optional = true }
serde_json = { version = "1" }
codee = { version = "0.3" } codee = { version = "0.3" }
error_reporter = { version = "1" } error_reporter = { version = "1" }
getrandom = { version = "0.3.1" } getrandom = { version = "0.3.1" }
@ -54,9 +52,6 @@ getrandom = { version = "0.3.1" }
[dev-dependencies] [dev-dependencies]
test-try = "0.1" test-try = "0.1"
[dev-dependencies.serde_json]
version = "1"
[features] [features]
hydrate = ["leptos/hydrate", "getrandom/wasm_js", "uuid/js"] hydrate = ["leptos/hydrate", "getrandom/wasm_js", "uuid/js"]
ssr = [ ssr = [
@ -65,7 +60,6 @@ ssr = [
"dep:image", "dep:image",
"dep:bincode", "dep:bincode",
"dep:tokio", "dep:tokio",
"dep:tower",
"dep:tower-http", "dep:tower-http",
"dep:leptos_axum", "dep:leptos_axum",
"dep:confik", "dep:confik",

View File

@ -3,19 +3,22 @@ pub mod ron {
use codee::Decoder; use codee::Decoder;
use codee::Encoder; use codee::Encoder;
use leptos::prelude::FromServerFnError;
use leptos::prelude::ServerFnErrorErr;
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use server_fn::ServerFnError; use server_fn::ContentType;
use server_fn::codec::Encoding; use server_fn::codec::Encoding;
use server_fn::codec::FromReq; use server_fn::codec::FromReq;
use server_fn::codec::FromRes; use server_fn::codec::FromRes;
use server_fn::codec::IntoReq; use server_fn::codec::IntoReq;
use server_fn::codec::IntoRes; use server_fn::codec::IntoRes;
use server_fn::error::IntoAppError;
use server_fn::request::ClientReq; use server_fn::request::ClientReq;
use server_fn::request::Req; use server_fn::request::Req;
use server_fn::response::ClientRes; use server_fn::response::ClientRes;
use server_fn::response::Res; use server_fn::response::TryRes;
pub struct Ron; pub struct Ron;
@ -44,10 +47,13 @@ pub mod ron {
} }
impl Encoding for Ron { impl Encoding for Ron {
const CONTENT_TYPE: &'static str = "application/ron";
const METHOD: http::Method = http::Method::POST; const METHOD: http::Method = http::Method::POST;
} }
impl ContentType for Ron {
const CONTENT_TYPE: &'static str = "application/ron";
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct RonEncoded<T>(pub T); pub struct RonEncoded<T>(pub T);
@ -74,9 +80,10 @@ pub mod ron {
where where
Request: ClientReq<Err>, Request: ClientReq<Err>,
T: Serialize, T: Serialize,
Err: FromServerFnError,
{ {
fn into_req(self, path: &str, accepts: &str) -> Result<Request, ServerFnError<Err>> { fn into_req(self, path: &str, accepts: &str) -> Result<Request, Err> {
let data = Ron::encode(&self.0).map_err(|e| ServerFnError::Serialization(e.to_string()))?; let data = Ron::encode(&self.0).map_err(|e| ServerFnErrorErr::Serialization(e.to_string()).into_app_error())?;
Request::try_new_post(path, Ron::CONTENT_TYPE, accepts, data) Request::try_new_post(path, Ron::CONTENT_TYPE, accepts, data)
} }
} }
@ -86,21 +93,25 @@ pub mod ron {
where where
Request: Req<Err> + Send, Request: Req<Err> + Send,
T: DeserializeOwned, T: DeserializeOwned,
Err: FromServerFnError,
{ {
async fn from_req(req: Request) -> Result<Self, ServerFnError<Err>> { async fn from_req(req: Request) -> Result<Self, Err> {
let data = req.try_into_string().await?; let data = req.try_into_string().await?;
Ron::decode(&data).map(RonEncoded).map_err(|e| ServerFnError::Args(e.to_string())) Ron::decode(&data)
.map(RonEncoded)
.map_err(|e| ServerFnErrorErr::Args(e.to_string()).into_app_error())
} }
} }
// IntoRes // IntoRes
impl<CustErr, T, Response> IntoRes<Ron, Response, CustErr> for RonEncoded<T> impl<Err, T, Response> IntoRes<Ron, Response, Err> for RonEncoded<T>
where where
Response: Res<CustErr>, Response: TryRes<Err>,
T: Serialize + Send, T: Serialize + Send,
Err: FromServerFnError,
{ {
async fn into_res(self) -> Result<Response, ServerFnError<CustErr>> { async fn into_res(self) -> Result<Response, Err> {
let data = Ron::encode(&self.0).map_err(|e| ServerFnError::Serialization(e.to_string()))?; let data = Ron::encode(&self.0).map_err(|e| ServerFnErrorErr::Serialization(e.to_string()).into_app_error())?;
Response::try_from_string(Ron::CONTENT_TYPE, data) Response::try_from_string(Ron::CONTENT_TYPE, data)
} }
} }
@ -110,12 +121,13 @@ pub mod ron {
where where
Response: ClientRes<Err> + Send, Response: ClientRes<Err> + Send,
T: DeserializeOwned, T: DeserializeOwned,
Err: FromServerFnError,
{ {
async fn from_res(res: Response) -> Result<Self, ServerFnError<Err>> { async fn from_res(res: Response) -> Result<Self, Err> {
let data = res.try_into_string().await?; let data = res.try_into_string().await?;
Ron::decode(&data) Ron::decode(&data)
.map(RonEncoded) .map(RonEncoded)
.map_err(|e| ServerFnError::Deserialization(e.to_string())) .map_err(|e| ServerFnErrorErr::Deserialization(e.to_string()).into_app_error())
} }
} }

View File

@ -24,7 +24,7 @@ impl Pattern {
#[must_use] #[must_use]
pub fn canonicalize(&self) -> Self { pub fn canonicalize(&self) -> Self {
let mut pattern = self.clone(); let mut pattern = self.clone();
let min_col = pattern.pattern.iter().map(|(hold_position, _)| hold_position.col).min().unwrap_or(0); let min_col = pattern.pattern.keys().map(|hold_position| hold_position.col).min().unwrap_or(0);
pattern.pattern = pattern pattern.pattern = pattern
.pattern .pattern
.iter() .iter()
@ -42,7 +42,7 @@ impl Pattern {
#[must_use] #[must_use]
pub fn shift_left(&self, shift: u64) -> Option<Self> { pub fn shift_left(&self, shift: u64) -> Option<Self> {
// Out of bounds check // Out of bounds check
if let Some(min_col) = self.pattern.iter().map(|(hold_position, _)| hold_position.col).min() { if let Some(min_col) = self.pattern.keys().map(|hold_position| hold_position.col).min() {
if shift > min_col { if shift > min_col {
return None; return None;
} }
@ -52,7 +52,7 @@ impl Pattern {
.pattern .pattern
.iter() .iter()
.map(|(hold_position, hold_role)| { .map(|(hold_position, hold_role)| {
let mut hold_position = hold_position.clone(); let mut hold_position = *hold_position;
hold_position.col -= shift; hold_position.col -= shift;
(hold_position, *hold_role) (hold_position, *hold_role)
}) })
@ -64,7 +64,7 @@ impl Pattern {
#[must_use] #[must_use]
pub fn shift_right(&self, wall_dimensions: WallDimensions, shift: u64) -> Option<Self> { pub fn shift_right(&self, wall_dimensions: WallDimensions, shift: u64) -> Option<Self> {
// Out of bounds check // Out of bounds check
if let Some(max_col) = self.pattern.iter().map(|(hold_position, _)| hold_position.col).max() { if let Some(max_col) = self.pattern.keys().map(|hold_position| hold_position.col).max() {
if max_col + shift >= wall_dimensions.cols { if max_col + shift >= wall_dimensions.cols {
return None; return None;
} }
@ -74,7 +74,7 @@ impl Pattern {
.pattern .pattern
.iter() .iter()
.map(|(hold_position, hold_role)| { .map(|(hold_position, hold_role)| {
let mut hold_position = hold_position.clone(); let mut hold_position = *hold_position;
hold_position.col += shift; hold_position.col += shift;
(hold_position, *hold_role) (hold_position, *hold_role)
}) })
@ -87,8 +87,8 @@ impl Pattern {
pub fn mirror(&self) -> Self { pub fn mirror(&self) -> Self {
let mut pattern = self.clone(); let mut pattern = self.clone();
let min_col = pattern.pattern.iter().map(|(hold_position, _)| hold_position.col).min().unwrap_or(0); let min_col = pattern.pattern.keys().map(|hold_position| hold_position.col).min().unwrap_or(0);
let max_col = pattern.pattern.iter().map(|(hold_position, _)| hold_position.col).max().unwrap_or(0); let max_col = pattern.pattern.keys().map(|hold_position| hold_position.col).max().unwrap_or(0);
pattern.pattern = pattern pattern.pattern = pattern
.pattern .pattern

View File

@ -29,20 +29,18 @@ pub fn Settings() -> impl IntoView {
} }
} }
#[component] // #[component]
#[tracing::instrument(skip_all)] // #[tracing::instrument(skip_all)]
fn Import(wall_uid: WallUid) -> impl IntoView { // fn Import(wall_uid: WallUid) -> impl IntoView {
let import_from_mini_moonboard = ServerAction::<ImportFromMiniMoonboard>::new(); // let import_from_mini_moonboard = ServerAction::<ImportFromMiniMoonboard>::new();
// let onclick = move |_mouse_event| {
let onclick = move |_mouse_event| { // import_from_mini_moonboard.dispatch(ImportFromMiniMoonboard { wall_uid });
import_from_mini_moonboard.dispatch(ImportFromMiniMoonboard { wall_uid }); // };
}; // view! {
// <p>"Import problems from"</p>
view! { // <button on:click=onclick>"Mini Moonboard"</button>
<p>"Import problems from"</p> // }
<button on:click=onclick>"Mini Moonboard"</button> // }
}
}
#[server] #[server]
#[tracing::instrument] #[tracing::instrument]

View File

@ -307,7 +307,7 @@ fn Transformations() -> impl IntoView {
fn LikedButton() -> impl IntoView { fn LikedButton() -> impl IntoView {
crate::tracing::on_enter!(); crate::tracing::on_enter!();
let ctx = use_context::<Context>().unwrap(); let _ctx = use_context::<Context>().unwrap();
view! { <Button text="Saved" icon=Icon::HeartOutline color=Gradient::PinkRed /> } view! { <Button text="Saved" icon=Icon::HeartOutline color=Gradient::PinkRed /> }
} }
@ -697,7 +697,7 @@ mod signals {
wall.problems wall.problems
.iter() .iter()
.filter(|problem| filter_holds.iter().all(|hold_pos| problem.pattern.pattern.contains_key(hold_pos))) .filter(|problem| filter_holds.iter().all(|hold_pos| problem.pattern.pattern.contains_key(hold_pos)))
.map(|problem| problem.clone()) .cloned()
.collect::<BTreeSet<models::Problem>>() .collect::<BTreeSet<models::Problem>>()
}) })
}) })
@ -717,7 +717,7 @@ mod signals {
problem_set problem_set
.iter() .iter()
.filter(|problem| filter_holds.iter().all(|hold_pos| problem.pattern.pattern.contains_key(hold_pos))) .filter(|problem| filter_holds.iter().all(|hold_pos| problem.pattern.pattern.contains_key(hold_pos)))
.map(|problem| problem.clone()) .cloned()
.collect::<HashSet<models::Problem>>() .collect::<HashSet<models::Problem>>()
}) })
.filter(|set| !set.is_empty()) .filter(|set| !set.is_empty())

View File

@ -70,13 +70,13 @@ pub async fn migrate_to_v4(db: &Database) -> Result<(), Box<dyn std::error::Erro
.map(|problem_uid| { .map(|problem_uid| {
let old_prob = &problems_dump[&(wall_uid, problem_uid)]; let old_prob = &problems_dump[&(wall_uid, problem_uid)];
let method = old_prob.method; let method = old_prob.method;
let problem = models::Problem {
models::Problem {
pattern: models::Pattern { pattern: models::Pattern {
pattern: old_prob.holds.clone(), pattern: old_prob.holds.clone(),
}, },
method, method,
}; }
problem
}) })
.collect(); .collect();
let wall = models::Wall { let wall = models::Wall {

18
flake.lock generated
View File

@ -10,11 +10,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1741078851, "lastModified": 1746696290,
"narHash": "sha256-1Qu/Uu+yPUDhHM2XjTbwQqpSrYhhHu7TpHHrT7UO/0o=", "narHash": "sha256-YokYinNgGIu80OErVMuFoIELhetzb45aWKTiKYNXvWA=",
"owner": "plul", "owner": "plul",
"repo": "basecamp", "repo": "basecamp",
"rev": "3e4579d8b4400506e5f53069448b3471608b5281", "rev": "108ef2874fd8f934602cda5bfdc0e58a541c6b4a",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -25,11 +25,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1742456341, "lastModified": 1746576598,
"narHash": "sha256-yvdnTnROddjHxoQqrakUQWDZSzVchczfsuuMOxg476c=", "narHash": "sha256-FshoQvr6Aor5SnORVvh/ZdJ1Sa2U4ZrIMwKBX5k2wu0=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "7344a3b78128f7b1765dba89060b015fb75431a7", "rev": "b3582c75c7f21ce0b429898980eddbbf05c68e55",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -53,11 +53,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1742524367, "lastModified": 1746671794,
"narHash": "sha256-KzTwk/5ETJavJZYV1DEWdCx05M4duFCxCpRbQSKWpng=", "narHash": "sha256-V+mpk2frYIEm85iYf+KPDmCGG3zBRAEhbv0E3lHdG2U=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "70bf752d176b2ce07417e346d85486acea9040ef", "rev": "ceec434b8741c66bb8df5db70d7e629a9d9c598f",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -8,6 +8,9 @@ fmt:
fd --extension=rs --exec-batch leptosfmt fd --extension=rs --exec-batch leptosfmt
bc-fmt bc-fmt
fix:
bc-fix
serve: serve:
RUST_BACKTRACE=1 cargo leptos watch -- serve RUST_BACKTRACE=1 cargo leptos watch -- serve