diff --git a/.env.example b/.env.example index 514787da..981abab8 100644 --- a/.env.example +++ b/.env.example @@ -1,8 +1,5 @@ # Database configuration -DATABASE_URL=postgres://admin:password@127.0.0.1:5432/portfolio -POSTGRES_USER=admin -POSTGRES_PASSWORD=password -POSTGRES_DB=portfolio +DATABASE_URL=sqlite:sqlite.db # Server configuration LEPTOS_SITE_ADDR=0.0.0.0:3000 @@ -10,7 +7,10 @@ RUST_LOG=info RUST_BACKTRACE=1 # JWT Secret (CHANGE THIS IN PRODUCTION!) -JWT_SECRET=your-super-secret-key-change-in-production +JWT_SECRET=change-this-to-a-long-random-secret-in-production # Application environment ENVIRONMENT=development + +# Proxy Configuration (REQUIRED in production if using Nginx/Reverse Proxy) +# TRUSTED_PROXY_IPS=172.18.0.2,172.18.0.3 diff --git a/.envrc b/.envrc index 2a701615..d0a23f02 100644 --- a/.envrc +++ b/.envrc @@ -1,8 +1,9 @@ +#!/usr/bin/env bash use flake # Load environment variables export RUST_LOG=info -export DATABASE_URL="postgres://admin:password@127.0.0.1:5432/portfolio" +export DATABASE_URL="sqlite://sqlite.db" export RUST_BACKTRACE=1 # Optional: Print helpful info when entering directory diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 19b6f1f3..5637a65c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,8 @@ jobs: components: rustfmt - name: Run cargo fmt run: cargo fmt --all -- --check - + - name: Run cargo fmt for hgen + run: cd hgen && cargo fmt --all -- --check clippy: name: Lint with Clippy runs-on: ubuntu-latest @@ -33,6 +34,8 @@ jobs: components: clippy - name: Run cargo clippy run: cargo clippy --workspace --all-targets --all-features -- -D warnings + - name: Run cargo clippy for hgen + run: cd hgen && cargo clippy --all-targets --all-features -- -D warnings test: name: Run Tests @@ -45,3 +48,7 @@ jobs: uses: taiki-e/install-action@nextest - name: Run tests with nextest run: cargo nextest run --workspace --all-features + - name: Check hgen compilation + run: cd hgen && cargo check --all-targets --all-features + - name: Run tests for hgen + run: cd hgen && cargo test diff --git a/.gitignore b/.gitignore index f9aa68cc..6ab4583f 100644 --- a/.gitignore +++ b/.gitignore @@ -69,3 +69,5 @@ imports/ .sass-cache/ .sass-cache +# Hgen +hgen/target/ diff --git a/Cargo.lock b/Cargo.lock index 062bdde9..23773dd4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -139,10 +139,10 @@ dependencies = [ "axum-core", "bytes", "futures-util", - "http 1.4.0", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", - "hyper 1.8.1", + "hyper", "hyper-util", "itoa", "matchit", @@ -156,7 +156,7 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", - "sync_wrapper 1.0.2", + "sync_wrapper", "tokio", "tower", "tower-layer", @@ -173,13 +173,13 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.4.0", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.2", + "sync_wrapper", "tower-layer", "tower-service", "tracing", @@ -211,17 +211,12 @@ dependencies = [ "tokio", "tower", "tower-http", + "tower_governor", "tracing", "tracing-subscriber", "uuid", ] -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - [[package]] name = "base64" version = "0.22.1" @@ -234,12 +229,6 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d809780667f4410e7c41b07f52439b94d2bdf8528eeedc287fa38d3b7f95d82" -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" version = "2.10.0" @@ -420,16 +409,6 @@ dependencies = [ "unicode-segmentation", ] -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -485,6 +464,19 @@ dependencies = [ "typenum", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "dashmap" version = "6.1.0" @@ -530,12 +522,6 @@ dependencies = [ "syn", ] -[[package]] -name = "deunicode" -version = "1.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abd57806937c9cc163efc8ea3910e00a62e2aeb0b8119f1793a978088f8f6b04" - [[package]] name = "digest" version = "0.10.7" @@ -653,12 +639,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - [[package]] name = "find-msvc-tools" version = "0.1.7" @@ -676,12 +656,6 @@ dependencies = [ "spin", ] -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - [[package]] name = "foldhash" version = "0.1.5" @@ -689,27 +663,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] -name = "foreign-types" -version = "0.3.2" +name = "form_urlencoded" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ - "foreign-types-shared", + "percent-encoding", ] [[package]] -name = "foreign-types-shared" +name = "forwarded-header-value" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +checksum = "8835f84f38484cc86f110a805655697908257fb9a7af005234060891557198e9" dependencies = [ - "percent-encoding", + "nonempty", + "thiserror 1.0.69", ] [[package]] @@ -819,6 +788,12 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + [[package]] name = "futures-util" version = "0.3.31" @@ -904,7 +879,7 @@ dependencies = [ "futures-core", "futures-sink", "gloo-utils", - "http 1.4.0", + "http", "js-sys", "pin-project", "serde", @@ -929,29 +904,30 @@ dependencies = [ ] [[package]] -name = "guardian" -version = "1.3.0" +name = "governor" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17e2ac29387b1aa07a1e448f7bb4f35b500787971e965b02842b900afa5c8f6f" +checksum = "68a7f542ee6b35af73b06abc0dad1c1bae89964e4e253bc4b587b91c9637867b" +dependencies = [ + "cfg-if", + "dashmap 5.5.3", + "futures", + "futures-timer", + "no-std-compat", + "nonzero_ext", + "parking_lot", + "portable-atomic", + "quanta", + "rand", + "smallvec", + "spinning_top", +] [[package]] -name = "h2" -version = "0.3.27" +name = "guardian" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap", - "slab", - "tokio", - "tokio-util", - "tracing", -] +checksum = "17e2ac29387b1aa07a1e448f7bb4f35b500787971e965b02842b900afa5c8f6f" [[package]] name = "hashbrown" @@ -1039,17 +1015,6 @@ dependencies = [ "utf8-width", ] -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http" version = "1.4.0" @@ -1060,17 +1025,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - [[package]] name = "http-body" version = "1.0.1" @@ -1078,7 +1032,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.4.0", + "http", ] [[package]] @@ -1089,8 +1043,8 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http 1.4.0", - "http-body 1.0.1", + "http", + "http-body", "pin-project-lite", ] @@ -1128,30 +1082,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "hyper" -version = "0.14.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2 0.5.10", - "tokio", - "tower-service", - "tracing", - "want", -] - [[package]] name = "hyper" version = "1.8.1" @@ -1162,8 +1092,8 @@ dependencies = [ "bytes", "futures-channel", "futures-core", - "http 1.4.0", - "http-body 1.0.1", + "http", + "http-body", "httparse", "httpdate", "itoa", @@ -1173,19 +1103,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "hyper-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" -dependencies = [ - "bytes", - "hyper 0.14.32", - "native-tls", - "tokio", - "tokio-native-tls", -] - [[package]] name = "hyper-util" version = "0.1.19" @@ -1194,9 +1111,9 @@ checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" dependencies = [ "bytes", "futures-core", - "http 1.4.0", - "http-body 1.0.1", - "hyper 1.8.1", + "http", + "http-body", + "hyper", "pin-project-lite", "tokio", "tower-service", @@ -1353,12 +1270,6 @@ dependencies = [ "rustversion", ] -[[package]] -name = "ipnet" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" - [[package]] name = "itertools" version = "0.14.0" @@ -1390,7 +1301,7 @@ version = "9.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" dependencies = [ - "base64 0.22.1", + "base64", "js-sys", "pem", "ring", @@ -1415,7 +1326,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b8731cb00f3f0894058155410b95c8955b17273181d2bc72600ab84edd24f1" dependencies = [ "any_spawner", - "base64 0.22.1", + "base64", "cfg-if", "either_of", "futures", @@ -1453,7 +1364,7 @@ checksum = "b5e4b8a1c190800d24f0c79e6c320186ad31ca8800e54c057ad65cdf452ff7d0" dependencies = [ "any_spawner", "axum", - "dashmap", + "dashmap 6.1.0", "futures", "hydration_context", "leptos", @@ -1612,7 +1523,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66985242812ec95e224fb48effe651ba02728beca92c461a9464c811a71aab11" dependencies = [ "any_spawner", - "base64 0.22.1", + "base64", "codee", "futures", "hydration_context", @@ -1643,7 +1554,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ - "bitflags 2.10.0", + "bitflags", "libc", "redox_syscall 0.7.0", ] @@ -1654,6 +1565,7 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ + "cc", "pkg-config", "vcpkg", ] @@ -1664,12 +1576,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfae20f6b19ad527b550c223fddc3077a547fc70cda94b9b566575423fd303ee" -[[package]] -name = "linux-raw-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" - [[package]] name = "litemap" version = "0.8.1" @@ -1745,22 +1651,6 @@ version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" -[[package]] -name = "migration_tool" -version = "0.1.0" -dependencies = [ - "chrono", - "dotenvy", - "reqwest", - "serde", - "serde_json", - "shared", - "slug", - "sqlx", - "tokio", - "uuid", -] - [[package]] name = "mime" version = "0.3.17" @@ -1797,7 +1687,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-util", - "http 1.4.0", + "http", "httparse", "memchr", "mime", @@ -1805,29 +1695,30 @@ dependencies = [ "version_check", ] -[[package]] -name = "native-tls" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "next_tuple" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60993920e071b0c9b66f14e2b32740a4e27ffc82854dcd72035887f336a09a28" +[[package]] +name = "no-std-compat" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" + +[[package]] +name = "nonempty" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9e591e719385e6ebaeb5ce5d3887f7d5676fceca6411d1925ccc95745f3d6f7" + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + [[package]] name = "nu-ansi-term" version = "0.50.3" @@ -1925,50 +1816,6 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -[[package]] -name = "openssl" -version = "0.10.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" -dependencies = [ - "bitflags 2.10.0", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "openssl-probe" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" - -[[package]] -name = "openssl-sys" -version = "0.9.111" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "or_poisoned" version = "0.1.0" @@ -2033,7 +1880,7 @@ version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" dependencies = [ - "base64 0.22.1", + "base64", "serde_core", ] @@ -2111,6 +1958,12 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "portable-atomic" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" + [[package]] name = "potential_utf" version = "0.1.4" @@ -2200,6 +2053,21 @@ dependencies = [ "yansi", ] +[[package]] +name = "quanta" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi", + "web-sys", + "winapi", +] + [[package]] name = "quote" version = "1.0.43" @@ -2267,6 +2135,15 @@ dependencies = [ "getrandom 0.2.16", ] +[[package]] +name = "raw-cpuid" +version = "11.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" +dependencies = [ + "bitflags", +] + [[package]] name = "reactive_graph" version = "0.1.8" @@ -2322,7 +2199,7 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.10.0", + "bitflags", ] [[package]] @@ -2331,7 +2208,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" dependencies = [ - "bitflags 2.10.0", + "bitflags", ] [[package]] @@ -2363,46 +2240,6 @@ version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", - "hyper-tls", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls-pemfile", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 0.1.2", - "system-configuration", - "tokio", - "tokio-native-tls", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg", -] - [[package]] name = "ring" version = "0.17.14" @@ -2458,19 +2295,6 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" -[[package]] -name = "rustix" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.61.2", -] - [[package]] name = "rustls" version = "0.23.36" @@ -2485,15 +2309,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - [[package]] name = "rustls-pki-types" version = "1.13.2" @@ -2535,44 +2350,12 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "schannel" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" -dependencies = [ - "windows-sys 0.61.2", -] - [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "security-framework" -version = "2.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" -dependencies = [ - "bitflags 2.10.0", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "send_wrapper" version = "0.6.0" @@ -2677,12 +2460,12 @@ dependencies = [ "axum", "bytes", "const_format", - "dashmap", + "dashmap 6.1.0", "futures", "gloo-net", - "http 1.4.0", + "http", "http-body-util", - "hyper 1.8.1", + "hyper", "inventory", "js-sys", "once_cell", @@ -2821,16 +2604,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "slug" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882a80f72ee45de3cc9a5afeb2da0331d58df69e4e7d8eeb5d3c7784ae67e724" -dependencies = [ - "deunicode", - "wasm-bindgen", -] - [[package]] name = "smallvec" version = "1.15.1" @@ -2840,16 +2613,6 @@ dependencies = [ "serde", ] -[[package]] -name = "socket2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - [[package]] name = "socket2" version = "0.6.1" @@ -2869,6 +2632,15 @@ dependencies = [ "lock_api", ] +[[package]] +name = "spinning_top" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96d2d1d716fb500937168cc09353ffdc7a012be8475ac7308e1bdf0e3923300" +dependencies = [ + "lock_api", +] + [[package]] name = "spki" version = "0.7.3" @@ -2898,7 +2670,7 @@ version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" dependencies = [ - "base64 0.22.1", + "base64", "bytes", "chrono", "crc", @@ -2975,8 +2747,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" dependencies = [ "atoi", - "base64 0.22.1", - "bitflags 2.10.0", + "base64", + "bitflags", "byteorder", "bytes", "chrono", @@ -3019,8 +2791,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" dependencies = [ "atoi", - "base64 0.22.1", - "bitflags 2.10.0", + "base64", + "bitflags", "byteorder", "chrono", "crc", @@ -3123,12 +2895,6 @@ dependencies = [ "syn", ] -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - [[package]] name = "sync_wrapper" version = "1.0.2" @@ -3146,27 +2912,6 @@ dependencies = [ "syn", ] -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tachys" version = "0.1.9" @@ -3201,19 +2946,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "tempfile" -version = "3.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" -dependencies = [ - "fastrand", - "getrandom 0.3.4", - "once_cell", - "rustix", - "windows-sys 0.61.2", -] - [[package]] name = "thiserror" version = "1.0.69" @@ -3340,7 +3072,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.6.1", + "socket2", "tokio-macros", "windows-sys 0.61.2", ] @@ -3356,16 +3088,6 @@ dependencies = [ "syn", ] -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - [[package]] name = "tokio-stream" version = "0.1.18" @@ -3430,7 +3152,7 @@ dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 1.0.2", + "sync_wrapper", "tokio", "tower-layer", "tower-service", @@ -3443,12 +3165,12 @@ version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ - "bitflags 2.10.0", + "bitflags", "bytes", "futures-core", "futures-util", - "http 1.4.0", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", "http-range-header", "httpdate", @@ -3475,6 +3197,22 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" +[[package]] +name = "tower_governor" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aea939ea6cfa7c4880f3e7422616624f97a567c16df67b53b11f0d03917a8e46" +dependencies = [ + "axum", + "forwarded-header-value", + "governor", + "http", + "pin-project", + "thiserror 1.0.69", + "tower", + "tracing", +] + [[package]] name = "tracing" version = "0.1.44" @@ -3537,12 +3275,6 @@ dependencies = [ "tracing-log", ] -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - [[package]] name = "typed-builder" version = "0.20.1" @@ -3684,15 +3416,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -3823,6 +3546,22 @@ dependencies = [ "wasite", ] +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + [[package]] name = "winapi-util" version = "0.1.11" @@ -3832,6 +3571,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows-core" version = "0.62.2" @@ -4122,16 +3867,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - [[package]] name = "wit-bindgen" version = "0.46.0" diff --git a/Cargo.toml b/Cargo.toml index 09bc9428..b12e1081 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,8 @@ [workspace] resolver = "2" -members = ["frontend", "backend", "shared", "migration"] +members = ["frontend", "backend", "shared"] +# 'hgen' is a standalone utility excluded from the workspace so it doesn't build by default. +exclude = ["hgen"] [workspace.dependencies] leptos = { version = "0.7" } diff --git a/GEMINI.md b/GEMINI.md index fcdeb0a0..4adcde1a 100644 --- a/GEMINI.md +++ b/GEMINI.md @@ -62,7 +62,7 @@ cargo leptos watch 5. **Deployment**: Deployment is managed by `./scripts/deploy.sh`. Changes to infrastructure should be mirrored in both `compose.yaml` and `compose.prod.yaml` where applicable. - - Tick off tasks in the roadmap as they are completed. +- Tick off tasks in the roadmap as they are completed. - Update the roadmap as the project progresses. - Update the plan as the project progresses. - Update the GEMINI.md SPARINGLY as the project progresses. diff --git a/README.md b/README.md index 4506a5eb..c534036a 100644 --- a/README.md +++ b/README.md @@ -27,11 +27,15 @@ For first-time SSL setup on the server: - **Reverse Proxy**: Nginx with Let's Encrypt SSL - **Deployment**: Docker Compose +### Known Limitations +- **Database Concurrency**: The application uses embedded SQLite in WAL mode with a small connection pool (`max_connections(5)`). SQLite only allows one concurrent writer. Concurrent write bursts will queue (up to a 5s busy timeout) and could fail under heavy write load. This is acceptable for a personal blog/portfolio, but must be accounted for if write traffic scales. +- **Reverse Proxy Setup**: When deploying behind a reverse proxy (such as Nginx), you **MUST** configure the `TRUSTED_PROXY_IPS` environment variable with the proxy's IP address. If left unset, all client requests will appear to come from the proxy's IP, effectively disabling per-client rate limiting and causing all users to share the same rate limit bucket. + ## Development ### Quick Start with Nix (Recommended) ```bash -direnv allow # Load development environment +direnv allow # Load development environment ./scripts/setup-dev.sh # Setup database cargo leptos watch # Start dev server ``` @@ -47,9 +51,8 @@ cargo leptos watch ## Project Structure - `backend/` - Server-side Rust code -- `frontend/` - Client-side Leptos components +- `frontend/` - Client-side Leptos components - `shared/` - Shared types and utilities -- `migration/` - Database migration tools - `flake.nix` - Nix development environment - `.envrc` - direnv configuration @@ -58,8 +61,7 @@ cargo leptos watch - [x] **HTTPS/SSL** - Let's Encrypt certificates - [x] **Authentication** - Password-protected admin panel - [x] **Theme** - Modern indigo design -- [ ] **Admin features** - Post creation, sync manager -- [ ] **Content sync** - Import from terracestandard.com +- [ ] **Admin features** - Post creation - [ ] **Media library** - Photo/video management -- [ ] **Password hashing** - Bcrypt implementation +- [x] **Password hashing** - Argon2 implementation - [ ] **Password reset** - Email-based recovery diff --git a/backend/Cargo.toml b/backend/Cargo.toml index 1c01d0bd..cfae5cd0 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -7,13 +7,13 @@ edition = "2021" axum = { workspace = true } tokio = { workspace = true } sqlx = { version = "0.8", features = [ - "postgres", + "sqlite", "runtime-tokio-rustls", "uuid", "chrono", "macros", ] } -argon2 = "0.5" +argon2 = { version = "0.5", features = ["std", "password-hash"] } rand = "0.8" uuid = { version = "1.0", features = ["serde", "v4"] } serde = { version = "1.0", features = ["derive"] } @@ -37,7 +37,8 @@ leptos_axum = { workspace = true } frontend = { path = "../frontend", default-features = false, features = [ "ssr", ] } +tower_governor = "0.4.2" [features] -default = [] -ssr = ["leptos/ssr", "leptos_meta/ssr", "leptos_router/ssr", "frontend/ssr"] +default = ["ssr"] +ssr = ["leptos/ssr", "leptos_meta/ssr", "leptos_router/ssr", "frontend/ssr", "shared/ssr"] diff --git a/backend/src/api/admin.rs b/backend/src/api/admin.rs index e8fef503..7b7d9648 100644 --- a/backend/src/api/admin.rs +++ b/backend/src/api/admin.rs @@ -1,32 +1,38 @@ +use argon2::{ + password_hash::{rand_core::OsRng, PasswordHash, PasswordHasher, PasswordVerifier, SaltString}, + Argon2, +}; +use axum::body::to_bytes; +use axum::body::Body; +use axum::http::{header, Request}; +use axum::response::IntoResponse; +use axum::response::Json; use axum::{ extract::State, http::{HeaderMap, StatusCode}, routing::{get, post}, Router, }; -use axum::body::to_bytes; -use axum::http::{header, Request}; -use axum::body::Body; -use axum::response::Json; -use axum::response::Html; -use axum::response::IntoResponse; -use axum::response::Redirect; +use chrono::{Duration, Utc}; +use jsonwebtoken::{encode, EncodingKey, Header}; use serde::{Deserialize, Serialize}; -use sqlx::PgPool; -use jsonwebtoken::{encode, Header, EncodingKey}; -use chrono::{Utc, Duration}; -use argon2::{ - password_hash::{ - rand_core::OsRng, - PasswordHash, PasswordHasher, PasswordVerifier, SaltString - }, - Argon2 -}; +use sqlx::SqlitePool; +use std::sync::OnceLock; + +pub fn init_dummy_hash() { + let _ = get_dummy_hash(); +} -fn get_jwt_secret() -> &'static [u8] { - // In production, use environment variable: std::env::var("JWT_SECRET").unwrap_or_default().as_bytes() - // For now using a default that should be changed - b"change-this-secret-key-in-production-environment" +fn get_dummy_hash() -> &'static str { + static DUMMY_HASH: OnceLock = OnceLock::new(); + DUMMY_HASH.get_or_init(|| { + let password = "dummy-password-that-will-never-match"; + let salt = SaltString::generate(&mut OsRng); + get_argon2() + .hash_password(password.as_bytes(), &salt) + .expect("Failed to generate dummy hash") + .to_string() + }) } #[derive(Serialize, Deserialize)] @@ -54,36 +60,97 @@ pub struct ChangePasswordRequest { #[derive(sqlx::FromRow)] struct UserRow { - id: uuid::Uuid, + id: String, password_hash: String, } +fn get_argon2() -> &'static Argon2<'static> { + static ARGON2: OnceLock> = OnceLock::new(); + ARGON2.get_or_init(|| { + let params = argon2::Params::new( + shared::auth::ARGON2_M_COST, + shared::auth::ARGON2_T_COST, + shared::auth::ARGON2_P_COST, + Some(argon2::Params::DEFAULT_OUTPUT_LEN), + ) + .expect("Valid Argon2 parameters"); + Argon2::new(argon2::Algorithm::Argon2id, argon2::Version::V0x13, params) + }) +} + fn hash_password(password: &str) -> Result { let salt = SaltString::generate(&mut OsRng); - let argon2 = Argon2::default(); - argon2.hash_password(password.as_bytes(), &salt) + get_argon2() + .hash_password(password.as_bytes(), &salt) .map_err(|e| e.to_string()) .map(|hash| hash.to_string()) } +#[inline(never)] fn verify_password(password: &str, password_hash: &str) -> bool { let parsed_hash = match PasswordHash::new(password_hash) { Ok(h) => h, - Err(_) => return false, + Err(_) => { + tracing::error!("Failed to parse password hash!"); + let dummy = get_dummy_hash(); + let parsed_dummy = PasswordHash::new(dummy).unwrap(); + let _ = get_argon2().verify_password(password.as_bytes(), &parsed_dummy); + return false; + } }; - Argon2::default().verify_password(password.as_bytes(), &parsed_hash).is_ok() + get_argon2() + .verify_password(password.as_bytes(), &parsed_hash) + .is_ok() } pub fn router(state: crate::state::AppState) -> Router { + // KNOWN LIMITATION: tower_governor uses in-memory state. A server restart will reset all rate limit counters. + // Burst windows completely refresh across restarts. Therefore, the effective rate limiting + // window ONLY covers uptime, not absolute calendar time. An attacker who can trigger or observe + // restarts could reset their login throttle window. For a low-traffic personal site, this is an + // acceptable trade-off to avoid the complexity of a distributed rate limiter like Redis. It is REQUIRED + // to pair this with an OS-level fail2ban or log-based alerting to compensate for the login endpoint. + tracing::info!("Initializing rate limiters. Warning: In-memory rate limiter state resets on restart. Frequent restarts may bypass burst limits."); + let shared_auth_governor_config = std::sync::Arc::new( + tower_governor::governor::GovernorConfigBuilder::default() + .key_extractor(crate::api::TrustedProxyIpKeyExtractor) + .per_second(1) + .burst_size(1) + .finish() + .unwrap(), + ); + + let login_governor_layer = tower_governor::GovernorLayer { + config: shared_auth_governor_config.clone(), + }; + + let password_governor_layer = tower_governor::GovernorLayer { + config: shared_auth_governor_config, + }; + + let me_governor_layer = tower_governor::GovernorLayer { + config: std::sync::Arc::new( + tower_governor::governor::GovernorConfigBuilder::default() + .key_extractor(crate::api::TrustedProxyIpKeyExtractor) + .per_second(5) + .burst_size(10) + .finish() + .unwrap(), + ), + }; + Router::new() - .route("/login", post(login)) - .route("/password", post(change_password)) - .route("/me", get(me)) + .route("/login", post(login).route_layer(login_governor_layer)) + .route( + "/password", + post(change_password).route_layer(password_governor_layer), + ) + .route("/me", get(me).route_layer(me_governor_layer)) .with_state(state) } async fn login( - State(pool): State, + State(pool): State, req: Request, ) -> Result { let (parts, body) = req.into_parts(); @@ -92,134 +159,252 @@ async fn login( .get(header::CONTENT_TYPE) .and_then(|v| v.to_str().ok()) .unwrap_or(""); - let accept = parts - .headers - .get(header::ACCEPT) - .and_then(|v| v.to_str().ok()) - .unwrap_or(""); - let bytes = to_bytes(body, 64 * 1024) + let bytes = to_bytes(body, 16 * 1024) .await .map_err(|_| (StatusCode::BAD_REQUEST, "Invalid request body".to_string()))?; - let req: LoginRequest = if content_type.contains("application/json") { - serde_json::from_slice(&bytes) - .map_err(|_| (StatusCode::BAD_REQUEST, "Invalid JSON".to_string()))? - } else if content_type.contains("application/x-www-form-urlencoded") - || content_type.contains("multipart/form-data") - { - serde_urlencoded::from_bytes(&bytes) - .map_err(|_| (StatusCode::BAD_REQUEST, "Invalid form data".to_string()))? - } else { - return Err((StatusCode::UNSUPPORTED_MEDIA_TYPE, "Unsupported content type".to_string())); - }; + if !content_type.contains("application/json") { + return Err(( + StatusCode::UNSUPPORTED_MEDIA_TYPE, + "Unsupported content type".to_string(), + )); + } - let user: Option = sqlx::query_as("SELECT id, password_hash FROM users WHERE username = $1") - .bind(&req.username) - .fetch_optional(&pool) - .await - .map_err(|_| (StatusCode::INTERNAL_SERVER_ERROR, "Database error".to_string()))?; + let req: LoginRequest = serde_json::from_slice(&bytes) + .map_err(|_| (StatusCode::BAD_REQUEST, "Invalid JSON".to_string()))?; - let is_invalid = match user { - Some(ref u) => !verify_password(&req.password, &u.password_hash), - None => true, - }; + // Prevent extremely long passwords from exhausting Argon2 CPU time. + if req.password.len() > 128 { + return Err((StatusCode::BAD_REQUEST, "Password too long".to_string())); + } - if is_invalid { - if content_type.contains("application/x-www-form-urlencoded") - || content_type.contains("multipart/form-data") - || accept.contains("text/html") - { - return Ok(Redirect::to("/admin/login?error=invalid").into_response()); + if req.username.len() > 64 { + return Err((StatusCode::BAD_REQUEST, "Username too long".to_string())); + } + + let user: Option = + sqlx::query_as("SELECT id, password_hash FROM users WHERE username = ?") + .bind(&req.username) + .fetch_optional(&pool) + .await + .map_err(|e| { + tracing::error!("Database error during login fetch: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Database error".to_string(), + ) + })?; + + let (hash_to_verify, is_valid_user) = match user { + Some(ref u) => (u.password_hash.as_str(), true), + None => { + // To prevent early-return timing leaks, we always verify a password hash. + // If the user doesn't exist, we use a dummy hash. The dummy hash's source + // password is irrelevant as it's only used to consume time. + (get_dummy_hash(), false) } + }; + + let hash = hash_to_verify.to_string(); + let pw = req.password.clone(); + let password_match = tokio::task::spawn_blocking(move || verify_password(&pw, &hash)) + .await + .unwrap_or(false); + let is_invalid = !is_valid_user || !password_match; + if is_invalid { return Err((StatusCode::UNAUTHORIZED, "Invalid credentials".to_string())); } - let user = user.expect("User should exist when credentials are valid"); - let exp = (Utc::now() + Duration::hours(24)).timestamp() as usize; let claims = Claims { - sub: user.id.to_string(), + sub: user.expect("is_valid_user guarantees Some").id, exp, }; let token = encode( &Header::default(), &claims, - &EncodingKey::from_secret(get_jwt_secret()), + &EncodingKey::from_secret(shared::auth::get_jwt_secret()), ) - .map_err(|_| (StatusCode::INTERNAL_SERVER_ERROR, "Token generation failed".to_string()))?; - - if content_type.contains("application/x-www-form-urlencoded") - || content_type.contains("multipart/form-data") - || accept.contains("text/html") - { - let html = format!( - r#""#, - token - ); - Ok(Html(html).into_response()) - } else { - Ok(Json(LoginResponse { token }).into_response()) - } + .map_err(|e| { + tracing::error!("Token generation failed: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Token generation failed".to_string(), + ) + })?; + + Ok(Json(LoginResponse { token }).into_response()) } -async fn me(headers: HeaderMap) -> Result<&'static str, StatusCode> { - headers +async fn me( + headers: HeaderMap, + connect_info: Option>, +) -> Result, StatusCode> { + // Design Note: The /me endpoint validates the JWT cryptographically but does not query the database. + // This means a deleted user's JWT remains valid until expiration (24h). For a single-admin personal site, + // this is an acceptable performance trade-off. `change_password` does perform a DB lookup. + + let token = headers .get("Authorization") .and_then(|h| h.to_str().ok()) .and_then(|s| s.strip_prefix("Bearer ")) .ok_or(StatusCode::UNAUTHORIZED)?; + let validation = jsonwebtoken::Validation::new(jsonwebtoken::Algorithm::HS256); + let _token_data = jsonwebtoken::decode::( + token, + &jsonwebtoken::DecodingKey::from_secret(shared::auth::get_jwt_secret()), + &validation, + ) + .map_err(|e| { + let proxy_ip = connect_info + .as_ref() + .map(|ci| ci.0.ip().to_string()) + .unwrap_or_else(|| "unknown".to_string()); - Ok("Authenticated") + let client_ip = crate::api::extract_client_ip(&headers, connect_info.map(|ci| ci.0.ip())) + .unwrap_or_else(|| proxy_ip.clone()); + let safe_client_ip = client_ip.replace(['\n', '\r'], " "); + + tracing::warn!( + "Invalid token on /me from client IP {} (via proxy {}): {}", + safe_client_ip, + proxy_ip, + e + ); + StatusCode::UNAUTHORIZED + })?; + + Ok(Json(serde_json::json!({ + "authenticated": true + }))) } async fn change_password( - State(pool): State, - headers: HeaderMap, - Json(req): Json, + State(pool): State, + req: Request, ) -> Result { - let token = headers + let (parts, body) = req.into_parts(); + let content_type = parts + .headers + .get(header::CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + .unwrap_or(""); + + if !content_type.contains("application/json") { + return Err(( + StatusCode::UNSUPPORTED_MEDIA_TYPE, + "Unsupported content type".to_string(), + )); + } + + let token = parts + .headers .get("Authorization") .and_then(|h| h.to_str().ok()) .and_then(|s| s.strip_prefix("Bearer ")) .ok_or((StatusCode::UNAUTHORIZED, "Missing token".to_string()))?; // Verify token (simple check, ideally decode claims) - let validation = jsonwebtoken::Validation::default(); + let validation = jsonwebtoken::Validation::new(jsonwebtoken::Algorithm::HS256); let token_data = jsonwebtoken::decode::( token, - &jsonwebtoken::DecodingKey::from_secret(get_jwt_secret()), + &jsonwebtoken::DecodingKey::from_secret(shared::auth::get_jwt_secret()), &validation, - ).map_err(|_| (StatusCode::UNAUTHORIZED, "Invalid token".to_string()))?; + ) + .map_err(|_| (StatusCode::UNAUTHORIZED, "Invalid token".to_string()))?; + + let bytes = to_bytes(body, 16 * 1024) + .await + .map_err(|_| (StatusCode::BAD_REQUEST, "Invalid request body".to_string()))?; - let user_id = token_data.claims.sub.parse::() - .map_err(|_| (StatusCode::INTERNAL_SERVER_ERROR, "Invalid user ID in token".to_string()))?; + let req: ChangePasswordRequest = serde_json::from_slice(&bytes) + .map_err(|_| (StatusCode::BAD_REQUEST, "Invalid JSON".to_string()))?; + + let current_byte_count = req.current_password.len(); + if current_byte_count > 128 { + return Err(( + StatusCode::BAD_REQUEST, + "Current password length must be no more than 128 bytes (for Argon2 processing)." + .to_string(), + )); + } + + let byte_count = req.new_password.len(); + if !(12..=128).contains(&byte_count) { + return Err(( + StatusCode::BAD_REQUEST, + "New password length must be at least 12 bytes and no more than 128 bytes (policy limit).".to_string(), + )); + } + + let user_id_str = &token_data.claims.sub; + if uuid::Uuid::parse_str(user_id_str).is_err() { + tracing::error!("Valid JWT contained invalid UUID string: {}", user_id_str); + return Err(( + StatusCode::INTERNAL_SERVER_ERROR, + "Invalid token payload".to_string(), + )); + } // Verify current password - let user: Option = sqlx::query_as("SELECT id, password_hash FROM users WHERE id = $1") - .bind(user_id) + let user: Option = sqlx::query_as("SELECT id, password_hash FROM users WHERE id = ?") + .bind(user_id_str) .fetch_optional(&pool) .await - .map_err(|_| (StatusCode::INTERNAL_SERVER_ERROR, "Database error".to_string()))?; + .map_err(|e| { + tracing::error!("Database error fetching user for password change: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Database error".to_string(), + ) + })?; - let user = user.ok_or((StatusCode::NOT_FOUND, "User not found".to_string()))?; + let (hash_to_verify, is_valid_user) = match user { + Some(ref u) => (u.password_hash.as_str(), true), + None => (get_dummy_hash(), false), + }; - if !verify_password(&req.current_password, &user.password_hash) { - return Err((StatusCode::FORBIDDEN, "Invalid current password".to_string())); + let hash = hash_to_verify.to_string(); + let pw = req.current_password.clone(); + let password_match = tokio::task::spawn_blocking(move || verify_password(&pw, &hash)) + .await + .unwrap_or(false); + + if !is_valid_user || !password_match { + return Err(( + StatusCode::FORBIDDEN, + "Invalid current password".to_string(), + )); } // Hash new password and update - let new_hash = hash_password(&req.new_password) - .map_err(|_| (StatusCode::INTERNAL_SERVER_ERROR, "Failed to hash password".to_string()))?; + let pw = req.new_password.clone(); + let new_hash = tokio::task::spawn_blocking(move || hash_password(&pw)) + .await + .unwrap_or_else(|_| Err("Task join failed".to_string())) + .map_err(|e| { + tracing::error!("Failed to hash new password: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Failed to hash password".to_string(), + ) + })?; - sqlx::query("UPDATE users SET password_hash = $1 WHERE id = $2") + sqlx::query("UPDATE users SET password_hash = ? WHERE id = ?") .bind(new_hash) - .bind(user_id) + .bind(user_id_str) .execute(&pool) .await - .map_err(|_| (StatusCode::INTERNAL_SERVER_ERROR, "Database update failed".to_string()))?; + .map_err(|e| { + tracing::error!("Database update failed for password change: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Database update failed".to_string(), + ) + })?; Ok(StatusCode::OK) } diff --git a/backend/src/api/mod.rs b/backend/src/api/mod.rs index 24e0ee32..32210974 100644 --- a/backend/src/api/mod.rs +++ b/backend/src/api/mod.rs @@ -1,8 +1,113 @@ +use axum::http::Request; use axum::Router; +use std::sync::OnceLock; -mod admin; +pub mod admin; mod public; +static TRUSTED_PROXY_IPS: OnceLock> = OnceLock::new(); + +pub fn init_trusted_proxies() { + let ips = get_trusted_proxies(); + tracing::info!("Initialized TRUSTED_PROXY_IPS: {:?}", ips); +} + +pub(crate) fn get_trusted_proxies() -> &'static Vec { + TRUSTED_PROXY_IPS.get_or_init(|| { + std::env::var("TRUSTED_PROXY_IPS") + .unwrap_or_default() + .split(',') + .filter_map(|s| { + let trimmed = s.trim(); + if trimmed.is_empty() { + return None; + } + match trimmed.parse() { + Ok(ip) => Some(ip), + Err(e) => { + tracing::warn!( + "Invalid IP address in TRUSTED_PROXY_IPS '{}': {}", + trimmed, + e + ); + None + } + } + }) + .collect() + }) +} + +pub fn extract_client_ip( + headers: &axum::http::HeaderMap, + peer_ip: Option, +) -> Option { + let trusted_ips = get_trusted_proxies(); + let is_trusted_proxy = peer_ip.is_some_and(|ip| trusted_ips.contains(&ip)); + + if is_trusted_proxy { + // Priority 1: X-Real-IP + // SECURITY NOTE: We unconditionally trust X-Real-IP here because `is_trusted_proxy` + // confirmed this request came from our trusted local reverse proxy. This behavior + // assumes that Nginx is explicitly configured with `proxy_set_header X-Real-IP $remote_addr;` + // to overwrite any potentially forged X-Real-IP header sent by the client. + if let Some(real_ip) = headers.get("X-Real-IP").and_then(|h| h.to_str().ok()) { + if let Ok(parsed_ip) = real_ip.trim().parse::() { + return Some(parsed_ip.to_string()); + } + } + + // Priority 2: X-Forwarded-For + if let Some(forwarded_for) = headers.get("X-Forwarded-For").and_then(|h| h.to_str().ok()) { + // We pick the rightmost IP (next_back) under the exact assumption that the trusted Nginx configuration + // uses `proxy_add_x_forwarded_for`, which appends the connecting peer's IP (the hop right before Nginx) to the right. + // We pick the rightmost IP because that is the most trusted hop added by our reverse proxy, preventing client-side spoofing. + // NOTE: This assumes Nginx is the ONLY intermediate proxy. Any CDN or external load balancer + // will put its own IP rightmost, making all traffic share one rate limit bucket. + if let Some(last_ip) = forwarded_for.split(',').next_back() { + if let Ok(parsed_ip) = last_ip.trim().parse::() { + if Some(parsed_ip) == peer_ip { + tracing::warn!("X-Forwarded-For rightmost IP {} matches the proxy peer IP. This usually indicates a CDN or external load balancer is stripping or improperly appending headers, collapsing all clients into one rate-limit bucket.", parsed_ip); + return None; + } else { + tracing::debug!("Extracted client IP {} from X-Forwarded-For rightmost entry. Multi-hop proxies (e.g. Cloudflare) may cause all clients to share this IP.", parsed_ip); + return Some(parsed_ip.to_string()); + } + } + } + } + tracing::warn!( + "TRUSTED_PROXY_IPS allowed proxy IP {}, but no valid X-Real-IP or X-Forwarded-For header was found. Rate limiting will apply to the proxy IP.", + peer_ip.unwrap() + ); + } + + peer_ip.map(|ip| ip.to_string()) +} + +#[derive(Clone)] +pub struct TrustedProxyIpKeyExtractor; + +impl tower_governor::key_extractor::KeyExtractor for TrustedProxyIpKeyExtractor { + type Key = String; + + fn extract(&self, req: &Request) -> Result { + let connect_info = req + .extensions() + .get::>(); + + if connect_info.is_none() { + tracing::error!("CRITICAL: ConnectInfo is missing from request extensions! This should never happen because `into_make_service_with_connect_info` is used in main.rs. Rate limiting will fail closed and return 500 errors."); + } + + let peer_ip = connect_info.map(|ci| ci.0.ip()); + + let key = extract_client_ip(req.headers(), peer_ip) + .ok_or(tower_governor::GovernorError::UnableToExtractKey)?; + Ok(key) + } +} + pub fn router(state: crate::state::AppState) -> Router { Router::new() .merge(public::router(state.clone())) diff --git a/backend/src/api/public.rs b/backend/src/api/public.rs index 16ee39e6..ea22e94c 100644 --- a/backend/src/api/public.rs +++ b/backend/src/api/public.rs @@ -1,11 +1,41 @@ -use axum::{extract::State, routing::get, Json, Router}; +use axum::{extract::Query, extract::State, routing::get, Json, Router}; use shared::{Article, BlogPost}; -use sqlx::PgPool; +use sqlx::SqlitePool; +#[derive(serde::Deserialize)] +pub struct Pagination { + pub limit: Option, + pub offset: Option, + pub before: Option, +} pub fn router(state: crate::state::AppState) -> Router { + let public_governor_config = std::sync::Arc::new( + tower_governor::governor::GovernorConfigBuilder::default() + .key_extractor(crate::api::TrustedProxyIpKeyExtractor) + .per_second(5) + .burst_size(20) + .finish() + .unwrap(), + ); + + let articles_governor_layer = tower_governor::GovernorLayer { + config: public_governor_config.clone(), + }; + + let blog_governor_layer = tower_governor::GovernorLayer { + config: public_governor_config, + }; + Router::new() .route("/health", get(health_check)) - .route("/api/articles", get(list_articles)) + .route( + "/api/articles", + get(list_articles).route_layer(articles_governor_layer), + ) + .route( + "/api/blog", + get(list_blog_posts).route_layer(blog_governor_layer), + ) .with_state(state) } @@ -15,57 +45,175 @@ async fn health_check() -> &'static str { use sqlx::Row; -async fn list_articles(State(pool): State) -> Json> { - match sqlx::query("SELECT id, wp_id, slug, title, subtitle, excerpt, content, cover_image_url, author, published_at, origin FROM articles ORDER BY published_at DESC LIMIT 20") - .map(|row: sqlx::postgres::PgRow| { - let origin_str: String = row.get("origin"); - let origin = match origin_str.as_str() { - "imported" => shared::Origin::Imported, - "synced" => shared::Origin::Synced, - _ => shared::Origin::Local, - }; - Article { - id: row.get("id"), - wp_id: row.get("wp_id"), - slug: row.get("slug"), - title: row.get("title"), - subtitle: row.get("subtitle"), - excerpt: row.get("excerpt"), - content: row.get("content"), - cover_image_url: row.get("cover_image_url"), - author: row.get("author"), - published_at: row.get("published_at"), - origin, - } - }) - .fetch_all(&pool) - .await - { - Ok(articles) => Json(articles), +async fn list_articles( + State(pool): State, + Query(query): Query, +) -> Result>, (axum::http::StatusCode, String)> { + let limit = query.limit.unwrap_or(20).min(50); + + if query.before.is_some() && query.offset.is_some() { + return Err(( + axum::http::StatusCode::BAD_REQUEST, + "Cannot use 'before' and 'offset' together".to_string(), + )); + } + + let rows_res = if let Some(before) = query.before { + let dt = chrono::DateTime::parse_from_rfc3339(&before) + .map_err(|_| { + ( + axum::http::StatusCode::BAD_REQUEST, + "Invalid 'before' date format".to_string(), + ) + })? + .to_utc(); + let normalized = dt.format("%Y-%m-%dT%H:%M:%3fZ").to_string(); + sqlx::query("SELECT id, wp_id, slug, title, subtitle, excerpt, content, cover_image_url, author, published_at, origin FROM articles WHERE published_at < ? ORDER BY published_at DESC LIMIT ?") + .bind(normalized) + .bind(limit) + .try_map(map_article_row) + .fetch_all(&pool) + .await + } else { + let offset = query.offset.unwrap_or(0); + if offset > 10_000 { + return Err(( + axum::http::StatusCode::BAD_REQUEST, + "Offset too large".to_string(), + )); + } + sqlx::query("SELECT id, wp_id, slug, title, subtitle, excerpt, content, cover_image_url, author, published_at, origin FROM articles ORDER BY published_at DESC LIMIT ? OFFSET ?") + .bind(limit) + .bind(offset) + .try_map(map_article_row) + .fetch_all(&pool) + .await + }; + + match rows_res { + Ok(articles) => Ok(Json(articles)), Err(e) => { tracing::error!("Failed to fetch articles: {}", e); - Json(Vec::new()) + Err(( + axum::http::StatusCode::INTERNAL_SERVER_ERROR, + "Database error".to_string(), + )) } } } -async fn list_blog_posts(State(pool): State) -> Json> { - match sqlx::query("SELECT id, slug, title, content, published_at, tags FROM blog_posts ORDER BY published_at DESC LIMIT 20") - .map(|row: sqlx::postgres::PgRow| BlogPost { - id: row.get("id"), - slug: row.get("slug"), - title: row.get("title"), - content: row.get("content"), - published_at: row.get("published_at"), - tags: row.get("tags"), - }) - .fetch_all(&pool) - .await - { - Ok(posts) => Json(posts), +async fn list_blog_posts( + State(pool): State, + Query(query): Query, +) -> Result>, (axum::http::StatusCode, String)> { + let limit = query.limit.unwrap_or(20).min(50); + + if query.before.is_some() && query.offset.is_some() { + return Err(( + axum::http::StatusCode::BAD_REQUEST, + "Cannot use 'before' and 'offset' together".to_string(), + )); + } + + let rows_res = if let Some(before) = query.before { + let dt = chrono::DateTime::parse_from_rfc3339(&before) + .map_err(|_| { + ( + axum::http::StatusCode::BAD_REQUEST, + "Invalid 'before' date format".to_string(), + ) + })? + .to_utc(); + let normalized = dt.format("%Y-%m-%dT%H:%M:%3fZ").to_string(); + sqlx::query("SELECT id, slug, title, content, published_at, tags FROM blog_posts WHERE published_at < ? ORDER BY published_at DESC LIMIT ?") + .bind(normalized) + .bind(limit) + .try_map(map_blog_post_row) + .fetch_all(&pool) + .await + } else { + let offset = query.offset.unwrap_or(0); + if offset > 10_000 { + return Err(( + axum::http::StatusCode::BAD_REQUEST, + "Offset too large".to_string(), + )); + } + sqlx::query("SELECT id, slug, title, content, published_at, tags FROM blog_posts ORDER BY published_at DESC LIMIT ? OFFSET ?") + .bind(limit) + .bind(offset) + .try_map(map_blog_post_row) + .fetch_all(&pool) + .await + }; + + match rows_res { + Ok(posts) => Ok(Json(posts)), Err(e) => { tracing::error!("Failed to fetch blog posts: {}", e); - Json(Vec::new()) + Err(( + axum::http::StatusCode::INTERNAL_SERVER_ERROR, + "Database error".to_string(), + )) } } } + +fn map_article_row(row: sqlx::sqlite::SqliteRow) -> Result { + let origin_str: String = row.try_get("origin")?; + let origin = match origin_str.as_str() { + "imported" => shared::Origin::Imported, + "synced" => shared::Origin::Synced, + _ => shared::Origin::Local, + }; + let id_str: String = row.try_get("id")?; + let id = id_str + .parse::() + .map_err(|e| sqlx::Error::Decode(Box::new(e)))?; + Ok(Article { + id, + wp_id: row.try_get("wp_id")?, + slug: row.try_get("slug")?, + title: row.try_get("title")?, + subtitle: row.try_get("subtitle")?, + excerpt: row.try_get("excerpt")?, + content: row.try_get("content")?, + cover_image_url: row.try_get("cover_image_url")?, + author: row.try_get("author")?, + published_at: parse_flexible_datetime(row.try_get("published_at")?)?, + origin, + }) +} + +fn map_blog_post_row(row: sqlx::sqlite::SqliteRow) -> Result { + let tags_str: Option = row.try_get("tags")?; + let tags = match tags_str { + Some(s) => match serde_json::from_str(&s) { + Ok(t) => Some(t), + Err(e) => return Err(sqlx::Error::Decode(Box::new(e))), + }, + None => None, + }; + let id_str: String = row.try_get("id")?; + let id = id_str + .parse::() + .map_err(|e| sqlx::Error::Decode(Box::new(e)))?; + Ok(BlogPost { + id, + slug: row.try_get("slug")?, + title: row.try_get("title")?, + content: row.try_get("content")?, + published_at: parse_flexible_datetime(row.try_get("published_at")?)?, + tags, + }) +} + +fn parse_flexible_datetime(dt_str: String) -> Result, sqlx::Error> { + chrono::DateTime::parse_from_rfc3339(&dt_str) + .map(|dt| dt.with_timezone(&chrono::Utc)) + .or_else(|_| { + chrono::NaiveDateTime::parse_from_str(&dt_str, "%Y-%m-%d %H:%M:%S") + .map(|ndt| ndt.and_utc()) + }) + .map_err(|e| sqlx::Error::Decode(Box::new(e))) +} diff --git a/backend/src/main.rs b/backend/src/main.rs index 4454928c..ff8754e1 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -1,8 +1,8 @@ #![recursion_limit = "256"] -use axum::{extract::State, Router}; use axum::body::Body; use axum::http::Request; use axum::middleware::{self, Next}; +use axum::{extract::State, Router}; use bytes::Bytes; use dotenvy::dotenv; use frontend::{App, Shell}; @@ -11,8 +11,9 @@ use futures_util::StreamExt; use leptos::context::provide_context; use leptos::prelude::*; use leptos_axum::{generate_route_list, LeptosRoutes}; -use sqlx::postgres::PgPoolOptions; +use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; use std::net::SocketAddr; +use std::str::FromStr; use tower::ServiceBuilder; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; @@ -34,13 +35,30 @@ async fn main() -> Result<(), Box> { .with(tracing_subscriber::fmt::layer()) .init(); + // Initialize JWT Secret early so it panics at startup if missing + shared::auth::init_jwt_secret(); + crate::api::admin::init_dummy_hash(); + crate::api::init_trusted_proxies(); + // Improved error handling for DATABASE_URL let database_url = std::env::var("DATABASE_URL") .map_err(|_| "DATABASE_URL environment variable must be set")?; - let pool = PgPoolOptions::new() + // Parse options and ensure database is created if it doesn't exist + let connect_options = SqliteConnectOptions::from_str(&database_url) + .map_err(|e| format!("Invalid DATABASE_URL: {}", e))? + .create_if_missing(true) + .journal_mode(sqlx::sqlite::SqliteJournalMode::Wal) + .busy_timeout(std::time::Duration::from_secs(5)); + + // With WAL mode, SQLite allows concurrent readers, but all writers are still + // serialized with a single write lock. Setting max_connections(5) helps with concurrent + // reads. We explicitly set min_connections(1) to keep one connection warm + // to avoid cold-start latency. + let pool = SqlitePoolOptions::new() .max_connections(5) - .connect(&database_url) + .min_connections(1) + .connect_with(connect_options) .await .map_err(|e| format!("Failed to create database pool: {}", e))?; @@ -53,6 +71,59 @@ async fn main() -> Result<(), Box> { e })?; + let user_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM users") + .fetch_one(&pool) + .await + .unwrap_or((0,)); + if user_count.0 == 0 { + tracing::warn!("====================================================================="); + tracing::warn!("WARNING: The 'users' table is empty. No admin user exists."); + tracing::warn!("Run './scripts/setup-dev.sh' or inject a seed migration to create one."); + tracing::warn!("====================================================================="); + } + + if std::env::var("ENVIRONMENT").as_deref() == Ok("production") { + match std::env::var("TRUSTED_PROXY_IPS").as_deref() { + Err(_) => panic!("TRUSTED_PROXY_IPS must be set in production. Otherwise, all users behind a proxy will share a single rate-limit bucket."), + Ok(ips) if ips.trim().is_empty() => panic!("TRUSTED_PROXY_IPS is set but empty. This will cause all proxies to be untrusted, collapsing rate limits."), + Ok(ips) => { + let default_ips = ips.split(',').map(|s| s.trim()).filter(|s| !s.is_empty()); + let mut has_private = false; + for ip_str in default_ips { + if let Ok(ip) = ip_str.parse::() { + if ip.is_loopback() { + has_private = true; + break; + } + match ip { + std::net::IpAddr::V4(v4) => { + let octets = v4.octets(); + if octets[0] == 10 || (octets[0] == 172 && (16..=31).contains(&octets[1])) || (octets[0] == 192 && octets[1] == 168) { + has_private = true; + break; + } + } + std::net::IpAddr::V6(v6) => { + if (v6.segments()[0] & 0xfe00) == 0xfc00 { + has_private = true; + break; + } + } + } + } + } + + if has_private { + tracing::warn!("====================================================================="); + tracing::warn!("WARNING: TRUSTED_PROXY_IPS contains private (e.g., Docker bridge) IPs."); + tracing::warn!("Container IPs can change on restart. Rate limiting may fail open if these are incorrect."); + tracing::warn!("Please verify these IPs post-deploy or use a more robust mechanism like static IPs (--ip) or docker network inspect."); + tracing::warn!("====================================================================="); + } + } + } + } + // Build LeptosOptions from environment/config let site_addr: SocketAddr = std::env::var("LEPTOS_SITE_ADDR") .unwrap_or_else(|_| "0.0.0.0:3000".to_string()) @@ -104,7 +175,11 @@ async fn main() -> Result<(), Box> { tracing::info!("listening on http://{}", &addr); let listener = tokio::net::TcpListener::bind(&addr).await?; - axum::serve(listener, app.into_make_service()).await?; + axum::serve( + listener, + app.into_make_service_with_connect_info::(), + ) + .await?; Ok(()) } diff --git a/backend/src/state.rs b/backend/src/state.rs index cbea6ecc..e1868b96 100644 --- a/backend/src/state.rs +++ b/backend/src/state.rs @@ -1,11 +1,11 @@ use axum::extract::FromRef; use leptos::prelude::LeptosOptions; -use sqlx::PgPool; +use sqlx::SqlitePool; #[derive(Clone)] pub struct AppState { pub leptos_options: LeptosOptions, - pub pool: PgPool, + pub pool: SqlitePool, } impl FromRef for LeptosOptions { @@ -14,7 +14,7 @@ impl FromRef for LeptosOptions { } } -impl FromRef for PgPool { +impl FromRef for SqlitePool { fn from_ref(state: &AppState) -> Self { state.pool.clone() } diff --git a/docker-compose.prod.yaml b/docker-compose.prod.yaml index 0cf9e0cc..93b54dd4 100644 --- a/docker-compose.prod.yaml +++ b/docker-compose.prod.yaml @@ -2,40 +2,19 @@ services: portfolio: build: . restart: always + user: "1000:1000" environment: - - DATABASE_URL=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB} + # 4 slashes are intentional for an absolute path: sqlite:////app/data/sqlite.db + - DATABASE_URL=sqlite:////app/data/sqlite.db - LEPTOS_SITE_ADDR=0.0.0.0:3000 - RUST_LOG=info - depends_on: - - db networks: - jake_net volumes: - ./media_mount:/app/media + - ./data:/app/data - db: - image: postgres:15-alpine - restart: always - environment: - POSTGRES_USER: ${POSTGRES_USER} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_DB: ${POSTGRES_DB} - volumes: - - db_data:/var/lib/postgresql/data - networks: - - jake_net - - migration: - build: . - depends_on: - - db - environment: - DATABASE_URL: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB} - volumes: - - ./migrations:/migrations - command: sqlx migrate run --source /migrations - networks: - - jake_net + # Embedded SQLite replaces separate db and migration services nginx: image: nginx:stable-alpine @@ -61,8 +40,5 @@ services: - ./certbot/www:/var/www/certbot entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew; sleep 12h & wait $${!}; done;'" -volumes: - db_data: - networks: jake_net: diff --git a/docker-compose.yaml b/docker-compose.yaml deleted file mode 100644 index 7f75172a..00000000 --- a/docker-compose.yaml +++ /dev/null @@ -1,18 +0,0 @@ -version: "3.8" - -services: - db: - image: docker.io/bitnami/postgresql:15 - restart: always - userns_mode: keep-id - environment: - POSTGRESQL_USERNAME: admin - POSTGRESQL_PASSWORD: password - POSTGRESQL_DATABASE: portfolio - ports: - - "5432:5432" - volumes: - - db_data_v2:/bitnami/postgresql - -volumes: - db_data_v2: diff --git a/frontend/Cargo.toml b/frontend/Cargo.toml index 61e6bfc1..56bedd14 100644 --- a/frontend/Cargo.toml +++ b/frontend/Cargo.toml @@ -33,7 +33,8 @@ ssr = [ "leptos_router/ssr", "dep:chrono", "dep:jsonwebtoken", - "dep:walkdir" + "dep:walkdir", + "shared/ssr" ] diff --git a/frontend/src/api/articles.rs b/frontend/src/api/articles.rs index b441f6d1..69cd9779 100644 --- a/frontend/src/api/articles.rs +++ b/frontend/src/api/articles.rs @@ -36,7 +36,7 @@ pub mod ssr_utils { // Simple JWT verification helper // In a real app, this should be shared with backend logic pub fn verify_token(token: &str) -> Result { - use jsonwebtoken::{decode, DecodingKey, Validation, Algorithm}; + use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation}; use serde::Deserialize; #[derive(Deserialize)] @@ -45,15 +45,12 @@ pub mod ssr_utils { _exp: usize, } - // WARN: Synchronize this secret with backend/src/api/admin.rs - // Ideally, use an ENV var. - let secret = b"change-this-secret-key-in-production-environment"; - let token_data = decode::( token, - &DecodingKey::from_secret(secret), + &DecodingKey::from_secret(shared::auth::get_jwt_secret()), &Validation::new(Algorithm::HS256), - ).map_err(|_| ServerFnError::new("Invalid token"))?; + ) + .map_err(|_| ServerFnError::new("Invalid token"))?; Ok(token_data.claims.sub) } @@ -70,7 +67,7 @@ pub async fn get_articles() -> Result, ServerFnError> { if let Ok(entries) = fs::read_dir(dir) { for entry in entries.flatten() { let path = entry.path(); - if path.extension().map_or(false, |ext| ext == "json") { + if path.extension().is_some_and(|ext| ext == "json") { if let Ok(content) = fs::read_to_string(&path) { if let Ok(article) = serde_json::from_str::
(&content) { articles.push(article); @@ -116,7 +113,9 @@ pub async fn save_article(token: String, article: Article) -> Result<(), ServerF } // Sanitize slug just in case - let safe_slug = article.slug.chars() + let safe_slug = article + .slug + .chars() .filter(|c| c.is_alphanumeric() || *c == '-') .collect::() .to_lowercase(); @@ -173,10 +172,12 @@ pub async fn list_media(token: String) -> Result, ServerFnError> for line in stdout.lines() { let line = line.trim(); - if line.is_empty() || line.ends_with('/') { continue; } // Skip directories + if line.is_empty() || line.ends_with('/') { + continue; + } // Skip directories if let Some(path) = line.strip_prefix("gs://jakewray-portfolio/") { - let name = path.split('/').last().unwrap_or(path).to_string(); + let name = path.split('/').next_back().unwrap_or(path).to_string(); items.push(MediaItem { url: format!("{}/{}", base_url, path), name, @@ -188,17 +189,33 @@ pub async fn list_media(token: String) -> Result, ServerFnError> } #[server(UploadMedia, "/api")] -pub async fn upload_media(token: String, filename: String, data: Vec) -> Result { +pub async fn upload_media( + token: String, + filename: String, + data: Vec, +) -> Result { use self::ssr_utils::verify_token; - use std::process::{Command, Stdio}; use std::io::Write; + use std::process::{Command, Stdio}; verify_token(&token)?; // We'll upload to a 'uploads' folder for manual picking or sorting later + let filtered_name: String = filename + .chars() + .filter(|c| c.is_alphanumeric() || *c == '.' || *c == '-' || *c == '_') + .collect(); + + if filtered_name.is_empty() { + return Err(ServerFnError::new("Invalid filename")); + } + let timestamp = chrono::Utc::now().timestamp(); - let safe_name = format!("{}_{}", timestamp, filename.replace(" ", "_")); - let destination = format!("gs://jakewray-portfolio/media/journalism/uploads/{}", safe_name); + let safe_name = format!("{}_{}", timestamp, filtered_name); + let destination = format!( + "gs://jakewray-portfolio/media/journalism/uploads/{}", + safe_name + ); let mut child = Command::new("gsutil") .arg("cp") @@ -216,5 +233,8 @@ pub async fn upload_media(token: String, filename: String, data: Vec) -> Res return Err(ServerFnError::new("Failed to upload to GCS")); } - Ok(format!("https://storage.googleapis.com/jakewray-portfolio/media/journalism/uploads/{}", safe_name)) + Ok(format!( + "https://storage.googleapis.com/jakewray-portfolio/media/journalism/uploads/{}", + safe_name + )) } diff --git a/frontend/src/app.rs b/frontend/src/app.rs index 7e8727a3..a2c95873 100644 --- a/frontend/src/app.rs +++ b/frontend/src/app.rs @@ -101,7 +101,6 @@ fn AdminRedirect() -> impl IntoView { leptos::prelude::Effect::new(move || { navigate("/admin/login", Default::default()); }); - view! {} } #[component] diff --git a/frontend/src/components/media_picker.rs b/frontend/src/components/media_picker.rs index 1b39d3fa..60d9ddce 100644 --- a/frontend/src/components/media_picker.rs +++ b/frontend/src/components/media_picker.rs @@ -1,84 +1,75 @@ -use leptos::prelude::*; use crate::api::articles::{list_media, upload_media, MediaItem}; -use leptos::task::spawn_local; use leptos::ev; -use web_sys::{HtmlInputElement, FileList}; +use leptos::prelude::*; +use leptos::task::spawn_local; use wasm_bindgen_futures::JsFuture; +use web_sys::{FileList, HtmlInputElement}; #[component] pub fn MediaPicker( token: Signal, on_select: F, - current_image: Option -) -> impl IntoView -where F: Fn(String) + 'static + Send + Sync + Clone + current_image: Option, +) -> impl IntoView +where + F: Fn(String) + 'static + Send + Sync + Clone, { let (items, set_items) = signal(Vec::::new()); let (loading, set_loading) = signal(true); let (uploading, set_uploading) = signal(false); let (error_msg, set_error_msg) = signal(String::new()); - let fetch_media = { - let token = token.clone(); - move || { - set_loading.set(true); - let t = token.get(); - spawn_local(async move { - match list_media(t).await { - Ok(res) => set_items.set(res), - Err(e) => set_error_msg.set(format!("Error: {}", e)), - } - set_loading.set(false); - }); - } + let fetch_media = move || { + set_loading.set(true); + let t = token.get(); + spawn_local(async move { + match list_media(t).await { + Ok(res) => set_items.set(res), + Err(e) => set_error_msg.set(format!("Error: {}", e)), + } + set_loading.set(false); + }); }; // Initial fetch - Effect::new({ - let fetch = fetch_media.clone(); - move || { fetch(); } + Effect::new(move || { + fetch_media(); }); - let on_upload = { - let token = token.clone(); - let fetch = fetch_media.clone(); - move |ev: ev::Event| { - let input: HtmlInputElement = event_target(&ev); - let files: Option = input.files(); - if let Some(files) = files { - if let Some(file) = files.get(0) { - let t = token.get(); - let f_clone = fetch.clone(); - let filename = file.name(); - let file_clone = file.clone(); // web_sys::File is Clone (JsValue wrapper) - set_uploading.set(true); - - spawn_local(async move { - // Read file as bytes via web_sys - let array_buffer_promise = file_clone.array_buffer(); - match JsFuture::from(array_buffer_promise).await { - Ok(array_buffer) => { - let uint8_array = js_sys::Uint8Array::new(&array_buffer); - let bytes = uint8_array.to_vec(); - - match upload_media(t, filename, bytes).await { - Ok(_url) => { - f_clone(); // Refresh list - }, - Err(e) => set_error_msg.set(format!("Upload failed: {}", e)), + let on_upload = move |ev: ev::Event| { + let input: HtmlInputElement = event_target(&ev); + let files: Option = input.files(); + if let Some(files) = files { + if let Some(file) = files.get(0) { + let t = token.get(); + let f_clone = fetch_media; + let filename = file.name(); + let file_clone = file.clone(); // web_sys::File is Clone (JsValue wrapper) + set_uploading.set(true); + + spawn_local(async move { + // Read file as bytes via web_sys + let array_buffer_promise = file_clone.array_buffer(); + match JsFuture::from(array_buffer_promise).await { + Ok(array_buffer) => { + let uint8_array = js_sys::Uint8Array::new(&array_buffer); + let bytes = uint8_array.to_vec(); + + match upload_media(t, filename, bytes).await { + Ok(_url) => { + f_clone(); // Refresh list } - }, - Err(e) => set_error_msg.set(format!("File read failed: {:?}", e)), + Err(e) => set_error_msg.set(format!("Upload failed: {}", e)), + } } - set_uploading.set(false); - }); - } + Err(e) => set_error_msg.set(format!("File read failed: {:?}", e)), + } + set_uploading.set(false); + }); } } }; - - view! {
@@ -107,15 +98,15 @@ where F: Fn(String) + 'static + Send + Sync + Clone } else { let on_select = on_select.clone(); let current_img = current_image.clone(); - + items.get().into_iter().map(move |item| { let url = item.url.clone(); let is_selected = current_img.as_ref() == Some(&url); let os = on_select.clone(); let u = url.clone(); - + view! { -
&'static [JournalismArticle] { } pub fn find_article(slug: &str) -> Option { - ARTICLES.iter().find(|article| article.slug == slug).cloned() + ARTICLES + .iter() + .find(|article| article.slug == slug) + .cloned() } diff --git a/frontend/src/lib.rs b/frontend/src/lib.rs index 3d51d373..5727fe05 100644 --- a/frontend/src/lib.rs +++ b/frontend/src/lib.rs @@ -1,6 +1,6 @@ +pub mod api; mod app; pub mod components; -pub mod api; pub mod data; pub mod pages; pub use app::*; diff --git a/frontend/src/pages/about.rs b/frontend/src/pages/about.rs index 8cbc175f..51a0e5b2 100644 --- a/frontend/src/pages/about.rs +++ b/frontend/src/pages/about.rs @@ -5,7 +5,7 @@ pub fn AboutPage() -> impl IntoView { view! {

"About Me"

- +

"I am a journalist, developer, and photographer based in Northern British Columbia. I have a passion for uncovering stories that matter and documenting the world around me through both words and images." diff --git a/frontend/src/pages/admin/login.rs b/frontend/src/pages/admin/login.rs index 849333bd..b13de3fd 100644 --- a/frontend/src/pages/admin/login.rs +++ b/frontend/src/pages/admin/login.rs @@ -41,6 +41,7 @@ pub fn AdminLoginPage() -> impl IntoView { let navigate = use_navigate(); move |ev: leptos::ev::SubmitEvent| { ev.prevent_default(); + #[cfg(debug_assertions)] web_sys::console::log_1(&"[Login] Form submitted".into()); _set_loading.set(true); set_error.set("".to_string()); @@ -49,7 +50,10 @@ pub fn AdminLoginPage() -> impl IntoView { let password_val = _password.get(); let navigate = navigate.clone(); - web_sys::console::log_1(&format!("[Login] Attempting login for user: {}", username_val).into()); + #[cfg(debug_assertions)] + web_sys::console::log_1( + &format!("[Login] Attempting login for user: {}", username_val).into(), + ); spawn_local(async move { let req = LoginRequest { @@ -57,6 +61,7 @@ pub fn AdminLoginPage() -> impl IntoView { password: password_val.clone(), }; + #[cfg(debug_assertions)] web_sys::console::log_1(&"[Login] Sending POST /admin/login".into()); let result = async { @@ -64,31 +69,41 @@ pub fn AdminLoginPage() -> impl IntoView { .header("Content-Type", "application/json") .json(&req) .map_err(|e| { - web_sys::console::log_1(&format!("[Login] Serialize error: {:?}", e).into()); + #[cfg(debug_assertions)] + web_sys::console::log_1( + &format!("[Login] Serialize error: {:?}", e).into(), + ); "Failed to serialize request".to_string() })? .send() .await .map_err(|e| { - web_sys::console::log_1(&format!("[Login] Network error: {:?}", e).into()); + #[cfg(debug_assertions)] + web_sys::console::log_1( + &format!("[Login] Network error: {:?}", e).into(), + ); "Failed to connect to server".to_string() })?; - web_sys::console::log_1(&format!("[Login] Response status: {}", resp.status()).into()); + #[cfg(debug_assertions)] + web_sys::console::log_1( + &format!("[Login] Response status: {}", resp.status()).into(), + ); if !resp.ok() { return Err("Invalid username or password".to_string()); } - let data: LoginResponse = resp - .json() - .await - .map_err(|e| { - web_sys::console::log_1(&format!("[Login] Parse error: {:?}", e).into()); - "Failed to parse response".to_string() - })?; + let data: LoginResponse = resp.json().await.map_err(|e| { + #[cfg(debug_assertions)] + web_sys::console::log_1(&format!("[Login] Parse error: {:?}", e).into()); + "Failed to parse response".to_string() + })?; - web_sys::console::log_1(&"[Login] Token received, storing in localStorage".into()); + #[cfg(debug_assertions)] + web_sys::console::log_1( + &"[Login] Token received, storing in localStorage".into(), + ); // Store token in localStorage let window = web_sys::window().unwrap(); @@ -101,10 +116,12 @@ pub fn AdminLoginPage() -> impl IntoView { match result { Ok(()) => { + #[cfg(debug_assertions)] web_sys::console::log_1(&"[Login] Success, navigating to dashboard".into()); navigate("/admin/dashboard", Default::default()) - }, + } Err(msg) => { + #[cfg(debug_assertions)] web_sys::console::log_1(&format!("[Login] Error: {}", msg).into()); set_error.set(msg); } diff --git a/frontend/src/pages/admin/password_change.rs b/frontend/src/pages/admin/password_change.rs index 95964f92..94e1f486 100644 --- a/frontend/src/pages/admin/password_change.rs +++ b/frontend/src/pages/admin/password_change.rs @@ -97,12 +97,12 @@ pub fn AdminPasswordChange() -> impl IntoView { // Let me check `backend/src/api/mod.rs`. // If `login.rs` works with `/admin/login`, then the backend MUST be serving it there. // I will check `backend/src/api/mod.rs` to see if it nests `admin::router`. - .header("Authorization", &format!("Bearer {}", token)) - .header("Content-Type", "application/json") - .json(&req) - .unwrap() - .send() - .await; + .header("Authorization", &format!("Bearer {}", token)) + .header("Content-Type", "application/json") + .json(&req) + .unwrap() + .send() + .await; match resp { Ok(r) => { @@ -112,12 +112,15 @@ pub fn AdminPasswordChange() -> impl IntoView { set_new_password.set("".to_string()); set_confirm_password.set("".to_string()); } else { - let text = r.text().await.unwrap_or_else(|_| "Unknown error".to_string()); + let text = r + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); set_error.set(format!("Error: {}", text)); } } Err(e) => { - set_error.set(format!("Network error: {}", e)); + set_error.set(format!("Network error: {}", e)); } } set_loading.set(false); diff --git a/frontend/src/pages/sections.rs b/frontend/src/pages/sections.rs index fdfc7701..c48c9093 100644 --- a/frontend/src/pages/sections.rs +++ b/frontend/src/pages/sections.rs @@ -1,11 +1,11 @@ // use crate::data::journalism; // Deprecated use crate::api::articles::{get_articles, Article}; +use crate::components::media_picker::MediaPicker; use leptos::prelude::*; -use leptos_router::hooks::use_params_map; use leptos::task::spawn_local; -use leptos_router::components::A; -use crate::components::media_picker::MediaPicker; use leptos::wasm_bindgen::JsCast; +use leptos_router::components::A; +use leptos_router::hooks::use_params_map; fn strip_tags(s: &str) -> String { let mut out = String::with_capacity(s.len()); @@ -14,7 +14,11 @@ fn strip_tags(s: &str) -> String { match ch { '<' => in_tag = true, '>' => in_tag = false, - _ => if !in_tag { out.push(ch) }, + _ => { + if !in_tag { + out.push(ch) + } + } } } out.trim().to_string() @@ -23,26 +27,51 @@ fn strip_tags(s: &str) -> String { fn starts_with_month(s: &str) -> bool { let sm = s.trim_start(); const MONTHS: [&str; 21] = [ - "Jan.", "January", "Feb.", "February", "Mar.", "March", "Apr.", "April", - "May", "June", "July", "Aug.", "August", "Sept.", "September", "Oct.", - "October", "Nov.", "November", "Dec.", "December", + "Jan.", + "January", + "Feb.", + "February", + "Mar.", + "March", + "Apr.", + "April", + "May", + "June", + "July", + "Aug.", + "August", + "Sept.", + "September", + "Oct.", + "October", + "Nov.", + "November", + "Dec.", + "December", ]; MONTHS.iter().any(|m| { - if sm.starts_with(m) { - let after = &sm[m.len()..]; + if let Some(after) = sm.strip_prefix(m) { // Match if it's the end of string or next char is not a letter - after.chars().next().map_or(true, |c| !c.is_alphabetic()) + after.chars().next().is_none_or(|c| !c.is_alphabetic()) } else { false } }) } -fn extract_between(haystack: &str, start_pat: &str, end_pat: &str, from: usize) -> Option<(String, usize)> { +fn extract_between( + haystack: &str, + start_pat: &str, + end_pat: &str, + from: usize, +) -> Option<(String, usize)> { let start_idx = haystack[from..].find(start_pat)? + from; let after = start_idx + start_pat.len(); let end_idx = haystack[after..].find(end_pat)? + after; - Some((haystack[after..end_idx].to_string(), end_idx + end_pat.len())) + Some(( + haystack[after..end_idx].to_string(), + end_idx + end_pat.len(), + )) } #[allow(dead_code)] @@ -61,9 +90,13 @@ fn extract_printed_date(html: &str) -> Option { if let Some((p_inner, next)) = extract_between(html, "", pos) { let open_end = p_inner.find('>').map(|i| i + 1).unwrap_or(0); let text = strip_tags(&p_inner[open_end..]); - if starts_with_month(&text) { return Some(text); } + if starts_with_month(&text) { + return Some(text); + } pos = next; - } else { break; } + } else { + break; + } } None } @@ -88,7 +121,7 @@ fn extract_body_preview(html: &str) -> Option { #[allow(dead_code)] fn replace_date_paragraph(html: &str, new_date: &str) -> String { // Reuse extract logic to find the range, then replace it - let after_h4 = html.find("").map(|idx| idx + 5).unwrap_or(0); + let after_h4 = html.find("").map(|idx| idx + 5).unwrap_or(0); let mut pos = after_h4; for _ in 0..5 { if let Some((p_inner, next)) = extract_between(html, "", pos) { @@ -97,19 +130,24 @@ fn replace_date_paragraph(html: &str, new_date: &str) -> String { if starts_with_month(&text) { if let Some(start_rel) = html[pos..].find("") { - let end_abs = after_start + end_rel + 4; //

len - let mut out = html.to_string(); - // Construct replacement paragraph - let replacement = format!("

{}

", new_date); - out.replace_range(start_abs..end_abs, &replacement); - return out; + let end_abs = after_start + end_rel + 4; //

len + let mut out = html.to_string(); + // Construct replacement paragraph + let replacement = format!( + "

{}

", + new_date + ); + out.replace_range(start_abs..end_abs, &replacement); + return out; } } } pos = next; - } else { break; } + } else { + break; + } } html.to_string() } @@ -174,26 +212,61 @@ fn linkify_images(html: &str) -> String { // Extract src let src = if let Some(src_start_rel) = img_tag.find("src=\"") { let after_src = src_start_rel + 5; - if let Some(src_end_rel) = img_tag[after_src..].find('"') { - Some(&img_tag[after_src..after_src + src_end_rel]) - } else { None } - } else { None }; + img_tag[after_src..] + .find('"') + .map(|src_end_rel| &img_tag[after_src..after_src + src_end_rel]) + } else { + None + }; if let Some(src_url) = src { - let wrapper_start = format!("", src_url); - let wrapper_end = ""; - - // Replace strict range - let new_content = format!("{}{}{}", wrapper_start, img_tag, wrapper_end); - out.replace_range(abs_open..abs_close, &new_content); - - search_pos = abs_open + new_content.len(); - continue; + let is_safe_scheme = src_url.starts_with("http://") + || src_url.starts_with("https://") + || src_url.starts_with("data:image/png") + || src_url.starts_with("data:image/jpeg") + || src_url.starts_with("data:image/gif") + || src_url.starts_with("data:image/webp") + || src_url.starts_with('/'); + + if is_safe_scheme { + let safe_url = src_url + .replace("&", "&") + .replace("\"", """) + .replace("<", "<") + .replace(">", ">"); + let wrapper_start = format!( + "", + safe_url + ); + let wrapper_end = ""; + let safe_img_tag = + format!("\"Article", safe_url); + + // Replace strict range + let new_content = format!("{}{}{}", wrapper_start, safe_img_tag, wrapper_end); + out.replace_range(abs_open..abs_close, &new_content); + + search_pos = abs_open + new_content.len(); + continue; + } else { + #[cfg(not(target_arch = "wasm32"))] + tracing::debug!( + "Skipped unsafe image scheme in journalism article: {}", + src_url + ); + #[cfg(target_arch = "wasm32")] + web_sys::console::log_1( + &format!( + "Skipped unsafe image scheme in journalism article: {}", + src_url + ) + .into(), + ); + } } - search_pos = abs_close; - + search_pos = abs_close; } else { - search_pos = abs_open + 4; + search_pos = abs_open + 4; } } out @@ -223,21 +296,24 @@ fn italicize_origin_line(html: &str) -> String { } search_pos = abs_content_end + 4; - } else { break; } - } else { search_pos = abs_open + 2; } + } else { + break; + } + } else { + search_pos = abs_open + 2; + } } out } fn format_cp_style(date: &str) -> String { - let date = date.replace("January", "Jan.") + date.replace("January", "Jan.") .replace("February", "Feb.") .replace("August", "Aug.") .replace("September", "Sept.") .replace("October", "Oct.") .replace("November", "Nov.") - .replace("December", "Dec."); - date + .replace("December", "Dec.") } #[component] @@ -262,8 +338,7 @@ pub fn JournalismPage() -> impl IntoView { let title = article.title.clone(); let preview_text = extract_body_preview(&article.content_html) .unwrap_or_else(|| article.excerpt.clone()); - let image = article.images.get(0).cloned(); - let thumb_src = image.clone().unwrap_or_else(|| "data:image/svg+xml;utf8,Image coming soon".to_string()); + let image = article.images.first().cloned(); let date = extract_printed_date(&article.content_html) .unwrap_or_else(|| article.display_date.clone()); let date = format_cp_style(&date); @@ -271,8 +346,17 @@ pub fn JournalismPage() -> impl IntoView { view! {
- article thumbnail - {image.is_none().then(|| view! {
"Image coming soon"
})} + {if let Some(ref img) = image { + view! { article thumbnail }.into_any() + } else { + view! { + + + "Image coming soon" + + }.into_any() + }} + // Removed duplicate placeholder div

{date}

@@ -299,12 +383,12 @@ pub fn JournalismArticlePage() -> impl IntoView { #[cfg(target_arch = "wasm32")] web_sys::console::log_1(&"Rendering JournalismArticlePage".into()); - use crate::api::articles::{get_article, save_article, delete_article}; + use crate::api::articles::{delete_article, get_article, save_article}; let params = use_params_map(); let slug = move || params.with(|p| p.get("slug").map(|s| s.to_string()).unwrap_or_default()); - let article_resource = Resource::new(slug, |s| get_article(s)); + let article_resource = Resource::new(slug, get_article); // Auth State let (is_admin, _set_is_admin) = signal(false); @@ -313,16 +397,20 @@ pub fn JournalismArticlePage() -> impl IntoView { Effect::new(move || { #[cfg(target_arch = "wasm32")] { + #[cfg(debug_assertions)] web_sys::console::log_1(&"Checking auth token...".into()); if let Ok(Some(storage)) = web_sys::window().unwrap().local_storage() { if let Ok(Some(t)) = storage.get_item("admin_token") { - web_sys::console::log_1(&format!("Found token: {}", t).into()); - if !t.is_empty() { + #[cfg(debug_assertions)] + web_sys::console::log_1(&format!("Found token: {}", t).into()); + if !t.is_empty() { _set_token.set(t); _set_is_admin.set(true); + #[cfg(debug_assertions)] web_sys::console::log_1(&"Admin mode enabled".into()); - } + } } else { + #[cfg(debug_assertions)] web_sys::console::log_1(&"No token found in localStorage".into()); } } @@ -361,7 +449,11 @@ pub fn JournalismArticlePage() -> impl IntoView { new_article.title = edit_title.get(); new_article.display_date = edit_date.get(); new_article.byline = Some(edit_byline.get()); - new_article.captions = if edit_caption.get().trim().is_empty() { vec![] } else { vec![edit_caption.get()] }; + new_article.captions = if edit_caption.get().trim().is_empty() { + vec![] + } else { + vec![edit_caption.get()] + }; new_article.images = edit_images.get(); new_article.content_html = edit_html.get(); @@ -370,7 +462,7 @@ pub fn JournalismArticlePage() -> impl IntoView { set_save_status.set("Saved!".to_string()); set_is_editing.set(false); article_resource.refetch(); - }, + } Err(e) => set_save_status.set(format!("Error: {}", e)), } }); @@ -379,7 +471,11 @@ pub fn JournalismArticlePage() -> impl IntoView { let on_delete = move |slug: String| { #[cfg(target_arch = "wasm32")] { - if !web_sys::window().unwrap().confirm_with_message("Are you sure you want to delete this article?").unwrap() { + if !web_sys::window() + .unwrap() + .confirm_with_message("Are you sure you want to delete this article?") + .unwrap() + { return; } } @@ -390,10 +486,12 @@ pub fn JournalismArticlePage() -> impl IntoView { Ok(_) => { let navigate = leptos_router::hooks::use_navigate(); navigate("/journalism", Default::default()); - }, + } Err(e) => { #[cfg(target_arch = "wasm32")] - let _ = web_sys::window().unwrap().alert_with_message(&format!("Error deleting: {}", e)); + let _ = web_sys::window() + .unwrap() + .alert_with_message(&format!("Error deleting: {}", e)); #[cfg(not(target_arch = "wasm32"))] leptos::logging::error!("Error deleting: {}", e); } diff --git a/hgen/Cargo.toml b/hgen/Cargo.toml new file mode 100644 index 00000000..432ba8ae --- /dev/null +++ b/hgen/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "hgen" +version = "0.1.0" +edition = "2021" + +[dependencies] +argon2 = { version = "0.5.0", features = ["std", "password-hash"] } +shared = { path = "../shared", features = ["ssr"] } diff --git a/hgen/src/main.rs b/hgen/src/main.rs new file mode 100644 index 00000000..9b70aee0 --- /dev/null +++ b/hgen/src/main.rs @@ -0,0 +1,31 @@ +// Usage: +// This utility reads the password from standard input (stdin), not from command-line arguments. +// This improves security by preventing the password from appearing in shell history or `ps` output. +// +// Example: +// echo -n "mypassword" | cargo run --bin hgen +// or run `cargo run --bin hgen` and type the password followed by Enter. + +use argon2::{ + password_hash::{rand_core::OsRng, PasswordHasher, SaltString}, + Argon2, +}; +fn main() { + let mut password = String::new(); + std::io::Read::read_to_string(&mut std::io::stdin(), &mut password).expect("Failed to read password"); + let password = password.trim_end_matches(['\r', '\n']); + let salt = SaltString::generate(&mut OsRng); + let params = argon2::Params::new( + shared::auth::ARGON2_M_COST, + shared::auth::ARGON2_T_COST, + shared::auth::ARGON2_P_COST, + Some(argon2::Params::DEFAULT_OUTPUT_LEN), + ).unwrap(); + let argon2 = Argon2::new( + argon2::Algorithm::Argon2id, + argon2::Version::V0x13, + params, + ); + let hash = argon2.hash_password(password.as_bytes(), &salt).unwrap(); + println!("{}", hash); +} diff --git a/migrations/20260110000000_initial_schema.sql b/migrations/20260110000000_initial_schema.sql index f072fdc0..16887831 100644 --- a/migrations/20260110000000_initial_schema.sql +++ b/migrations/20260110000000_initial_schema.sql @@ -1,17 +1,20 @@ --- Enable UUID extension -CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +-- MACRO: UUID_V4_GENERATOR +-- Expression: (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))) +-- Note: (random() & 3) + 1 provides perfectly uniform UUID variant bits. -- Users (Admin) CREATE TABLE users ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + -- Uses MACRO: UUID_V4_GENERATOR + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), username TEXT NOT NULL UNIQUE, password_hash TEXT NOT NULL, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + created_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) ); -- Articles (Journalism - Imported/Synced) CREATE TABLE articles ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + -- Uses MACRO: UUID_V4_GENERATOR + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), wp_id BIGINT UNIQUE, -- External ID from WordPress slug TEXT NOT NULL UNIQUE, title TEXT NOT NULL, @@ -20,74 +23,75 @@ CREATE TABLE articles ( content TEXT NOT NULL, -- HTML content cover_image_url TEXT, author TEXT NOT NULL, - published_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + published_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), origin TEXT NOT NULL DEFAULT 'local', -- 'imported', 'synced', 'local' - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + created_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) ); -- Personal Blog Posts CREATE TABLE blog_posts ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + -- Uses MACRO: UUID_V4_GENERATOR + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), slug TEXT NOT NULL UNIQUE, title TEXT NOT NULL, content TEXT NOT NULL, -- Markdown/Rich Text - published_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - tags TEXT[], - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + published_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + tags TEXT, -- JSON Array + created_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) ); -- Creative Writing (Stories, Novels, Poetry) -CREATE TYPE creative_type AS ENUM ('story', 'novel', 'poetry'); CREATE TABLE creative_works ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + -- Uses MACRO: UUID_V4_GENERATOR + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), slug TEXT NOT NULL UNIQUE, title TEXT NOT NULL, - work_type creative_type NOT NULL, + work_type TEXT NOT NULL CHECK(work_type IN ('story', 'novel', 'poetry')), -- 'story', 'novel', 'poetry' synopsis TEXT, content TEXT, -- Full text or chapters (can be JSON if complex) status TEXT NOT NULL DEFAULT 'published', -- 'draft', 'published' - published_at TIMESTAMPTZ DEFAULT NOW(), - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + published_at DATETIME DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + created_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) ); -- Media Items (Photography, Visual Art, J-School Video, Videography) -CREATE TYPE media_category AS ENUM ('photography', 'visual_art', 'video', 'j_school'); -CREATE TYPE media_context AS ENUM ('personal', 'professional'); + CREATE TABLE media_items ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + -- Uses MACRO: UUID_V4_GENERATOR + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), title TEXT, description TEXT, url TEXT NOT NULL, -- S3 URL or local path thumbnail_url TEXT, - category media_category NOT NULL, - context media_context NOT NULL DEFAULT 'personal', -- To distinguish Photojournalism (prof) vs Personal - taken_at TIMESTAMPTZ, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + category TEXT NOT NULL CHECK(category IN ('photography', 'visual_art', 'video', 'j_school')), -- 'photography', 'visual_art', 'video', 'j_school' + context TEXT NOT NULL DEFAULT 'personal' CHECK(context IN ('personal', 'professional')), -- To distinguish Photojournalism (prof) vs Personal + taken_at DATETIME, + created_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) ); -- Music CREATE TABLE music_tracks ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + -- Uses MACRO: UUID_V4_GENERATOR + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), title TEXT NOT NULL, description TEXT, audio_url TEXT, embed_code TEXT, -- For Soundcloud/Spotify iframe - published_at TIMESTAMPTZ DEFAULT NOW(), - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + published_at DATETIME DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + created_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) ); -- Programming Projects CREATE TABLE projects ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + -- Uses MACRO: UUID_V4_GENERATOR + id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab', (random() & 3) + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), name TEXT NOT NULL, description TEXT, github_url TEXT, demo_url TEXT, - technologies TEXT[], + technologies TEXT, -- JSON Array stars INT DEFAULT 0, is_featured BOOLEAN DEFAULT FALSE, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + created_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) ); diff --git a/migrations/20260210120000_seed_admin_password.sql b/migrations/20260210120000_seed_admin_password.sql index 5468b3f6..899ea320 100644 --- a/migrations/20260210120000_seed_admin_password.sql +++ b/migrations/20260210120000_seed_admin_password.sql @@ -1,6 +1,3 @@ --- Update or insert admin user with secure default password --- Password: ZO6gOCn0icxcvrke62F96A== -INSERT INTO users (id, username, password_hash) -VALUES (gen_random_uuid(), 'admin', '$argon2id$v=19$m=19456,t=2,p=1$Ewiz6jCZu9NGQaAJtWRLqg$Fn5yB19PZG+eTq/f1oKbw+tsqvhwuAnMI3TpQCIg9vI') -ON CONFLICT (username) -DO UPDATE SET password_hash = EXCLUDED.password_hash; +-- Local development environments inject the password via scripts/setup-dev.sh. +-- Production environments require the admin password to be set manually or via an env var initialization script on first-run. +-- The default static password hash has been removed from this file. diff --git a/migrations/20260319000000_add_published_at_indexes.sql b/migrations/20260319000000_add_published_at_indexes.sql new file mode 100644 index 00000000..83c6fc8b --- /dev/null +++ b/migrations/20260319000000_add_published_at_indexes.sql @@ -0,0 +1,3 @@ +-- Add indexes on published_at to optimize listing articles and blog posts +CREATE INDEX IF NOT EXISTS idx_articles_published_at ON articles(published_at DESC); +CREATE INDEX IF NOT EXISTS idx_blog_posts_published_at ON blog_posts(published_at DESC); diff --git a/migrations/20260325000000_add_updated_at.sql b/migrations/20260325000000_add_updated_at.sql new file mode 100644 index 00000000..52196831 --- /dev/null +++ b/migrations/20260325000000_add_updated_at.sql @@ -0,0 +1,8 @@ +-- Add updated_at columns to core entities where they were missing + +ALTER TABLE articles ADD COLUMN updated_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')); +ALTER TABLE blog_posts ADD COLUMN updated_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')); +ALTER TABLE creative_works ADD COLUMN updated_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')); +ALTER TABLE media_items ADD COLUMN updated_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')); +ALTER TABLE music_tracks ADD COLUMN updated_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')); +ALTER TABLE projects ADD COLUMN updated_at DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')); diff --git a/migrations/20260325000001_normalize_timestamps.sql b/migrations/20260325000001_normalize_timestamps.sql new file mode 100644 index 00000000..d1510597 --- /dev/null +++ b/migrations/20260325000001_normalize_timestamps.sql @@ -0,0 +1,10 @@ +-- Normalize datetime precision to match the default SQLite %f format (milliseconds) +-- This ensures consistent precision for cursor-based pagination. + +UPDATE articles +SET published_at = strftime('%Y-%m-%dT%H:%M:%fZ', published_at) +WHERE published_at IS NOT NULL; + +UPDATE blog_posts +SET published_at = strftime('%Y-%m-%dT%H:%M:%fZ', published_at) +WHERE published_at IS NOT NULL; diff --git a/migrations/20260325000002_add_updated_at_triggers.sql b/migrations/20260325000002_add_updated_at_triggers.sql new file mode 100644 index 00000000..14b4e30c --- /dev/null +++ b/migrations/20260325000002_add_updated_at_triggers.sql @@ -0,0 +1,49 @@ +-- Add AFTER UPDATE triggers for core tables to auto-update the updated_at column + +CREATE TRIGGER update_articles_updated_at +AFTER UPDATE ON articles +FOR EACH ROW +WHEN NEW.updated_at IS OLD.updated_at +BEGIN + UPDATE articles SET updated_at = (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) WHERE id = NEW.id; +END; + +CREATE TRIGGER update_blog_posts_updated_at +AFTER UPDATE ON blog_posts +FOR EACH ROW +WHEN NEW.updated_at IS OLD.updated_at +BEGIN + UPDATE blog_posts SET updated_at = (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) WHERE id = NEW.id; +END; + +CREATE TRIGGER update_creative_works_updated_at +AFTER UPDATE ON creative_works +FOR EACH ROW +WHEN NEW.updated_at IS OLD.updated_at +BEGIN + UPDATE creative_works SET updated_at = (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) WHERE id = NEW.id; +END; + +CREATE TRIGGER update_media_items_updated_at +AFTER UPDATE ON media_items +FOR EACH ROW +WHEN NEW.updated_at IS OLD.updated_at +BEGIN + UPDATE media_items SET updated_at = (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) WHERE id = NEW.id; +END; + +CREATE TRIGGER update_music_tracks_updated_at +AFTER UPDATE ON music_tracks +FOR EACH ROW +WHEN NEW.updated_at IS OLD.updated_at +BEGIN + UPDATE music_tracks SET updated_at = (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) WHERE id = NEW.id; +END; + +CREATE TRIGGER update_projects_updated_at +AFTER UPDATE ON projects +FOR EACH ROW +WHEN NEW.updated_at IS OLD.updated_at +BEGIN + UPDATE projects SET updated_at = (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')) WHERE id = NEW.id; +END; diff --git a/migrations/20260325000003_add_published_at_check.sql b/migrations/20260325000003_add_published_at_check.sql new file mode 100644 index 00000000..867dbeb0 --- /dev/null +++ b/migrations/20260325000003_add_published_at_check.sql @@ -0,0 +1,29 @@ +-- Add triggers to validate published_at datetime format on insert and update for articles and blog_posts + +CREATE TRIGGER check_articles_published_at_insert +BEFORE INSERT ON articles +WHEN NEW.published_at IS NOT NULL AND NEW.published_at != strftime('%Y-%m-%dT%H:%M:%fZ', NEW.published_at) +BEGIN + SELECT RAISE(ABORT, 'published_at must be in %Y-%m-%dT%H:%M:%fZ format'); +END; + +CREATE TRIGGER check_articles_published_at_update +BEFORE UPDATE ON articles +WHEN NEW.published_at IS NOT NULL AND NEW.published_at IS NOT OLD.published_at AND NEW.published_at != strftime('%Y-%m-%dT%H:%M:%fZ', NEW.published_at) +BEGIN + SELECT RAISE(ABORT, 'published_at must be in %Y-%m-%dT%H:%M:%fZ format'); +END; + +CREATE TRIGGER check_blog_posts_published_at_insert +BEFORE INSERT ON blog_posts +WHEN NEW.published_at IS NOT NULL AND NEW.published_at != strftime('%Y-%m-%dT%H:%M:%fZ', NEW.published_at) +BEGIN + SELECT RAISE(ABORT, 'published_at must be in %Y-%m-%dT%H:%M:%fZ format'); +END; + +CREATE TRIGGER check_blog_posts_published_at_update +BEFORE UPDATE ON blog_posts +WHEN NEW.published_at IS NOT NULL AND NEW.published_at IS NOT OLD.published_at AND NEW.published_at != strftime('%Y-%m-%dT%H:%M:%fZ', NEW.published_at) +BEGIN + SELECT RAISE(ABORT, 'published_at must be in %Y-%m-%dT%H:%M:%fZ format'); +END; diff --git a/scripts/deploy.sh b/scripts/deploy.sh index 29f57af6..aee3dca8 100755 --- a/scripts/deploy.sh +++ b/scripts/deploy.sh @@ -15,7 +15,8 @@ echo "Deploying target: $TARGET" # rsync/scp will overwrite changed files. echo "Preparing remote directory..." gcloud compute ssh jake-user@$INSTANCE_NAME --project=$PROJECT_ID --zone=$ZONE --command=" - mkdir -p ~/app && \ + mkdir -p ~/app/data && \ + chmod 700 ~/app/data && \ sudo chown -R jake-user:jake-user ~/app " @@ -47,7 +48,7 @@ rsync -avz --info=progress2 \ --exclude '.DS_Store' \ -e "ssh -i ~/.ssh/google_compute_engine -o StrictHostKeyChecking=no" \ ./ \ - jake-user@$IP:~/app/ + "jake-user@$IP:~/app/" # 2. SSH and Deploy echo "Starting remote configuration and build..." diff --git a/scripts/remote_build.sh b/scripts/remote_build.sh index 09637b33..4dda0826 100644 --- a/scripts/remote_build.sh +++ b/scripts/remote_build.sh @@ -10,16 +10,28 @@ echo "Remote Build Target: $TARGET" export DOCKER_BUILDKIT=1 export COMPOSE_DOCKER_CLI_BUILD=1 -# Generate .env file with defaults for production -cat < .env -POSTGRES_USER=admin -POSTGRES_PASSWORD=password -POSTGRES_DB=portfolio +ensure_data_dir() { + echo "Ensuring data directory exists..." + mkdir -p data && chmod 700 data && sudo chown 1000:1000 data +} + +if [ ! -f .env ]; then + echo "Generating new .env file with defaults..." + cat < .env DOMAIN_NAME=jakewray.dev LEPTOS_SITE_ADDR=0.0.0.0:3000 RUST_LOG=info -DATABASE_URL=postgres://admin:password@db:5432/portfolio +DATABASE_URL=sqlite:////app/data/sqlite.db +ENVIRONMENT=production +JWT_SECRET=$(openssl rand -base64 48 | tr -d '\n') +# Warning: Ephemeral Docker Bridge IPs change on restart. +# Run `docker network inspect jakewraydev_default` to find the proxy IP, +# and manually add TRUSTED_PROXY_IPS= to this file if using rate limiting. EOF +chmod 600 .env +else + echo "Using existing .env file." +fi if [ "$TARGET" = "all" ] || [ "$TARGET" = "backend" ]; then echo "Building chef base image (with cache)..." @@ -28,36 +40,20 @@ if [ "$TARGET" = "all" ] || [ "$TARGET" = "backend" ]; then --cache-from portfolio-chef:latest \ -t portfolio-chef . - echo "Ensuring DB is up for preparation..." - sudo docker compose -f compose.prod.yaml up -d db - echo "Waiting for DB..." - sleep 5 - - echo "Running sqlx prepare on server..." - DB_CONTAINER=$(sudo docker compose -f compose.prod.yaml ps -q db | head -n1) - - # We use the chef image which has sqlx-cli installed, and mount source code - sudo docker run --rm \ - --network container:$DB_CONTAINER \ - -v "$(pwd)":/app \ - -w /app \ - -u root \ - -e DATABASE_URL=postgres://admin:password@localhost:5432/portfolio \ - -e SQLX_OFFLINE=false \ - portfolio-chef \ - cargo sqlx prepare --workspace - sudo chown -R jake-user:jake-user . + ensure_data_dir fi if [ "$TARGET" = "all" ]; then echo "Building and starting ALL services with BuildKit caching..." sudo DOCKER_BUILDKIT=1 docker compose -f compose.prod.yaml build \ --build-arg BUILDKIT_INLINE_CACHE=1 + ensure_data_dir sudo docker compose -f compose.prod.yaml up -d --remove-orphans elif [ "$TARGET" = "backend" ]; then echo "Building and restarting BACKEND (portfolio) service with caching..." sudo DOCKER_BUILDKIT=1 docker compose -f compose.prod.yaml build \ --build-arg BUILDKIT_INLINE_CACHE=1 portfolio + ensure_data_dir sudo docker compose -f compose.prod.yaml up -d --no-deps portfolio elif [ "$TARGET" = "frontend" ]; then echo "Frontend is part of the backend binary in this setup (SSR)." @@ -67,3 +63,8 @@ else echo "Unknown target: $TARGET" exit 1 fi + +echo "=====================================================================" +echo "WARNING: Check your .env file for TRUSTED_PROXY_IPS." +echo "Docker bridge IPs may change. Verify them if using rate limiting!" +echo "=====================================================================" diff --git a/scripts/setup-dev.sh b/scripts/setup-dev.sh index ea4af5b8..929a02ef 100755 --- a/scripts/setup-dev.sh +++ b/scripts/setup-dev.sh @@ -1,12 +1,18 @@ -#!/bin/bash +#!/usr/bin/env bash # Local development setup script set -e echo "🚀 Setting up local development environment..." +if [ "$ENVIRONMENT" = "production" ] || [[ "$DATABASE_URL" == *"/app/data"* ]]; then + echo "❌ Error: Production environment detected. Setup script aborted." + exit 1 +fi + # Check dependencies command -v cargo &> /dev/null || { echo "❌ cargo not found. Install Rust from https://rustup.rs/"; exit 1; } +command -v sqlite3 &> /dev/null || { echo "❌ sqlite3 not found"; exit 1; } # Check for container runtime CONTAINER_CMD="" @@ -49,24 +55,52 @@ if [ "$CONTAINER_CMD" = "docker" ] && ! docker ps &> /dev/null; then fi fi -# Start database -echo "📦 Starting PostgreSQL database..." -COMPOSE_CMD="docker-compose" -if [ "$CONTAINER_CMD" = "podman" ]; then - COMPOSE_CMD="podman-compose" -fi - -$COMPOSE_CMD up -d db -sleep 3 - +# Try to use existing tools if possible, but no background service is needed for sqlite. echo "" echo "⏳ Running database migrations..." -cargo sqlx database create || true -cargo sqlx migrate run -D "postgres://admin:password@127.0.0.1:5432/portfolio" || true +# create an empty sqlite database file if it doesn't exist +touch sqlite.db +chmod 600 sqlite.db +if [ -z "$DATABASE_URL" ]; then + export DATABASE_URL="sqlite://sqlite.db" +fi + +cargo sqlx migrate run -D "$DATABASE_URL" || true echo "" echo "👤 Creating default admin user..." -PGPASSWORD=password psql -U admin -h 127.0.0.1 -d portfolio -c "INSERT INTO users (username, password_hash) VALUES ('admin', 'demo-admin-2026!') ON CONFLICT (username) DO NOTHING;" || echo "⚠️ Could not create user (may already exist)" +# WARN: The seeded password below is 'demo-admin-2026!'. +# Anyone reading the repository knows these default credentials. Check that this +# dev instance isn't exposed to untrusted networks. +# Generate hash dynamically +if [ ! -x "hgen/target/release/hgen" ]; then + (cd hgen && cargo build --release --quiet) +fi +ADMIN_HASH=$(echo -n "demo-admin-2026!" | ./hgen/target/release/hgen 2>/dev/null | tail -n 1) + +if ! [[ "$ADMIN_HASH" =~ ^\$argon2 ]]; then + echo "❌ hgen failed or produced unexpected output" + exit 1 +fi + +# Fallback to python UUID or kernel uuid if uuidgen missing +ADMIN_UUID=$(uuidgen 2>/dev/null || cat /proc/sys/kernel/random/uuid 2>/dev/null || python3 -c 'import uuid; print(uuid.uuid4())' 2>/dev/null || { echo "❌ Could not generate a UUID. Please install uuidgen."; exit 1; }) +SAFE_UUID=$(echo "$ADMIN_UUID" | tr -cd 'a-fA-F0-9-') + +if ! [[ "$SAFE_UUID" =~ ^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$ ]]; then + echo "❌ Invalid Admin UUID format generated: $SAFE_UUID" + exit 1 +fi + +ESCAPED_HASH="${ADMIN_HASH//\'/\'\'}" +sqlite3 sqlite.db <> = OnceLock::new(); + +/// Required initialization: Call early if you want to fail fast on startup, +/// but `get_jwt_secret` will also lazily initialize it. +pub fn init_jwt_secret() { + let _ = get_jwt_secret(); +} + +pub fn get_jwt_secret() -> &'static [u8] { + JWT_SECRET.get_or_init(|| { + let secret = std::env::var("JWT_SECRET") + .unwrap_or_else(|_| { + panic!("JWT_SECRET environment variable must be set. If this is a frontend/WASM build, the 'ssr' feature may have been incorrectly enabled."); + }); + if secret.len() < 32 { + panic!("JWT_SECRET must be at least 32 bytes long for security."); + } + secret.into_bytes() + }) +} diff --git a/shared/src/lib.rs b/shared/src/lib.rs index dd40b064..3c9f521c 100644 --- a/shared/src/lib.rs +++ b/shared/src/lib.rs @@ -1,3 +1,6 @@ +#[cfg(feature = "ssr")] +pub mod auth; + use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use uuid::Uuid; @@ -35,6 +38,7 @@ pub struct BlogPost { } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] pub enum MediaCategory { Photography, VisualArt, @@ -42,12 +46,59 @@ pub enum MediaCategory { JSchool, } +impl std::fmt::Display for MediaCategory { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + MediaCategory::Photography => write!(f, "photography"), + MediaCategory::VisualArt => write!(f, "visual_art"), + MediaCategory::Video => write!(f, "video"), + MediaCategory::JSchool => write!(f, "j_school"), + } + } +} + +impl std::str::FromStr for MediaCategory { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "photography" => Ok(MediaCategory::Photography), + "visual_art" => Ok(MediaCategory::VisualArt), + "video" => Ok(MediaCategory::Video), + "j_school" => Ok(MediaCategory::JSchool), + _ => Err(format!("Invalid media category: {}", s)), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] pub enum MediaContext { Personal, Professional, } +impl std::fmt::Display for MediaContext { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + MediaContext::Personal => write!(f, "personal"), + MediaContext::Professional => write!(f, "professional"), + } + } +} + +impl std::str::FromStr for MediaContext { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "personal" => Ok(MediaContext::Personal), + "professional" => Ok(MediaContext::Professional), + _ => Err(format!("Invalid media context: {}", s)), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct MediaItem { pub id: Uuid, @@ -61,12 +112,36 @@ pub struct MediaItem { } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] pub enum CreativeType { Story, Novel, Poetry, } +impl std::fmt::Display for CreativeType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CreativeType::Story => write!(f, "story"), + CreativeType::Novel => write!(f, "novel"), + CreativeType::Poetry => write!(f, "poetry"), + } + } +} + +impl std::str::FromStr for CreativeType { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "story" => Ok(CreativeType::Story), + "novel" => Ok(CreativeType::Novel), + "poetry" => Ok(CreativeType::Poetry), + _ => Err(format!("Invalid creative type: {}", s)), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CreativeWork { pub id: Uuid,