diff --git a/sources/en.comix/Cargo.lock b/sources/en.comix/Cargo.lock index b794f74b..cd9b744c 100644 --- a/sources/en.comix/Cargo.lock +++ b/sources/en.comix/Cargo.lock @@ -61,12 +61,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - [[package]] name = "byteorder" version = "1.5.0" @@ -94,7 +88,6 @@ version = "0.1.0" dependencies = [ "aidoku", "aidoku-test", - "base64", "serde", "serde_json", ] diff --git a/sources/en.comix/Cargo.toml b/sources/en.comix/Cargo.toml index 4e94199b..ac436b0a 100644 --- a/sources/en.comix/Cargo.toml +++ b/sources/en.comix/Cargo.toml @@ -8,7 +8,6 @@ crate-type = ["cdylib"] [dependencies] aidoku = { git = "https://github.com/Aidoku/aidoku-rs.git", features = ["json"] } -base64 = { version = "0.22", default-features = false, features = ["alloc"] } serde = { version = "1.0", features = ["derive", "alloc"], default-features = false } serde_json = { version = "1.0", default-features = false, features = ["alloc"] } diff --git a/sources/en.comix/res/source.json b/sources/en.comix/res/source.json index 7cbe7212..c248491b 100644 --- a/sources/en.comix/res/source.json +++ b/sources/en.comix/res/source.json @@ -2,7 +2,7 @@ "info": { "id": "en.comix", "name": "Comix", - "version": 9, + "version": 10, "url": "https://comix.to", "contentRating": 1, "languages": ["en"] diff --git a/sources/en.comix/src/hash.rs b/sources/en.comix/src/hash.rs deleted file mode 100644 index fec42d3c..00000000 --- a/sources/en.comix/src/hash.rs +++ /dev/null @@ -1,230 +0,0 @@ -// reference: https://github.com/keiyoushi/extensions-source/blob/e6af5a11a7e8bdcfdfde50825b615e91dd2fc20c/src/en/comix/src/eu/kanade/tachiyomi/extension/en/comix/Hash.kt -use aidoku::{ - alloc::{string::String, vec::Vec}, - helpers::uri::encode_uri_component, -}; -use base64::{ - Engine, - engine::general_purpose::{STANDARD, URL_SAFE_NO_PAD}, -}; - -// [RC4 key, mutKey, prefKey] × 5 rounds -const KEYS: [&str; 15] = [ - "JxTcdyiA5GZxnbrmthXBQfU2IMTKcY1+3nNhbq98Sgo=", // 0 RC4 key round 1 - "3PordjODbhqla382Cxapmo/1JiABJQcjiJj1+48gTJ4=", // 1 mutKey round 1 - "OaKvnI5ARA==", // 2 prefKey round 1 - "MHNBHYWA7lvy867fXgvGcJwWDk79KqUJUVFsh3RwnnI=", // 3 RC4 key round 2 - "8i0Cru/VJBSVB2Y1GcMDVpzx2WepOcfnWdd81yxICl4=", // 4 mutKey round 2 - "Fyskubz8VvA=", // 5 prefKey round 2 - "B46L1x+UeWP+19cRpQ+OZvdLAK9EHID8g3mSgn57tew=", // 6 RC4 key round 3 - "DTSTmUt6LpDUw9r1lSQqyb3YlFTzruT8tk8wUGkwehQ=", // 7 mutKey round 3 - "vY/meeI=", // 8 prefKey round 3 - "7xWfIF5THL5LAnRgAARg+4mjWHPU9n3PQwvzbaMNi+Q=", // 9 RC4 key round 4 - "bewtiTuV+HJk56xxkf2iCljLgruCpBmN9BgE8i6gc9M=", // 10 mutKey round 4 - "/Xcb2zAu8AU=", // 11 prefKey round 4 - "WgeCQ3T8R51uTwVSiVa7Zy0dN6JOg6Z5JleMS+HV8Aw=", // 12 RC4 key round 5 - "yXayUVFrrcW56jQCEfZzuCidjpnWKjTDUNT7XeX9i7k=", // 13 mutKey round 5 - "tSLco2w=", // 14 prefKey round 5 -]; - -fn get_key_bytes(index: usize) -> Vec { - let Some(b64) = KEYS.get(index) else { - return Vec::new(); - }; - STANDARD.decode(b64.as_bytes()).unwrap_or_default() -} - -fn rc4(key: &[u8], data: &[u8]) -> Vec { - if key.is_empty() { - return data.to_vec(); - } - - let mut s = [0u8; 256]; - for (i, v) in s.iter_mut().enumerate() { - *v = i as u8; - } - - let mut j: usize = 0; - for i in 0..256usize { - j = (j + s[i] as usize + key[i % key.len()] as usize) % 256; - s.swap(i, j); - } - - let mut i: usize = 0; - j = 0; - let mut out = Vec::with_capacity(data.len()); - - for &byte in data { - i = (i + 1) % 256; - j = (j + s[i] as usize) % 256; - s.swap(i, j); - let k = s[(s[i] as usize + s[j] as usize) % 256]; - out.push(byte ^ k); - } - - out -} - -#[inline] -fn get_mut_key(mk: &[u8], idx: usize) -> u8 { - if !mk.is_empty() && (idx % 32) < mk.len() { - mk[idx % 32] - } else { - 0 - } -} - -#[inline] -fn op_shift_right7_left1(e: u8) -> u8 { - e.rotate_left(1) -} -#[inline] -fn op_shift_left1_right7(e: u8) -> u8 { - e.rotate_left(1) -} -#[inline] -fn op_shift_right2_left6(e: u8) -> u8 { - e.rotate_right(2) -} -#[inline] -fn op_shift_left4_right4(e: u8) -> u8 { - e.rotate_right(4) -} -#[inline] -fn op_shift_right4_left4(e: u8) -> u8 { - e.rotate_left(4) -} - -fn mutate( - data: &[u8], - mut_key: &[u8], - pref_key: &[u8], - pref_key_limit: usize, - round: usize, -) -> Vec { - let mut out = Vec::with_capacity(data.len() + pref_key_limit); - for o in 0..data.len() { - if o < pref_key_limit && o < pref_key.len() { - out.push(pref_key[o]); - } - let mut n = data[o] ^ get_mut_key(mut_key, o); - n = match round { - 1 => match o % 10 { - 0 => op_shift_right7_left1(n), - 1 => n ^ 37, - 2 => n ^ 81, - 3 => n ^ 147, - 4 => op_shift_right2_left6(n), - 5 | 8 => op_shift_right4_left4(n), - 6 => n ^ 218, - 7 => n.wrapping_add(159), - 9 => n ^ 180, - _ => n, - }, - 2 => match o % 10 { - 0 | 9 => n ^ 180, - 1 => op_shift_left1_right7(n), - 2 => n ^ 147, - 3 => op_shift_right7_left1(n), - 4 => op_shift_right2_left6(n), - 5 => op_shift_right4_left4(n), - 6 | 8 => n.wrapping_add(159), - 7 => n.wrapping_add(34), - _ => n, - }, - 3 => match o % 10 { - 0 => n ^ 81, - 1 => op_shift_right4_left4(n), - 2 | 9 => op_shift_left4_right4(n), - 3 => n ^ 37, - 4 => n.wrapping_add(159), - 5 => op_shift_left1_right7(n), - 6 => n ^ 180, - 7 => n.wrapping_add(34), - 8 => op_shift_right2_left6(n), - _ => n, - }, - 4 => match o % 10 { - 0 | 7 => n ^ 218, - 1 | 4 => op_shift_left1_right7(n), - 2 => op_shift_right7_left1(n), - 3 => n.wrapping_add(159), - 5 | 8 => n ^ 180, - 6 => n ^ 147, - 9 => n ^ 37, - _ => n, - }, - 5 => match o % 10 { - 0 => op_shift_left4_right4(n), - 1 | 3 => n ^ 147, - 2 => n.wrapping_add(34), - 4 | 9 => n ^ 218, - 5 | 7 => op_shift_left1_right7(n), - 6 => n ^ 180, - 8 => op_shift_right2_left6(n), - _ => n, - }, - _ => n, - }; - out.push(n); - } - out -} - -fn round1(data: &[u8]) -> Vec { - let mutated = mutate(data, &get_key_bytes(1), &get_key_bytes(2), 7, 1); - rc4(&get_key_bytes(0), &mutated) -} - -fn round2(data: &[u8]) -> Vec { - let mutated = mutate(data, &get_key_bytes(4), &get_key_bytes(5), 8, 2); - rc4(&get_key_bytes(3), &mutated) -} - -fn round3(data: &[u8]) -> Vec { - let mutated = mutate(data, &get_key_bytes(7), &get_key_bytes(8), 5, 3); - rc4(&get_key_bytes(6), &mutated) -} - -fn round4(data: &[u8]) -> Vec { - let mutated = mutate(data, &get_key_bytes(10), &get_key_bytes(11), 8, 4); - rc4(&get_key_bytes(9), &mutated) -} - -fn round5(data: &[u8]) -> Vec { - let mutated = mutate(data, &get_key_bytes(13), &get_key_bytes(14), 5, 5); - rc4(&get_key_bytes(12), &mutated) -} - -/// * `path`: API path, e.g. "/manga/some-hash/chapters" -pub fn generate_hash(path: &str) -> String { - let encoded = encode_uri_component(path) - .replace("+", "%20") - .replace("*", "%2A"); - - let r1 = round1(encoded.as_bytes()); - let r2 = round2(&r1); - let r3 = round3(&r2); - let r4 = round4(&r3); - let r5 = round5(&r4); - - URL_SAFE_NO_PAD.encode(r5) -} - -#[cfg(test)] -mod tests { - use super::*; - use aidoku_test::aidoku_test; - - #[aidoku_test] - fn test_manga_keys() { - assert_eq!( - generate_hash("/manga/prm8/chapters"), - "bemJ-0y5bduXT9upsFZyqV4s6RO7JKSqdGy_wtVw2MsErBWwyqsxWwbL-D5qRSWCr15sWrYTsLd0-os" - ); - assert_eq!( - generate_hash("/chapters/7660562"), - "bemJ-0y5bduXT9upsFZyqV4s6RO7JKSqdGy_wtVw2MsEpRV3yl8xub6LeSndTesynt7dDDNi" - ); - } -} diff --git a/sources/en.comix/src/lib.rs b/sources/en.comix/src/lib.rs index 46e46c42..272371fe 100644 --- a/sources/en.comix/src/lib.rs +++ b/sources/en.comix/src/lib.rs @@ -2,7 +2,8 @@ use aidoku::{ Chapter, DeepLinkHandler, DeepLinkResult, FilterValue, HashMap, Home, HomeComponent, HomeLayout, HomePartialResult, ImageRequestProvider, Link, LinkValue, Listing, ListingProvider, - Manga, MangaPageResult, MangaWithChapter, NotificationHandler, Page, Result, Source, + Manga, MangaPageResult, MangaWithChapter, NotificationHandler, Page, PageContent, Result, + Source, alloc::{String, Vec, string::ToString, vec}, helpers::uri::{QueryParameters, encode_uri_component}, imports::{ @@ -12,10 +13,10 @@ use aidoku::{ prelude::*, }; -mod hash; mod helpers; mod models; mod settings; +mod web; use models::*; @@ -201,9 +202,12 @@ impl Source for Comix { let deduplicate = settings::dedupchapter(); let mut chapter_map: HashMap = HashMap::new(); let mut chapter_list: Vec = Vec::new(); + + let web_view = web::create_web_view()?; + let path = format!("/manga/{}/chapters", manga.key); + let token = web::get_token(&web_view, &path)?; + loop { - let path = format!("/manga/{}/chapters", manga.key); - let token = hash::generate_hash(&path); let url = format!( "{API_URL}{path}\ ?limit={limit}\ @@ -212,7 +216,9 @@ impl Source for Comix { &_={token}" ); - let res = Request::get(url)?.json_owned::()?; + let encoded_res = Request::get(&url)?.string()?; + let result = web::decode_response(&web_view, &url, &encoded_res)?; + let res = serde_json::from_str::(&result)?; let items = res.result.items; @@ -258,16 +264,35 @@ impl Source for Comix { } fn get_page_list(&self, _manga: Manga, chapter: Chapter) -> Result> { + let web_view = web::create_web_view()?; let path = format!("/chapters/{}", chapter.key); - let token = hash::generate_hash(&path); + let token = web::get_token(&web_view, &path)?; let url = format!("{API_URL}{path}?_={token}"); - let json: ChapterResponse = Request::get(url)?.json_owned()?; + let encoded_res = Request::get(&url)?.string()?; + let result = web::decode_response(&web_view, &url, &encoded_res)?; + let json: ChapterResponse = serde_json::from_str(&result)?; let Some(result) = json.result else { bail!("Missing chapter") }; - Ok(result.pages.into_iter().map(Into::into).collect()) + let base_url = result.pages.base_url.trim_end_matches('/'); + Ok(result + .pages + .items + .into_iter() + .map(|page| { + let url = if page.url.starts_with("http") { + page.url + } else { + format!("{base_url}/{}", page.url.trim_start_matches('/')) + }; + Page { + content: PageContent::url(url), + ..Default::default() + } + }) + .collect()) } } diff --git a/sources/en.comix/src/models.rs b/sources/en.comix/src/models.rs index 7a5db128..624aaca3 100644 --- a/sources/en.comix/src/models.rs +++ b/sources/en.comix/src/models.rs @@ -1,6 +1,6 @@ use crate::{BASE_URL, helpers, settings}; use aidoku::{ - Chapter, ContentRating, Manga, MangaPageResult, MangaStatus, Page, PageContent, Viewer, + Chapter, ContentRating, Manga, MangaPageResult, MangaStatus, Viewer, alloc::{String, Vec, string::ToString, vec}, prelude::*, }; @@ -268,7 +268,7 @@ impl ComixChapter { #[derive(Deserialize)] pub struct ComixChapterWithPages { - pub pages: Vec, + pub pages: ComixPages, } #[derive(Deserialize)] @@ -290,17 +290,15 @@ pub struct ScanlationGroup { } #[derive(Deserialize)] -pub struct ComixPage { - pub url: String, +#[serde(rename_all = "camelCase")] +pub struct ComixPages { + pub base_url: String, + pub items: Vec, } -impl From for Page { - fn from(value: ComixPage) -> Self { - Page { - content: PageContent::url(value.url), - ..Default::default() - } - } +#[derive(Deserialize)] +pub struct ComixPage { + pub url: String, } // deserialize a bool from a json bool, number, or string diff --git a/sources/en.comix/src/web.rs b/sources/en.comix/src/web.rs new file mode 100644 index 00000000..9d5af9a6 --- /dev/null +++ b/sources/en.comix/src/web.rs @@ -0,0 +1,101 @@ +// reference: https://github.com/nobottomline/extensions-source/blob/c8fe930f315f3baee23587559edfceab5e969202/src/en/comix/src/eu/kanade/tachiyomi/extension/en/comix/Signer.kt +use crate::BASE_URL; +use aidoku::{ + Result, + alloc::string::String, + imports::{js::WebView, net::Request}, + prelude::*, +}; + +pub fn create_web_view() -> Result { + let web_view = WebView::new(); + web_view.load_blocking(Request::get(BASE_URL)?)?; + Ok(web_view) +} + +/// * `path`: API path, e.g. "/manga/some-hash/chapters" +pub fn get_token(web_view: &WebView, path: &str) -> Result { + let token = web_view.eval(&format!( + "(() => {{ + try {{ + const vmKey = Object.keys(window).find(key => key.startsWith('vm')); + const vmObj = window[vmKey]; + if (!vmObj || typeof vmObj.Qi !== 'function') {{ + return ''; + }} + return vmObj.Qi('{path}'); + }} catch(e) {{ + return ''; + }} + }})()" + ))?; + if token.is_empty() { + bail!("Failed to fetch token") + } + Ok(token) +} + +pub fn decode_response(web_view: &WebView, url: &str, encoded_res: &str) -> Result { + let result = web_view.eval(&format!( + "(() => {{ + try {{ + const vmKey = Object.keys(window).find(key => key.startsWith('vm')); + const vmObj = window[vmKey]; + if (!vmObj || typeof vmObj.Qi !== 'function') {{ + return ''; + }} + var captured = {{ req: null, res: null }}; + var fakeAxios = {{ + interceptors: {{ + request: {{ + use: function (fn) {{ + captured.req = fn; + }}, + }}, + response: {{ + use: function (fn) {{ + captured.res = fn; + }}, + }}, + }}, + defaults: {{ + headers: {{ common: {{}} }}, + transformRequest: [], + transformResponse: [], + }}, + }}; + vmObj.v(fakeAxios); + + var raw = JSON.parse('{encoded_res}'); + var bodyOut; + if (raw && typeof raw === 'object' && 'e' in raw && captured.res) {{ + var fakeResp = {{ + data: raw, + status: 200, + statusText: '', + headers: {{ + 'x-enc': '1', + }}, + config: {{ url: '{url}', method: 'get', baseURL: '/api/v1' }}, + request: {{}}, + }}; + var decoded = captured.res(fakeResp); + bodyOut = JSON.stringify({{ result: decoded && decoded.data }}); + }} else if (raw && typeof raw === 'object' && 'result' in raw) {{ + bodyOut = text; + }} else {{ + bodyOut = JSON.stringify({{ result: raw }}); + }} + return bodyOut; + }} catch(e) {{ + return 'error: ' + e; + }} + }})()", + ))?; + if result.starts_with("error:") { + bail!("{result}"); + } else if result.is_empty() { + bail!("Failed to fetch token") + } + Ok(result) +}