diff --git a/.env.example b/.env.example
index 6a9964e..7abb8a6 100644
--- a/.env.example
+++ b/.env.example
@@ -14,6 +14,17 @@ VITE_WALLETCONNECT_PROJECT_ID=your-walletconnect-project-id
# EDB_API_KEY — Secret key that the Vercel proxy injects into bridge requests
# EDB_CORS_ALLOWED_ORIGINS — Comma-separated extra origins for the edb proxy (e.g. https://yourdomain.com)
+# Starknet Simulator Bridge (client-side override — defaults to /api/starknet-sim)
+# For local dev, Vite proxies /api/starknet-sim to http://127.0.0.1:5790 automatically.
+# Set to "disabled" to turn off the Starknet sim integration entirely.
+# VITE_STARKNET_SIM_BRIDGE_URL=/api/starknet-sim
+
+# Starknet Simulator Bridge (server-side, used by the Vercel proxy)
+# STARKNET_SIM_BRIDGE_URL — Full URL of the bridge (e.g. https://sim-sn.your-domain:5790)
+# STARKNET_SIM_API_KEY — Secret key injected by the Vercel proxy as X-API-Key
+# STARKNET_SIM_CORS_ALLOWED_ORIGINS — Comma-separated extra origins for the starknet-sim proxy
+# STARKNET_SIM_RPC_ALLOWED_HOSTS — Optional comma-separated RPC host allowlist for X-Starknet-Rpc-Url
+
# Etherscan
# ETHERSCAN_API_KEY — Default server-side explorer key used by the Etherscan-family proxy
@@ -25,9 +36,8 @@ VITE_WALLETCONNECT_PROJECT_ID=your-walletconnect-project-id
# GEMINI_MODEL — Primary model (default: gemini-3.1-pro-preview)
# GEMINI_FALLBACK_MODEL — Fallback on 429/503 (default: gemini-2.5-flash)
-# Shared proxy secret (optional — when set, lifi-composer and llm-recommend
-# proxies require this value in the x-proxy-secret header instead of origin checks)
-# PROXY_SECRET — Random secret string
-
-# Origin allowlist for lifi-composer and llm-recommend proxies
+# Public proxy controls for LI.FI, Gemini, and default Etherscan requests.
+# These routes are browser-called, origin-checked, and rate-limited. Never expose
+# PROXY_SECRET through a VITE_ variable; it is only a server-to-server bypass.
+# PROXY_SECRET — Optional random secret for trusted server-to-server calls
# ALLOWED_ORIGINS — Comma-separated origins (e.g. https://yourdomain.com,https://preview.yourdomain.com)
diff --git a/.gitignore b/.gitignore
index 7b8edfe..dbed772 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,6 +18,9 @@ CURRENT_WORKING.md
node_modules
dist
dist-ssr
+.worktrees/
+edb/target/
+fhe/cache/
*.local
.env
@@ -61,11 +64,23 @@ edb
# Misc
*.pdf
-*test*
-*Test*
-*TEST*
/tmp
video/out
.superpowers
/docs
-@docs
\ No newline at end of file
+@docs
+
+# Tests — run via vitest locally, never committed.
+**/*.test.ts
+**/*.test.tsx
+**/*.test.js
+**/*.test.jsx
+**/*.spec.ts
+**/*.spec.tsx
+**/*.spec.js
+**/*.spec.jsx
+**/__tests__/
+**/__mocks__/
+**/test-fixtures/
+/tests/
+/test-results/
diff --git a/.npmrc b/.npmrc
new file mode 100644
index 0000000..521a9f7
--- /dev/null
+++ b/.npmrc
@@ -0,0 +1 @@
+legacy-peer-deps=true
diff --git a/.nvmrc b/.nvmrc
new file mode 100644
index 0000000..2bd5a0a
--- /dev/null
+++ b/.nvmrc
@@ -0,0 +1 @@
+22
diff --git a/README.md b/README.md
index becc606..7f8e49c 100644
--- a/README.md
+++ b/README.md
@@ -1,273 +1,146 @@
-
-
-
+# HexKit
-
HEXKIT
+HexKit is a browser-based web3 developer toolkit for decoding calldata, building transactions, simulating EVM and Starknet calls, inspecting contract source, debugging traces, and managing DeFi yield.
-
- A local-first EVM developer toolkit for decoding, simulating, debugging smart contract transactions, and managing DeFi yield.
-
+The production app is a Vite/React frontend deployed on Vercel. Server-side API routes proxy external services and optional simulator bridges so browser clients do not need direct access to private keys, API keys, or bridge credentials.
----
+## Core Features
-## Overview
-
-HexKit is a browser-based web3 developer toolkit built for inspecting, simulating, and debugging EVM transactions, with an integrated DeFi yield management layer. All heavy compute -- REVM replay, instrumentation, and trace decoding -- runs locally, keeping your data private and your workflow fast.
-
-The application pairs a React frontend with a local Rust-powered EDB (EVM Debugger) engine that provides full transaction replay, step-through debugging, and storage introspection without relying on third-party simulation services. The integrations layer connects to external DeFi protocols (starting with LI.FI) for cross-chain yield discovery, deposit execution, and portfolio management.
+- EVM transaction utilities: calldata decoding, ABI encoding, wallet-backed reads/writes, and local EDB simulation.
+- Starknet transaction utilities: invoke builder, trace replay, simulation history, state diffs, token flows, events, L2 to L1 messages, and class/source inspection.
+- Contract source tools: Sourcify, Etherscan-compatible explorer, Blockscout, and Starknet class/source lookup.
+- Debugging views: call trees, stack traces, source panels, trace rows, breakpoints, and snapshot-backed evaluation.
+- Signature database: selector/topic lookup, cached signatures, and custom signature management.
+- LI.FI Earn integration: vault discovery, portfolio positions, deposit/withdraw flows, and yield recommendations.
+- Vercel API proxies: EDB bridge, Starknet simulator bridge, LI.FI Composer/Earn, Gemini recommendations, and Etherscan-family explorer requests.
## Tech Stack
| Layer | Technologies |
-|-------|-------------|
-| Frontend | React 19, TypeScript, Vite 5 |
-| Styling | Tailwind CSS v4, shadcn/ui |
-| Web3 | ethers v5, viem, wagmi, RainbowKit |
-| Simulation | REVM (Rust), EDB engine, WebSocket bridge |
-| Integrations | LI.FI SDK (Earn API, Composer), Gemini LLM |
-| API Proxies | Vercel Serverless Functions |
-| Testing | Vitest, Testing Library |
-
-## Features
-
-### Smart Decoder
-
-Paste any calldata, transaction hash, or function signature and HexKit auto-detects the format and decodes it using the OpenChain signature database. Supports custom ABI input for decoding private or unregistered functions. A local signature cache enables offline use.
-
-### Signature Database
-
-Look up function selectors (4-byte) and event topics by name or hex signature. Manage custom signatures and browse the local cache.
-
-### Transaction Builder
-
-Two operating modes:
-
-- **Live Interaction** -- Connect a wallet via RainbowKit, then read and write contract functions on any EVM chain directly from the browser.
-- **Simulation (EDB)** -- Build transactions and simulate them through the local EDB engine. Get full REVM replay with execution traces, state changes, event logs, and gas analysis without spending gas or requiring a wallet.
-
-### Source Tools
-
-- **Contract Explorer** -- Browse verified contract source code fetched from Sourcify, Etherscan, and Blockscout.
-- **Contract Diff** -- Side-by-side bytecode and source comparison between two contracts on the same or different chains.
-- **Storage Layout Viewer** -- AST-based storage slot reconstruction with live value reading from on-chain state.
-
-### Simulation Results
-
-A full simulation analysis page with six tabs:
-
-- **Summary** -- Gas usage, value transferred, and execution status.
-- **Events** -- Decoded event logs with token movement visualization.
-- **State Changes** -- Storage diff with named slots when source is available.
-- **Execution Trace** -- Step-through call tree with source mapping.
-- **Contracts** -- All contracts involved in the transaction with verification status.
-- **Debug** -- Step-through Solidity debugger with breakpoints, snapshots, and an expression evaluator.
-
-### Simulation History
-
-Browse past simulations stored in IndexedDB. Re-open any previous result for review or further debugging.
-
-### Integrations
-
-The `/integrations` route hosts protocol-specific modules that extend HexKit beyond debugging into active DeFi operations.
-
-#### LI.FI Earn
-
-A full yield management layer powered by the LI.FI Earn API:
-
-- **Vault Browser** -- Browse, search, and filter yield vaults across 20+ protocols and all supported EVM chains. Each vault shows live APY, TVL, underlying tokens, and protocol metadata.
-- **My Positions** -- View open earn positions for any connected wallet or arbitrary address. Shows per-position PnL and portfolio summary.
-- **Deposit / Withdraw Flows** -- Deposit into and withdraw from vaults directly through LI.FI's Composer API, which handles cross-chain swaps and bridging automatically.
-- **Vault Simulator** -- Forecast projected returns for any vault over a configurable time horizon before committing capital.
-
-#### Yield Concierge (AI-powered)
-
-An AI assistant that translates natural language yield goals into actionable vault recommendations:
-
-- **Intent Parser** -- Gemini LLM converts free-text prompts ("safest USDC vault above 5% on Arbitrum") into structured filters (token, chain, APY range, objective, protocol allow/deny lists).
-- **My Assets Mode** -- Say "best vaults for my assets" and the concierge fans out per-asset recommendations for every idle token in the connected wallet.
-- **Consolidate Mode** -- Say "best vault for my assets" (singular) and the concierge finds the top vault candidates to funnel all holdings into a single position via cross-chain swaps.
-- **Idle Sweep** -- Detects wallet tokens sitting idle (not earning yield) and suggests the best vault for each, with one-click deposit.
-- **Execution Pipeline** -- A sequential deposit queue that processes multiple deposits one after another, handling quoting, approval, execution, and cross-chain bridge status polling for each leg.
-- **Model Fallback** -- The LLM proxy tries `gemini-3.1-pro-preview` first and auto-falls back to `gemini-2.5-flash` on rate limits, keeping recommendations available without interruption.
-
-### Contract Resolution
-
-HexKit resolves contract ABIs and source code through parallel multi-source lookup:
-
-1. **Sourcify** (full match and partial match)
-2. **Etherscan** (and compatible explorers)
-3. **Blockscout**
-4. **WhatsABI** decompilation fallback for unverified contracts
+| --- | --- |
+| Frontend | React 19, TypeScript, Vite |
+| Styling | Tailwind CSS v4, shadcn/ui primitives |
+| EVM | ethers v5, viem, wagmi, RainbowKit |
+| Starknet | starknet.js, Cartridge controller, Starkzap |
+| Editor/debug UI | Monaco, React Router, Framer Motion |
+| APIs | Vercel Serverless Functions |
+| Testing and quality | Vitest, ESLint, TypeScript project build |
-Additional capabilities:
+## Local Development
-- Diamond proxy (EIP-2535) facet resolution with automatic loupe calls
-- Two-tier caching: L1 (in-memory LRU) and L2 (IndexedDB with 24-hour TTL)
+Use Node.js 22. The Starknet runtime dependencies require Node 22+, and Vercel should be configured to build with the same major version.
-### Debug Sessions
-
-Full Solidity step-through debugger powered by the local EDB engine:
-
-- Source-level breakpoints tied to Solidity source maps
-- Expression evaluator with a three-tier resolution chain: locals, storage-layout decode, and ABI getter fallback
-- Dual-layer snapshot system (opcode snapshots and hook snapshots)
-- Keep-alive sessions via the EDB bridge for long-running investigations
-
-## Architecture
-
-HexKit follows a local-first architecture. The frontend communicates with a local EDB simulator bridge over WebSocket, which in turn drives the Rust-based REVM engine.
-
-```
-Browser (React) <--> Simulator Bridge (:5789) <--> EDB Engine (Rust/REVM)
-```
-
-Visual architecture diagrams are available in the `schematics/` folder:
-
-| Diagram | Description |
-|---------|-------------|
-| `schematics/hexkit-app-architecture.excalidraw` | Frontend application layers, routes, and data flow |
-| `schematics/edb-system-topology.excalidraw` | End-to-end system architecture (Frontend, Bridge, Rust Engine, REVM) |
-| `schematics/edb-engine-internals.excalidraw` | EDB engine 8-step workflow and dual snapshot system |
-
-## Getting Started
-
-### Prerequisites
-
-- Node.js 18+
-- npm
-- Rust toolchain (for building the EDB simulator)
-
-### Quick Start
-
-The start script handles everything:
+Install dependencies:
```bash
-./start-dev.sh
+npm install
```
-It performs the following steps:
-
-1. Checks for the `edb-simulator` binary (auto-builds via `cargo build -p edb-simulator --release` if missing)
-2. Starts the EDB simulator bridge on port 5789
-3. Starts the Vite dev server on port 5173
-
-### Manual Start
+Start only the Vite app:
```bash
-# Terminal 1: EDB bridge
-npm run simulator:server
-
-# Terminal 2: Frontend
npm run dev
```
-### Environment
+Start Vite plus the local EDB and Starknet simulator bridges:
-The app works out of the box with public RPC endpoints. For better reliability, configure an Alchemy API key, Infura project ID, or a custom RPC URL through the RPC Settings modal (gear icon in the top bar).
+```bash
+npm run dev:full
+```
-For the LI.FI Earn integration and AI concierge, set the following in `.env`:
+Build the production bundle:
-| Variable | Purpose |
-|----------|---------|
-| `LIFI_API_KEY` | LI.FI API key for Earn and Composer endpoints |
-| `GEMINI_API_KEY` | Google AI Studio API key for the yield concierge LLM |
-| `GEMINI_MODEL` | Primary Gemini model (default: `gemini-3.1-pro-preview`) |
-| `GEMINI_FALLBACK_MODEL` | Fallback on 429/503 (default: `gemini-2.5-flash`) |
-| `PROXY_SECRET` | Shared secret for API proxy authentication (production) |
-| `ALLOWED_ORIGINS` | Comma-separated allowed CORS origins (production) |
+```bash
+npm run build
+```
-## Project Structure
+Run quality checks:
+```bash
+npm run lint
+npm run test:run
```
-src/
- components/
- simple-grid/ Transaction builder grid layout
- smart-decoder/ Calldata decoder module
- simulation-results/ Results page sub-modules
- explorer/ Source tools (explorer, diff, storage layout)
- execution-trace/ Trace viewer components
- debug/ Debug window and panels
- integrations/
- IntegrationsHub.tsx Integration router
- lifi-earn/
- LifiEarnPage.tsx Main earn page (positions, vaults tabs)
- VaultList.tsx Vault browser with filters
- VaultDrawer.tsx Vault detail sheet
- DepositFlow.tsx Deposit transaction flow
- WithdrawFlow.tsx Withdraw transaction flow
- earnApi.ts LI.FI Earn API client
- concierge/
- ConciergePanel.tsx Yield concierge tabs (idle sweep + intent)
- IdleSweepPanel.tsx Idle asset detection and recommendations
- VaultRecommendations.tsx Recommendation card grid
- ExecutionQueue.tsx Sequential deposit pipeline UI
- executionMachine.ts Reducer-based execution state machine
- intent/
- IntentPanel.tsx AI intent UI (prompt, chips, results)
- schema.ts ParsedIntent zod schema
- hooks/
- useIntentParser.ts LLM prompt → structured intent
- useVaultsByIntent.ts Filter + rank vaults by intent
- useIntentRecommendation.ts LLM-powered vault picks
- shared/ Reusable components
- icons/ Icon library
- ui/ shadcn/ui primitives
- contexts/ React contexts (state management)
- hooks/ Custom hooks
- services/ Service layer (bridge, history, vault)
- utils/
- resolver/ Contract resolution engine
- traceDecoder/ Trace decoding pipeline
- transaction-simulation/ Simulation logic
- solidity-layout/ Storage layout reconstruction
- fetchers/ API fetchers (Sourcify, Etherscan, Blockscout)
- cache/ Caching utilities
- workers/ Web Workers (trace decoder)
- types/ TypeScript types
- styles/ CSS modules
- config/ App configuration
- lib/ Shared libraries
+## Vercel Configuration
+
+Client-side variables must be prefixed with `VITE_`. Server-side secrets are read only by `api/*` routes and should be configured in the Vercel dashboard.
+
+| Variable | Scope | Purpose |
+| --- | --- | --- |
+| `VITE_WALLETCONNECT_PROJECT_ID` | Client | WalletConnect project ID for EVM wallet connections |
+| `VITE_SIMULATOR_BRIDGE_URL` | Client | EDB bridge endpoint, defaults to `/api/edb` |
+| `VITE_STARKNET_SIM_BRIDGE_URL` | Client | Starknet simulator endpoint, defaults to `/api/starknet-sim` |
+| `EDB_BRIDGE_URL` | Server | Upstream EDB bridge URL for the Vercel proxy |
+| `EDB_API_KEY` | Server | API key injected into EDB bridge requests |
+| `EDB_CORS_ALLOWED_ORIGINS` | Server | Extra allowed origins for the EDB proxy |
+| `STARKNET_SIM_BRIDGE_URL` | Server | Upstream Starknet simulator bridge URL |
+| `STARKNET_SIM_API_KEY` | Server | API key injected into Starknet simulator requests |
+| `STARKNET_SIM_CORS_ALLOWED_ORIGINS` | Server | Extra allowed origins for the Starknet simulator proxy |
+| `STARKNET_SIM_RPC_ALLOWED_HOSTS` | Server | Optional RPC override host allowlist for Starknet sim requests |
+| `ETHERSCAN_API_KEY` | Server | Explorer API key for Etherscan-compatible requests |
+| `LIFI_API_KEY` | Server | LI.FI Earn and Composer API key |
+| `GEMINI_API_KEY` | Server | Gemini key for yield recommendation routes |
+| `GEMINI_MODEL` | Server | Primary Gemini model |
+| `GEMINI_FALLBACK_MODEL` | Server | Fallback Gemini model |
+| `PROXY_SECRET` | Server | Optional server-to-server bypass for public proxy rate/origin checks |
+| `ALLOWED_ORIGINS` | Server | Comma-separated production origins for browser-called public proxies |
+
+See `.env.example` for a copyable template.
+
+## Project Layout
+
+```text
api/
- llm-recommend.ts Gemini LLM proxy with model fallback
- lifi-composer.ts LI.FI Composer quote/execute proxy
- edb/ EDB simulation API routes
+ edb-proxy.ts EDB bridge proxy for Vercel
+ starknet-sim-proxy.ts Starknet simulator bridge proxy
+ lifi-composer.ts LI.FI quote/execute proxy
+ lifi-earn.ts LI.FI Earn proxy
+ llm-recommend.ts Gemini recommendation proxy
+ explorer/etherscan.ts Explorer proxy
-edb/
- crates/engine/ Core debug engine
- crates/edb-simulator/ CLI simulator binary
- crates/rpc-proxy/ Intelligent RPC proxy
-
-scripts/ Dev scripts (bridge, perf tests)
-schematics/ Architecture documentation
+src/
+ chains/ Chain capabilities, adapters, Starknet clients
+ components/ App screens and UI components
+ components/starknet/ Starknet builder, history, and result routes
+ components/starknet-simulation-results/ Starknet result presentation adapters
+ contexts/ Wallet, network, simulation, and debug state
+ features/earn/ Cross-chain earn adapter layer
+ hooks/ Shared React hooks
+ lib/ Monaco and asset helpers
+ routes/ Family-aware route helpers
+ services/ History, trace, and bridge services
+ utils/ Decoders, resolver, simulation, layout utilities
+
+public/
+ logos/ Favicons, app icons, and public logo assets
+
+scripts/
+ dev-full.sh Local multi-service dev runner
+ check-family-imports.mjs Family import boundary check
+ check-inline-copy.mjs Inline copy guard
+
+starknet-sim/ Starknet simulator bridge workspace
+edb/ EVM debugger workspace
```
-## Scripts
-
-| Script | Purpose |
-|--------|---------|
-| `npm run dev` | Start Vite dev server |
-| `npm run build` | TypeScript check + production build |
-| `npm run simulator:server` | Start EDB simulator bridge |
-| `npm run test` | Run Vitest |
-| `npm run qa:live:matrix` | Live QA test matrix |
-| `npm run perf:debug-matrix` | Debug performance stress test |
-
-## Documentation
-
-Architecture docs live in `schematics/`:
+## Production Notes
-| Document | Purpose |
-|----------|---------|
-| `SYSTEM_SCHEMATIC.md` | High-level architecture and task-to-area index |
-| `DATAFLOWS.md` | End-to-end data flow diagrams |
-| `GRANULAR-COMPONENTS.md` | Component-level documentation |
-| `LEGEND.md` | Doc routing guide |
+- Vercel builds with `npm run build`.
+- Generated output in `dist/` is ignored and does not need to be committed.
+- Local bridge build products, node_modules folders, temp output, and worktrees are ignored and should not be pushed.
+- Public assets are limited to files referenced by the app shell, manifest, route UI, or social metadata.
+- For Starknet simulator production, keep `VITE_STARKNET_SIM_BRIDGE_URL` unset or set to `/api/starknet-sim`, then point `STARKNET_SIM_BRIDGE_URL` at the HTTPS droplet bridge URL. If the bridge requires auth, set `STARKNET_SIM_API_KEY` in Vercel and configure the same key on the droplet bridge.
+- Do not expose `PROXY_SECRET` with a `VITE_` prefix. Browser-called proxy routes use origin checks and serverless rate limits; `PROXY_SECRET` is only for trusted server-to-server calls.
-## Giveth
-https://giveth.io/project/hexkit
+## Architecture Diagrams
+Tracked diagrams live in `schematics/`:
+| File | Purpose |
+| --- | --- |
+| `schematics/hexkit-app-architecture.excalidraw` | Frontend route, provider, and data-flow map |
+| `schematics/edb-system-topology.excalidraw` | Browser, bridge, and EDB engine topology |
+| `schematics/edb-engine-internals.excalidraw` | EDB engine execution and snapshot internals |
## License
diff --git a/api/_llm/requireAuth.ts b/api/_llm/requireAuth.ts
new file mode 100644
index 0000000..78b70d8
--- /dev/null
+++ b/api/_llm/requireAuth.ts
@@ -0,0 +1,49 @@
+import * as crypto from "crypto";
+
+type Headers = Record;
+
+function getHeader(headers: Headers, name: string): string | undefined {
+ const v = headers[name.toLowerCase()];
+ return Array.isArray(v) ? v[0] : v;
+}
+
+function timingSafeEqualStr(a: string, b: string): boolean {
+ const ab = Buffer.from(a);
+ const bb = Buffer.from(b);
+ if (ab.length !== bb.length) return false;
+ return crypto.timingSafeEqual(ab, bb);
+}
+
+export function hasValidProxySecret(req: { headers: Headers }): boolean {
+ const secret = process.env.PROXY_SECRET;
+ if (!secret) return false;
+ const sent = getHeader(req.headers, "x-proxy-secret");
+ if (!sent) return false;
+ return timingSafeEqualStr(sent, secret);
+}
+
+export interface ServerKeyAuthResult {
+ ok: boolean;
+ status?: number;
+ body?: { error: string };
+}
+
+export function authorizeServerKeyUse(
+ req: { headers: Headers },
+): ServerKeyAuthResult {
+ if (!process.env.PROXY_SECRET) {
+ return {
+ ok: false,
+ status: 503,
+ body: { error: "server_key_not_authorized" },
+ };
+ }
+ if (!hasValidProxySecret(req)) {
+ return {
+ ok: false,
+ status: 403,
+ body: { error: "forbidden" },
+ };
+ }
+ return { ok: true };
+}
diff --git a/api/_utils/proxyHelper.ts b/api/_utils/proxyHelper.ts
new file mode 100644
index 0000000..f23cb5e
--- /dev/null
+++ b/api/_utils/proxyHelper.ts
@@ -0,0 +1,224 @@
+import type { VercelRequest, VercelResponse } from "@vercel/node";
+
+export interface CorsHeadersOptions {
+ allowedOrigin?: string | null;
+ allowMethods?: string;
+ allowHeaders?: string;
+ maxAge?: string;
+ varyOrigin?: boolean;
+}
+
+export function applyCorsHeaders(
+ res: VercelResponse,
+ options: CorsHeadersOptions,
+): void {
+ if (options.allowedOrigin) {
+ res.setHeader("Access-Control-Allow-Origin", options.allowedOrigin);
+ if (options.varyOrigin) {
+ res.setHeader("Vary", "Origin");
+ }
+ }
+ if (options.allowMethods) {
+ res.setHeader("Access-Control-Allow-Methods", options.allowMethods);
+ }
+ if (options.allowHeaders) {
+ res.setHeader("Access-Control-Allow-Headers", options.allowHeaders);
+ }
+ if (options.maxAge) {
+ res.setHeader("Access-Control-Max-Age", options.maxAge);
+ }
+}
+
+export function handleCorsPreflight(
+ req: VercelRequest,
+ res: VercelResponse,
+ cors?: CorsHeadersOptions | (() => CorsHeadersOptions | undefined),
+): boolean {
+ if (req.method !== "OPTIONS") return false;
+ const options = typeof cors === "function" ? cors() : cors;
+ if (options) applyCorsHeaders(res, options);
+ res.status(204).end();
+ return true;
+}
+
+export function readRawBody(
+ req: VercelRequest,
+ maxBodyBytes: number,
+): Promise {
+ return new Promise((resolve, reject) => {
+ const chunks: Buffer[] = [];
+ let total = 0;
+
+ req.on("data", (chunk: Buffer) => {
+ total += chunk.length;
+ if (total > maxBodyBytes) {
+ req.destroy();
+ reject(new Error("body_too_large"));
+ return;
+ }
+ chunks.push(chunk);
+ });
+ req.on("end", () => resolve(Buffer.concat(chunks)));
+ req.on("error", reject);
+ });
+}
+
+export interface ProxyAbortOptions {
+ timeoutMs?: number;
+ abortOnClose?: boolean;
+}
+
+export function createProxyAbortSignal(
+ req: VercelRequest,
+ options: ProxyAbortOptions,
+): AbortSignal {
+ const controller = new AbortController();
+
+ if (options.abortOnClose) {
+ req.on("close", () => controller.abort());
+ }
+
+ if (typeof options.timeoutMs === "number") {
+ const timer = setTimeout(() => controller.abort(), options.timeoutMs);
+ req.on("close", () => clearTimeout(timer));
+ }
+
+ return controller.signal;
+}
+
+export function fetchUpstream(
+ req: VercelRequest,
+ input: string | URL,
+ init: RequestInit,
+ abort?: ProxyAbortOptions,
+): Promise {
+ return fetch(input, {
+ ...init,
+ signal: abort ? createProxyAbortSignal(req, abort) : init.signal,
+ });
+}
+
+export async function sendTextUpstreamResponse(
+ res: VercelResponse,
+ upstream: Response,
+ headers?: Record,
+): Promise {
+ if (headers) {
+ for (const [name, value] of Object.entries(headers)) {
+ res.setHeader(name, value);
+ }
+ }
+
+ const body = await upstream.text();
+ res.status(upstream.status).send(body);
+}
+
+const HOP_BY_HOP_HEADERS = new Set([
+ "connection",
+ "keep-alive",
+ "proxy-authenticate",
+ "proxy-authorization",
+ "te",
+ "trailer",
+ "transfer-encoding",
+ "upgrade",
+]);
+
+const DEFAULT_BUFFERED_HEADER_NAMES = ["content-type", "vary"];
+
+export interface BufferedUpstreamResponseOptions {
+ headerNames?: Iterable;
+}
+
+export async function sendBufferedUpstreamResponse(
+ res: VercelResponse,
+ upstream: Response,
+ options: BufferedUpstreamResponseOptions = {},
+): Promise {
+ res.status(upstream.status);
+
+ for (const name of options.headerNames ?? DEFAULT_BUFFERED_HEADER_NAMES) {
+ const lowerName = name.toLowerCase();
+ if (HOP_BY_HOP_HEADERS.has(lowerName)) continue;
+
+ const value = upstream.headers.get(name);
+ if (!value) continue;
+
+ if (lowerName === "vary") {
+ const existing = res.getHeader("Vary");
+ res.setHeader("Vary", existing ? `${existing}, ${value}` : value);
+ } else {
+ res.setHeader(lowerName, value);
+ }
+ }
+
+ const buf = Buffer.from(await upstream.arrayBuffer());
+ res.send(buf);
+}
+
+export async function streamSseResponse(
+ res: VercelResponse,
+ upstream: Response,
+): Promise {
+ const contentType = upstream.headers.get("content-type") || "";
+ if (!contentType.includes("text/event-stream") || !upstream.body) {
+ return false;
+ }
+
+ res.setHeader("Content-Type", "text/event-stream");
+ res.setHeader("Cache-Control", "no-cache");
+ res.setHeader("Connection", "keep-alive");
+ res.setHeader("X-Accel-Buffering", "no");
+
+ const reader = upstream.body.getReader();
+ const decoder = new TextDecoder();
+ try {
+ for (;;) {
+ const { done, value } = await reader.read();
+ if (done) break;
+ res.write(decoder.decode(value, { stream: true }));
+ }
+ } catch {
+ // Client disconnected or upstream closed.
+ } finally {
+ reader.cancel().catch(() => {});
+ res.end();
+ }
+
+ return true;
+}
+
+export interface JsonProxyError {
+ status: number;
+ body: { error: string };
+}
+
+export interface ProxyErrorOptions {
+ logLabel: string;
+ upstream: JsonProxyError;
+ timeout?: JsonProxyError;
+ bodyTooLarge?: JsonProxyError;
+}
+
+export function sendProxyError(
+ res: VercelResponse,
+ err: unknown,
+ options: ProxyErrorOptions,
+): VercelResponse {
+ if (
+ options.bodyTooLarge &&
+ err instanceof Error &&
+ err.message === "body_too_large"
+ ) {
+ return res
+ .status(options.bodyTooLarge.status)
+ .json(options.bodyTooLarge.body);
+ }
+
+ if (options.timeout && err instanceof Error && err.name === "AbortError") {
+ return res.status(options.timeout.status).json(options.timeout.body);
+ }
+
+ console.error(`[${options.logLabel}] upstream error:`, err);
+ return res.status(options.upstream.status).json(options.upstream.body);
+}
diff --git a/api/_utils/publicProxyGuard.ts b/api/_utils/publicProxyGuard.ts
new file mode 100644
index 0000000..fcb1fa8
--- /dev/null
+++ b/api/_utils/publicProxyGuard.ts
@@ -0,0 +1,65 @@
+import type { VercelRequest, VercelResponse } from "@vercel/node";
+import { hasValidProxySecret } from "../_llm/requireAuth";
+import { enforceRateLimit, type RateLimitOptions } from "./rateLimit";
+
+export interface ResolveAllowedOriginOptions {
+ envVar?: string;
+ allowLocalhost?: boolean;
+}
+
+export interface PublicProxyAccessOptions {
+ allowedOrigin: string | null;
+ rateLimit: RateLimitOptions;
+ allowServerSecretBypass?: boolean;
+}
+
+function headerValue(value: string | string[] | undefined): string | undefined {
+ return Array.isArray(value) ? value[0] : value;
+}
+
+export function resolveAllowedProxyOrigin(
+ req: VercelRequest,
+ options: ResolveAllowedOriginOptions = {},
+): string | null {
+ const origin = headerValue(req.headers.origin);
+ if (!origin) return null;
+
+ const envVar = options.envVar ?? "ALLOWED_ORIGINS";
+ const configured = (process.env[envVar] || "")
+ .split(",")
+ .map((value) => value.trim())
+ .filter(Boolean);
+ if (configured.includes(origin)) return origin;
+
+ if (options.allowLocalhost !== false) {
+ if (
+ origin.startsWith("http://localhost:") ||
+ origin.startsWith("http://127.0.0.1:")
+ ) {
+ return origin;
+ }
+ }
+
+ const host = headerValue(req.headers.host);
+ if (host && origin === `https://${host}`) return origin;
+
+ return null;
+}
+
+export function enforcePublicProxyAccess(
+ req: VercelRequest,
+ res: VercelResponse,
+ options: PublicProxyAccessOptions,
+): boolean {
+ if (options.allowServerSecretBypass !== false && hasValidProxySecret(req)) {
+ return true;
+ }
+
+ const origin = headerValue(req.headers.origin);
+ if (origin && !options.allowedOrigin) {
+ res.status(403).json({ error: "origin_not_allowed" });
+ return false;
+ }
+
+ return enforceRateLimit(req, res, options.rateLimit);
+}
diff --git a/api/_utils/rateLimit.ts b/api/_utils/rateLimit.ts
new file mode 100644
index 0000000..0c52f46
--- /dev/null
+++ b/api/_utils/rateLimit.ts
@@ -0,0 +1,88 @@
+import type { VercelRequest, VercelResponse } from "@vercel/node";
+
+interface RateLimitEntry {
+ count: number;
+ resetAt: number;
+}
+
+export interface RateLimitOptions {
+ bucket: string;
+ limit: number;
+ windowMs: number;
+ key?: string;
+}
+
+const stores = new Map>();
+const MAX_KEYS_PER_BUCKET = 5_000;
+
+function firstHeaderValue(value: string | string[] | undefined): string | undefined {
+ return Array.isArray(value) ? value[0] : value;
+}
+
+export function requestClientKey(req: VercelRequest): string {
+ const forwardedFor = firstHeaderValue(req.headers["x-forwarded-for"]);
+ if (forwardedFor) {
+ const first = forwardedFor.split(",")[0]?.trim();
+ if (first) return first;
+ }
+
+ const realIp = firstHeaderValue(req.headers["x-real-ip"]);
+ if (realIp?.trim()) return realIp.trim();
+
+ return req.socket?.remoteAddress ?? "unknown";
+}
+
+export function enforceRateLimit(
+ req: VercelRequest,
+ res: VercelResponse,
+ options: RateLimitOptions,
+): boolean {
+ const now = Date.now();
+ const key = options.key ?? requestClientKey(req);
+ const store = stores.get(options.bucket) ?? new Map();
+ stores.set(options.bucket, store);
+ pruneExpiredEntries(store, now);
+
+ const current = store.get(key);
+ const entry =
+ current && current.resetAt > now
+ ? current
+ : { count: 0, resetAt: now + options.windowMs };
+
+ entry.count += 1;
+ store.set(key, entry);
+
+ const remaining = Math.max(0, options.limit - entry.count);
+ const retryAfter = Math.max(1, Math.ceil((entry.resetAt - now) / 1000));
+
+ res.setHeader("X-RateLimit-Limit", String(options.limit));
+ res.setHeader("X-RateLimit-Remaining", String(remaining));
+ res.setHeader("X-RateLimit-Reset", String(Math.ceil(entry.resetAt / 1000)));
+
+ if (entry.count <= options.limit) return true;
+
+ res.setHeader("Retry-After", String(retryAfter));
+ res.status(429).json({ error: "rate_limited" });
+ return false;
+}
+
+function pruneExpiredEntries(
+ store: Map,
+ now: number,
+): void {
+ if (store.size <= MAX_KEYS_PER_BUCKET) return;
+
+ for (const [key, entry] of store) {
+ if (entry.resetAt <= now) store.delete(key);
+ }
+
+ if (store.size <= MAX_KEYS_PER_BUCKET) return;
+
+ const overflow = store.size - MAX_KEYS_PER_BUCKET;
+ let removed = 0;
+ for (const key of store.keys()) {
+ store.delete(key);
+ removed += 1;
+ if (removed >= overflow) return;
+ }
+}
diff --git a/api/_utils/rpcUrlSafety.ts b/api/_utils/rpcUrlSafety.ts
new file mode 100644
index 0000000..d100763
--- /dev/null
+++ b/api/_utils/rpcUrlSafety.ts
@@ -0,0 +1,156 @@
+import * as net from "node:net";
+
+export interface RpcUrlValidationOptions {
+ allowedHostsEnv?: string;
+}
+
+export interface RpcUrlValidationResult {
+ ok: boolean;
+ reason?: string;
+}
+
+export function validatePublicRpcUrl(
+ raw: string,
+ options: RpcUrlValidationOptions = {},
+): RpcUrlValidationResult {
+ if (raw.length > 2048) return { ok: false, reason: "rpc_url_too_long" };
+
+ let url: URL;
+ try {
+ url = new URL(raw);
+ } catch {
+ return { ok: false, reason: "invalid_rpc_url" };
+ }
+
+ if (url.protocol !== "http:" && url.protocol !== "https:") {
+ return { ok: false, reason: "unsupported_rpc_scheme" };
+ }
+
+ if (url.username || url.password) {
+ return { ok: false, reason: "rpc_url_credentials_not_allowed" };
+ }
+
+ const host = url.hostname.replace(/^\[|\]$/g, "").toLowerCase();
+ if (!host) return { ok: false, reason: "missing_rpc_host" };
+
+ if (isPrivateHost(host)) {
+ return { ok: false, reason: "private_rpc_host_not_allowed" };
+ }
+
+ const allowedHosts = parseAllowedHosts(options.allowedHostsEnv);
+ if (allowedHosts.length > 0 && !hostMatchesAllowlist(host, allowedHosts)) {
+ return { ok: false, reason: "rpc_host_not_allowed" };
+ }
+
+ return { ok: true };
+}
+
+function parseAllowedHosts(envName?: string): string[] {
+ if (!envName) return [];
+ return (process.env[envName] || "")
+ .split(",")
+ .map((value) => value.trim().toLowerCase())
+ .filter(Boolean);
+}
+
+function hostMatchesAllowlist(host: string, allowedHosts: string[]): boolean {
+ return allowedHosts.some((allowed) => {
+ if (host === allowed) return true;
+ if (allowed.startsWith(".")) return host.endsWith(allowed);
+ return false;
+ });
+}
+
+function isPrivateHost(host: string): boolean {
+ if (host === "localhost" || host === "localhost." || host.endsWith(".localhost")) {
+ return true;
+ }
+
+ const ipVersion = net.isIP(host);
+ if (ipVersion === 4) return isPrivateIpv4(host);
+ if (ipVersion === 6) return isPrivateIpv6(host);
+
+ return false;
+}
+
+function isPrivateIpv4(host: string): boolean {
+ const octets = host.split(".").map((part) => Number(part));
+ if (
+ octets.length !== 4 ||
+ octets.some((part) => !Number.isInteger(part) || part < 0 || part > 255)
+ ) {
+ return true;
+ }
+
+ const [a, b, c] = octets;
+ return (
+ a === 0 ||
+ a === 10 ||
+ a === 127 ||
+ (a === 100 && b >= 64 && b <= 127) ||
+ (a === 169 && b === 254) ||
+ (a === 172 && b >= 16 && b <= 31) ||
+ (a === 192 && b === 168) ||
+ (a === 192 && b === 0 && c === 0) ||
+ (a === 192 && b === 0 && c === 2) ||
+ (a === 192 && b === 88 && c === 99) ||
+ (a === 198 && (b === 18 || b === 19)) ||
+ (a === 198 && b === 51 && c === 100) ||
+ (a === 203 && b === 0 && c === 113) ||
+ a >= 224
+ );
+}
+
+function isPrivateIpv6(host: string): boolean {
+ const normalized = host.toLowerCase();
+ const mappedIpv4 = ipv4FromMappedIpv6(normalized);
+ if (mappedIpv4) return isPrivateIpv4(mappedIpv4);
+
+ return (
+ normalized === "::" ||
+ normalized === "::1" ||
+ normalized.startsWith("::") ||
+ normalized.startsWith("fc") ||
+ normalized.startsWith("fd") ||
+ normalized.startsWith("fe8") ||
+ normalized.startsWith("fe9") ||
+ normalized.startsWith("fea") ||
+ normalized.startsWith("feb") ||
+ normalized.startsWith("::ffff:10.") ||
+ normalized.startsWith("::ffff:127.") ||
+ normalized.startsWith("::ffff:192.168.") ||
+ normalized.startsWith("::ffff:169.254.")
+ );
+}
+
+function ipv4FromMappedIpv6(host: string): string | null {
+ const prefix = "::ffff:";
+ if (!host.startsWith(prefix)) return null;
+
+ const suffix = host.slice(prefix.length);
+ if (suffix.includes(".")) return suffix;
+
+ const groups = suffix.split(":");
+ if (groups.length !== 2) return null;
+
+ const [hiRaw, loRaw] = groups;
+ const hi = Number.parseInt(hiRaw, 16);
+ const lo = Number.parseInt(loRaw, 16);
+ if (
+ !Number.isInteger(hi) ||
+ !Number.isInteger(lo) ||
+ hi < 0 ||
+ hi > 0xffff ||
+ lo < 0 ||
+ lo > 0xffff
+ ) {
+ return null;
+ }
+
+ return [
+ (hi >> 8) & 0xff,
+ hi & 0xff,
+ (lo >> 8) & 0xff,
+ lo & 0xff,
+ ].join(".");
+}
diff --git a/api/edb-proxy.ts b/api/edb-proxy.ts
index cbcf8d9..ae1887b 100644
--- a/api/edb-proxy.ts
+++ b/api/edb-proxy.ts
@@ -1,5 +1,14 @@
import type { VercelRequest, VercelResponse } from "@vercel/node";
import { maybeInjectDefaultEtherscanKey } from "./edbShared.js";
+import {
+ applyCorsHeaders,
+ fetchUpstream,
+ handleCorsPreflight,
+ readRawBody,
+ sendBufferedUpstreamResponse,
+ sendProxyError,
+ streamSseResponse,
+} from "./_utils/proxyHelper";
export const config = {
api: { bodyParser: false },
@@ -44,30 +53,14 @@ function applyCors(req: VercelRequest, res: VercelResponse) {
typeof req.headers.host === "string" ? req.headers.host : undefined;
const allowed = resolveAllowedOrigin(origin, host);
if (allowed) {
- res.setHeader("Access-Control-Allow-Origin", allowed);
- res.setHeader("Vary", "Origin");
- res.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS, HEAD");
- res.setHeader("Access-Control-Allow-Headers", "Content-Type, Accept");
- res.setHeader("Access-Control-Max-Age", "600");
- }
-}
-
-function getRawBody(req: VercelRequest): Promise {
- return new Promise((resolve, reject) => {
- const chunks: Buffer[] = [];
- let total = 0;
- req.on("data", (chunk: Buffer) => {
- total += chunk.length;
- if (total > MAX_BODY_BYTES) {
- req.destroy();
- reject(new Error("body_too_large"));
- return;
- }
- chunks.push(chunk);
+ applyCorsHeaders(res, {
+ allowedOrigin: allowed,
+ allowMethods: "GET, POST, OPTIONS, HEAD",
+ allowHeaders: "Content-Type, Accept",
+ maxAge: "600",
+ varyOrigin: true,
});
- req.on("end", () => resolve(Buffer.concat(chunks)));
- req.on("error", reject);
- });
+ }
}
export default async function handler(req: VercelRequest, res: VercelResponse) {
@@ -86,7 +79,7 @@ export default async function handler(req: VercelRequest, res: VercelResponse) {
// OPTIONS preflight — CORS headers already set above
if (req.method === "OPTIONS") {
- res.status(204).end();
+ handleCorsPreflight(req, res);
return;
}
@@ -140,7 +133,7 @@ export default async function handler(req: VercelRequest, res: VercelResponse) {
try {
const rawBody =
req.method !== "GET" && req.method !== "HEAD"
- ? await getRawBody(req)
+ ? await readRawBody(req, MAX_BODY_BYTES)
: undefined;
const body = maybeInjectDefaultEtherscanKey(
rawBody,
@@ -151,73 +144,30 @@ export default async function handler(req: VercelRequest, res: VercelResponse) {
// Detect SSE path — use longer timeout, abort on client disconnect
const isSSE = subPath.match(/debug\/prepare\/[^/]+\/events$/);
- const controller = new AbortController();
-
- if (isSSE) {
- // Abort upstream when client disconnects
- req.on("close", () => controller.abort());
- } else {
- // Regular requests get a hard timeout
- const timer = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS);
- req.on("close", () => clearTimeout(timer));
- }
- const upstream = await fetch(target, {
- method: req.method || "GET",
- headers: upstreamHeaders,
- body,
- signal: controller.signal,
- redirect: "error", // never follow redirects — prevents key leaking to unexpected hosts
- });
+ const upstream = await fetchUpstream(
+ req,
+ target,
+ {
+ method: req.method || "GET",
+ headers: upstreamHeaders,
+ body,
+ redirect: "error", // never follow redirects — prevents key leaking to unexpected hosts
+ },
+ isSSE ? { abortOnClose: true } : { timeoutMs: FETCH_TIMEOUT_MS },
+ );
- // SSE streaming response
- const contentType = upstream.headers.get("content-type") || "";
- if (contentType.includes("text/event-stream") && upstream.body) {
- res.setHeader("Content-Type", "text/event-stream");
- res.setHeader("Cache-Control", "no-cache");
- res.setHeader("Connection", "keep-alive");
- res.setHeader("X-Accel-Buffering", "no");
-
- const reader = upstream.body.getReader();
- const decoder = new TextDecoder();
- try {
- for (;;) {
- const { done, value } = await reader.read();
- if (done) break;
- res.write(decoder.decode(value, { stream: true }));
- }
- } catch {
- // client disconnected or upstream closed
- } finally {
- reader.cancel().catch(() => {});
- res.end();
- }
+ if (await streamSseResponse(res, upstream)) {
return;
}
// Standard response — pipe status + body
- res.status(upstream.status);
-
- const upstreamContentType = upstream.headers.get("content-type");
- if (upstreamContentType) res.setHeader("content-type", upstreamContentType);
- // Merge upstream Vary with any Vary header set in applyCors (e.g., "Origin")
- // so CORS cache keys remain correct.
- const upstreamVary = upstream.headers.get("vary");
- if (upstreamVary) {
- const existing = res.getHeader("Vary");
- res.setHeader(
- "Vary",
- existing ? `${existing}, ${upstreamVary}` : upstreamVary,
- );
- }
-
- const buf = Buffer.from(await upstream.arrayBuffer());
- res.send(buf);
+ await sendBufferedUpstreamResponse(res, upstream);
} catch (err: unknown) {
- if (err instanceof Error && err.name === "AbortError") {
- return res.status(504).json({ error: "bridge_timeout" });
- }
- console.error("[edb] upstream error:", err);
- res.status(502).json({ error: "bridge_unreachable" });
+ return sendProxyError(res, err, {
+ logLabel: "edb",
+ timeout: { status: 504, body: { error: "bridge_timeout" } },
+ upstream: { status: 502, body: { error: "bridge_unreachable" } },
+ });
}
}
diff --git a/api/explorer/etherscan.ts b/api/explorer/etherscan.ts
index fe439b2..0af5ea2 100644
--- a/api/explorer/etherscan.ts
+++ b/api/explorer/etherscan.ts
@@ -1,5 +1,9 @@
import type { VercelRequest, VercelResponse } from "@vercel/node";
import { handleEtherscanLookup } from "./etherscanShared.js";
+import {
+ enforcePublicProxyAccess,
+ resolveAllowedProxyOrigin,
+} from "../_utils/publicProxyGuard.js";
export const config = {
api: { bodyParser: true },
@@ -7,7 +11,14 @@ export const config = {
};
export default async function handler(req: VercelRequest, res: VercelResponse) {
+ const allowedOrigin = resolveAllowedProxyOrigin(req);
+
if (req.method === "OPTIONS") {
+ if (allowedOrigin) {
+ res.setHeader("Access-Control-Allow-Origin", allowedOrigin);
+ }
+ res.setHeader("Access-Control-Allow-Methods", "POST, OPTIONS");
+ res.setHeader("Access-Control-Allow-Headers", "Content-Type, Accept, x-proxy-secret");
res.status(204).setHeader("cache-control", "no-store").end();
return;
}
@@ -20,8 +31,20 @@ export default async function handler(req: VercelRequest, res: VercelResponse) {
return;
}
+ if (
+ !enforcePublicProxyAccess(req, res, {
+ allowedOrigin,
+ rateLimit: { bucket: "etherscan-proxy", limit: 120, windowMs: 60_000 },
+ })
+ ) {
+ return;
+ }
+
const response = await handleEtherscanLookup(req.body, process.env);
res.status(response.status);
+ if (allowedOrigin) {
+ res.setHeader("Access-Control-Allow-Origin", allowedOrigin);
+ }
response.headers.forEach((value, key) => {
res.setHeader(key, value);
diff --git a/api/lifi-composer.ts b/api/lifi-composer.ts
index 813a83b..b7c0c5f 100644
--- a/api/lifi-composer.ts
+++ b/api/lifi-composer.ts
@@ -1,5 +1,15 @@
import type { VercelRequest, VercelResponse } from "@vercel/node";
-import * as crypto from "crypto";
+import {
+ applyCorsHeaders,
+ fetchUpstream,
+ handleCorsPreflight,
+ sendProxyError,
+ sendTextUpstreamResponse,
+} from "./_utils/proxyHelper";
+import {
+ enforcePublicProxyAccess,
+ resolveAllowedProxyOrigin,
+} from "./_utils/publicProxyGuard";
export const config = {
api: { bodyParser: false },
@@ -9,68 +19,36 @@ export const config = {
const LIFI_BASE = "https://li.quest";
const LIFI_API_KEY = process.env.LIFI_API_KEY || "";
const ALLOWED_METHODS = new Set(["GET", "OPTIONS", "HEAD"]);
-const ALLOWED_ORIGINS = new Set(
- (process.env.ALLOWED_ORIGINS || "").split(",").filter(Boolean)
-);
-
-// Shared secret the frontend embeds at build time. When set, every request
-// must present it in the x-proxy-secret header — Origin alone is spoofable
-// from non-browser clients and cannot protect a paid API key.
-const PROXY_SECRET = process.env.PROXY_SECRET || "";
-
-function getAllowedOrigin(req: VercelRequest): string | null {
- const origin = req.headers.origin;
- if (!origin) return null;
- if (ALLOWED_ORIGINS.has(origin)) return origin;
- if (origin.startsWith("http://localhost:")) return origin;
- const host = req.headers.host;
- if (host && origin === `https://${host}`) return origin;
- return null;
-}
-
-function hasValidSecret(req: VercelRequest): boolean {
- if (!PROXY_SECRET) return false;
- const header = req.headers["x-proxy-secret"];
- if (typeof header !== "string") return false;
- const a = Buffer.from(header);
- const b = Buffer.from(PROXY_SECRET);
- if (a.length !== b.length) return false;
- return crypto.timingSafeEqual(a, b);
-}
export default async function handler(
req: VercelRequest,
res: VercelResponse
) {
- const allowedOrigin = getAllowedOrigin(req);
-
- if (req.method === "OPTIONS") {
- if (allowedOrigin) {
- res.setHeader("Access-Control-Allow-Origin", allowedOrigin);
- }
- res.setHeader("Access-Control-Allow-Methods", "GET, OPTIONS");
- res.setHeader("Access-Control-Allow-Headers", "Content-Type, x-proxy-secret");
- return res.status(204).end();
- }
-
- // When PROXY_SECRET is configured, require it on every non-OPTIONS request.
- if (PROXY_SECRET) {
- if (!hasValidSecret(req)) {
- return res.status(403).json({ error: "Forbidden" });
- }
- } else {
- // No PROXY_SECRET: allow same-origin requests (browser omits Origin header
- // for same-origin fetches) and requests with a matching Origin.
- const origin = req.headers.origin;
- if (origin && !allowedOrigin) {
- return res.status(403).json({ error: "Origin not allowed" });
- }
+ const allowedOrigin = resolveAllowedProxyOrigin(req);
+
+ if (
+ handleCorsPreflight(req, res, {
+ allowedOrigin,
+ allowMethods: "GET, OPTIONS",
+ allowHeaders: "Content-Type, x-proxy-secret",
+ })
+ ) {
+ return;
}
if (!ALLOWED_METHODS.has(req.method || "")) {
return res.status(405).json({ error: "Method not allowed" });
}
+ if (
+ !enforcePublicProxyAccess(req, res, {
+ allowedOrigin,
+ rateLimit: { bucket: "lifi-composer", limit: 90, windowMs: 60_000 },
+ })
+ ) {
+ return;
+ }
+
if (!LIFI_API_KEY) {
return res.status(500).json({ error: "LIFI_API_KEY not configured" });
}
@@ -96,24 +74,28 @@ export default async function handler(
const upstream = `${LIFI_BASE}/${subPath.replace(/^\/+/, "")}${qs ? `?${qs}` : ""}`;
try {
- const upstreamRes = await fetch(upstream, {
- method: "GET",
- headers: {
- "x-lifi-api-key": LIFI_API_KEY,
- Accept: "application/json",
+ const upstreamRes = await fetchUpstream(
+ req,
+ upstream,
+ {
+ method: "GET",
+ headers: {
+ "x-lifi-api-key": LIFI_API_KEY,
+ Accept: "application/json",
+ },
+ signal: AbortSignal.timeout(25000),
},
- signal: AbortSignal.timeout(25000),
- });
+ );
- const body = await upstreamRes.text();
-
- if (allowedOrigin) {
- res.setHeader("Access-Control-Allow-Origin", allowedOrigin);
- }
- res.setHeader("Content-Type", "application/json");
- return res.status(upstreamRes.status).send(body);
- } catch (err: any) {
- console.error("[lifi-composer] upstream error:", err);
- return res.status(502).json({ error: "Upstream request failed" });
+ applyCorsHeaders(res, { allowedOrigin });
+ await sendTextUpstreamResponse(res, upstreamRes, {
+ "Content-Type": "application/json",
+ });
+ return;
+ } catch (err) {
+ return sendProxyError(res, err, {
+ logLabel: "lifi-composer",
+ upstream: { status: 502, body: { error: "Upstream request failed" } },
+ });
}
}
diff --git a/api/lifi-earn.ts b/api/lifi-earn.ts
index 0b8551b..8e2410a 100644
--- a/api/lifi-earn.ts
+++ b/api/lifi-earn.ts
@@ -1,5 +1,15 @@
import type { VercelRequest, VercelResponse } from "@vercel/node";
-import * as crypto from "crypto";
+import {
+ applyCorsHeaders,
+ fetchUpstream,
+ handleCorsPreflight,
+ sendProxyError,
+ sendTextUpstreamResponse,
+} from "./_utils/proxyHelper";
+import {
+ enforcePublicProxyAccess,
+ resolveAllowedProxyOrigin,
+} from "./_utils/publicProxyGuard";
export const config = {
api: { bodyParser: false },
@@ -9,62 +19,36 @@ export const config = {
const LIFI_EARN_BASE = "https://earn.li.fi";
const LIFI_API_KEY = process.env.LIFI_API_KEY || "";
const ALLOWED_METHODS = new Set(["GET", "OPTIONS", "HEAD"]);
-const ALLOWED_ORIGINS = new Set(
- (process.env.ALLOWED_ORIGINS || "").split(",").filter(Boolean)
-);
-
-const PROXY_SECRET = process.env.PROXY_SECRET || "";
-
-function getAllowedOrigin(req: VercelRequest): string | null {
- const origin = req.headers.origin;
- if (!origin) return null;
- if (ALLOWED_ORIGINS.has(origin)) return origin;
- if (origin.startsWith("http://localhost:")) return origin;
- const host = req.headers.host;
- if (host && origin === `https://${host}`) return origin;
- return null;
-}
-
-function hasValidSecret(req: VercelRequest): boolean {
- if (!PROXY_SECRET) return false;
- const header = req.headers["x-proxy-secret"];
- if (typeof header !== "string") return false;
- const a = Buffer.from(header);
- const b = Buffer.from(PROXY_SECRET);
- if (a.length !== b.length) return false;
- return crypto.timingSafeEqual(a, b);
-}
export default async function handler(
req: VercelRequest,
res: VercelResponse
) {
- const allowedOrigin = getAllowedOrigin(req);
-
- if (req.method === "OPTIONS") {
- if (allowedOrigin) {
- res.setHeader("Access-Control-Allow-Origin", allowedOrigin);
- }
- res.setHeader("Access-Control-Allow-Methods", "GET, OPTIONS");
- res.setHeader("Access-Control-Allow-Headers", "Content-Type, x-proxy-secret");
- return res.status(204).end();
- }
-
- if (PROXY_SECRET) {
- if (!hasValidSecret(req)) {
- return res.status(403).json({ error: "Forbidden" });
- }
- } else {
- const origin = req.headers.origin;
- if (origin && !allowedOrigin) {
- return res.status(403).json({ error: "Origin not allowed" });
- }
+ const allowedOrigin = resolveAllowedProxyOrigin(req);
+
+ if (
+ handleCorsPreflight(req, res, {
+ allowedOrigin,
+ allowMethods: "GET, OPTIONS",
+ allowHeaders: "Content-Type, x-proxy-secret",
+ })
+ ) {
+ return;
}
if (!ALLOWED_METHODS.has(req.method || "")) {
return res.status(405).json({ error: "Method not allowed" });
}
+ if (
+ !enforcePublicProxyAccess(req, res, {
+ allowedOrigin,
+ rateLimit: { bucket: "lifi-earn", limit: 120, windowMs: 60_000 },
+ })
+ ) {
+ return;
+ }
+
if (!LIFI_API_KEY) {
return res.status(500).json({ error: "LIFI_API_KEY not configured" });
}
@@ -90,24 +74,28 @@ export default async function handler(
const upstream = `${LIFI_EARN_BASE}/${subPath.replace(/^\/+/, "")}${qs ? `?${qs}` : ""}`;
try {
- const upstreamRes = await fetch(upstream, {
- method: "GET",
- headers: {
- "x-lifi-api-key": LIFI_API_KEY,
- Accept: "application/json",
+ const upstreamRes = await fetchUpstream(
+ req,
+ upstream,
+ {
+ method: "GET",
+ headers: {
+ "x-lifi-api-key": LIFI_API_KEY,
+ Accept: "application/json",
+ },
+ signal: AbortSignal.timeout(25000),
},
- signal: AbortSignal.timeout(25000),
- });
+ );
- const body = await upstreamRes.text();
-
- if (allowedOrigin) {
- res.setHeader("Access-Control-Allow-Origin", allowedOrigin);
- }
- res.setHeader("Content-Type", "application/json");
- return res.status(upstreamRes.status).send(body);
- } catch (err: any) {
- console.error("[lifi-earn] upstream error:", err);
- return res.status(502).json({ error: "Upstream request failed" });
+ applyCorsHeaders(res, { allowedOrigin });
+ await sendTextUpstreamResponse(res, upstreamRes, {
+ "Content-Type": "application/json",
+ });
+ return;
+ } catch (err) {
+ return sendProxyError(res, err, {
+ logLabel: "lifi-earn",
+ upstream: { status: 502, body: { error: "Upstream request failed" } },
+ });
}
}
diff --git a/api/llm-recommend.ts b/api/llm-recommend.ts
index 90f9cf7..d530316 100644
--- a/api/llm-recommend.ts
+++ b/api/llm-recommend.ts
@@ -1,5 +1,8 @@
import type { VercelRequest, VercelResponse } from "@vercel/node";
-import * as crypto from "crypto";
+import {
+ enforcePublicProxyAccess,
+ resolveAllowedProxyOrigin,
+} from "./_utils/publicProxyGuard";
export const config = {
api: { bodyParser: true },
@@ -10,35 +13,11 @@ const GEMINI_MODEL = process.env.GEMINI_MODEL || "gemini-2.5-flash-lite";
const GEMINI_API_KEY = process.env.GEMINI_API_KEY || "";
const ALLOWED_METHODS = new Set(["POST", "OPTIONS"]);
-const ALLOWED_ORIGINS = new Set(
- (process.env.ALLOWED_ORIGINS || "").split(",").filter(Boolean)
-);
-const PROXY_SECRET = process.env.PROXY_SECRET || "";
-
-function getAllowedOrigin(req: VercelRequest): string | null {
- const origin = req.headers.origin;
- if (!origin) return null;
- if (ALLOWED_ORIGINS.has(origin)) return origin;
- if (origin.startsWith("http://localhost:")) return origin;
- const host = req.headers.host;
- if (host && origin === `https://${host}`) return origin;
- return null;
-}
-
-function hasValidSecret(req: VercelRequest): boolean {
- if (!PROXY_SECRET) return false;
- const header = req.headers["x-proxy-secret"];
- if (typeof header !== "string") return false;
- const a = Buffer.from(header);
- const b = Buffer.from(PROXY_SECRET);
- if (a.length !== b.length) return false;
- return crypto.timingSafeEqual(a, b);
-}
const MAX_BODY_BYTES = 64 * 1024;
export default async function handler(req: VercelRequest, res: VercelResponse) {
- const allowedOrigin = getAllowedOrigin(req);
+ const allowedOrigin = resolveAllowedProxyOrigin(req);
if (req.method === "OPTIONS") {
if (allowedOrigin) res.setHeader("Access-Control-Allow-Origin", allowedOrigin);
@@ -47,28 +26,25 @@ export default async function handler(req: VercelRequest, res: VercelResponse) {
return res.status(204).end();
}
- if (PROXY_SECRET) {
- if (!hasValidSecret(req)) {
- return res.status(403).json({ error: "Forbidden" });
- }
- } else {
- // No PROXY_SECRET: allow same-origin (no Origin header) and matching origins.
- const origin = req.headers.origin;
- if (origin && !allowedOrigin) {
- return res.status(403).json({ error: "Origin not allowed" });
- }
- }
-
if (!ALLOWED_METHODS.has(req.method || "")) {
return res.status(405).json({ error: "Method not allowed" });
}
+ if (
+ !enforcePublicProxyAccess(req, res, {
+ allowedOrigin,
+ rateLimit: { bucket: "llm-recommend", limit: 12, windowMs: 5 * 60_000 },
+ })
+ ) {
+ return;
+ }
+
const body = req.body;
if (!body || typeof body !== "object") {
return res.status(400).json({ error: "Missing JSON body" });
}
- if (!Array.isArray((body as any).contents)) {
+ if (!Array.isArray((body as Record).contents)) {
return res.status(400).json({ error: "Body must include `contents` array" });
}
@@ -104,7 +80,7 @@ export default async function handler(req: VercelRequest, res: VercelResponse) {
res.setHeader("Content-Type", "application/json");
res.setHeader("X-Gemini-Model", GEMINI_MODEL);
return res.status(upstreamRes.status).send(text);
- } catch (err: any) {
+ } catch (err) {
console.error("[llm-recommend] upstream error:", err);
return res.status(502).json({ error: "Upstream request failed" });
}
diff --git a/api/starknet-sim-proxy.ts b/api/starknet-sim-proxy.ts
new file mode 100644
index 0000000..6898f2d
--- /dev/null
+++ b/api/starknet-sim-proxy.ts
@@ -0,0 +1,228 @@
+import type { VercelRequest, VercelResponse } from "@vercel/node";
+import {
+ applyCorsHeaders,
+ fetchUpstream,
+ handleCorsPreflight,
+ readRawBody,
+ sendBufferedUpstreamResponse,
+ sendProxyError,
+ streamSseResponse,
+} from "./_utils/proxyHelper";
+import { enforceRateLimit } from "./_utils/rateLimit";
+import { validatePublicRpcUrl } from "./_utils/rpcUrlSafety";
+
+export const config = {
+ api: { bodyParser: false },
+ maxDuration: 300,
+};
+
+const MAX_BODY_BYTES = 50 * 1024 * 1024; // 50 MB — matches EDB
+const FETCH_TIMEOUT_MS = 120_000;
+const ALLOWED_METHODS = new Set(["GET", "POST", "OPTIONS", "HEAD"]);
+
+const DEFAULT_ALLOWED_ORIGINS = new Set([
+ "http://localhost:5173",
+ "http://127.0.0.1:5173",
+ "http://localhost:4173",
+ "http://127.0.0.1:4173",
+]);
+
+function resolveAllowedOrigin(
+ origin: string | undefined,
+ host?: string,
+): string | null {
+ if (!origin) return null;
+ if (DEFAULT_ALLOWED_ORIGINS.has(origin)) return origin;
+ if (host && origin === `https://${host}`) return origin;
+ const extra = process.env.STARKNET_SIM_CORS_ALLOWED_ORIGINS;
+ if (extra) {
+ const list = extra
+ .split(",")
+ .map((s) => s.trim())
+ .filter(Boolean);
+ if (list.includes(origin)) return origin;
+ }
+ return null;
+}
+
+function corsOptionsFor(req: VercelRequest) {
+ const origin =
+ typeof req.headers.origin === "string" ? req.headers.origin : undefined;
+ const host =
+ typeof req.headers.host === "string" ? req.headers.host : undefined;
+ const allowed = resolveAllowedOrigin(origin, host);
+ if (!allowed) return undefined;
+ return {
+ allowedOrigin: allowed,
+ allowMethods: "GET, POST, OPTIONS, HEAD",
+ allowHeaders: "Content-Type, Accept, X-Starknet-Rpc-Url",
+ maxAge: "600",
+ varyOrigin: true,
+ };
+}
+
+function applyCors(req: VercelRequest, res: VercelResponse) {
+ const options = corsOptionsFor(req);
+ if (options) applyCorsHeaders(res, options);
+}
+
+function isHeavyBridgePath(subPath: string): boolean {
+ return (
+ subPath === "simulate" ||
+ subPath === "simulate/prepare" ||
+ subPath === "estimate-fee" ||
+ /^trace\/[^/]+$/.test(subPath)
+ );
+}
+
+export default async function handler(req: VercelRequest, res: VercelResponse) {
+ if (req.method === "OPTIONS") {
+ handleCorsPreflight(req, res, () => corsOptionsFor(req));
+ return;
+ }
+
+ applyCors(req, res);
+
+ const bridgeUrl = process.env.STARKNET_SIM_BRIDGE_URL;
+ const apiKey = process.env.STARKNET_SIM_API_KEY;
+
+ if (!bridgeUrl || !apiKey) {
+ return res.status(503).json({ error: "bridge_not_configured" });
+ }
+
+ const reqOrigin =
+ typeof req.headers.origin === "string" ? req.headers.origin : undefined;
+ const reqHost =
+ typeof req.headers.host === "string" ? req.headers.host : undefined;
+
+ // Requests that carry a disallowed Origin are always rejected.
+ if (reqOrigin && !resolveAllowedOrigin(reqOrigin, reqHost)) {
+ return res.status(403).json({ error: "origin_not_allowed" });
+ }
+
+ // If the request carries an RPC override header and no Origin, we cannot
+ // verify it came from an allowed browser context. Reject to close the
+ // SSRF path where a non-browser client bypasses the CORS check by simply
+ // omitting the Origin header and supplies an arbitrary RPC URL.
+ const hasRpcOverride = typeof req.headers["x-starknet-rpc-url"] === "string"
+ || Array.isArray(req.headers["x-starknet-rpc-url"]);
+ if (hasRpcOverride && !reqOrigin) {
+ return res.status(403).json({ error: "origin_required_for_rpc_override" });
+ }
+
+ if (!ALLOWED_METHODS.has(req.method || "GET")) {
+ return res.status(405).json({ error: "method_not_allowed" });
+ }
+
+ const pathParam = req.query?.path;
+ const subPath = Array.isArray(pathParam)
+ ? pathParam.join("/")
+ : typeof pathParam === "string"
+ ? pathParam
+ : "";
+
+ const parts = subPath ? subPath.split("/") : [];
+ for (const seg of parts) {
+ if (seg === "." || seg === ".." || /[^a-zA-Z0-9_\-:.]/.test(seg)) {
+ return res.status(400).json({ error: "invalid_path" });
+ }
+ }
+
+ const isHeavyPath = isHeavyBridgePath(subPath);
+ if (isHeavyPath && !reqOrigin) {
+ return res.status(403).json({ error: "origin_required_for_heavy_route" });
+ }
+
+ const targetUrl = new URL(
+ `${bridgeUrl.replace(/\/+$/, "")}/${subPath}`,
+ );
+ for (const [key, raw] of Object.entries(req.query ?? {})) {
+ if (key === "path") continue;
+ if (Array.isArray(raw)) {
+ for (const value of raw) targetUrl.searchParams.append(key, value);
+ } else if (typeof raw === "string") {
+ targetUrl.searchParams.append(key, raw);
+ }
+ }
+ const target = targetUrl.toString();
+
+ if (
+ !enforceRateLimit(req, res, {
+ bucket: isHeavyPath ? "starknet-sim-heavy" : "starknet-sim",
+ limit: isHeavyPath ? 12 : 120,
+ windowMs: 60_000,
+ })
+ ) {
+ return;
+ }
+
+ const upstreamHeaders: Record = {};
+ upstreamHeaders["X-API-Key"] = apiKey;
+ const ct = req.headers["content-type"];
+ if (ct) upstreamHeaders["Content-Type"] = Array.isArray(ct) ? ct[0] : ct;
+ const accept = req.headers["accept"];
+ if (accept) upstreamHeaders["Accept"] = Array.isArray(accept) ? accept[0] : accept;
+ const acceptEncoding = req.headers["accept-encoding"];
+ if (acceptEncoding)
+ upstreamHeaders["Accept-Encoding"] = Array.isArray(acceptEncoding)
+ ? acceptEncoding[0]
+ : acceptEncoding;
+
+ // Forward per-request RPC override the frontend resolves from the
+ // user's network config. The bridge's `rpc_override::resolve` reads
+ // it for /simulate, /trace, /estimate-fee and falls back to its
+ // STARKNET_RPC_URL env if the header is missing.
+ const rpcOverride = req.headers["x-starknet-rpc-url"];
+ if (rpcOverride) {
+ const value = Array.isArray(rpcOverride) ? rpcOverride[0] : rpcOverride;
+ if (typeof value === "string") {
+ const validation = validatePublicRpcUrl(value, {
+ allowedHostsEnv: "STARKNET_SIM_RPC_ALLOWED_HOSTS",
+ });
+ if (!validation.ok) {
+ return res.status(400).json({
+ error: "invalid_rpc_override",
+ reason: validation.reason,
+ });
+ }
+ upstreamHeaders["X-Starknet-Rpc-Url"] = value;
+ }
+ }
+
+ try {
+ const rawBody =
+ req.method !== "GET" && req.method !== "HEAD"
+ ? await readRawBody(req, MAX_BODY_BYTES)
+ : undefined;
+
+ // SSE paths — no hard timeout, abort on client disconnect.
+ const isSSE =
+ /^step\/[^/]+\/events$/.test(subPath) ||
+ /^simulate\/prepare\/[^/]+\/events$/.test(subPath);
+
+ const upstream = await fetchUpstream(
+ req,
+ target,
+ {
+ method: req.method || "GET",
+ headers: upstreamHeaders,
+ body: rawBody,
+ redirect: "error",
+ },
+ isSSE ? { abortOnClose: true } : { timeoutMs: FETCH_TIMEOUT_MS },
+ );
+
+ if (await streamSseResponse(res, upstream)) {
+ return;
+ }
+
+ await sendBufferedUpstreamResponse(res, upstream);
+ } catch (err: unknown) {
+ return sendProxyError(res, err, {
+ logLabel: "starknet-sim",
+ bodyTooLarge: { status: 413, body: { error: "body_too_large" } },
+ timeout: { status: 504, body: { error: "bridge_timeout" } },
+ upstream: { status: 502, body: { error: "bridge_unreachable" } },
+ });
+ }
+}
diff --git a/api/vertexAuth.ts b/api/vertexAuth.ts
index 010026b..2df0151 100644
--- a/api/vertexAuth.ts
+++ b/api/vertexAuth.ts
@@ -1,4 +1,5 @@
import crypto from "crypto";
+import fs from "fs";
interface ServiceAccountKey {
project_id: string;
@@ -31,9 +32,6 @@ function loadServiceAccountKey(): ServiceAccountKey | null {
const filePath = process.env.GOOGLE_APPLICATION_CREDENTIALS;
if (filePath) {
try {
- // Dynamic require for local dev — Vercel bundles won't hit this path
- // eslint-disable-next-line @typescript-eslint/no-var-requires
- const fs = require("fs");
const content = fs.readFileSync(filePath, "utf-8");
return JSON.parse(content);
} catch {
diff --git a/components.json b/components.json
index 2b0833f..1aee42c 100644
--- a/components.json
+++ b/components.json
@@ -18,5 +18,7 @@
"lib": "@/lib",
"hooks": "@/hooks"
},
- "registries": {}
+ "registries": {
+ "@reui": "https://reui.io/r/{style}/{name}.json"
+ }
}
diff --git a/edb b/edb
index 1ba2fca..c5b32ca 160000
--- a/edb
+++ b/edb
@@ -1 +1 @@
-Subproject commit 1ba2fcaca73cee96bf10107b7b0f98ab1ceab1a4
+Subproject commit c5b32ca53e7c2146e647830af486b6481aa37548
diff --git a/eslint.config.js b/eslint.config.js
index 2c133c2..2cf9ca9 100644
--- a/eslint.config.js
+++ b/eslint.config.js
@@ -7,15 +7,33 @@ import reactRefresh from 'eslint-plugin-react-refresh';
export default tseslint.config(
{
ignores: [
+ '.claude/**',
+ '.cursor/**',
+ '.superpowers/**',
+ '.worktrees/**',
'dist/**',
+ 'dist-ssr/**',
'node_modules/**',
'current_bundle/**',
+ 'coverage/**',
+ 'docs/**',
+ 'edb/**',
+ 'fhe/**',
+ 'schematics/**',
+ 'starknet-sim/**',
+ 'tasks/**',
'test-results/**',
+ 'tmp/**',
+ 'video/**',
'scripts/**',
'public/**',
'tests/**',
'test-*/**',
'**/test-*.js',
+ '**/*.test.{ts,tsx,js,jsx}',
+ '**/*.spec.{ts,tsx,js,jsx}',
+ '**/__tests__/**',
+ '**/__mocks__/**',
'test-app-*/**',
],
},
diff --git a/index.html b/index.html
index 6746f42..16ff5ea 100644
--- a/index.html
+++ b/index.html
@@ -4,36 +4,36 @@
-
+
- HexKit — Ethereum Developer Toolkit | Decode, Build, Simulate
-
-
+ HexKit | Web3 Developer Toolkit
+
+
-
-
+
+
-
+
-
-
+
+
-
+