diff --git a/README.md b/README.md
index dd73208..4bd76e1 100644
--- a/README.md
+++ b/README.md
@@ -163,6 +163,7 @@ The "default" column shows the default value if the attribute is not set.
| branding | boolean to include logos | true |
| mock | boolean to use the mock wasm client data | false |
| target | string "web", "extension-offscreen" or "extension-popup" | web |
+| headless | boolean to skip UI rendering (headless mode) | false |
In development, these settings can be customized using the `REACT_APP_*` environment variables in the `.env` or in your terminal.
For example, to run the widget in "panel" layout, you can run `REACT_APP_LAYOUT=panel yarn start`. To run the widget with mock data,
@@ -174,6 +175,81 @@ you can set `data-layout="panel"` in `ui/public/index.html`.
If you enable the editor (by setting `REACT_APP_EDITOR=true` or `data-editor="true"`), you can also edit the settings dynamically in the browser using a UI editor the renders above the widget.
*Note* that the `mock` and `target` settings are not dynamic and therefore not editable in the browser. These two settings are static and must be set at the time the wasm interface is initialized.
+#### Headless mode (programmatic API)
+
+Headless mode lets you run the WASM proxy without rendering any UI, giving the host page full control over the user experience. When `data-headless="true"` is set, React rendering is skipped entirely. A global `window.LanternProxy` API is exposed for programmatic control — call `init()` to load the WASM proxy engine, then `start()` to begin sharing.
+
+This is useful when you want to:
+- Build a custom UI around the proxy (e.g. embed proxy stats in your own dashboard)
+- Run the proxy silently in the background
+- Integrate proxy data (connections, throughput) into an existing application
+
+**Minimal example:**
+
+```html
+
+
+
+
+```
+
+**API reference:**
+
+| Method / Property | Description |
+|---|---|
+| `init(options?)` | Initialize the WASM proxy. Accepts optional `{ mock: boolean }`. Must be called before `start()`. Safe to call concurrently — duplicate calls return the same promise. |
+| `start()` | Begin proxying traffic (fire-and-forget). |
+| `stop()` | Stop proxying traffic (fire-and-forget). |
+| `on(event, callback)` | Subscribe to an event. Returns an unsubscribe function. |
+| `off(event, callback)` | Unsubscribe from an event. |
+| `getState()` | Returns a snapshot of current state: `{ ready, sharing, connections, throughput, lifetimeConnections, chunks }`. |
+| `initialized` | Boolean — whether `init()` has been called successfully. |
+| `config` | Read-only copy of the WASM client config (discovery server, egress, etc). |
+
+**Events:**
+
+| Event | Payload | Description |
+|---|---|---|
+| `ready` | `boolean` | Fires when the proxy engine is ready to start |
+| `sharing` | `boolean` | Fires when the proxy begins/stops sharing traffic |
+| `connections` | `Connection[]` | Active connection list updates |
+| `throughput` | `number` | Average throughput in bytes/sec |
+| `lifetimeConnections` | `number` | Cumulative connections served |
+| `chunks` | `Chunk[]` | Data chunk updates |
+
+**Note:** `window.LanternProxy` is registered on every page load (even without `data-headless`), so you can use the API alongside the standard UI embed too. The `data-headless` attribute only controls whether the React UI renders.
+
Links:
[Github pages sandbox](https://embed.lantern.io)
diff --git a/ui/src/headlessApi.test.ts b/ui/src/headlessApi.test.ts
new file mode 100644
index 0000000..cd37f5c
--- /dev/null
+++ b/ui/src/headlessApi.test.ts
@@ -0,0 +1,140 @@
+import {readyEmitter, sharingEmitter, connectionsEmitter, averageThroughputEmitter, lifetimeConnectionsEmitter, lifetimeChunksEmitter, WasmInterface} from './utils/wasmInterface'
+
+// Mock WasmInterface before importing headlessApi
+jest.mock('./utils/wasmInterface', () => {
+ const {StateEmitter} = jest.requireActual('./hooks/useStateEmitter')
+ const readyEmitter = new StateEmitter(false)
+ const sharingEmitter = new StateEmitter(false)
+ const connectionsEmitter = new StateEmitter([])
+ const averageThroughputEmitter = new StateEmitter(0)
+ const lifetimeConnectionsEmitter = new StateEmitter(0)
+ const lifetimeChunksEmitter = new StateEmitter([])
+
+ const mockInstance = {}
+
+ class MockWasmInterface {
+ initialize = jest.fn().mockResolvedValue(mockInstance)
+ start = jest.fn()
+ stop = jest.fn()
+ ready = false
+ initializing = false
+ connectionMap = {}
+ throughput = {bytesPerSec: 0}
+ connections = []
+ }
+
+ return {
+ WasmInterface: MockWasmInterface,
+ readyEmitter,
+ sharingEmitter,
+ connectionsEmitter,
+ averageThroughputEmitter,
+ lifetimeConnectionsEmitter,
+ lifetimeChunksEmitter,
+ }
+})
+
+// Import after mock is set up
+import {LanternProxy} from './headlessApi'
+
+beforeEach(() => {
+ // Reset emitter state between tests
+ readyEmitter.update(false)
+ sharingEmitter.update(false)
+ connectionsEmitter.update([])
+ averageThroughputEmitter.update(0)
+ lifetimeConnectionsEmitter.update(0)
+ lifetimeChunksEmitter.update([])
+})
+
+describe('LanternProxy.on / off', () => {
+ test('on() delivers emitter updates to subscribers', () => {
+ const cb = jest.fn()
+ LanternProxy.on('ready', cb)
+ readyEmitter.update(true)
+ expect(cb).toHaveBeenCalledWith(true)
+ })
+
+ test('on() returns an unsubscribe function', () => {
+ const cb = jest.fn()
+ const unsub = LanternProxy.on('throughput', cb)
+ averageThroughputEmitter.update(100)
+ expect(cb).toHaveBeenCalledTimes(1)
+
+ unsub()
+ averageThroughputEmitter.update(200)
+ expect(cb).toHaveBeenCalledTimes(1) // no new calls
+ })
+
+ test('off() removes a specific callback', () => {
+ const cb1 = jest.fn()
+ const cb2 = jest.fn()
+ LanternProxy.on('sharing', cb1)
+ LanternProxy.on('sharing', cb2)
+
+ LanternProxy.off('sharing', cb1)
+ sharingEmitter.update(true)
+
+ expect(cb1).not.toHaveBeenCalled()
+ expect(cb2).toHaveBeenCalledWith(true)
+ })
+
+ test('multiple event types work independently', () => {
+ const readyCb = jest.fn()
+ const connCb = jest.fn()
+ LanternProxy.on('ready', readyCb)
+ LanternProxy.on('connections', connCb)
+
+ readyEmitter.update(true)
+ expect(readyCb).toHaveBeenCalledWith(true)
+ expect(connCb).not.toHaveBeenCalled()
+
+ const conns = [{state: 1, workerIdx: 0, addr: '1.2.3.4'}]
+ connectionsEmitter.update(conns)
+ expect(connCb).toHaveBeenCalledWith(conns)
+ })
+})
+
+describe('LanternProxy.getState', () => {
+ test('returns current emitter state', () => {
+ readyEmitter.update(true)
+ sharingEmitter.update(true)
+ averageThroughputEmitter.update(500)
+ lifetimeConnectionsEmitter.update(42)
+
+ const state = LanternProxy.getState()
+ expect(state.ready).toBe(true)
+ expect(state.sharing).toBe(true)
+ expect(state.throughput).toBe(500)
+ expect(state.lifetimeConnections).toBe(42)
+ })
+
+ test('returns shallow copies of arrays', () => {
+ const conns = [{state: 1, workerIdx: 0, addr: '1.2.3.4'}]
+ connectionsEmitter.update(conns)
+
+ const state = LanternProxy.getState()
+ expect(state.connections).toEqual(conns)
+ expect(state.connections).not.toBe(conns) // different reference
+ })
+})
+
+describe('LanternProxy.init', () => {
+ test('concurrent calls return the same promise', () => {
+ const p1 = LanternProxy.init()
+ const p2 = LanternProxy.init()
+ expect(p1).toBe(p2)
+ })
+})
+
+describe('window.LanternProxy', () => {
+ test('is exposed globally', () => {
+ expect((window as any).LanternProxy).toBe(LanternProxy)
+ })
+
+ test('is not writable', () => {
+ expect(() => {
+ (window as any).LanternProxy = 'overwrite'
+ }).toThrow()
+ })
+})
diff --git a/ui/src/headlessApi.ts b/ui/src/headlessApi.ts
new file mode 100644
index 0000000..82c7c64
--- /dev/null
+++ b/ui/src/headlessApi.ts
@@ -0,0 +1,141 @@
+/**
+ * Headless API for controlling the unbounded WASM proxy without rendering any UI.
+ *
+ * Usage (as a module or after the deferred script has loaded):
+ *
+ *
+ *
+ */
+
+import {WasmInterface, connectionsEmitter, averageThroughputEmitter, lifetimeConnectionsEmitter, lifetimeChunksEmitter, readyEmitter, sharingEmitter, type Connection, type Chunk} from './utils/wasmInterface'
+import {Targets, WASM_CLIENT_CONFIG} from './constants'
+
+export type ProxyEvent = 'ready' | 'sharing' | 'connections' | 'throughput' | 'lifetimeConnections' | 'chunks'
+
+export interface ProxyState {
+ ready: boolean
+ sharing: boolean
+ connections: Connection[]
+ throughput: number
+ lifetimeConnections: number
+ chunks: Chunk[]
+}
+
+type EventCallback = (value: T) => void
+
+const listeners = new Map>()
+
+function emitToListeners(event: string, value: unknown) {
+ const set = listeners.get(event)
+ if (set) set.forEach(cb => cb(value))
+}
+
+// Wire up emitters to forward to external listeners
+function wireEmitters() {
+ readyEmitter.on((v) => emitToListeners('ready', v))
+ sharingEmitter.on((v) => emitToListeners('sharing', v))
+ connectionsEmitter.on((v) => emitToListeners('connections', v))
+ averageThroughputEmitter.on((v) => emitToListeners('throughput', v))
+ lifetimeConnectionsEmitter.on((v) => emitToListeners('lifetimeConnections', v))
+ lifetimeChunksEmitter.on((v) => emitToListeners('chunks', v))
+}
+
+let wasmInterface: WasmInterface | null = null
+let initialized = false
+let initPromise: Promise | null = null
+
+export const LanternProxy = {
+ /**
+ * Initialize the WASM proxy. Must be called before start().
+ * Safe to call concurrently — subsequent calls return the same promise.
+ * @param options.mock - Use mock client for testing (default: false)
+ */
+ init(options?: { mock?: boolean }): Promise {
+ if (initialized) {
+ return Promise.resolve()
+ }
+ if (initPromise) {
+ return initPromise
+ }
+ initPromise = (async () => {
+ const mock = options?.mock ?? false
+ wasmInterface = new WasmInterface()
+ const instance = await wasmInterface.initialize({mock, target: Targets.WEB})
+ if (!instance) {
+ initPromise = null
+ throw new Error('WASM proxy failed to initialize')
+ }
+ initialized = true
+ })()
+ return initPromise
+ },
+
+ /** Start proxying traffic (fire-and-forget). Must call init() first. */
+ start(): void {
+ if (!initialized || !wasmInterface) throw new Error('LanternProxy not initialized — call and await init() first')
+ wasmInterface.start()
+ },
+
+ /** Stop proxying traffic (fire-and-forget). Must call init() first. */
+ stop(): void {
+ if (!initialized || !wasmInterface) throw new Error('LanternProxy not initialized — call and await init() first')
+ wasmInterface.stop()
+ },
+
+ /** Subscribe to a proxy event. Returns an unsubscribe function. */
+ on(event: ProxyEvent, callback: EventCallback): () => void {
+ if (!listeners.has(event)) listeners.set(event, new Set())
+ const set = listeners.get(event)!
+ set.add(callback as EventCallback)
+ return () => set.delete(callback as EventCallback)
+ },
+
+ /** Unsubscribe from a proxy event. */
+ off(event: ProxyEvent, callback: EventCallback): void {
+ listeners.get(event)?.delete(callback)
+ },
+
+ /** Get a snapshot of the current proxy state. Arrays are shallow-copied. */
+ getState(): ProxyState {
+ return {
+ ready: readyEmitter.state,
+ sharing: sharingEmitter.state,
+ connections: [...connectionsEmitter.state],
+ throughput: averageThroughputEmitter.state,
+ lifetimeConnections: lifetimeConnectionsEmitter.state,
+ chunks: [...lifetimeChunksEmitter.state],
+ }
+ },
+
+ /** Whether init() has been called successfully. */
+ get initialized(): boolean {
+ return initialized
+ },
+
+ /** The WASM client config (discovery server, egress, etc). Read-only. */
+ get config() {
+ return {...WASM_CLIENT_CONFIG}
+ },
+}
+
+// Wire emitters immediately so subscriptions work before init()
+wireEmitters()
+
+// Expose globally — use defineProperty to prevent accidental overwrites
+if (!(window as any).LanternProxy) {
+ Object.defineProperty(window, 'LanternProxy', {
+ value: LanternProxy,
+ writable: false,
+ enumerable: false,
+ configurable: false,
+ })
+}
diff --git a/ui/src/index.tsx b/ui/src/index.tsx
index f86aad8..b7cafa5 100644
--- a/ui/src/index.tsx
+++ b/ui/src/index.tsx
@@ -4,6 +4,9 @@ import App from './App'
import {StateEmitter} from './hooks/useStateEmitter'
import {defaultSettings, Settings, Themes} from './constants'
+// Always register the headless API on window.LanternProxy
+import './headlessApi'
+
export const settingsEmitter = new StateEmitter<{ [key: number]: Settings }>({})
const upperSnakeToCamel = (s: string | undefined) => {
@@ -49,13 +52,20 @@ const hydrateSettings = (i: number, dataset: Settings) => {
const init = (embeds: NodeListOf) => {
embeds.forEach((embed, i) => {
+ const dataset = embed.dataset as unknown as Settings
+ hydrateSettings(i, dataset)
+
+ // Headless mode: skip all UI rendering, just expose window.LanternProxy
+ // Usage:
+ if ((embed.dataset as any).headless === 'true') {
+ console.log('Unbounded: headless mode — UI rendering skipped. Call window.LanternProxy.init() to load the proxy engine.')
+ return
+ }
+
const root = ReactDOM.createRoot(
embed
)
- const dataset = embed.dataset as unknown as Settings
- hydrateSettings(i, dataset)
-
const observer = new MutationObserver((mutations) => {
mutations.forEach((mutation) => {
if (mutation.attributeName && mutation.attributeName.includes('data-')) {