diff --git a/.github/workflows/build-apk.yml b/.github/workflows/build-apk.yml index b581f4aa..26f00b4a 100644 --- a/.github/workflows/build-apk.yml +++ b/.github/workflows/build-apk.yml @@ -62,7 +62,7 @@ jobs: continue-on-error: true - name: Setup signing keystore - if: ${{ secrets.KEYSTORE_BASE64 != '' }} + if: secrets.KEYSTORE_BASE64 != '' run: | # Decode keystore from secret (persistent key for consistent signatures) echo "${{ secrets.KEYSTORE_BASE64 }}" | base64 -d > android/app/upload-keystore.jks @@ -76,7 +76,7 @@ jobs: EOF - name: Note about signing - if: ${{ secrets.KEYSTORE_BASE64 == '' }} + if: secrets.KEYSTORE_BASE64 == '' run: | echo "⚠️ No signing keystore configured - APK will be signed with debug key" echo "This is normal for forks. The APK will still work for testing." diff --git a/docs/SENDSPIN_WEBRTC_RESEARCH.md b/docs/SENDSPIN_WEBRTC_RESEARCH.md new file mode 100644 index 00000000..4d1c3b46 --- /dev/null +++ b/docs/SENDSPIN_WEBRTC_RESEARCH.md @@ -0,0 +1,336 @@ +# Sendspin WebRTC Architecture Research + +**Date:** 2026-01-13 +**Status:** Research Complete - Dual Peer Connection Architecture Identified +**Upstream Response:** Sendspin uses WebRTC DataChannels, not WebSocket proxying + +--- + +## Executive Summary + +Based on upstream feedback and current research, **Sendspin DOES support WebRTC DataChannels** for audio streaming in remote scenarios. The current implementation incorrectly assumes Sendspin requires WebSocket connections and attempted to proxy WebSocket over WebRTC. The correct architecture uses **two separate peer connections**: + +1. **Peer Connection 1**: MA API control (already implemented ✅) +2. **Peer Connection 2**: Sendspin audio streaming (not implemented ❌) + +--- + +## Current State of Sendspin Protocol (2025-2026) + +### What is Sendspin? + +Sendspin (formerly "Resonate") is Music Assistant's new open-source protocol for synchronized multi-room audio, introduced in MA 2.7 (December 2025). Key features: + +- **Sample-accurate synchronization**: Sub-0.05ms deviation between devices +- **Multi-role support**: Audio player, controller, artwork display, visualizers, etc. +- **Open standard**: Royalty-free, designed to compete with AirPlay/Chromecast +- **Multiple transport layers**: WebSocket (local), WebRTC (remote), Opus codec +- **Complete music experience**: Audio + metadata + artwork + visualizations + +### Transport Layer Architecture + +According to official documentation and upstream feedback: + +**Local Network (LAN):** +- Uses **WebSocket** connection directly to server +- URL: `ws://{server-ip}:8927/sendspin` +- Optimal for stability and low latency with TCP reliability + +**Remote Network (WAN):** +- Uses **WebRTC** with DataChannels +- Enables peer-to-peer, NAT traversal via STUN/TURN +- Signaling happens through MA API connection +- End-to-end encrypted + +### Key Insight: Dual Peer Connection Architecture + +Music Assistant's web player implementation uses: + +``` +Connection 1: MA API (Control Plane) +├── WebRTC Peer Connection +├── Data Channel: "ma-api" +└── Purpose: Browse library, control players, authentication + +Connection 2: Sendspin Audio (Data Plane) +├── WebRTC Peer Connection (separate from API) +├── Data Channel: unnamed/default +├── Signaling: Via API data channel (Connection 1) +└── Purpose: Stream audio (JSON control + binary PCM/Opus) +``` + +**Critical difference from current implementation:** +- Current: Tried to proxy WebSocket→WebRTC (single connection) +- Correct: Two separate WebRTC peer connections, signaled through first + +--- + +## API Endpoints for Sendspin WebRTC + +Based on research, these MA API commands exist for establishing the second peer connection: + +### 1. `sendspin/ice_servers` +**Purpose:** Get STUN/TURN server configuration for WebRTC +**Request:** Via MA API data channel +**Response:** Array of ICE server configurations + +```json +{ + "ice_servers": [ + {"urls": "stun:stun.l.google.com:19302"}, + {"urls": "turn:turn.example.com", "username": "...", "credential": "..."} + ] +} +``` + +### 2. `sendspin/connect` +**Purpose:** Initiate Sendspin peer connection with SDP offer +**Request:** +```json +{ + "sdp": { + "type": "offer", + "sdp": "v=0\r\no=- ..." + } +} +``` +**Response:** SDP answer from server + +### 3. ICE Candidate Exchange +**Purpose:** Exchange ICE candidates for NAT traversal +**Method:** Via API data channel (likely custom message format) + +### 4. `sendspin/update_state` (already implemented) +**Purpose:** Report player state back to server +**Current Status:** ✅ Implemented in music_assistant_api.dart + +### 5. `sendspin/disconnect` (already implemented) +**Purpose:** Gracefully disconnect player +**Current Status:** ✅ Implemented in music_assistant_api.dart + +--- + +## What Went Wrong in Current Implementation + +### Incorrect Assumption +The code comments in `music_assistant_api.dart:2838-2846` state: + +```dart +// NOTE: The following Sendspin API methods were removed because they don't exist in MA: +// - getSendspinConnectionInfo (sendspin/connection_info) +// - sendspinOffer (sendspin/webrtc_offer) +// - sendspinAnswer (sendspin/webrtc_answer) +// - sendspinIceCandidate (sendspin/ice_candidate) +``` + +**Reality:** These APIs (or equivalents) DO exist for establishing WebRTC connections remotely. + +### What Was Tried +From `EMAIL_TO_UPSTREAM.md` and `REMOTE_ACCESS_RESEARCH.md`: + +1. ❌ **WebSocket Proxy over WebRTC** + - Attempted to forward WebSocket frames through single WebRTC data channel + - Quote: "Initial attempts at proxying WebSocket over WebRTC data channel showed high latency" + - Quote: "I abandoned this approach without trying alternative architectures" + - Result: Abandoned due to latency + +2. ✅ **Direct URL Connection** + - Using Cloudflare Tunnel with real URLs works + - But requires external infrastructure + +### What Was NOT Tried + +❌ **Dual Peer Connection Architecture** +- Never implemented second WebRTC peer connection +- Never called `sendspin/ice_servers` or `sendspin/connect` endpoints +- Never tested native WebRTC DataChannel for Sendspin audio + +--- + +## How It Should Work (Based on Upstream Feedback) + +### Architecture Flow + +``` +Step 1: Establish API Connection (Already Working ✅) +┌─────────────┐ WebRTC ┌──────────────┐ +│ Mobile │◄──────── Data Channel ─────►│ MA Server │ +│ App │ "ma-api" label │ │ +└─────────────┘ └──────────────┘ + │ ▲ + │ Library browsing, authentication, │ + │ player control, queue management │ + └────────────────────────────────────────────┘ + +Step 2: Request Sendspin Connection (Via API Channel) +┌─────────────┐ ┌──────────────┐ +│ Mobile │ sendspin/ice_servers ───► │ MA Server │ +│ App │ ◄──── ICE server config │ │ +│ │ sendspin/connect (SDP) ───► │ │ +│ │ ◄──── SDP answer │ │ +└─────────────┘ └──────────────┘ + +Step 3: Establish Second Peer Connection for Audio +┌─────────────┐ WebRTC ┌──────────────┐ +│ Mobile │◄─────── Data Channel ──────►│ MA Server │ +│ App │ (Sendspin protocol) │ │ +└─────────────┘ └──────────────┘ + ▲ │ + │ Binary PCM audio + JSON control │ + └────────────────────────────────────────────┘ +``` + +### Implementation Steps Required + +1. **Detect Remote Mode** + - When connected via RemoteAccessManager + - Skip WebSocket connection for Sendspin + +2. **Request ICE Servers** + - Call `sendspin/ice_servers` via MA API + - Get STUN/TURN configuration + +3. **Create Second Peer Connection** + - New `RTCPeerConnection` instance + - Separate from MA API connection + - Configure with received ICE servers + +4. **Create Data Channel** + - Label: Could be empty or "sendspin" + - Reliable, ordered delivery for audio + +5. **Exchange SDP** + - Create offer from mobile app + - Send via `sendspin/connect` through API channel + - Receive answer from server + - Set remote description + +6. **Exchange ICE Candidates** + - Send local candidates to server (via API channel) + - Receive remote candidates from server + - Add to peer connection + +7. **Handle Sendspin Protocol** + - JSON control messages (same as WebSocket version) + - Binary audio frames (PCM or Opus) + - Parse header: message type + timestamp + data + +8. **Lifecycle Management** + - Keep both connections alive + - Reconnect second peer connection if drops + - Cleanup on disconnect + +--- + +## Code Changes Required + +### Files to Modify + +1. **`lib/services/remote/webrtc_sendspin_transport.dart`** (NEW) + - Second peer connection manager + - Handles Sendspin-specific WebRTC setup + - Signaling via MA API data channel + +2. **`lib/services/sendspin_service.dart`** (MODIFY) + - Detect remote mode + - Use WebRTC transport instead of WebSocket when remote + - Keep existing protocol logic (JSON + binary) + +3. **`lib/services/music_assistant_api.dart`** (MODIFY) + - Add `getSendspinIceServers()` method + - Add `sendspinConnect(sdp)` method + - Add ICE candidate exchange methods + - Remove incorrect comments about non-existent APIs + +4. **`lib/providers/music_assistant_provider.dart`** (MODIFY) + - Pass remote mode flag to Sendspin initialization + - Coordinate lifecycle of both peer connections + +### Estimated Code Impact + +- **New code:** ~300-400 lines (WebRTC Sendspin transport) +- **Modified code:** ~100 lines (API methods, Sendspin service) +- **Total:** ~400-500 lines + +### Minimal change philosophy maintained: +- Reuses existing WebRTC infrastructure from API connection +- No changes to Sendspin protocol handling +- Only transport layer changes + +--- + +## Benefits of Correct Implementation + +1. ✅ **Native WebRTC performance**: No proxy overhead +2. ✅ **Lower latency**: Direct peer-to-peer audio stream +3. ✅ **Better for audio**: UDP with loss tolerance vs TCP retransmits +4. ✅ **Architecturally correct**: Matches MA web player design +5. ✅ **Future-proof**: Aligns with MA's remote streaming vision +6. ✅ **No infrastructure**: No Cloudflare Tunnel required + +--- + +## Comparison: Approaches + +| Approach | Status | Latency | Complexity | Works? | +|----------|--------|---------|------------|--------| +| WebSocket Proxy over WebRTC | Tried, abandoned | High | High | ❌ No | +| Cloudflare Tunnel | Current workaround | Low | Medium | ✅ Yes | +| Dual Peer Connection | **Not tried** | **Low** | **Medium** | **❓ Should work** | + +--- + +## References + +### Official Documentation +- Sendspin Protocol: https://www.sendspin-audio.com/ +- Music Assistant API: https://www.music-assistant.io/api/ +- GitHub: https://github.com/Sendspin +- GitHub: https://github.com/music-assistant/server + +### Implementation Examples +- **sendspin-js**: TypeScript client with WebRTC support +- **aiosendspin**: Python async client library +- **MA web player**: Browser-based player using WebRTC for remote + +### Key Features (MA 2.7+) +- WebRTC remote streaming via Nabu Casa infrastructure +- Sample-accurate multi-room sync (<0.05ms deviation) +- Multiple device roles (player, controller, display, visualizer) +- Open, royalty-free protocol + +--- + +## Next Steps + +### To Implement Dual Peer Connection: + +1. ✅ **Research complete** - Architecture understood +2. ⏳ **Verify API endpoints** - Test `sendspin/ice_servers` and `sendspin/connect` +3. ⏳ **Prototype second peer connection** - Minimal implementation +4. ⏳ **Test audio streaming** - Verify PCM/Opus over WebRTC DataChannel +5. ⏳ **Integration** - Connect to existing Sendspin service +6. ⏳ **Testing** - Remote connection stability, audio quality +7. ⏳ **Documentation** - Update implementation docs + +### Questions to Answer: + +1. **Exact API format**: What's the precise request/response for `sendspin/connect`? +2. **ICE candidate exchange**: How are candidates sent through API channel? +3. **Audio codec**: PCM or Opus over WebRTC? Same format as WebSocket? +4. **Synchronization**: Does time sync work over WebRTC DataChannel? +5. **Error handling**: Reconnection strategy for second peer connection? + +--- + +## Conclusion + +The upstream response reveals a critical architectural misunderstanding. Sendspin DOES support WebRTC DataChannels natively - the correct implementation uses: + +- **Two separate peer connections** +- **Signaling through the first (API) connection** +- **Direct audio streaming over the second connection** + +This has never been tested. The WebSocket proxy approach was a dead end. The dual peer connection architecture aligns with Music Assistant's official web player and should be the path forward for remote audio playback. + +**Key Takeaway:** The code comments claiming these APIs don't exist were incorrect. They do exist, and implementing them properly would enable full remote playback functionality without requiring external infrastructure like Cloudflare Tunnel. diff --git a/lib/providers/music_assistant_provider.dart b/lib/providers/music_assistant_provider.dart index 58bb7074..2d65445e 100644 --- a/lib/providers/music_assistant_provider.dart +++ b/lib/providers/music_assistant_provider.dart @@ -1806,7 +1806,25 @@ class MusicAssistantProvider with ChangeNotifier { final playerId = await DeviceIdService.getOrCreateDevicePlayerId(); _logger.log('Sendspin: Player ID: $playerId'); - // Parse server URL to determine connection strategy + // Check if we're in remote mode (WebRTC connection) + final isRemoteMode = _serverUrl == 'wss://remote.music-assistant.io'; + + if (isRemoteMode) { + _logger.log('🌐 Sendspin: Remote mode detected, using WebRTC DataChannel for audio'); + + // Use WebRTC transport for audio (second peer connection) + final connected = await _sendspinService!.connectViaWebRTC(_api!); + if (connected) { + _sendspinConnected = true; + _logger.log('✅ Sendspin: Connected via WebRTC DataChannel'); + return true; + } + + _logger.log('❌ Sendspin: WebRTC connection failed'); + return false; + } + + // Parse server URL to determine connection strategy for local mode final serverUri = Uri.parse(_serverUrl!.startsWith('http') ? _serverUrl! : 'https://$_serverUrl'); diff --git a/lib/services/music_assistant_api.dart b/lib/services/music_assistant_api.dart index 409e2171..98828fd2 100644 --- a/lib/services/music_assistant_api.dart +++ b/lib/services/music_assistant_api.dart @@ -2835,15 +2835,81 @@ class MusicAssistantAPI { return _eventStreams['sendspin_player']!.stream; } - // NOTE: The following Sendspin API methods were removed because they don't exist in MA: - // - getSendspinConnectionInfo (sendspin/connection_info) - // - sendspinOffer (sendspin/webrtc_offer) - // - sendspinAnswer (sendspin/webrtc_answer) - // - sendspinIceCandidate (sendspin/ice_candidate) + // ============================================================================ + // SENDSPIN WEBRTC SIGNALING (for remote connections) + // ============================================================================ + // For remote connections, Sendspin audio uses a second WebRTC peer connection. + // Signaling for this connection happens through the MA API data channel. // - // Sendspin connection is handled directly via WebSocket to: - // - Local: ws://{server-ip}:8927/sendspin - // - External: wss://{server}/sendspin (via MA's proxy with auth) + // Local connections still use WebSocket: ws://{server-ip}:8927/sendspin + // Remote connections use WebRTC DataChannel signaled through these methods: + + /// Get ICE servers for Sendspin WebRTC connection + /// Returns STUN/TURN server configuration for establishing audio peer connection + Future>> getSendspinIceServers() async { + try { + _logger.log('Sendspin: Requesting ICE servers for WebRTC audio connection'); + final result = await _sendCommand('sendspin/ice_servers'); + + if (result is Map && result.containsKey('ice_servers')) { + final servers = result['ice_servers'] as List; + return servers.cast>(); + } + + // Fallback ICE servers if server doesn't provide them + _logger.log('Sendspin: Using fallback ICE servers'); + return [ + {'urls': 'stun:stun.l.google.com:19302'}, + {'urls': 'stun:stun.cloudflare.com:3478'}, + ]; + } catch (e) { + _logger.log('Sendspin: Error getting ICE servers: $e'); + // Return fallback servers on error + return [ + {'urls': 'stun:stun.l.google.com:19302'}, + {'urls': 'stun:stun.cloudflare.com:3478'}, + ]; + } + } + + /// Send SDP offer and receive answer for Sendspin WebRTC connection + /// Establishes the second peer connection for audio streaming + Future> sendspinConnect(Map offer) async { + try { + _logger.log('Sendspin: Sending SDP offer for WebRTC audio connection'); + final result = await _sendCommand( + 'sendspin/connect', + args: {'sdp': offer}, + ); + + if (result is Map && result.containsKey('sdp')) { + _logger.log('Sendspin: Received SDP answer'); + return result['sdp'] as Map; + } + + throw Exception('Invalid response from sendspin/connect'); + } catch (e) { + _logger.log('Sendspin: Error in sendspin/connect: $e'); + rethrow; + } + } + + /// Send ICE candidate to server for Sendspin WebRTC connection + Future sendspinIceCandidate(Map candidate) async { + try { + await _sendCommand( + 'sendspin/ice_candidate', + args: {'candidate': candidate}, + ); + } catch (e) { + _logger.log('Sendspin: Error sending ICE candidate: $e'); + // Non-fatal, don't rethrow + } + } + + // ============================================================================ + // END SENDSPIN WEBRTC SIGNALING + // ============================================================================ /// Update Sendspin player state /// Similar to updateBuiltinPlayerState but for Sendspin protocol diff --git a/lib/services/remote/webrtc_sendspin_transport.dart b/lib/services/remote/webrtc_sendspin_transport.dart new file mode 100644 index 00000000..94ee32b5 --- /dev/null +++ b/lib/services/remote/webrtc_sendspin_transport.dart @@ -0,0 +1,357 @@ +/// WebRTC Sendspin Transport +/// +/// Manages the second peer connection specifically for Sendspin audio streaming. +/// This is separate from the MA API connection and is signaled through the API +/// data channel using sendspin/ice_servers and sendspin/connect endpoints. +/// +/// Architecture: +/// - Connection 1 (MA API): Already established via WebRTCTransport +/// - Connection 2 (Sendspin): This transport, signaled through Connection 1 +/// +/// The signaling happens via the MA API connection, not the signaling server. + +import 'dart:async'; +import 'dart:typed_data'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import '../debug_logger.dart'; +import '../music_assistant_api.dart'; + +/// State of the Sendspin WebRTC connection +enum SendspinWebRTCState { + disconnected, + connecting, + connected, + reconnecting, + failed, +} + +/// Configuration for WebRTC Sendspin transport +class WebRTCSendspinConfig { + final List> iceServers; + final bool reconnect; + final int reconnectDelay; + final int maxReconnectDelay; + + WebRTCSendspinConfig({ + required this.iceServers, + this.reconnect = true, + this.reconnectDelay = 1000, + this.maxReconnectDelay = 30000, + }); +} + +/// WebRTC transport for Sendspin audio streaming +/// This handles the second peer connection for audio data +class WebRTCSendspinTransport { + final MusicAssistantAPI api; + final _logger = DebugLogger(); + + RTCPeerConnection? _peerConnection; + RTCDataChannel? _dataChannel; + + SendspinWebRTCState _state = SendspinWebRTCState.disconnected; + SendspinWebRTCState get state => _state; + + final _stateController = StreamController.broadcast(); + Stream get stateStream => _stateController.stream; + + // Message streams + final _textMessageController = StreamController.broadcast(); + Stream get textMessageStream => _textMessageController.stream; + + final _binaryMessageController = StreamController.broadcast(); + Stream get binaryMessageStream => _binaryMessageController.stream; + + final _errorController = StreamController.broadcast(); + Stream get errorStream => _errorController.stream; + + final List _iceCandidateBuffer = []; + bool _remoteDescriptionSet = false; + bool _intentionalClose = false; + Timer? _reconnectTimer; + int _reconnectAttempts = 0; + + List> _iceServers = []; + + WebRTCSendspinTransport(this.api); + + /// Connect and establish the Sendspin audio peer connection + Future connect() async { + _intentionalClose = false; + _setState(SendspinWebRTCState.connecting); + _logger.log('[SendspinWebRTC] Starting connection for audio streaming'); + + try { + // Step 1: Get ICE servers from MA API + _logger.log('[SendspinWebRTC] Requesting ICE servers from MA API'); + _iceServers = await api.getSendspinIceServers(); + _logger.log('[SendspinWebRTC] Received ${_iceServers.length} ICE servers'); + + // Step 2: Create peer connection + await _createPeerConnection(); + + // Step 3: Create data channel for audio + await _createDataChannel(); + + // Step 4: Create and send SDP offer + _logger.log('[SendspinWebRTC] Creating SDP offer'); + final offer = await _peerConnection!.createOffer(); + await _peerConnection!.setLocalDescription(offer); + + // Step 5: Send offer to MA server via API and get answer + _logger.log('[SendspinWebRTC] Sending SDP offer to MA API'); + final answer = await api.sendspinConnect({ + 'type': offer.type, + 'sdp': offer.sdp, + }); + + // Step 6: Set remote description (answer from server) + _logger.log('[SendspinWebRTC] Setting remote description from answer'); + final remoteDesc = RTCSessionDescription( + answer['sdp'] as String, + answer['type'] as String, + ); + await _peerConnection!.setRemoteDescription(remoteDesc); + _remoteDescriptionSet = true; + + // Step 7: Add any buffered ICE candidates + if (_iceCandidateBuffer.isNotEmpty) { + _logger.log('[SendspinWebRTC] Adding ${_iceCandidateBuffer.length} buffered ICE candidates'); + for (final candidate in _iceCandidateBuffer) { + await _peerConnection!.addCandidate(candidate); + } + _iceCandidateBuffer.clear(); + } + + // Wait for connection to be established + await _waitForConnection(); + + _reconnectAttempts = 0; + _setState(SendspinWebRTCState.connected); + _logger.log('[SendspinWebRTC] Audio connection established successfully'); + } catch (e) { + _logger.log('[SendspinWebRTC] Connection failed: $e'); + _cleanup(); + _setState(SendspinWebRTCState.failed); + rethrow; + } + } + + /// Create the WebRTC peer connection + Future _createPeerConnection() async { + final configuration = { + 'iceServers': _iceServers, + 'sdpSemantics': 'unified-plan', + }; + + _logger.log('[SendspinWebRTC] Creating peer connection'); + _peerConnection = await createPeerConnection(configuration); + + // Handle ICE candidates - send them to MA server via API + _peerConnection!.onIceCandidate = (candidate) { + if (candidate.candidate != null) { + _logger.log('[SendspinWebRTC] Local ICE candidate generated'); + // Send to MA server via API + api.sendspinIceCandidate({ + 'candidate': candidate.candidate, + 'sdpMid': candidate.sdpMid, + 'sdpMLineIndex': candidate.sdpMLineIndex, + }).catchError((e) { + _logger.log('[SendspinWebRTC] Error sending ICE candidate: $e'); + }); + } + }; + + // Handle ICE connection state changes + _peerConnection!.onIceConnectionState = (state) { + _logger.log('[SendspinWebRTC] ICE connection state: $state'); + + if (state == RTCIceConnectionState.RTCIceConnectionStateFailed || + state == RTCIceConnectionState.RTCIceConnectionStateDisconnected) { + if (!_intentionalClose) { + _scheduleReconnect(); + } + } + }; + + // Handle connection state changes + _peerConnection!.onConnectionState = (state) { + _logger.log('[SendspinWebRTC] Peer connection state: $state'); + + if (state == RTCPeerConnectionState.RTCPeerConnectionStateFailed) { + _logger.log('[SendspinWebRTC] Connection failed, scheduling reconnect'); + if (!_intentionalClose) { + _scheduleReconnect(); + } + } + }; + } + + /// Create data channel for Sendspin protocol + Future _createDataChannel() async { + final dataChannelConfig = RTCDataChannelInit() + ..ordered = true + ..maxRetransmits = -1; // Reliable channel + + _dataChannel = await _peerConnection!.createDataChannel( + 'sendspin', + dataChannelConfig, + ); + + _dataChannel!.onDataChannelState = (state) { + _logger.log('[SendspinWebRTC] Data channel state: $state'); + + if (state == RTCDataChannelState.RTCDataChannelOpen) { + _logger.log('[SendspinWebRTC] Data channel opened for audio'); + } else if (state == RTCDataChannelState.RTCDataChannelClosed) { + _logger.log('[SendspinWebRTC] Data channel closed'); + if (!_intentionalClose) { + _scheduleReconnect(); + } + } + }; + + // Handle incoming messages + _dataChannel!.onMessage = (message) { + if (message.isBinary) { + // Binary audio data + final data = message.binary; + if (!_binaryMessageController.isClosed) { + _binaryMessageController.add(data); + } + } else if (message.text != null) { + // JSON control messages + if (!_textMessageController.isClosed) { + _textMessageController.add(message.text); + } + } + }; + } + + /// Wait for data channel to open + Future _waitForConnection() async { + final completer = Completer(); + final timeout = Timer(const Duration(seconds: 30), () { + if (!completer.isCompleted) { + completer.completeError(Exception('Connection timeout')); + } + }); + + StreamSubscription? subscription; + subscription = _dataChannel!.stateChangeStream?.listen((state) { + if (state == RTCDataChannelState.RTCDataChannelOpen) { + timeout.cancel(); + subscription?.cancel(); + if (!completer.isCompleted) { + completer.complete(); + } + } + }); + + return completer.future; + } + + /// Handle incoming ICE candidate from server (via API event) + Future handleRemoteIceCandidate(Map candidateData) async { + _logger.log('[SendspinWebRTC] Received remote ICE candidate'); + + try { + final candidate = RTCIceCandidate( + candidateData['candidate'] as String?, + candidateData['sdpMid'] as String?, + candidateData['sdpMLineIndex'] as int?, + ); + + if (_remoteDescriptionSet) { + await _peerConnection!.addCandidate(candidate); + } else { + // Buffer candidates until remote description is set + _iceCandidateBuffer.add(candidate); + } + } catch (e) { + _logger.log('[SendspinWebRTC] Error handling ICE candidate: $e'); + } + } + + /// Send a text message (JSON) through the data channel + void sendText(String message) { + if (_dataChannel == null || _dataChannel!.state != RTCDataChannelState.RTCDataChannelOpen) { + throw Exception('Data channel is not open'); + } + _dataChannel!.send(RTCDataChannelMessage(message)); + } + + /// Send binary data through the data channel + void sendBinary(Uint8List data) { + if (_dataChannel == null || _dataChannel!.state != RTCDataChannelState.RTCDataChannelOpen) { + throw Exception('Data channel is not open'); + } + _dataChannel!.send(RTCDataChannelMessage.fromBinary(data)); + } + + /// Disconnect the audio connection + void disconnect() { + _intentionalClose = true; + _clearReconnectTimer(); + _cleanup(); + _setState(SendspinWebRTCState.disconnected); + _logger.log('[SendspinWebRTC] Disconnected'); + } + + /// Schedule reconnection attempt + void _scheduleReconnect() { + if (_intentionalClose || _state == SendspinWebRTCState.reconnecting) { + return; + } + + _clearReconnectTimer(); + _setState(SendspinWebRTCState.reconnecting); + + _reconnectAttempts++; + final delay = 1000 * _reconnectAttempts; + final clampedDelay = delay.clamp(1000, 30000); + + _logger.log('[SendspinWebRTC] Scheduling reconnect attempt $_reconnectAttempts in ${clampedDelay}ms'); + + _reconnectTimer = Timer(Duration(milliseconds: clampedDelay), () { + if (!_intentionalClose) { + _logger.log('[SendspinWebRTC] Attempting reconnect...'); + connect().catchError((e) { + _logger.log('[SendspinWebRTC] Reconnect failed: $e'); + }); + } + }); + } + + void _clearReconnectTimer() { + _reconnectTimer?.cancel(); + _reconnectTimer = null; + } + + void _cleanup() { + _dataChannel?.close(); + _dataChannel = null; + + _peerConnection?.close(); + _peerConnection = null; + + _iceCandidateBuffer.clear(); + _remoteDescriptionSet = false; + } + + void _setState(SendspinWebRTCState newState) { + _state = newState; + if (!_stateController.isClosed) { + _stateController.add(newState); + } + } + + /// Dispose all resources + void dispose() { + disconnect(); + _stateController.close(); + _textMessageController.close(); + _binaryMessageController.close(); + _errorController.close(); + } +} diff --git a/lib/services/sendspin_service.dart b/lib/services/sendspin_service.dart index 3d15bc19..d0099847 100644 --- a/lib/services/sendspin_service.dart +++ b/lib/services/sendspin_service.dart @@ -7,6 +7,8 @@ import 'package:web_socket_channel/io.dart'; import 'debug_logger.dart'; import 'settings_service.dart'; import 'device_id_service.dart'; +import 'music_assistant_api.dart'; +import 'remote/webrtc_sendspin_transport.dart'; /// Connection state for Sendspin player enum SendspinConnectionState { @@ -26,21 +28,25 @@ typedef SendspinAudioDataCallback = void Function(Uint8List audioData); typedef SendspinStreamStartCallback = void Function(Map? trackInfo); typedef SendspinStreamEndCallback = void Function(); -/// Service to manage Sendspin WebSocket connection for local playback +/// Service to manage Sendspin connection for local and remote playback /// Sendspin is the replacement for builtin_player in MA 2.7.0b20+ /// -/// Connection strategy (smart fallback for external access): -/// 1. If server is HTTPS, try external wss://{server}/sendspin first -/// 2. Fall back to local_ws_url from API (ws://local-ip:8927/sendspin) -/// 3. WebRTC fallback as last resort (requires TURN servers) +/// Connection strategy: +/// 1. Local: WebSocket to ws://{server-ip}:8927/sendspin +/// 2. Remote: WebRTC DataChannel (second peer connection) for audio class SendspinService { final String serverUrl; final _logger = DebugLogger(); + // WebSocket mode (local connections) WebSocketChannel? _channel; Timer? _heartbeatTimer; Timer? _reconnectTimer; + // WebRTC mode (remote connections) + WebRTCSendspinTransport? _webrtcTransport; + bool _useWebRTC = false; + // Connection state SendspinConnectionState _state = SendspinConnectionState.disconnected; SendspinConnectionState get state => _state; @@ -194,6 +200,98 @@ class SendspinService { } } + /// Connect via WebRTC for remote connections + /// Uses second peer connection signaled through MA API + Future connectViaWebRTC(MusicAssistantAPI api) async { + if (_isDisposed) return false; + if (_state == SendspinConnectionState.connected) return true; + + // Deduplicate concurrent connection attempts + if (_connectionInProgress != null) { + _logger.log('Sendspin: Connection already in progress, waiting...'); + return _connectionInProgress!.future; + } + _connectionInProgress = Completer(); + + _updateState(SendspinConnectionState.connecting); + + try { + _playerId = await DeviceIdService.getOrCreateDevicePlayerId(); + _playerName = await SettingsService.getLocalPlayerName(); + + _logger.log('Sendspin: Connecting via WebRTC as "$_playerName" (ID: $_playerId)'); + + // Create WebRTC transport for second peer connection + _webrtcTransport = WebRTCSendspinTransport(api); + _useWebRTC = true; + + // Set up message handlers + _webrtcTransport!.textMessageStream.listen(_handleTextMessage); + _webrtcTransport!.binaryMessageStream.listen(_handleBinaryMessage); + + // Connect the peer connection + await _webrtcTransport!.connect(); + + // Send client/hello message + _logger.log('Sendspin: Sending client/hello via WebRTC'); + _sendMessageWebRTC({ + 'type': 'client/hello', + 'payload': { + 'client_id': _playerId, + 'name': _playerName, + 'version': 1, + 'supported_roles': ['player@v1'], + 'player_support': { + 'supported_formats': [ + { + 'codec': 'pcm', + 'channels': 2, + 'sample_rate': 48000, + 'bit_depth': 16, + }, + ], + 'buffer_capacity': 1048576, + 'supported_commands': ['volume', 'mute'], + }, + }, + }); + + // Wait for server acknowledgment + final ackReceived = await _waitForAck().timeout( + const Duration(seconds: 5), + onTimeout: () => false, + ); + + if (!ackReceived) { + _logger.log('Sendspin: No acknowledgment from server after hello (WebRTC)'); + _webrtcTransport?.disconnect(); + _webrtcTransport = null; + _useWebRTC = false; + _updateState(SendspinConnectionState.error); + _connectionInProgress?.complete(false); + _connectionInProgress = null; + return false; + } + + _logger.log('Sendspin: Connected via WebRTC and registered successfully'); + _updateState(SendspinConnectionState.connected); + _startHeartbeat(); + + _connectionInProgress?.complete(true); + _connectionInProgress = null; + return true; + } catch (e) { + _logger.log('Sendspin: WebRTC connection error: $e'); + _webrtcTransport?.disconnect(); + _webrtcTransport = null; + _useWebRTC = false; + _updateState(SendspinConnectionState.error); + _connectionInProgress?.complete(false); + _connectionInProgress = null; + return false; + } + } + /// Attempt to connect to a specific WebSocket URL /// If useProxyAuth is true and we have an auth token, authenticate first (for MA 2.7.1+ proxy) Future _tryConnect(String url, {Duration timeout = const Duration(seconds: 5), bool useProxyAuth = false}) async { @@ -515,6 +613,11 @@ class SendspinService { /// Send a JSON message to the server /// allowDuringHandshake: set to true to send messages before connection is established (e.g., hello) void _sendMessage(Map message, {bool allowDuringHandshake = false}) { + if (_useWebRTC) { + _sendMessageWebRTC(message); + return; + } + if (_channel == null) return; // During handshake, we need to send hello even though state is still 'connecting' @@ -529,6 +632,135 @@ class SendspinService { } } + /// Send message via WebRTC DataChannel + void _sendMessageWebRTC(Map message) { + if (_webrtcTransport == null) return; + + try { + final json = jsonEncode(message); + _logger.log('Sendspin: Sending WebRTC message: ${message['type']}'); + _webrtcTransport!.sendText(json); + } catch (e) { + _logger.log('Sendspin: Error sending WebRTC message: $e'); + } + } + + /// Handle text message from WebRTC + void _handleTextMessage(String message) { + try { + final data = jsonDecode(message) as Map; + final type = data['type'] as String?; + + _logger.log('Sendspin: Received WebRTC message type: $type'); + + // Reuse existing message handler logic + switch (type) { + case 'server/hello': + _logger.log('Sendspin: Received server/hello via WebRTC'); + final payload = data['payload'] as Map?; + if (payload != null) { + _logger.log('Sendspin: Server name: ${payload['name']}, version: ${payload['version']}'); + } + if (_ackCompleter != null && !_ackCompleter!.isCompleted) { + _ackCompleter!.complete(true); + } + _sendInitialStateWebRTC(); + _sendClientTimeWebRTC(); + break; + + case 'server/time': + _logger.log('Sendspin: Received server/time response (WebRTC)'); + break; + + case 'group/update': + _logger.log('Sendspin: Received group/update (WebRTC)'); + break; + + case 'stream/start': + _logger.log('Sendspin: Audio stream starting (WebRTC)'); + _isStreamingAudio = true; + _audioFramesReceived = 0; + _isPlaying = true; + _isPaused = false; + final streamPayload = data['payload'] as Map?; + onStreamStart?.call(streamPayload); + break; + + case 'stream/end': + _logger.log('Sendspin: Audio stream ended (WebRTC, received $_audioFramesReceived frames)'); + _isStreamingAudio = false; + _isPlaying = false; + onStreamEnd?.call(); + break; + + case 'pause': + _isPaused = true; + _isPlaying = false; + onPause?.call(); + break; + + case 'stop': + _isPlaying = false; + _isPaused = false; + _isStreamingAudio = false; + onStop?.call(); + break; + + case 'seek': + final position = data['position'] as int?; + if (position != null) { + _position = position; + onSeek?.call(position); + } + break; + + case 'volume': + final level = data['level'] as int?; + if (level != null) { + _volume = level; + onVolume?.call(level); + } + break; + + default: + _logger.log('Sendspin: Unknown WebRTC message type: $type'); + } + } catch (e) { + _logger.log('Sendspin: Error handling WebRTC text message: $e'); + } + } + + /// Handle binary message from WebRTC + void _handleBinaryMessage(Uint8List message) { + _handleBinaryAudioData(message); + } + + /// Send initial state via WebRTC + void _sendInitialStateWebRTC() { + _logger.log('Sendspin: Sending initial client/state (WebRTC)'); + _sendMessageWebRTC({ + 'type': 'client/state', + 'payload': { + 'player': { + 'state': 'synchronized', + 'volume': _volume, + 'muted': _isMuted, + }, + }, + }); + } + + /// Send client/time via WebRTC + void _sendClientTimeWebRTC() { + final timestampMicroseconds = DateTime.now().microsecondsSinceEpoch; + _sendMessageWebRTC({ + 'type': 'client/time', + 'payload': { + 'client_transmitted': timestampMicroseconds, + }, + }); + } + /// Report current player state to server /// Uses Sendspin protocol 'client/state' message format void reportState({ @@ -665,7 +897,18 @@ class SendspinService { _stopHeartbeat(); _reconnectTimer?.cancel(); - if (_channel != null) { + if (_useWebRTC && _webrtcTransport != null) { + // Send graceful goodbye via WebRTC + _sendMessageWebRTC({ + 'type': 'client/goodbye', + 'payload': { + 'reason': 'user_request', + }, + }); + _webrtcTransport!.disconnect(); + _webrtcTransport = null; + _useWebRTC = false; + } else if (_channel != null) { // Send graceful goodbye per Sendspin protocol _sendMessage({ 'type': 'client/goodbye', @@ -686,6 +929,7 @@ class SendspinService { _stopHeartbeat(); _reconnectTimer?.cancel(); _channel?.sink.close(); + _webrtcTransport?.dispose(); _stateController.close(); _audioDataController.close(); }