|
| 1 | +# Audio Player |
| 2 | + |
| 3 | +The AudioPlayer is a small helper that plays audio using a selected output device by pulling frames from your implementation of AudioSource. It manages a native AudioDeviceModule internally and provides idempotent start/stop. |
| 4 | + |
| 5 | +API: `dev.onvoid.webrtc.media.audio.AudioPlayer` |
| 6 | + |
| 7 | +## When to use it |
| 8 | +- You want to render raw PCM audio (generated or decoded by your app) to an OS output device. |
| 9 | +- You need a simple, high‑level start/stop wrapper around WebRTC’s audio playout. |
| 10 | + |
| 11 | +See also: [Audio Device Selection](../audio_devices.md), [Custom Audio Source](../custom_audio_source.md), [Headless Audio](../headless_audio_device_module.md). |
| 12 | + |
| 13 | +## Key concepts |
| 14 | +- Device selection: Provide an `AudioDevice` representing the output device (speaker) before starting. |
| 15 | +- Data supply: Implement `AudioSource` to provide 10 ms PCM frames on demand. |
| 16 | +- Lifecycle: `start()` initializes output and begins pulling; `stop()` halts playout and releases resources. |
| 17 | + |
| 18 | +## Basic usage |
| 19 | + |
| 20 | +```java |
| 21 | +import dev.onvoid.webrtc.media.audio.AudioPlayer; |
| 22 | +import dev.onvoid.webrtc.media.audio.AudioSource; |
| 23 | +import dev.onvoid.webrtc.media.audio.AudioDevice; |
| 24 | +import dev.onvoid.webrtc.media.audio.AudioDeviceModule; |
| 25 | + |
| 26 | +import java.nio.ByteBuffer; |
| 27 | + |
| 28 | +public class TonePlayerExample { |
| 29 | + |
| 30 | + public static void main(String[] args) { |
| 31 | + // Choose a playout device (speaker). Enumerate via a temporary ADM. |
| 32 | + AudioDeviceModule adm = new AudioDeviceModule(); |
| 33 | + AudioDevice speaker = adm.getPlayoutDevices().stream() |
| 34 | + .findFirst() |
| 35 | + .orElseThrow(() -> new IllegalStateException("No playout device found")); |
| 36 | + adm.dispose(); |
| 37 | + |
| 38 | + // Provide 10 ms frames of PCM 16‑bit audio. This example generates a sine tone. |
| 39 | + final int sampleRate = 48000; |
| 40 | + final int channels = 2; |
| 41 | + final int bytesPerSample = channels * 2; // 16‑bit |
| 42 | + final double frequency = 440.0; // A4 |
| 43 | + final double twoPiFDivFs = 2 * Math.PI * frequency / sampleRate; |
| 44 | + final int samplesPer10msPerChannel = sampleRate / 100; // 480 samples/channel |
| 45 | + final int totalSamplesPer10ms = samplesPer10msPerChannel * channels; // e.g., 960 samples |
| 46 | + final double[] phase = new double[] {0.0}; |
| 47 | + |
| 48 | + AudioSource source = (audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec) -> { |
| 49 | + // Ensure caller requested matches our configuration |
| 50 | + if (nBytesPerSample != bytesPerSample || nChannels != channels || samplesPerSec != sampleRate) { |
| 51 | + // Fill silence if formats mismatch |
| 52 | + java.util.Arrays.fill(audioSamples, (byte) 0); |
| 53 | + return totalSamplesPer10ms; |
| 54 | + } |
| 55 | + |
| 56 | + // Generate interleaved stereo sine wave |
| 57 | + int idx = 0; |
| 58 | + for (int i = 0; i < samplesPer10msPerChannel; i++) { |
| 59 | + short s = (short) (Math.sin(phase[0]) * 32767); |
| 60 | + // left |
| 61 | + audioSamples[idx++] = (byte) (s & 0xFF); |
| 62 | + audioSamples[idx++] = (byte) ((s >> 8) & 0xFF); |
| 63 | + // right |
| 64 | + audioSamples[idx++] = (byte) (s & 0xFF); |
| 65 | + audioSamples[idx++] = (byte) ((s >> 8) & 0xFF); |
| 66 | + |
| 67 | + phase[0] += twoPiFDivFs; |
| 68 | + |
| 69 | + if (phase[0] > Math.PI * 2) { |
| 70 | + phase[0] -= Math.PI * 2; |
| 71 | + } |
| 72 | + } |
| 73 | + return totalSamplesPer10ms; // number of samples written across all channels |
| 74 | + }; |
| 75 | + |
| 76 | + AudioPlayer player = new AudioPlayer(); |
| 77 | + player.setAudioDevice(speaker); |
| 78 | + player.setAudioSource(source); |
| 79 | + |
| 80 | + player.start(); |
| 81 | + // ... playout running ... |
| 82 | + player.stop(); |
| 83 | + } |
| 84 | +} |
| 85 | +``` |
| 86 | + |
| 87 | +## Data format |
| 88 | +- The player requests 10 ms frames as 16‑bit little‑endian PCM via `AudioSource#onPlaybackData`. |
| 89 | +- Return value must be the number of samples written across all channels for that 10 ms frame. |
| 90 | + |
| 91 | +## Tips |
| 92 | +- If your synthesis/decoder operates at a different rate or channel layout, convert using the [Audio Converter](../audio_converter.md) before writing into the output buffer. |
| 93 | + |
| 94 | +## API reference |
| 95 | +- `setAudioDevice(AudioDevice device)` – choose output device |
| 96 | +- `setAudioSource(AudioSource source)` – provide playout frames |
| 97 | +- `start()` / `stop()` – control the playout lifecycle |
0 commit comments