Skip to content
This repository was archived by the owner on May 9, 2023. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
77 changes: 47 additions & 30 deletions custom/shared/components/Audio/Audio.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,56 +7,73 @@
* and Chrome's maximum media elements. On Chrome we add all audio tracks
* into into a single audio node using the CombinedAudioTrack component
*/
import React, { useEffect, useMemo } from 'react';
import React, { useEffect, useMemo, useState } from 'react';
import { useTracks } from '@custom/shared/contexts/TracksProvider';
import Bowser from 'bowser';
import { isSafari } from '@custom/shared/lib/browserConfig';
import { Portal } from 'react-portal';
import { useDeepCompareEffect } from 'use-deep-compare';
import { useCallState } from '../../contexts/CallProvider';
import { isScreenId } from '../../contexts/participantsState';
import { useNetworkState } from '../../hooks/useNetworkState';
import AudioTrack from './AudioTrack';
import CombinedAudioTrack from './CombinedAudioTrack';
import { WebAudioTracks } from './WebAudioTracks';

export const Audio = () => {
const { disableAudio } = useCallState();
const { audioTracks } = useTracks();
const [renderedTracks, setRenderedTracks] = useState({});
const [isClient, setIsClient] = useState(false);
const { topology } = useNetworkState();

const renderedTracks = useMemo(
() =>
Object.entries(audioTracks).reduce(
(tracks, [id, track]) => ({ ...tracks, [id]: track }),
{}
),
[audioTracks]
);
useEffect(() => {
setIsClient(true);
}, []);

useDeepCompareEffect(() => {
const newTracks = Object.entries(audioTracks).reduce(
(tracks, [id, track]) => {
if (!disableAudio || isScreenId(id)) {
tracks[id] = track;
}
return tracks;
},
{}
);
setRenderedTracks(newTracks);
}, [audioTracks, disableAudio]);

// On iOS safari, when headphones are disconnected, all audio elements are paused.
// This means that when a user disconnects their headphones, that user will not
// be able to hear any other users until they mute/unmute their mics.
// To fix that, we call `play` on each audio track on all devicechange events.
useEffect(() => {
const playTracks = () => {
document.querySelectorAll('.audioTracks audio').forEach(async (audio) => {
try {
if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
await audio?.play();
document
.querySelectorAll('.audioTracks audio')
.forEach(async (audio) => {
try {
if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
await audio?.play();
}
} catch (e) {
// Auto play failed
}
} catch (e) {
// Auto play failed
}
});
});
};

navigator.mediaDevices.addEventListener('devicechange', playTracks);
return () => {
navigator.mediaDevices.removeEventListener('devicechange', playTracks);
};
}, []);

const tracksComponent = useMemo(() => {
const { browser } = Bowser.parse(navigator.userAgent);
if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) {
return <CombinedAudioTrack tracks={renderedTracks} />;
if (isSafari() || topology === 'peer') {
return Object.entries(renderedTracks).map(([id, track]) => (
<AudioTrack key={id} track={track.persistentTrack} />
));
}
return Object.entries(renderedTracks).map(([id, track]) => (
<AudioTrack key={id} track={track.persistentTrack} />
));
}, [renderedTracks]);
return <WebAudioTracks />;
}, [renderedTracks, topology]);

// Only render audio tracks in browser
if (!isClient) return null;

return (
<Portal key="AudioTracks">
Expand Down
67 changes: 0 additions & 67 deletions custom/shared/components/Audio/CombinedAudioTrack.js

This file was deleted.

81 changes: 81 additions & 0 deletions custom/shared/components/Audio/WebAudioTracks.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import React, { memo, useEffect, useRef } from 'react';
import { useCallback } from 'react';

import { useCallState } from '../../contexts/CallProvider';
import { Loopback } from '../../lib/loopback';

const WAT = () => {
const { callObject } = useCallState();
const audioEl = useRef(null);
const audioCtx = useRef(null);
const destination = useRef(null);
const inputNodes = useRef({});

useEffect(() => {
if (!audioEl.current) return;
const AC = AudioContext || webkitAudioContext;
audioCtx.current = window.audioContext ?? new AC();
// Workaround as long as MediaDevices.selectAudioOutput is not supported
// https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/selectAudioOutput
destination.current = audioCtx.current.createMediaStreamDestination();
// Expose map of connected nodes for Debugger
window['dailyAudioNodes'] = inputNodes.current;

let loopback;
async function setupLoopback() {
loopback = new Loopback();
await loopback.start(destination.current.stream);
const loopbackStream = loopback.getLoopback();
audioEl.current.srcObject = loopbackStream;
audioEl.current.play();
}

setupLoopback();

return () => loopback.destroy();
}, []);

const handleTrackStarted = useCallback(async ({ participant, track }) => {
if (track.kind !== 'audio' || participant?.local) return;
if (Object.keys(inputNodes.current).includes(track.id)) return;
const mediaStream = new MediaStream([track]);
const node = new MediaStreamAudioSourceNode(audioCtx.current, {
mediaStream,
});

// Apparently this is required due to a Chromium bug!
// https://bugs.chromium.org/p/chromium/issues/detail?id=933677
// https://stackoverflow.com/questions/55703316/audio-from-rtcpeerconnection-is-not-audible-after-processing-in-audiocontext
const mutedAudio = new Audio();
mutedAudio.muted = true;
mutedAudio.srcObject = mediaStream;
mutedAudio.play();

inputNodes.current[track.id] = node;
node.connect(destination.current);
}, []);

const handleTrackStopped = useCallback(({ participant, track }) => {
if (track.kind !== 'audio' || participant?.local) return;
const node = inputNodes.current?.[track.id];
if (!node) return;
node.disconnect();
delete inputNodes.current[track.id];
}, []);

useEffect(() => {
if (!callObject) return;

callObject.on('track-started', handleTrackStarted);
callObject.on('track-stopped', handleTrackStopped);

return () => {
callObject.off('track-started', handleTrackStarted);
callObject.off('track-stopped', handleTrackStopped);
};
}, [callObject, handleTrackStarted, handleTrackStopped]);

return <audio autoPlay playsInline ref={audioEl} />;
};

export const WebAudioTracks = memo(WAT);
2 changes: 1 addition & 1 deletion custom/shared/components/Audio/index.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
export { Audio } from './Audio';
export { AudioTrack } from './AudioTrack';
export { CombinedAudioTrack } from './CombinedAudioTrack';
export { WebAudioTracks } from './WebAudioTracks';
2 changes: 1 addition & 1 deletion custom/shared/components/ParticipantBar/ParticipantBar.js
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ export const ParticipantBar = ({
key={key}
aspectRatio={aspectRatio}
participant={item}
network={networkState}
network={networkState.threshold}
/>
);
})}
Expand Down
2 changes: 1 addition & 1 deletion custom/shared/contexts/ParticipantsProvider.js
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ export const ParticipantsProvider = ({ children }) => {

if (
// weak or bad network
([VIDEO_QUALITY_LOW, VIDEO_QUALITY_VERY_LOW].includes(networkState) &&
([VIDEO_QUALITY_LOW, VIDEO_QUALITY_VERY_LOW].includes(networkState.threshold) &&
videoQuality === VIDEO_QUALITY_AUTO) ||
// Low quality or Bandwidth saver mode enabled
[VIDEO_QUALITY_BANDWIDTH_SAVER, VIDEO_QUALITY_LOW].includes(
Expand Down
19 changes: 16 additions & 3 deletions custom/shared/hooks/useNetworkState.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ export const useNetworkState = (
quality = VIDEO_QUALITY_HIGH
) => {
const [threshold, setThreshold] = useState(NETWORK_STATE_GOOD);
const [topology, setTopology] = useState('peer');

const setQuality = useCallback(
(q) => {
Expand Down Expand Up @@ -84,16 +85,28 @@ export const useNetworkState = (
[setQuality, threshold, quality]
);

const handleNetworkTopology = useCallback((ev) => {
switch (ev.event) {
case 'connected':
if (ev.type === 'peer-to-peer') setTopology('peer');
if (ev.type === 'sfu') setTopology('sfu');
break;
}
}, []);

useEffect(() => {
if (!callObject) return false;
callObject.on('network-quality-change', handleNetworkQualityChange);
return () =>
callObject.on('network-connection', handleNetworkTopology);
return () => {
callObject.off('network-quality-change', handleNetworkQualityChange);
}, [callObject, handleNetworkQualityChange]);
callObject.off('network-connection', handleNetworkTopology);
}
}, [callObject, handleNetworkQualityChange, handleNetworkTopology]);

useEffect(() => {
setQuality(quality);
}, [quality, setQuality]);

return threshold;
return { threshold, topology };
};
33 changes: 33 additions & 0 deletions custom/shared/lib/browserConfig.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import Bowser from 'bowser';

export const isAndroidChrome = (browserName) => {
const browser = Bowser.parse(navigator.userAgent);
const name = browserName ?? browser?.browser?.name;
return (
name === 'Chrome' &&
browser?.platform?.type === 'mobile' &&
browser?.os?.name === 'Android'
);
};

export const isIOSMobile = () => {
const browser = Bowser.parse(navigator.userAgent);
return (
browser.platform?.vendor === 'Apple' &&
navigator.maxTouchPoints > 0 &&
typeof TouchEvent !== 'undefined'
);
};

export const isSafari = (minVersion = 1, maxVersion = 100) => {
const browser = Bowser.parse(navigator.userAgent);
const [major] = browser?.browser?.version
?.split('.')
.map((n) => parseInt(n, 10));
if (!major) return browser?.browser?.name === 'Safari';
return (
browser?.browser?.name === 'Safari' &&
major >= minVersion &&
major <= maxVersion
);
};
Loading