diff --git a/README.md b/README.md
index a4942c71..ee8a31c8 100644
--- a/README.md
+++ b/README.md
@@ -89,6 +89,7 @@ In your [AppDelegate.m](https://github.com/livekit/client-sdk-react-native/blob/
```objc
#import "LivekitReactNative.h"
+#import "WebRTCModuleOptions.h"
@implementation AppDelegate
@@ -97,6 +98,12 @@ In your [AppDelegate.m](https://github.com/livekit/client-sdk-react-native/blob/
// Place this above any other RN related initialization
[LivekitReactNative setup];
+ // Uncomment the following lines if you want to use the camera in the background
+ // Requires voip background mode and iOS 18+.
+
+ // WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
+ // options.enableMultitaskingCameraAccess = YES;
+
//...
}
```
diff --git a/example/ios/LivekitReactNativeExample/AppDelegate.mm b/example/ios/LivekitReactNativeExample/AppDelegate.mm
index d616c354..7026f1a7 100644
--- a/example/ios/LivekitReactNativeExample/AppDelegate.mm
+++ b/example/ios/LivekitReactNativeExample/AppDelegate.mm
@@ -12,7 +12,8 @@ - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(
[LivekitReactNative setup];
WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
// Optional for debugging WebRTC issues.
- options.loggingSeverity = RTCLoggingSeverityInfo;
+ // options.loggingSeverity = RTCLoggingSeverityInfo;
+ options.enableMultitaskingCameraAccess = YES;
self.moduleName = @"LivekitReactNativeExample";
// You can add your custom initial props in the dictionary below.
diff --git a/example/src/ParticipantView.tsx b/example/src/ParticipantView.tsx
index 6ea5b84a..dbb1dc13 100644
--- a/example/src/ParticipantView.tsx
+++ b/example/src/ParticipantView.tsx
@@ -1,9 +1,9 @@
import * as React from 'react';
-import { Image, StyleSheet, ViewStyle } from 'react-native';
+import { Image, StyleSheet, type ViewStyle } from 'react-native';
import {
isTrackReference,
- TrackReferenceOrPlaceholder,
+ type TrackReferenceOrPlaceholder,
useEnsureTrackRef,
useIsMuted,
useIsSpeaking,
@@ -14,63 +14,71 @@ import { View } from 'react-native';
import { Text } from 'react-native';
import { useTheme } from '@react-navigation/native';
import { Track } from 'livekit-client';
+import { Component, forwardRef } from 'react';
export type Props = {
trackRef: TrackReferenceOrPlaceholder;
style?: ViewStyle;
zOrder?: number;
mirror?: boolean;
+ useIOSPIP?: boolean;
};
-export const ParticipantView = ({
- style = {},
- trackRef,
- zOrder,
- mirror,
-}: Props) => {
- const trackReference = useEnsureTrackRef(trackRef);
- const { identity, name } = useParticipantInfo({
- participant: trackReference.participant,
- });
- const isSpeaking = useIsSpeaking(trackRef.participant);
- const isVideoMuted = useIsMuted(trackRef);
- const { colors } = useTheme();
- let videoView;
- if (isTrackReference(trackRef) && !isVideoMuted) {
- videoView = (
-
- );
- } else {
- videoView = (
-
-
- (
+ ({ style = {}, trackRef, zOrder, mirror, useIOSPIP = false }: Props, ref) => {
+ const trackReference = useEnsureTrackRef(trackRef);
+ const { identity, name } = useParticipantInfo({
+ participant: trackReference.participant,
+ });
+ const isSpeaking = useIsSpeaking(trackRef.participant);
+ const isVideoMuted = useIsMuted(trackRef);
+ const { colors } = useTheme();
+ let videoView;
+ if (isTrackReference(trackRef) && !isVideoMuted) {
+ videoView = (
+
-
-
- );
- }
+ );
+ } else {
+ videoView = (
+
+
+
+
+
+ );
+ }
- let displayName = name ? name : identity;
- if (trackRef.source === Track.Source.ScreenShare) {
- displayName = displayName + "'s screen";
- }
+ let displayName = name ? name : identity;
+ if (trackRef.source === Track.Source.ScreenShare) {
+ displayName = displayName + "'s screen";
+ }
- return (
-
- {videoView}
-
- {displayName}
+ return (
+
+ {videoView}
+
+ {displayName}
+
+ {isSpeaking && }
- {isSpeaking && }
-
- );
-};
+ );
+ }
+);
const styles = StyleSheet.create({
container: {
diff --git a/src/components/VideoTrack.tsx b/src/components/VideoTrack.tsx
index 6526ee54..a9d0a4f7 100644
--- a/src/components/VideoTrack.tsx
+++ b/src/components/VideoTrack.tsx
@@ -2,6 +2,7 @@ import * as React from 'react';
import {
type LayoutChangeEvent,
+ Platform,
StyleSheet,
View,
type ViewStyle,
@@ -12,8 +13,20 @@ import {
Track,
TrackEvent,
} from 'livekit-client';
-import { RTCView } from '@livekit/react-native-webrtc';
-import { useCallback, useEffect, useMemo, useState } from 'react';
+import {
+ RTCView,
+ RTCPIPView,
+ type RTCIOSPIPOptions,
+} from '@livekit/react-native-webrtc';
+import {
+ Component,
+ forwardRef,
+ useCallback,
+ useEffect,
+ useMemo,
+ useState,
+ type ReactNode,
+} from 'react';
import { RemoteVideoTrack } from 'livekit-client';
import ViewPortDetector from './ViewPortDetector';
import type { TrackReference } from '@livekit/components-react';
@@ -62,6 +75,50 @@ export type VideoTrackProps = {
* video(s) which appear above the remote video(s).
*/
zOrder?: number;
+
+ /**
+ * Picture in picture options for this view. Disabled if not supplied.
+ *
+ * iOS only. Requires iOS 15.0 or above, and the PIP background mode capability.
+ *
+ * If `iosPIP.enabled` is true, startIOSPIP and stopIOSPIP can be used to manually
+ * trigger the PIP mode. `iosPIP.startAutomatically` can be used to automatically
+ * enter PIP when backgrounding the app.
+ *
+ * @example
+ * ```tsx
+ * import { startIOSPIP, stopIOSPIP } from '@livekit/react-native-webrtc';
+ *
+ * // Obtain a ref to the view
+ * const videoRef = useRef(null);
+ * const videoView = (
+ *
+ * );
+ *
+ * // Start/stop manually
+ * startIOSPIP(videoRef);
+ * stopIOSPIP(videoRef);
+ * ```
+ *
+ */
+ iosPIP?: RTCIOSPIPOptions & {
+ preferredSize: {
+ width: number;
+ height: number;
+ };
+ fallbackView?: ReactNode;
+ };
};
/**
@@ -72,83 +129,114 @@ export type VideoTrackProps = {
* @returns A React component that renders the given video track.
* @public
*/
-export const VideoTrack = ({
- style = {},
- trackRef,
- objectFit = 'cover',
- zOrder,
- mirror,
-}: VideoTrackProps) => {
- const [elementInfo] = useState(() => {
- let info = new VideoTrackElementInfo();
- info.id = trackRef?.publication?.trackSid;
- return info;
- });
-
- const layoutOnChange = useCallback(
- (event: LayoutChangeEvent) => elementInfo.onLayout(event),
- [elementInfo]
- );
- const visibilityOnChange = useCallback(
- (isVisible: boolean) => elementInfo.onVisibility(isVisible),
- [elementInfo]
- );
-
- const videoTrack = trackRef?.publication.track;
-
- const shouldObserveVisibility = useMemo(() => {
- return (
- videoTrack instanceof RemoteVideoTrack && videoTrack.isAdaptiveStream
+export const VideoTrack = forwardRef(
+ (
+ {
+ style = {},
+ trackRef,
+ objectFit = 'cover',
+ zOrder,
+ mirror,
+ iosPIP,
+ }: VideoTrackProps,
+ ref
+ ) => {
+ const [elementInfo] = useState(() => {
+ let info = new VideoTrackElementInfo();
+ info.id = trackRef?.publication?.trackSid;
+ return info;
+ });
+
+ const layoutOnChange = useCallback(
+ (event: LayoutChangeEvent) => elementInfo.onLayout(event),
+ [elementInfo]
);
- }, [videoTrack]);
-
- const [mediaStream, setMediaStream] = useState(videoTrack?.mediaStream);
- useEffect(() => {
- setMediaStream(videoTrack?.mediaStream);
- if (videoTrack instanceof LocalVideoTrack) {
- const onRestarted = (track: Track | null) => {
- setMediaStream(track?.mediaStream);
- };
- videoTrack.on(TrackEvent.Restarted, onRestarted);
-
- return () => {
- videoTrack.off(TrackEvent.Restarted, onRestarted);
- };
- } else {
- return () => {};
- }
- }, [videoTrack]);
-
- useEffect(() => {
- if (videoTrack instanceof RemoteVideoTrack && videoTrack.isAdaptiveStream) {
- videoTrack?.observeElementInfo(elementInfo);
- return () => {
- videoTrack?.stopObservingElementInfo(elementInfo);
- };
- } else {
- return () => {};
- }
- }, [videoTrack, elementInfo]);
-
- return (
-
-
+ const visibilityOnChange = useCallback(
+ (isVisible: boolean) => elementInfo.onVisibility(isVisible),
+ [elementInfo]
+ );
+
+ const videoTrack = trackRef?.publication.track;
+
+ const shouldObserveVisibility = useMemo(() => {
+ return (
+ videoTrack instanceof RemoteVideoTrack && videoTrack.isAdaptiveStream
+ );
+ }, [videoTrack]);
+
+ const [mediaStream, setMediaStream] = useState(videoTrack?.mediaStream);
+ useEffect(() => {
+ setMediaStream(videoTrack?.mediaStream);
+ if (videoTrack instanceof LocalVideoTrack) {
+ const onRestarted = (track: Track | null) => {
+ setMediaStream(track?.mediaStream);
+ };
+ videoTrack.on(TrackEvent.Restarted, onRestarted);
+
+ return () => {
+ videoTrack.off(TrackEvent.Restarted, onRestarted);
+ };
+ } else {
+ return () => {};
+ }
+ }, [videoTrack]);
+
+ useEffect(() => {
+ if (
+ videoTrack instanceof RemoteVideoTrack &&
+ videoTrack.isAdaptiveStream
+ ) {
+ videoTrack?.observeElementInfo(elementInfo);
+ return () => {
+ videoTrack?.stopObservingElementInfo(elementInfo);
+ };
+ } else {
+ return () => {};
+ }
+ }, [videoTrack, elementInfo]);
+
+ let videoView;
+ if (!iosPIP || Platform.OS !== 'ios') {
+ videoView = (
-
-
- );
-};
+ );
+ } else {
+ videoView = (
+
+ );
+ }
+ return (
+
+
+ {videoView}
+
+
+ );
+ }
+);
const styles = StyleSheet.create({
container: {},