Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions packages/react-bindings/src/hooks/callUtilHooks.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { useCallback, useEffect, useState } from 'react';
import { useCall } from '../contexts';
import { useIsCallRecordingInProgress } from './callStateHooks';
import { hasAudio, StreamVideoParticipant } from '@stream-io/video-client';

/**
* Custom hook for toggling call recording in a video call.
Expand Down Expand Up @@ -42,3 +43,48 @@ export const useToggleCallRecording = () => {

return { toggleCallRecording, isAwaitingResponse, isCallRecordingInProgress };
};

/**
* Custom hook for checking if an audio track is connecting.
*
* This hook checks if the participant has an audio track and if the audio track is unmuted.
*
* @param participant the participant to check.
* @returns true if the audio track is connecting, false otherwise.
*/
export const useIsAudioConnecting = (
participant: StreamVideoParticipant,
): boolean => {
const audioStream = participant.audioStream;
const hasAudioTrack = hasAudio(participant);
const trackId = audioStream?.getAudioTracks()[0]?.id;

const [unmuted, setUnmuted] = useState(() => {
const track = audioStream?.getAudioTracks()[0];
return !!track && !track.muted;
});

useEffect(() => {
const track = audioStream?.getAudioTracks()[0];
if (!track) {
setUnmuted(false);
return;
}

setUnmuted(!track.muted);

const handler = () => {
setUnmuted(!track.muted);
};

track.addEventListener('mute', handler);
track.addEventListener('unmute', handler);

return () => {
track.removeEventListener('mute', handler);
track.removeEventListener('unmute', handler);
};
}, [audioStream, trackId]);

return hasAudioTrack && !unmuted;
};
Original file line number Diff line number Diff line change
@@ -1,13 +1,23 @@
import React, { useMemo } from 'react';
import { Pressable, StyleSheet, Text, View } from 'react-native';
import {
Pressable,
StyleSheet,
Text,
View,
ActivityIndicator,
} from 'react-native';
import {
BadNetwork,
MicOff,
PinVertical,
ScreenShareIndicator,
VideoSlash,
} from '../../../icons';
import { useCall, useI18n } from '@stream-io/video-react-bindings';
import {
useCall,
useI18n,
useIsAudioConnecting,
} from '@stream-io/video-react-bindings';
import { ComponentTestIds } from '../../../constants/TestIds';
import { type ParticipantViewProps } from './ParticipantView';
import { Z_INDEX } from '../../../constants';
Expand Down Expand Up @@ -55,6 +65,7 @@ export const ParticipantLabel = ({
const isAudioMuted = !hasAudio(participant);
const isVideoMuted = !hasVideo(participant);
const isTrackPaused = trackType && hasPausedTrack(participant, trackType);
const isAudioConnecting = useIsAudioConnecting(participant);

if (trackType === 'screenShareTrack') {
const screenShareText = isLocalParticipant
Expand Down Expand Up @@ -104,6 +115,13 @@ export const ParticipantLabel = ({
]}
>
<View style={styles.wrapper}>
{isAudioConnecting && (
<ActivityIndicator
size="small"
color={colors.iconPrimary}
style={styles.audioConnectingIndicator}
/>
)}
<Text style={[styles.userNameLabel, userNameLabel]} numberOfLines={1}>
{participantLabel}
</Text>
Expand Down Expand Up @@ -174,6 +192,10 @@ const useStyles = () => {
fontWeight: '400',
color: theme.colors.textPrimary,
},
audioConnectingIndicator: {
marginRight: theme.variants.spacingSizes.sm,
justifyContent: 'center',
},
screenShareIconContainer: {
marginRight: theme.variants.spacingSizes.sm,
justifyContent: 'center',
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import { ComponentType, forwardRef, useEffect, useState } from 'react';
import { ComponentType, forwardRef } from 'react';
import { Placement } from '@floating-ui/react';
import {
hasAudio,
hasPausedTrack,
hasScreenShare,
hasVideo,
SfuModels,
StreamVideoParticipant,
} from '@stream-io/video-client';
import { useCall, useI18n } from '@stream-io/video-react-bindings';
import {
useCall,
useI18n,
useIsAudioConnecting,
} from '@stream-io/video-react-bindings';
import clsx from 'clsx';

import {
Expand Down Expand Up @@ -139,8 +142,7 @@ export const ParticipantDetails = ({
const isTrackPaused =
trackType !== 'none' ? hasPausedTrack(participant, trackType) : false;

const isAudioTrackUnmuted = useIsTrackUnmuted(participant);
const isAudioConnecting = hasAudioTrack && !isAudioTrackUnmuted;
const isAudioConnecting = useIsAudioConnecting(participant);

return (
<>
Expand Down Expand Up @@ -215,33 +217,3 @@ export const SpeechIndicator = () => {
</span>
);
};

const useIsTrackUnmuted = (participant: StreamVideoParticipant) => {
const audioStream = participant.audioStream;

const [unmuted, setUnmuted] = useState(() => {
const track = audioStream?.getAudioTracks()[0];
return !!track && !track.muted;
});

useEffect(() => {
const track = audioStream?.getAudioTracks()[0];
if (!track) return;

setUnmuted(!track.muted);

const handler = () => {
setUnmuted(!track.muted);
};

track.addEventListener('mute', handler);
track.addEventListener('unmute', handler);

return () => {
track.removeEventListener('mute', handler);
track.removeEventListener('unmute', handler);
};
}, [audioStream]);

return unmuted;
};
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import { BottomControls } from './CallControlls/BottomControls';
import { useOrientation } from '../hooks/useOrientation';
import { Z_INDEX } from '../constants';
import { TopControls } from './CallControlls/TopControls';
import { AudioConnectingParticipantLabel } from './AudioConnectingParticipantLabel';
import { useLayout } from '../contexts/LayoutContext';
import { useAppGlobalStoreValue } from '../contexts/AppContext';
import DeviceInfo from 'react-native-device-info';
Expand Down Expand Up @@ -128,6 +129,7 @@ export const ActiveCall = ({
iOSPiPIncludeLocalParticipantVideo
onHangupCallHandler={onHangupCallHandler}
CallControls={CustomBottomControls}
ParticipantLabel={AudioConnectingParticipantLabel}
landscape={isLandscape}
layout={selectedLayout}
/>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import React from 'react';
import { ActivityIndicator, StyleSheet, Text, View } from 'react-native';
import {
useIsAudioConnecting,
ParticipantLabel,
type ParticipantLabelProps,
useTheme,
} from '@stream-io/video-react-native-sdk';

export const AudioConnectingParticipantLabel = (
props: ParticipantLabelProps,
) => {
const { participant, trackType } = props;
const { theme } = useTheme();
const isAudioConnecting =
useIsAudioConnecting(participant) && trackType !== 'screenShareTrack';

return (
<View>
<ParticipantLabel {...props} />
{isAudioConnecting && (
<View
style={[styles.badge, { backgroundColor: theme.colors.sheetOverlay }]}
>
<ActivityIndicator size="small" color={theme.colors.iconPrimary} />
<Text style={[styles.text, { color: theme.colors.textPrimary }]}>
Connecting to audio…
</Text>
</View>
)}
Comment thread
santhoshvai marked this conversation as resolved.
</View>
);
};

const styles = StyleSheet.create({
badge: {
flexDirection: 'row',
alignItems: 'center',
alignSelf: 'flex-start',
paddingHorizontal: 8,
paddingVertical: 4,
marginTop: 4,
borderRadius: 6,
},
text: { marginLeft: 6, fontSize: 12, fontWeight: '500' },
});
Loading