I'm using amazon-chime-react-native
in my App. How I can get devices setup before the meeting start?
https://github.com/aws-samples/amazon-chime-react-native-demo
Only after startMeeting I can get the device list, but on a setup screen the meeting starts and I can hear second participant
NativeFunction.startMeeting(meeting,attendee);
So, this NativeFunction.startMeeting
should be on Meeting screen.
I think I should start the session and after the meetingSession. How to do that?
Bridge.ts
import { NativeModules, NativeEventEmitter } from 'react-native';
/**
* These are the function that will be called from native side
* i.e. Native -> React Native
*
* NativeEventEmitter.onMeetingStart(meetingName)
* NativeEventEmitter.onMeetingEnd()
* NativeEventEmitter.onAttendeesJoin(attendeeInfo)
* NativeEventEmitter.onAttendeesLeave(attendeeInfo)
* NativeEventEmitter.onAddVideoTile(tileState)
* NativeEventEmitter.onRemoveVideoTile(tileState)
* NativeEventEmitter.onError(errorMessage)
*/
const eventEmitter = new NativeEventEmitter(
NativeModules.NativeMobileSDKBridge
);
export const MobileSDKEvent = {
OnMeetingStart: 'OnMeetingStart',
OnMeetingEnd: 'OnMeetingEnd',
OnAddVideoTile: 'OnAddVideoTile',
OnRemoveVideoTile: 'OnRemoveVideoTile',
OnAttendeesJoin: 'OnAttendeesJoin',
OnAttendeesLeave: 'OnAttendeesLeave',
OnAttendeesMute: 'OnAttendeesMute',
OnAttendeesUnmute: 'OnAttendeesUnmute',
OnAudioDeviceChanged: 'OnAudioDeviceChanged',
OnDataMessageReceive: 'OnDataMessageReceive',
OnError: 'OnError',
};
export const MeetingError = {
OnMaximumConcurrentVideoReached: 'OnMaximumConcurrentVideoReached',
};
export function getSDKEventEmitter() {
return eventEmitter;
}
/**
* These are functions available for React native to call on native
* i.e. React Native -> Native
*
* NativeModules.NativeMobileSDKBridge.startMeeting(meetingId, userName)
* NativeModules.NativeMobileSDKBridge.stopMeeting()
* NativeModules.NativeMobileSDKBridge.setMute(isMute) -> boolean
* NativeModules.NativeMobileSDKBridge.setCameraOn(isOn) -> boolean
* NativeModules.NativeMobileSDKBridge.bindVideoView(reactTagId, tileId)
* NativeModules.NativeMobileSDKBridge.unbindVideoView(reactTagId, tileId)
* NativeModules.NativeMobileSDKBridge.setAudioDevice -> boolean
* NativeModules.NativeMobileSDKBridge.getAudieDevicesList -> List<MediaDevice>
*/
export const NativeFunction = {
startMeeting: NativeModules.NativeMobileSDKBridge.startMeeting,
stopMeeting: NativeModules.NativeMobileSDKBridge.stopMeeting,
setMute: NativeModules.NativeMobileSDKBridge.setMute,
setAudioDevice: NativeModules.NativeMobileSDKBridge.setAudioDevice,
getAudioDevicesList: NativeModules.NativeMobileSDKBridge.getAudieDevicesList,
setCameraOn: NativeModules.NativeMobileSDKBridge.setCameraOn,
bindVideoView: NativeModules.NativeMobileSDKBridge.bindVideoView,
unbindVideoView: NativeModules.NativeMobileSDKBridge.unbindVideoView,
switchCamera: NativeModules.NativeMobileSDKBridge.switchCamera,
sendDataMessage: NativeModules.NativeMobileSDKBridge.sendDataMessage,
};
For Android: NativeMobileSDKBridge.kt
fun getAudieDevicesList(): WritableMap{
val listAudioDevices = meetingSession?.audioVideo?.listAudioDevices()
val map: WritableMap = WritableNativeMap()
val list: WritableArray = WritableNativeArray()
listAudioDevices?.forEach {
val device: WritableMap = WritableNativeMap()
device.putString("id",it.id)
device.putString("label",it.label)
device.putString("type",it.type.name)
list.pushMap(device)
}
map.putArray("devices", list);
return map;
}
For iOS: NativeMobileSDKBridge.m
RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(getAudieDevicesList)
{
NSArray *deviceList = [meetingSession.audioVideo listAudioDevices];
NSArray *transformedList = [NSArray array];
for (MediaDevice * device in deviceList) {
transformedList = [transformedList arrayByAddingObject:@{
@"label": device.label,
@"type": [NSString stringWithFormat:@"%i", device.type],
}];
}
return @{@"devices":transformedList};;
}
Here is my Setup view for configuration devices before the meeting:
const DeviceSetup: FC<Props> = ({ route }) => {
const navigation = useNavigation();
const [microphones, setMicrophones] = useState([]);
const [selectedMicrophone, setSelectedMicrophone] = useState();
const [speakers, setSpeakers] = useState([]);
const [selectedSpeaker, setSelectedSpeaker] = useState();
const handleJoinToCall = useCallback(
() =>
navigation.dispatch(
CommonActions.reset({
index: 1,
routes: [
{
name: PATHS.CALL,
params: { ...route.params },
},
],
})
),
[]
);
const refreshDevices = useCallback(async () => {
const deviceList = await NativeFunction.getAudioDevicesList();
const microphonesList = deviceList?.devices.filter(
(x: AudioDevice) => x.type === 'AUDIO_HANDSET' || x.type === '3'
);
const speakersList = deviceList?.devices.filter(
(x: AudioDevice) => x.type === 'AUDIO_BUILTIN_SPEAKER' || x.type === '2'
);
setMicrophones(microphonesList);
setSelectedMicrophone(microphonesList[0]);
setSpeakers(speakersList);
setSelectedSpeaker(speakersList[0]);
}, []);
const onDeviceChange = useCallback((device: any) => {
if (device.type === 'AUDIO_BUILTIN_SPEAKER' || device.type === '2') {
setSelectedSpeaker(device);
} else if (device.type === 'AUDIO_HANDSET' || device.type === '3') {
setSelectedMicrophone(device);
}
}, []);
const requestPermissions = useCallback(async () => {
const permissionIosCamera = await check(PERMISSIONS.IOS.CAMERA);
const permissionIosMicrophone = await check(PERMISSIONS.IOS.MICROPHONE);
const permissionAndroidCamera = await check(PERMISSIONS.ANDROID.CAMERA);
const permissionAndroidMicrophone = await check(PERMISSIONS.ANDROID.RECORD_AUDIO);
if (
(permissionIosCamera === RESULTS.GRANTED &&
permissionIosMicrophone === RESULTS.GRANTED) ||
(permissionAndroidCamera === RESULTS.GRANTED &&
permissionAndroidMicrophone === RESULTS.GRANTED)
) {
refreshDevices();
return;
}
if (
(permissionIosCamera === RESULTS.DENIED &&
permissionIosMicrophone === RESULTS.DENIED) ||
(permissionAndroidCamera === RESULTS.DENIED &&
permissionAndroidMicrophone === RESULTS.DENIED)
) {
const result = await requestMultiple([
PERMISSIONS.IOS.CAMERA,
PERMISSIONS.IOS.MICROPHONE,
PERMISSIONS.ANDROID.CAMERA,
PERMISSIONS.ANDROID.RECORD_AUDIO,
]);
if (
(result[PERMISSIONS.IOS.CAMERA] === RESULTS.GRANTED &&
result[PERMISSIONS.IOS.MICROPHONE] === RESULTS.GRANTED) ||
(result[PERMISSIONS.ANDROID.CAMERA] === RESULTS.GRANTED &&
result[PERMISSIONS.ANDROID.RECORD_AUDIO] === RESULTS.GRANTED)
) {
refreshDevices();
}
}
}, []);
useEffect(() => {
requestPermissions();
}, []);
return (
<View>
<View style={{ flex: 1 }}>
<>
<Picker
onChange={onDeviceChange}
value={selectedMicrophone}
>
{microphones?.map((item: any) => (
<Picker.Item key={item.type} label={item.label} value={item} />
))}
</Picker>
<Picker
onChange={onDeviceChange}
value={selectedSpeaker}
>
{speakers?.map((item: any) => (
<Picker.Item key={item.type} label={item.label} value={item} />
))}
</Picker>
</>
</View>
<View>
<Button
onPress={handleJoinToCall}
size='wide'
title='NEXT'
/>
</View>
</View>
);
};
export default DeviceSetup;