Add input device selection during voice broadcast (#9620)
This commit is contained in:
parent
5b5c3ab33c
commit
b302275289
12 changed files with 486 additions and 87 deletions
|
@ -29,6 +29,10 @@ limitations under the License.
|
||||||
color: $accent;
|
color: $accent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.mx_Icon_alert {
|
||||||
|
color: $alert;
|
||||||
|
}
|
||||||
|
|
||||||
.mx_Icon_8 {
|
.mx_Icon_8 {
|
||||||
flex: 0 0 8px;
|
flex: 0 0 8px;
|
||||||
height: 8px;
|
height: 8px;
|
||||||
|
|
1
res/img/element-icons/Mic.svg
Normal file
1
res/img/element-icons/Mic.svg
Normal file
|
@ -0,0 +1 @@
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="16.15" height="19.751" fill="none"><path fill="currentColor" fill-rule="evenodd" d="M4.175 3.9a3.9 3.9 0 0 1 7.8 0v5.832a3.9 3.9 0 1 1-7.8 0zM1.25 8.488c.69 0 1.25.56 1.25 1.25a5.57 5.57 0 0 0 5.567 5.566h.017a5.57 5.57 0 0 0 5.566-5.566 1.25 1.25 0 0 1 2.5 0c0 4.03-2.96 7.37-6.825 7.97v.793a1.25 1.25 0 0 1-2.5 0v-.793C2.96 17.108 0 13.769 0 9.738c0-.69.56-1.25 1.25-1.25Z" clip-rule="evenodd"/></svg>
|
After Width: | Height: | Size: 450 B |
56
src/components/views/audio_messages/DevicesContextMenu.tsx
Normal file
56
src/components/views/audio_messages/DevicesContextMenu.tsx
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
/*
|
||||||
|
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React, { MutableRefObject } from "react";
|
||||||
|
|
||||||
|
import { toLeftOrRightOf } from "../../structures/ContextMenu";
|
||||||
|
import IconizedContextMenu, {
|
||||||
|
IconizedContextMenuOptionList,
|
||||||
|
IconizedContextMenuRadio,
|
||||||
|
} from "../context_menus/IconizedContextMenu";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
containerRef: MutableRefObject<HTMLElement | null>;
|
||||||
|
currentDevice: MediaDeviceInfo | null;
|
||||||
|
devices: MediaDeviceInfo[];
|
||||||
|
onDeviceSelect: (device: MediaDeviceInfo) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DevicesContextMenu: React.FC<Props> = ({
|
||||||
|
containerRef,
|
||||||
|
currentDevice,
|
||||||
|
devices,
|
||||||
|
onDeviceSelect,
|
||||||
|
}) => {
|
||||||
|
const deviceOptions = devices.map((d: MediaDeviceInfo) => {
|
||||||
|
return <IconizedContextMenuRadio
|
||||||
|
key={d.deviceId}
|
||||||
|
active={d.deviceId === currentDevice?.deviceId}
|
||||||
|
onClick={() => onDeviceSelect(d)}
|
||||||
|
label={d.label}
|
||||||
|
/>;
|
||||||
|
});
|
||||||
|
|
||||||
|
return <IconizedContextMenu
|
||||||
|
mountAsChild={false}
|
||||||
|
onFinished={() => {}}
|
||||||
|
{...toLeftOrRightOf(containerRef.current.getBoundingClientRect(), 0)}
|
||||||
|
>
|
||||||
|
<IconizedContextMenuOptionList>
|
||||||
|
{ deviceOptions }
|
||||||
|
</IconizedContextMenuOptionList>
|
||||||
|
</IconizedContextMenu>;
|
||||||
|
};
|
76
src/hooks/useAudioDeviceSelection.ts
Normal file
76
src/hooks/useAudioDeviceSelection.ts
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
/*
|
||||||
|
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { useRef, useState } from "react";
|
||||||
|
|
||||||
|
import { _t } from "../languageHandler";
|
||||||
|
import MediaDeviceHandler, { MediaDeviceKindEnum } from "../MediaDeviceHandler";
|
||||||
|
import { requestMediaPermissions } from "../utils/media/requestMediaPermissions";
|
||||||
|
|
||||||
|
interface State {
|
||||||
|
devices: MediaDeviceInfo[];
|
||||||
|
device: MediaDeviceInfo | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useAudioDeviceSelection = (
|
||||||
|
onDeviceChanged?: (device: MediaDeviceInfo) => void,
|
||||||
|
) => {
|
||||||
|
const shouldRequestPermissionsRef = useRef<boolean>(true);
|
||||||
|
const [state, setState] = useState<State>({
|
||||||
|
devices: [],
|
||||||
|
device: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (shouldRequestPermissionsRef.current) {
|
||||||
|
shouldRequestPermissionsRef.current = false;
|
||||||
|
requestMediaPermissions(false).then((stream: MediaStream | undefined) => {
|
||||||
|
MediaDeviceHandler.getDevices().then(({ audioinput }) => {
|
||||||
|
MediaDeviceHandler.getDefaultDevice(audioinput);
|
||||||
|
const deviceFromSettings = MediaDeviceHandler.getAudioInput();
|
||||||
|
const device = audioinput.find((d) => {
|
||||||
|
return d.deviceId === deviceFromSettings;
|
||||||
|
}) || audioinput[0];
|
||||||
|
setState({
|
||||||
|
...state,
|
||||||
|
devices: audioinput,
|
||||||
|
device,
|
||||||
|
});
|
||||||
|
stream?.getTracks().forEach(t => t.stop());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const setDevice = (device: MediaDeviceInfo) => {
|
||||||
|
const shouldNotify = device.deviceId !== state.device?.deviceId;
|
||||||
|
MediaDeviceHandler.instance.setDevice(device.deviceId, MediaDeviceKindEnum.AudioInput);
|
||||||
|
|
||||||
|
setState({
|
||||||
|
...state,
|
||||||
|
device,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (shouldNotify) {
|
||||||
|
onDeviceChanged?.(device);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
currentDevice: state.device,
|
||||||
|
currentDeviceLabel: state.device?.label || _t("Default Device"),
|
||||||
|
devices: state.devices,
|
||||||
|
setDevice,
|
||||||
|
};
|
||||||
|
};
|
|
@ -654,6 +654,7 @@
|
||||||
"30s backward": "30s backward",
|
"30s backward": "30s backward",
|
||||||
"30s forward": "30s forward",
|
"30s forward": "30s forward",
|
||||||
"Go live": "Go live",
|
"Go live": "Go live",
|
||||||
|
"Change input device": "Change input device",
|
||||||
"Live": "Live",
|
"Live": "Live",
|
||||||
"Voice broadcast": "Voice broadcast",
|
"Voice broadcast": "Voice broadcast",
|
||||||
"Cannot reach homeserver": "Cannot reach homeserver",
|
"Cannot reach homeserver": "Cannot reach homeserver",
|
||||||
|
|
|
@ -21,99 +21,34 @@ import AccessibleButton from "../../../components/views/elements/AccessibleButto
|
||||||
import { VoiceBroadcastPreRecording } from "../../models/VoiceBroadcastPreRecording";
|
import { VoiceBroadcastPreRecording } from "../../models/VoiceBroadcastPreRecording";
|
||||||
import { Icon as LiveIcon } from "../../../../res/img/element-icons/live.svg";
|
import { Icon as LiveIcon } from "../../../../res/img/element-icons/live.svg";
|
||||||
import { _t } from "../../../languageHandler";
|
import { _t } from "../../../languageHandler";
|
||||||
import IconizedContextMenu, {
|
import { useAudioDeviceSelection } from "../../../hooks/useAudioDeviceSelection";
|
||||||
IconizedContextMenuOptionList,
|
import { DevicesContextMenu } from "../../../components/views/audio_messages/DevicesContextMenu";
|
||||||
IconizedContextMenuRadio,
|
|
||||||
} from "../../../components/views/context_menus/IconizedContextMenu";
|
|
||||||
import { requestMediaPermissions } from "../../../utils/media/requestMediaPermissions";
|
|
||||||
import MediaDeviceHandler from "../../../MediaDeviceHandler";
|
|
||||||
import { toLeftOrRightOf } from "../../../components/structures/ContextMenu";
|
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
voiceBroadcastPreRecording: VoiceBroadcastPreRecording;
|
voiceBroadcastPreRecording: VoiceBroadcastPreRecording;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface State {
|
|
||||||
devices: MediaDeviceInfo[];
|
|
||||||
device: MediaDeviceInfo | null;
|
|
||||||
showDeviceSelect: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const VoiceBroadcastPreRecordingPip: React.FC<Props> = ({
|
export const VoiceBroadcastPreRecordingPip: React.FC<Props> = ({
|
||||||
voiceBroadcastPreRecording,
|
voiceBroadcastPreRecording,
|
||||||
}) => {
|
}) => {
|
||||||
const shouldRequestPermissionsRef = useRef<boolean>(true);
|
const pipRef = useRef<HTMLDivElement | null>(null);
|
||||||
const pipRef = useRef<HTMLDivElement>(null);
|
const { currentDevice, currentDeviceLabel, devices, setDevice } = useAudioDeviceSelection();
|
||||||
const [state, setState] = useState<State>({
|
const [showDeviceSelect, setShowDeviceSelect] = useState<boolean>(false);
|
||||||
devices: [],
|
|
||||||
device: null,
|
|
||||||
showDeviceSelect: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (shouldRequestPermissionsRef.current) {
|
const onDeviceSelect = (device: MediaDeviceInfo | null) => {
|
||||||
shouldRequestPermissionsRef.current = false;
|
setShowDeviceSelect(false);
|
||||||
requestMediaPermissions(false).then((stream: MediaStream | undefined) => {
|
setDevice(device);
|
||||||
MediaDeviceHandler.getDevices().then(({ audioinput }) => {
|
|
||||||
MediaDeviceHandler.getDefaultDevice(audioinput);
|
|
||||||
const deviceFromSettings = MediaDeviceHandler.getAudioInput();
|
|
||||||
const device = audioinput.find((d) => {
|
|
||||||
return d.deviceId === deviceFromSettings;
|
|
||||||
}) || audioinput[0];
|
|
||||||
setState({
|
|
||||||
...state,
|
|
||||||
devices: audioinput,
|
|
||||||
device,
|
|
||||||
});
|
|
||||||
stream?.getTracks().forEach(t => t.stop());
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const onDeviceOptionClick = (device: MediaDeviceInfo) => {
|
|
||||||
setState({
|
|
||||||
...state,
|
|
||||||
device,
|
|
||||||
showDeviceSelect: false,
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const onMicrophoneLineClick = () => {
|
|
||||||
setState({
|
|
||||||
...state,
|
|
||||||
showDeviceSelect: true,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const deviceOptions = state.devices.map((d: MediaDeviceInfo) => {
|
|
||||||
return <IconizedContextMenuRadio
|
|
||||||
key={d.deviceId}
|
|
||||||
active={d.deviceId === state.device?.deviceId}
|
|
||||||
onClick={() => onDeviceOptionClick(d)}
|
|
||||||
label={d.label}
|
|
||||||
/>;
|
|
||||||
});
|
|
||||||
|
|
||||||
const devicesMenu = state.showDeviceSelect && pipRef.current
|
|
||||||
? <IconizedContextMenu
|
|
||||||
mountAsChild={false}
|
|
||||||
onFinished={() => {}}
|
|
||||||
{...toLeftOrRightOf(pipRef.current.getBoundingClientRect(), 0)}
|
|
||||||
>
|
|
||||||
<IconizedContextMenuOptionList>
|
|
||||||
{ deviceOptions }
|
|
||||||
</IconizedContextMenuOptionList>
|
|
||||||
</IconizedContextMenu>
|
|
||||||
: null;
|
|
||||||
|
|
||||||
return <div
|
return <div
|
||||||
className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip"
|
className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip"
|
||||||
ref={pipRef}
|
ref={pipRef}
|
||||||
>
|
>
|
||||||
<VoiceBroadcastHeader
|
<VoiceBroadcastHeader
|
||||||
onCloseClick={voiceBroadcastPreRecording.cancel}
|
onCloseClick={voiceBroadcastPreRecording.cancel}
|
||||||
onMicrophoneLineClick={onMicrophoneLineClick}
|
onMicrophoneLineClick={() => setShowDeviceSelect(true)}
|
||||||
room={voiceBroadcastPreRecording.room}
|
room={voiceBroadcastPreRecording.room}
|
||||||
microphoneLabel={state.device?.label || _t('Default Device')}
|
microphoneLabel={currentDeviceLabel}
|
||||||
showClose={true}
|
showClose={true}
|
||||||
/>
|
/>
|
||||||
<AccessibleButton
|
<AccessibleButton
|
||||||
|
@ -124,6 +59,13 @@ export const VoiceBroadcastPreRecordingPip: React.FC<Props> = ({
|
||||||
<LiveIcon className="mx_Icon mx_Icon_16" />
|
<LiveIcon className="mx_Icon mx_Icon_16" />
|
||||||
{ _t("Go live") }
|
{ _t("Go live") }
|
||||||
</AccessibleButton>
|
</AccessibleButton>
|
||||||
{ devicesMenu }
|
{
|
||||||
|
showDeviceSelect && <DevicesContextMenu
|
||||||
|
containerRef={pipRef}
|
||||||
|
currentDevice={currentDevice}
|
||||||
|
devices={devices}
|
||||||
|
onDeviceSelect={onDeviceSelect}
|
||||||
|
/>
|
||||||
|
}
|
||||||
</div>;
|
</div>;
|
||||||
};
|
};
|
||||||
|
|
|
@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import React from "react";
|
import React, { useRef, useState } from "react";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
VoiceBroadcastControl,
|
VoiceBroadcastControl,
|
||||||
|
@ -26,13 +26,18 @@ import { VoiceBroadcastHeader } from "../atoms/VoiceBroadcastHeader";
|
||||||
import { Icon as StopIcon } from "../../../../res/img/element-icons/Stop.svg";
|
import { Icon as StopIcon } from "../../../../res/img/element-icons/Stop.svg";
|
||||||
import { Icon as PauseIcon } from "../../../../res/img/element-icons/pause.svg";
|
import { Icon as PauseIcon } from "../../../../res/img/element-icons/pause.svg";
|
||||||
import { Icon as RecordIcon } from "../../../../res/img/element-icons/Record.svg";
|
import { Icon as RecordIcon } from "../../../../res/img/element-icons/Record.svg";
|
||||||
|
import { Icon as MicrophoneIcon } from "../../../../res/img/element-icons/Mic.svg";
|
||||||
import { _t } from "../../../languageHandler";
|
import { _t } from "../../../languageHandler";
|
||||||
|
import AccessibleButton from "../../../components/views/elements/AccessibleButton";
|
||||||
|
import { useAudioDeviceSelection } from "../../../hooks/useAudioDeviceSelection";
|
||||||
|
import { DevicesContextMenu } from "../../../components/views/audio_messages/DevicesContextMenu";
|
||||||
|
|
||||||
interface VoiceBroadcastRecordingPipProps {
|
interface VoiceBroadcastRecordingPipProps {
|
||||||
recording: VoiceBroadcastRecording;
|
recording: VoiceBroadcastRecording;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProps> = ({ recording }) => {
|
export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProps> = ({ recording }) => {
|
||||||
|
const pipRef = useRef<HTMLDivElement | null>(null);
|
||||||
const {
|
const {
|
||||||
live,
|
live,
|
||||||
timeLeft,
|
timeLeft,
|
||||||
|
@ -41,6 +46,29 @@ export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProp
|
||||||
stopRecording,
|
stopRecording,
|
||||||
toggleRecording,
|
toggleRecording,
|
||||||
} = useVoiceBroadcastRecording(recording);
|
} = useVoiceBroadcastRecording(recording);
|
||||||
|
const { currentDevice, devices, setDevice } = useAudioDeviceSelection();
|
||||||
|
|
||||||
|
const onDeviceSelect = async (device: MediaDeviceInfo) => {
|
||||||
|
setShowDeviceSelect(false);
|
||||||
|
|
||||||
|
if (currentDevice.deviceId === device.deviceId) {
|
||||||
|
// device unchanged
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setDevice(device);
|
||||||
|
|
||||||
|
if ([VoiceBroadcastInfoState.Paused, VoiceBroadcastInfoState.Stopped].includes(recordingState)) {
|
||||||
|
// Nothing to do in these cases. Resume will use the selected device.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// pause and resume to switch the input device
|
||||||
|
await recording.pause();
|
||||||
|
await recording.resume();
|
||||||
|
};
|
||||||
|
|
||||||
|
const [showDeviceSelect, setShowDeviceSelect] = useState<boolean>(false);
|
||||||
|
|
||||||
const toggleControl = recordingState === VoiceBroadcastInfoState.Paused
|
const toggleControl = recordingState === VoiceBroadcastInfoState.Paused
|
||||||
? <VoiceBroadcastControl
|
? <VoiceBroadcastControl
|
||||||
|
@ -53,6 +81,7 @@ export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProp
|
||||||
|
|
||||||
return <div
|
return <div
|
||||||
className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip"
|
className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip"
|
||||||
|
ref={pipRef}
|
||||||
>
|
>
|
||||||
<VoiceBroadcastHeader
|
<VoiceBroadcastHeader
|
||||||
live={live ? "live" : "grey"}
|
live={live ? "live" : "grey"}
|
||||||
|
@ -62,11 +91,25 @@ export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProp
|
||||||
<hr className="mx_VoiceBroadcastBody_divider" />
|
<hr className="mx_VoiceBroadcastBody_divider" />
|
||||||
<div className="mx_VoiceBroadcastBody_controls">
|
<div className="mx_VoiceBroadcastBody_controls">
|
||||||
{ toggleControl }
|
{ toggleControl }
|
||||||
|
<AccessibleButton
|
||||||
|
aria-label={_t("Change input device")}
|
||||||
|
onClick={() => setShowDeviceSelect(true)}
|
||||||
|
>
|
||||||
|
<MicrophoneIcon className="mx_Icon mx_Icon_16 mx_Icon_alert" />
|
||||||
|
</AccessibleButton>
|
||||||
<VoiceBroadcastControl
|
<VoiceBroadcastControl
|
||||||
icon={StopIcon}
|
icon={StopIcon}
|
||||||
label="Stop Recording"
|
label="Stop Recording"
|
||||||
onClick={stopRecording}
|
onClick={stopRecording}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
{
|
||||||
|
showDeviceSelect && <DevicesContextMenu
|
||||||
|
containerRef={pipRef}
|
||||||
|
currentDevice={currentDevice}
|
||||||
|
devices={devices}
|
||||||
|
onDeviceSelect={onDeviceSelect}
|
||||||
|
/>
|
||||||
|
}
|
||||||
</div>;
|
</div>;
|
||||||
};
|
};
|
||||||
|
|
|
@ -47,7 +47,13 @@ const showStopBroadcastingDialog = async (): Promise<boolean> => {
|
||||||
|
|
||||||
export const useVoiceBroadcastRecording = (recording: VoiceBroadcastRecording) => {
|
export const useVoiceBroadcastRecording = (recording: VoiceBroadcastRecording) => {
|
||||||
const client = MatrixClientPeg.get();
|
const client = MatrixClientPeg.get();
|
||||||
const room = client.getRoom(recording.infoEvent.getRoomId());
|
const roomId = recording.infoEvent.getRoomId();
|
||||||
|
const room = client.getRoom(roomId);
|
||||||
|
|
||||||
|
if (!room) {
|
||||||
|
throw new Error("Unable to find voice broadcast room with Id: " + roomId);
|
||||||
|
}
|
||||||
|
|
||||||
const stopRecording = async () => {
|
const stopRecording = async () => {
|
||||||
const confirmed = await showStopBroadcastingDialog();
|
const confirmed = await showStopBroadcastingDialog();
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,138 @@
|
||||||
|
/*
|
||||||
|
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React from "react";
|
||||||
|
import { mocked } from "jest-mock";
|
||||||
|
import { MatrixClient, Room, RoomMember } from "matrix-js-sdk/src/matrix";
|
||||||
|
import { act, render, RenderResult, screen } from "@testing-library/react";
|
||||||
|
import userEvent from "@testing-library/user-event";
|
||||||
|
|
||||||
|
import {
|
||||||
|
VoiceBroadcastPreRecording,
|
||||||
|
VoiceBroadcastPreRecordingPip,
|
||||||
|
VoiceBroadcastRecordingsStore,
|
||||||
|
} from "../../../../src/voice-broadcast";
|
||||||
|
import { flushPromises, stubClient } from "../../../test-utils";
|
||||||
|
import { requestMediaPermissions } from "../../../../src/utils/media/requestMediaPermissions";
|
||||||
|
import MediaDeviceHandler, { MediaDeviceKindEnum } from "../../../../src/MediaDeviceHandler";
|
||||||
|
|
||||||
|
jest.mock("../../../../src/utils/media/requestMediaPermissions");
|
||||||
|
|
||||||
|
// mock RoomAvatar, because it is doing too much fancy stuff
|
||||||
|
jest.mock("../../../../src/components/views/avatars/RoomAvatar", () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: jest.fn().mockImplementation(({ room }) => {
|
||||||
|
return <div data-testid="room-avatar">room avatar: { room.name }</div>;
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("VoiceBroadcastPreRecordingPip", () => {
|
||||||
|
let renderResult: RenderResult;
|
||||||
|
let preRecording: VoiceBroadcastPreRecording;
|
||||||
|
let recordingsStore: VoiceBroadcastRecordingsStore;
|
||||||
|
let client: MatrixClient;
|
||||||
|
let room: Room;
|
||||||
|
let sender: RoomMember;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
client = stubClient();
|
||||||
|
room = new Room("!room@example.com", client, client.getUserId() || "");
|
||||||
|
sender = new RoomMember(room.roomId, client.getUserId() || "");
|
||||||
|
recordingsStore = new VoiceBroadcastRecordingsStore();
|
||||||
|
mocked(requestMediaPermissions).mockReturnValue(new Promise<MediaStream>((r) => {
|
||||||
|
r({
|
||||||
|
getTracks: () => [],
|
||||||
|
} as unknown as MediaStream);
|
||||||
|
}));
|
||||||
|
jest.spyOn(MediaDeviceHandler, "getDevices").mockResolvedValue({
|
||||||
|
[MediaDeviceKindEnum.AudioInput]: [
|
||||||
|
{
|
||||||
|
deviceId: "d1",
|
||||||
|
label: "Device 1",
|
||||||
|
} as MediaDeviceInfo,
|
||||||
|
{
|
||||||
|
deviceId: "d2",
|
||||||
|
label: "Device 2",
|
||||||
|
} as MediaDeviceInfo,
|
||||||
|
],
|
||||||
|
[MediaDeviceKindEnum.AudioOutput]: [],
|
||||||
|
[MediaDeviceKindEnum.VideoInput]: [],
|
||||||
|
});
|
||||||
|
jest.spyOn(MediaDeviceHandler.instance, "setDevice").mockImplementation();
|
||||||
|
preRecording = new VoiceBroadcastPreRecording(
|
||||||
|
room,
|
||||||
|
sender,
|
||||||
|
client,
|
||||||
|
recordingsStore,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
jest.resetAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("when rendered", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
renderResult = render(<VoiceBroadcastPreRecordingPip
|
||||||
|
voiceBroadcastPreRecording={preRecording}
|
||||||
|
/>);
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
flushPromises();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should match the snapshot", () => {
|
||||||
|
expect(renderResult.container).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("and clicking the device label", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
await act(async () => {
|
||||||
|
await userEvent.click(screen.getByText("Default Device"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should display the device selection", () => {
|
||||||
|
expect(screen.queryAllByText("Default Device").length).toBe(2);
|
||||||
|
expect(screen.queryByText("Device 1")).toBeInTheDocument();
|
||||||
|
expect(screen.queryByText("Device 2")).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("and selecting a device", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
await act(async () => {
|
||||||
|
await userEvent.click(screen.getByText("Device 1"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set it as current device", () => {
|
||||||
|
expect(MediaDeviceHandler.instance.setDevice).toHaveBeenCalledWith(
|
||||||
|
"d1",
|
||||||
|
MediaDeviceKindEnum.AudioInput,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not show the device selection", () => {
|
||||||
|
expect(screen.queryByText("Default Device")).not.toBeInTheDocument();
|
||||||
|
// expected to be one in the document, displayed in the pip directly
|
||||||
|
expect(screen.queryByText("Device 1")).toBeInTheDocument();
|
||||||
|
expect(screen.queryByText("Device 2")).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -16,18 +16,23 @@ limitations under the License.
|
||||||
//
|
//
|
||||||
|
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import { render, RenderResult, screen } from "@testing-library/react";
|
import { act, render, RenderResult, screen } from "@testing-library/react";
|
||||||
import userEvent from "@testing-library/user-event";
|
import userEvent from "@testing-library/user-event";
|
||||||
import { MatrixClient, MatrixEvent } from "matrix-js-sdk/src/matrix";
|
import { MatrixClient, MatrixEvent } from "matrix-js-sdk/src/matrix";
|
||||||
import { sleep } from "matrix-js-sdk/src/utils";
|
import { sleep } from "matrix-js-sdk/src/utils";
|
||||||
|
import { mocked } from "jest-mock";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
VoiceBroadcastInfoState,
|
VoiceBroadcastInfoState,
|
||||||
VoiceBroadcastRecording,
|
VoiceBroadcastRecording,
|
||||||
VoiceBroadcastRecordingPip,
|
VoiceBroadcastRecordingPip,
|
||||||
} from "../../../../src/voice-broadcast";
|
} from "../../../../src/voice-broadcast";
|
||||||
import { stubClient } from "../../../test-utils";
|
import { filterConsole, flushPromises, stubClient } from "../../../test-utils";
|
||||||
import { mkVoiceBroadcastInfoStateEvent } from "../../utils/test-utils";
|
import { mkVoiceBroadcastInfoStateEvent } from "../../utils/test-utils";
|
||||||
|
import { requestMediaPermissions } from "../../../../src/utils/media/requestMediaPermissions";
|
||||||
|
import MediaDeviceHandler, { MediaDeviceKindEnum } from "../../../../src/MediaDeviceHandler";
|
||||||
|
|
||||||
|
jest.mock("../../../../src/utils/media/requestMediaPermissions");
|
||||||
|
|
||||||
// mock RoomAvatar, because it is doing too much fancy stuff
|
// mock RoomAvatar, because it is doing too much fancy stuff
|
||||||
jest.mock("../../../../src/components/views/avatars/RoomAvatar", () => ({
|
jest.mock("../../../../src/components/views/avatars/RoomAvatar", () => ({
|
||||||
|
@ -54,31 +59,80 @@ describe("VoiceBroadcastRecordingPip", () => {
|
||||||
let infoEvent: MatrixEvent;
|
let infoEvent: MatrixEvent;
|
||||||
let recording: VoiceBroadcastRecording;
|
let recording: VoiceBroadcastRecording;
|
||||||
let renderResult: RenderResult;
|
let renderResult: RenderResult;
|
||||||
|
let restoreConsole: () => void;
|
||||||
|
|
||||||
const renderPip = (state: VoiceBroadcastInfoState) => {
|
const renderPip = async (state: VoiceBroadcastInfoState) => {
|
||||||
infoEvent = mkVoiceBroadcastInfoStateEvent(
|
infoEvent = mkVoiceBroadcastInfoStateEvent(
|
||||||
roomId,
|
roomId,
|
||||||
state,
|
state,
|
||||||
client.getUserId(),
|
client.getUserId() || "",
|
||||||
client.getDeviceId(),
|
client.getDeviceId() || "",
|
||||||
);
|
);
|
||||||
recording = new VoiceBroadcastRecording(infoEvent, client, state);
|
recording = new VoiceBroadcastRecording(infoEvent, client, state);
|
||||||
|
jest.spyOn(recording, "pause");
|
||||||
|
jest.spyOn(recording, "resume");
|
||||||
renderResult = render(<VoiceBroadcastRecordingPip recording={recording} />);
|
renderResult = render(<VoiceBroadcastRecordingPip recording={recording} />);
|
||||||
|
await act(async () => {
|
||||||
|
flushPromises();
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
client = stubClient();
|
client = stubClient();
|
||||||
|
mocked(requestMediaPermissions).mockReturnValue(new Promise<MediaStream>((r) => {
|
||||||
|
r({
|
||||||
|
getTracks: () => [],
|
||||||
|
} as unknown as MediaStream);
|
||||||
|
}));
|
||||||
|
jest.spyOn(MediaDeviceHandler, "getDevices").mockResolvedValue({
|
||||||
|
[MediaDeviceKindEnum.AudioInput]: [
|
||||||
|
{
|
||||||
|
deviceId: "d1",
|
||||||
|
label: "Device 1",
|
||||||
|
} as MediaDeviceInfo,
|
||||||
|
{
|
||||||
|
deviceId: "d2",
|
||||||
|
label: "Device 2",
|
||||||
|
} as MediaDeviceInfo,
|
||||||
|
],
|
||||||
|
[MediaDeviceKindEnum.AudioOutput]: [],
|
||||||
|
[MediaDeviceKindEnum.VideoInput]: [],
|
||||||
|
});
|
||||||
|
jest.spyOn(MediaDeviceHandler.instance, "setDevice").mockImplementation();
|
||||||
|
restoreConsole = filterConsole("Starting load of AsyncWrapper for modal");
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
restoreConsole();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("when rendering a started recording", () => {
|
describe("when rendering a started recording", () => {
|
||||||
beforeEach(() => {
|
beforeEach(async () => {
|
||||||
renderPip(VoiceBroadcastInfoState.Started);
|
await renderPip(VoiceBroadcastInfoState.Started);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should render as expected", () => {
|
it("should render as expected", () => {
|
||||||
expect(renderResult.container).toMatchSnapshot();
|
expect(renderResult.container).toMatchSnapshot();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("and selecting another input device", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
await act(async () => {
|
||||||
|
await userEvent.click(screen.getByLabelText("Change input device"));
|
||||||
|
await userEvent.click(screen.getByText("Device 1"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should select the device and pause and resume the broadcast", () => {
|
||||||
|
expect(MediaDeviceHandler.instance.setDevice).toHaveBeenCalledWith(
|
||||||
|
"d1",
|
||||||
|
MediaDeviceKindEnum.AudioInput,
|
||||||
|
);
|
||||||
|
expect(recording.pause).toHaveBeenCalled();
|
||||||
|
expect(recording.resume).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe("and clicking the pause button", () => {
|
describe("and clicking the pause button", () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await userEvent.click(screen.getByLabelText("pause voice broadcast"));
|
await userEvent.click(screen.getByLabelText("pause voice broadcast"));
|
||||||
|
@ -113,8 +167,8 @@ describe("VoiceBroadcastRecordingPip", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("when rendering a paused recording", () => {
|
describe("when rendering a paused recording", () => {
|
||||||
beforeEach(() => {
|
beforeEach(async () => {
|
||||||
renderPip(VoiceBroadcastInfoState.Paused);
|
await renderPip(VoiceBroadcastInfoState.Paused);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should render as expected", () => {
|
it("should render as expected", () => {
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||||
|
|
||||||
|
exports[`VoiceBroadcastPreRecordingPip when rendered should match the snapshot 1`] = `
|
||||||
|
<div>
|
||||||
|
<div
|
||||||
|
class="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="mx_VoiceBroadcastHeader"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
data-testid="room-avatar"
|
||||||
|
>
|
||||||
|
room avatar:
|
||||||
|
!room@example.com
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
class="mx_VoiceBroadcastHeader_content"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="mx_VoiceBroadcastHeader_room"
|
||||||
|
>
|
||||||
|
!room@example.com
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
class="mx_VoiceBroadcastHeader_line mx_VoiceBroadcastHeader_mic--clickable"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="mx_Icon mx_Icon_16"
|
||||||
|
/>
|
||||||
|
<span>
|
||||||
|
Default Device
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
class="mx_AccessibleButton"
|
||||||
|
role="button"
|
||||||
|
tabindex="0"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="mx_Icon mx_Icon_16"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
class="mx_AccessibleButton mx_VoiceBroadcastBody_blockButton mx_AccessibleButton_hasKind mx_AccessibleButton_kind_danger"
|
||||||
|
role="button"
|
||||||
|
tabindex="0"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="mx_Icon mx_Icon_16"
|
||||||
|
/>
|
||||||
|
Go live
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
|
@ -60,6 +60,16 @@ exports[`VoiceBroadcastRecordingPip when rendering a paused recording should ren
|
||||||
class="mx_Icon mx_Icon_16"
|
class="mx_Icon mx_Icon_16"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
<div
|
||||||
|
aria-label="Change input device"
|
||||||
|
class="mx_AccessibleButton"
|
||||||
|
role="button"
|
||||||
|
tabindex="0"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="mx_Icon mx_Icon_16 mx_Icon_alert"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
<div
|
<div
|
||||||
aria-label="Stop Recording"
|
aria-label="Stop Recording"
|
||||||
class="mx_AccessibleButton mx_VoiceBroadcastControl"
|
class="mx_AccessibleButton mx_VoiceBroadcastControl"
|
||||||
|
@ -135,6 +145,16 @@ exports[`VoiceBroadcastRecordingPip when rendering a started recording should re
|
||||||
class="mx_Icon mx_Icon_16"
|
class="mx_Icon mx_Icon_16"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
<div
|
||||||
|
aria-label="Change input device"
|
||||||
|
class="mx_AccessibleButton"
|
||||||
|
role="button"
|
||||||
|
tabindex="0"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="mx_Icon mx_Icon_16 mx_Icon_alert"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
<div
|
<div
|
||||||
aria-label="Stop Recording"
|
aria-label="Stop Recording"
|
||||||
class="mx_AccessibleButton mx_VoiceBroadcastControl"
|
class="mx_AccessibleButton mx_VoiceBroadcastControl"
|
||||||
|
|
Loading…
Reference in a new issue