Show time left for voice broadcast recordings (#9564)

This commit is contained in:
Michael Weimann 2022-11-10 11:53:49 +01:00 committed by GitHub
parent 962e8e0b23
commit f6347d24ef
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 469 additions and 145 deletions

View file

@ -0,0 +1,3 @@
<svg width="12" height="14" viewBox="0 0 12 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8 0H4V1.33333H8V0ZM5.33333 8.66667H6.66667V4.66667H5.33333V8.66667ZM10.6867 4.26L11.6333 3.31333C11.3467 2.97333 11.0333 2.65333 10.6933 2.37333L9.74667 3.32C8.71333 2.49333 7.41333 2 6 2C2.68667 2 0 4.68667 0 8C0 11.3133 2.68 14 6 14C9.32 14 12 11.3133 12 8C12 6.58667 11.5067 5.28667 10.6867 4.26ZM6 12.6667C3.42 12.6667 1.33333 10.58 1.33333 8C1.33333 5.42 3.42 3.33333 6 3.33333C8.58 3.33333 10.6667 5.42 10.6667 8C10.6667 10.58 8.58 12.6667 6 12.6667Z" fill="currentColor"/>
</svg>

After

Width:  |  Height:  |  Size: 593 B

View file

@ -149,6 +149,31 @@ export function formatSeconds(inSeconds: number): string {
return output;
}
export function formatTimeLeft(inSeconds: number): string {
const hours = Math.floor(inSeconds / (60 * 60)).toFixed(0);
const minutes = Math.floor((inSeconds % (60 * 60)) / 60).toFixed(0);
const seconds = Math.floor(((inSeconds % (60 * 60)) % 60)).toFixed(0);
if (hours !== "0") {
return _t("%(hours)sh %(minutes)sm %(seconds)ss left", {
hours,
minutes,
seconds,
});
}
if (minutes !== "0") {
return _t("%(minutes)sm %(seconds)ss left", {
minutes,
seconds,
});
}
return _t("%(seconds)ss left", {
seconds,
});
}
const MILLIS_IN_DAY = 86400000;
function withinPast24Hours(prevDate: Date, nextDate: Date): boolean {
return Math.abs(prevDate.getTime() - nextDate.getTime()) <= MILLIS_IN_DAY;

View file

@ -182,6 +182,8 @@ export interface IConfigOptions {
voice_broadcast?: {
// length per voice chunk in seconds
chunk_length?: number;
// max voice broadcast length in seconds
max_length?: number;
};
user_notice?: {

View file

@ -47,7 +47,8 @@ export const DEFAULTS: IConfigOptions = {
url: "https://element.io/get-started",
},
voice_broadcast: {
chunk_length: 120, // two minutes
chunk_length: 2 * 60, // two minutes
max_length: 4 * 60 * 60, // four hours
},
};

View file

@ -18,20 +18,26 @@ import React, { HTMLProps } from "react";
import { formatSeconds } from "../../../DateUtils";
interface IProps extends Pick<HTMLProps<HTMLSpanElement>, "aria-live" | "role"> {
interface Props extends Pick<HTMLProps<HTMLSpanElement>, "aria-live" | "role"> {
seconds: number;
formatFn?: (seconds: number) => string;
}
/**
* Simply converts seconds into minutes and seconds. Note that hours will not be
* displayed, making it possible to see "82:29".
* Simply converts seconds using formatFn.
* Defaulting to formatSeconds().
* Note that in this case hours will not be displayed, making it possible to see "82:29".
*/
export default class Clock extends React.Component<IProps> {
public constructor(props) {
export default class Clock extends React.Component<Props> {
public static defaultProps = {
formatFn: formatSeconds,
};
public constructor(props: Props) {
super(props);
}
public shouldComponentUpdate(nextProps: Readonly<IProps>): boolean {
public shouldComponentUpdate(nextProps: Readonly<Props>): boolean {
const currentFloor = Math.floor(this.props.seconds);
const nextFloor = Math.floor(nextProps.seconds);
return currentFloor !== nextFloor;
@ -39,7 +45,7 @@ export default class Clock extends React.Component<IProps> {
public render() {
return <span aria-live={this.props["aria-live"]} role={this.props.role} className='mx_Clock'>
{ formatSeconds(this.props.seconds) }
{ this.props.formatFn(this.props.seconds) }
</span>;
}
}

View file

@ -48,6 +48,9 @@
"%(weekDayName)s, %(monthName)s %(day)s %(time)s": "%(weekDayName)s, %(monthName)s %(day)s %(time)s",
"%(weekDayName)s, %(monthName)s %(day)s %(fullYear)s": "%(weekDayName)s, %(monthName)s %(day)s %(fullYear)s",
"%(weekDayName)s, %(monthName)s %(day)s %(fullYear)s %(time)s": "%(weekDayName)s, %(monthName)s %(day)s %(fullYear)s %(time)s",
"%(hours)sh %(minutes)sm %(seconds)ss left": "%(hours)sh %(minutes)sm %(seconds)ss left",
"%(minutes)sm %(seconds)ss left": "%(minutes)sm %(seconds)ss left",
"%(seconds)ss left": "%(seconds)ss left",
"%(date)s at %(time)s": "%(date)s at %(time)s",
"%(value)sd": "%(value)sd",
"%(value)sh": "%(value)sh",
@ -1886,7 +1889,6 @@
"The conversation continues here.": "The conversation continues here.",
"This room has been replaced and is no longer active.": "This room has been replaced and is no longer active.",
"You do not have permission to post to this room": "You do not have permission to post to this room",
"%(seconds)ss left": "%(seconds)ss left",
"Send voice message": "Send voice message",
"Hide stickers": "Hide stickers",
"Sticker": "Sticker",

View file

@ -18,17 +18,19 @@ import { Optional } from "matrix-events-sdk";
import { TypedEventEmitter } from "matrix-js-sdk/src/models/typed-event-emitter";
import { getChunkLength } from "..";
import { VoiceRecording } from "../../audio/VoiceRecording";
import { IRecordingUpdate, VoiceRecording } from "../../audio/VoiceRecording";
import { concat } from "../../utils/arrays";
import { IDestroyable } from "../../utils/IDestroyable";
import { Singleflight } from "../../utils/Singleflight";
export enum VoiceBroadcastRecorderEvent {
ChunkRecorded = "chunk_recorded",
CurrentChunkLengthUpdated = "current_chunk_length_updated",
}
interface EventMap {
[VoiceBroadcastRecorderEvent.ChunkRecorded]: (chunk: ChunkRecordedPayload) => void;
[VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated]: (length: number) => void;
}
export interface ChunkRecordedPayload {
@ -46,8 +48,11 @@ export class VoiceBroadcastRecorder
implements IDestroyable {
private headers = new Uint8Array(0);
private chunkBuffer = new Uint8Array(0);
// position of the previous chunk in seconds
private previousChunkEndTimePosition = 0;
private pagesFromRecorderCount = 0;
// current chunk length in seconds
private currentChunkLength = 0;
public constructor(
private voiceRecording: VoiceRecording,
@ -58,7 +63,11 @@ export class VoiceBroadcastRecorder
}
public async start(): Promise<void> {
return this.voiceRecording.start();
await this.voiceRecording.start();
this.voiceRecording.liveData.onUpdate((data: IRecordingUpdate) => {
this.setCurrentChunkLength(data.timeSeconds - this.previousChunkEndTimePosition);
});
return;
}
/**
@ -68,15 +77,25 @@ export class VoiceBroadcastRecorder
await this.voiceRecording.stop();
// forget about that call, so that we can stop it again later
Singleflight.forgetAllFor(this.voiceRecording);
return this.extractChunk();
const chunk = this.extractChunk();
this.currentChunkLength = 0;
this.previousChunkEndTimePosition = 0;
return chunk;
}
public get contentType(): string {
return this.voiceRecording.contentType;
}
private get chunkLength(): number {
return this.voiceRecording.recorderSeconds - this.previousChunkEndTimePosition;
private setCurrentChunkLength(currentChunkLength: number): void {
if (this.currentChunkLength === currentChunkLength) return;
this.currentChunkLength = currentChunkLength;
this.emit(VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated, currentChunkLength);
}
public getCurrentChunkLength(): number {
return this.currentChunkLength;
}
private onDataAvailable = (data: ArrayBuffer): void => {
@ -89,6 +108,7 @@ export class VoiceBroadcastRecorder
return;
}
this.setCurrentChunkLength(this.voiceRecording.recorderSeconds - this.previousChunkEndTimePosition);
this.handleData(dataArray);
};
@ -98,7 +118,7 @@ export class VoiceBroadcastRecorder
}
private emitChunkIfTargetLengthReached(): void {
if (this.chunkLength >= this.targetChunkLength) {
if (this.getCurrentChunkLength() >= this.targetChunkLength) {
this.emitAndResetChunk();
}
}
@ -114,9 +134,10 @@ export class VoiceBroadcastRecorder
const currentRecorderTime = this.voiceRecording.recorderSeconds;
const payload: ChunkRecordedPayload = {
buffer: concat(this.headers, this.chunkBuffer),
length: this.chunkLength,
length: this.getCurrentChunkLength(),
};
this.chunkBuffer = new Uint8Array(0);
this.setCurrentChunkLength(0);
this.previousChunkEndTimePosition = currentRecorderTime;
return payload;
}

View file

@ -17,10 +17,13 @@ import { Room, RoomMember } from "matrix-js-sdk/src/matrix";
import { LiveBadge } from "../..";
import { Icon as LiveIcon } from "../../../../res/img/element-icons/live.svg";
import { Icon as MicrophoneIcon } from "../../../../res/img/voip/call-view/mic-on.svg";
import { Icon as TimerIcon } from "../../../../res/img/element-icons/Timer.svg";
import { _t } from "../../../languageHandler";
import RoomAvatar from "../../../components/views/avatars/RoomAvatar";
import AccessibleButton from "../../../components/views/elements/AccessibleButton";
import { Icon as XIcon } from "../../../../res/img/element-icons/cancel-rounded.svg";
import Clock from "../../../components/views/audio_messages/Clock";
import { formatTimeLeft } from "../../../DateUtils";
interface VoiceBroadcastHeaderProps {
live?: boolean;
@ -28,6 +31,7 @@ interface VoiceBroadcastHeaderProps {
room: Room;
sender: RoomMember;
showBroadcast?: boolean;
timeLeft?: number;
showClose?: boolean;
}
@ -38,6 +42,7 @@ export const VoiceBroadcastHeader: React.FC<VoiceBroadcastHeaderProps> = ({
sender,
showBroadcast = false,
showClose = false,
timeLeft,
}) => {
const broadcast = showBroadcast
? <div className="mx_VoiceBroadcastHeader_line">
@ -54,6 +59,13 @@ export const VoiceBroadcastHeader: React.FC<VoiceBroadcastHeaderProps> = ({
</AccessibleButton>
: null;
const timeLeftLine = timeLeft
? <div className="mx_VoiceBroadcastHeader_line">
<TimerIcon className="mx_Icon mx_Icon_16" />
<Clock formatFn={formatTimeLeft} seconds={timeLeft} />
</div>
: null;
return <div className="mx_VoiceBroadcastHeader">
<RoomAvatar room={room} width={32} height={32} />
<div className="mx_VoiceBroadcastHeader_content">
@ -64,6 +76,7 @@ export const VoiceBroadcastHeader: React.FC<VoiceBroadcastHeaderProps> = ({
<MicrophoneIcon className="mx_Icon mx_Icon_16" />
<span>{ sender.name }</span>
</div>
{ timeLeftLine }
{ broadcast }
</div>
{ liveBadge }

View file

@ -35,6 +35,7 @@ interface VoiceBroadcastRecordingPipProps {
export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProps> = ({ recording }) => {
const {
live,
timeLeft,
recordingState,
room,
sender,
@ -58,6 +59,7 @@ export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProp
live={live}
sender={sender}
room={room}
timeLeft={timeLeft}
/>
<hr className="mx_VoiceBroadcastBody_divider" />
<div className="mx_VoiceBroadcastBody_controls">

View file

@ -65,6 +65,13 @@ export const useVoiceBroadcastRecording = (recording: VoiceBroadcastRecording) =
},
);
const [timeLeft, setTimeLeft] = useState(recording.getTimeLeft());
useTypedEventEmitter(
recording,
VoiceBroadcastRecordingEvent.TimeLeftChanged,
setTimeLeft,
);
const live = [
VoiceBroadcastInfoState.Started,
VoiceBroadcastInfoState.Paused,
@ -73,6 +80,7 @@ export const useVoiceBroadcastRecording = (recording: VoiceBroadcastRecording) =
return {
live,
timeLeft,
recordingState,
room,
sender: recording.infoEvent.sender,

View file

@ -41,6 +41,7 @@ export * from "./stores/VoiceBroadcastPreRecordingStore";
export * from "./stores/VoiceBroadcastRecordingsStore";
export * from "./utils/checkVoiceBroadcastPreConditions";
export * from "./utils/getChunkLength";
export * from "./utils/getMaxBroadcastLength";
export * from "./utils/hasRoomLiveVoiceBroadcast";
export * from "./utils/findRoomLiveVoiceBroadcastFromUserAndDevice";
export * from "./utils/shouldDisplayAsVoiceBroadcastRecordingTile";

View file

@ -15,12 +15,20 @@ limitations under the License.
*/
import { logger } from "matrix-js-sdk/src/logger";
import { MatrixClient, MatrixEvent, MatrixEventEvent, RelationType } from "matrix-js-sdk/src/matrix";
import {
EventType,
MatrixClient,
MatrixEvent,
MatrixEventEvent,
MsgType,
RelationType,
} from "matrix-js-sdk/src/matrix";
import { TypedEventEmitter } from "matrix-js-sdk/src/models/typed-event-emitter";
import {
ChunkRecordedPayload,
createVoiceBroadcastRecorder,
getMaxBroadcastLength,
VoiceBroadcastInfoEventContent,
VoiceBroadcastInfoEventType,
VoiceBroadcastInfoState,
@ -33,13 +41,17 @@ import { createVoiceMessageContent } from "../../utils/createVoiceMessageContent
import { IDestroyable } from "../../utils/IDestroyable";
import dis from "../../dispatcher/dispatcher";
import { ActionPayload } from "../../dispatcher/payloads";
import { VoiceBroadcastChunkEvents } from "../utils/VoiceBroadcastChunkEvents";
import { RelationsHelper, RelationsHelperEvent } from "../../events/RelationsHelper";
export enum VoiceBroadcastRecordingEvent {
StateChanged = "liveness_changed",
TimeLeftChanged = "time_left_changed",
}
interface EventMap {
[VoiceBroadcastRecordingEvent.StateChanged]: (state: VoiceBroadcastInfoState) => void;
[VoiceBroadcastRecordingEvent.TimeLeftChanged]: (timeLeft: number) => void;
}
export class VoiceBroadcastRecording
@ -49,6 +61,10 @@ export class VoiceBroadcastRecording
private recorder: VoiceBroadcastRecorder;
private sequence = 1;
private dispatcherRef: string;
private chunkEvents = new VoiceBroadcastChunkEvents();
private chunkRelationHelper: RelationsHelper;
private maxLength: number;
private timeLeft: number;
public constructor(
public readonly infoEvent: MatrixEvent,
@ -56,6 +72,8 @@ export class VoiceBroadcastRecording
initialState?: VoiceBroadcastInfoState,
) {
super();
this.maxLength = getMaxBroadcastLength();
this.timeLeft = this.maxLength;
if (initialState) {
this.state = initialState;
@ -64,11 +82,41 @@ export class VoiceBroadcastRecording
}
// TODO Michael W: listen for state updates
//
this.infoEvent.on(MatrixEventEvent.BeforeRedaction, this.onBeforeRedaction);
this.dispatcherRef = dis.register(this.onAction);
this.chunkRelationHelper = this.initialiseChunkEventRelation();
}
private initialiseChunkEventRelation(): RelationsHelper {
const relationsHelper = new RelationsHelper(
this.infoEvent,
RelationType.Reference,
EventType.RoomMessage,
this.client,
);
relationsHelper.on(RelationsHelperEvent.Add, this.onChunkEvent);
relationsHelper.emitFetchCurrent().catch((err) => {
logger.warn("error fetching server side relation for voice broadcast chunks", err);
// fall back to local events
relationsHelper.emitCurrent();
});
return relationsHelper;
}
private onChunkEvent = (event: MatrixEvent): void => {
if (
(!event.getId() && !event.getTxnId())
|| event.getContent()?.msgtype !== MsgType.Audio // don't add non-audio event
) {
return;
}
this.chunkEvents.addEvent(event);
};
private setInitialStateFromInfoEvent(): void {
const room = this.client.getRoom(this.infoEvent.getRoomId());
const relations = room?.getUnfilteredTimelineSet()?.relations?.getChildEventsForEvent(
@ -82,6 +130,23 @@ export class VoiceBroadcastRecording
}) ? VoiceBroadcastInfoState.Started : VoiceBroadcastInfoState.Stopped;
}
public getTimeLeft(): number {
return this.timeLeft;
}
private async setTimeLeft(timeLeft: number): Promise<void> {
if (timeLeft <= 0) {
// time is up - stop the recording
return await this.stop();
}
// do never increase time left; no action if equals
if (timeLeft >= this.timeLeft) return;
this.timeLeft = timeLeft;
this.emit(VoiceBroadcastRecordingEvent.TimeLeftChanged, timeLeft);
}
public async start(): Promise<void> {
return this.getRecorder().start();
}
@ -127,20 +192,23 @@ export class VoiceBroadcastRecording
if (!this.recorder) {
this.recorder = createVoiceBroadcastRecorder();
this.recorder.on(VoiceBroadcastRecorderEvent.ChunkRecorded, this.onChunkRecorded);
this.recorder.on(VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated, this.onCurrentChunkLengthUpdated);
}
return this.recorder;
}
public destroy(): void {
public async destroy(): Promise<void> {
if (this.recorder) {
this.recorder.off(VoiceBroadcastRecorderEvent.ChunkRecorded, this.onChunkRecorded);
this.recorder.stop();
this.recorder.destroy();
}
this.infoEvent.off(MatrixEventEvent.BeforeRedaction, this.onBeforeRedaction);
this.removeAllListeners();
dis.unregister(this.dispatcherRef);
this.chunkEvents = new VoiceBroadcastChunkEvents();
this.chunkRelationHelper.destroy();
}
private onBeforeRedaction = () => {
@ -163,6 +231,10 @@ export class VoiceBroadcastRecording
this.emit(VoiceBroadcastRecordingEvent.StateChanged, this.state);
}
private onCurrentChunkLengthUpdated = (currentChunkLength: number) => {
this.setTimeLeft(this.maxLength - this.chunkEvents.getLengthSeconds() - currentChunkLength);
};
private onChunkRecorded = async (chunk: ChunkRecordedPayload): Promise<void> => {
const { url, file } = await this.uploadFile(chunk);
await this.sendVoiceMessage(chunk, url, file);

View file

@ -62,6 +62,10 @@ export class VoiceBroadcastChunkEvents {
}, 0);
}
public getLengthSeconds(): number {
return this.getLength() / 1000;
}
/**
* Returns the accumulated length to (excl.) a chunk event.
*/

View file

@ -0,0 +1,29 @@
/*
Copyright 2022 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import SdkConfig, { DEFAULTS } from "../../SdkConfig";
/**
* Returns the max length for voice broadcasts:
* - Tries to get the value from the voice_broadcast.max_length config
* - If that fails from DEFAULTS
* - If that fails fall back to four hours
*/
export const getMaxBroadcastLength = (): number => {
return SdkConfig.get("voice_broadcast")?.max_length
|| DEFAULTS.voice_broadcast?.max_length
|| 4 * 60 * 60;
};

View file

@ -27,14 +27,16 @@ describe("SdkConfig", () => {
beforeEach(() => {
SdkConfig.put({
voice_broadcast: {
chunk_length: 1337,
chunk_length: 42,
max_length: 1337,
},
});
});
it("should return the custom config", () => {
const customConfig = JSON.parse(JSON.stringify(DEFAULTS));
customConfig.voice_broadcast.chunk_length = 1337;
customConfig.voice_broadcast.chunk_length = 42;
customConfig.voice_broadcast.max_length = 1337;
expect(SdkConfig.get()).toEqual(customConfig);
});
});

View file

@ -171,7 +171,9 @@ export function createTestClient(): MatrixClient {
setPusher: jest.fn().mockResolvedValue(undefined),
setPushRuleEnabled: jest.fn().mockResolvedValue(undefined),
setPushRuleActions: jest.fn().mockResolvedValue(undefined),
relations: jest.fn().mockRejectedValue(undefined),
relations: jest.fn().mockResolvedValue({
events: [],
}),
isCryptoEnabled: jest.fn().mockReturnValue(false),
hasLazyLoadMembersEnabled: jest.fn().mockReturnValue(false),
isInitialSyncComplete: jest.fn().mockReturnValue(true),

View file

@ -14,7 +14,13 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import { formatSeconds, formatRelativeTime, formatDuration, formatFullDateNoDayISO } from "../../src/DateUtils";
import {
formatSeconds,
formatRelativeTime,
formatDuration,
formatFullDateNoDayISO,
formatTimeLeft,
} from "../../src/DateUtils";
import { REPEATABLE_DATE } from "../test-utils";
describe("formatSeconds", () => {
@ -99,3 +105,17 @@ describe("formatFullDateNoDayISO", () => {
expect(formatFullDateNoDayISO(REPEATABLE_DATE)).toEqual("2022-11-17T16:58:32.517Z");
});
});
describe("formatTimeLeft", () => {
it.each([
[null, "0s left"],
[0, "0s left"],
[23, "23s left"],
[60 + 23, "1m 23s left"],
[60 * 60, "1h 0m 0s left"],
[60 * 60 + 23, "1h 0m 23s left"],
[5 * 60 * 60 + 7 * 60 + 23, "5h 7m 23s left"],
])("should format %s to %s", (seconds: number, expected: string) => {
expect(formatTimeLeft(seconds)).toBe(expected);
});
});

View file

@ -26,7 +26,19 @@ import {
VoiceBroadcastRecorderEvent,
} from "../../../src/voice-broadcast";
jest.mock("../../../src/audio/VoiceRecording");
// mock VoiceRecording because it contains all the audio APIs
jest.mock("../../../src/audio/VoiceRecording", () => ({
VoiceRecording: jest.fn().mockReturnValue({
disableMaxLength: jest.fn(),
emit: jest.fn(),
liveData: {
onUpdate: jest.fn(),
},
start: jest.fn(),
stop: jest.fn(),
destroy: jest.fn(),
}),
}));
describe("VoiceBroadcastRecorder", () => {
describe("createVoiceBroadcastRecorder", () => {
@ -46,7 +58,6 @@ describe("VoiceBroadcastRecorder", () => {
it("should return a VoiceBroadcastRecorder instance with targetChunkLength from config", () => {
const voiceBroadcastRecorder = createVoiceBroadcastRecorder();
expect(mocked(VoiceRecording).mock.instances[0].disableMaxLength).toHaveBeenCalled();
expect(voiceBroadcastRecorder).toBeInstanceOf(VoiceBroadcastRecorder);
expect(voiceBroadcastRecorder.targetChunkLength).toBe(1337);
});

View file

@ -37,7 +37,16 @@ jest.mock("../../../../src/components/views/avatars/RoomAvatar", () => ({
}),
}));
jest.mock("../../../../src/audio/VoiceRecording");
// mock VoiceRecording because it contains all the audio APIs
jest.mock("../../../../src/audio/VoiceRecording", () => ({
VoiceRecording: jest.fn().mockReturnValue({
disableMaxLength: jest.fn(),
liveData: {
onUpdate: jest.fn(),
},
start: jest.fn(),
}),
}));
describe("VoiceBroadcastRecordingPip", () => {
const roomId = "!room:example.com";

View file

@ -32,6 +32,18 @@ exports[`VoiceBroadcastRecordingPip when rendering a paused recording should ren
@userId:matrix.org
</span>
</div>
<div
class="mx_VoiceBroadcastHeader_line"
>
<div
class="mx_Icon mx_Icon_16"
/>
<span
class="mx_Clock"
>
4h 0m 0s left
</span>
</div>
</div>
<div
class="mx_LiveBadge"
@ -105,6 +117,18 @@ exports[`VoiceBroadcastRecordingPip when rendering a started recording should re
@userId:matrix.org
</span>
</div>
<div
class="mx_VoiceBroadcastHeader_line"
>
<div
class="mx_Icon mx_Icon_16"
/>
<span
class="mx_Clock"
>
4h 0m 0s left
</span>
</div>
</div>
<div
class="mx_LiveBadge"

View file

@ -31,8 +31,9 @@ import { uploadFile } from "../../../src/ContentMessages";
import { IEncryptedFile } from "../../../src/customisations/models/IMediaEventContent";
import { createVoiceMessageContent } from "../../../src/utils/createVoiceMessageContent";
import {
ChunkRecordedPayload,
createVoiceBroadcastRecorder,
getChunkLength,
getMaxBroadcastLength,
VoiceBroadcastInfoEventContent,
VoiceBroadcastInfoEventType,
VoiceBroadcastInfoState,
@ -43,12 +44,29 @@ import {
} from "../../../src/voice-broadcast";
import { mkEvent, mkStubRoom, stubClient } from "../../test-utils";
import dis from "../../../src/dispatcher/dispatcher";
import { VoiceRecording } from "../../../src/audio/VoiceRecording";
jest.mock("../../../src/voice-broadcast/audio/VoiceBroadcastRecorder", () => ({
...jest.requireActual("../../../src/voice-broadcast/audio/VoiceBroadcastRecorder") as object,
createVoiceBroadcastRecorder: jest.fn(),
}));
// mock VoiceRecording because it contains all the audio APIs
jest.mock("../../../src/audio/VoiceRecording", () => ({
VoiceRecording: jest.fn().mockReturnValue({
disableMaxLength: jest.fn(),
liveData: {
onUpdate: jest.fn(),
},
off: jest.fn(),
on: jest.fn(),
start: jest.fn(),
stop: jest.fn(),
destroy: jest.fn(),
contentType: "audio/ogg",
}),
}));
jest.mock("../../../src/ContentMessages", () => ({
uploadFile: jest.fn(),
}));
@ -61,13 +79,13 @@ describe("VoiceBroadcastRecording", () => {
const roomId = "!room:example.com";
const uploadedUrl = "mxc://example.com/vb";
const uploadedFile = { file: true } as unknown as IEncryptedFile;
const maxLength = getMaxBroadcastLength();
let room: Room;
let client: MatrixClient;
let infoEvent: MatrixEvent;
let voiceBroadcastRecording: VoiceBroadcastRecording;
let onStateChanged: (state: VoiceBroadcastInfoState) => void;
let voiceBroadcastRecorder: VoiceBroadcastRecorder;
let onChunkRecorded: (chunk: ChunkRecordedPayload) => Promise<void>;
const mkVoiceBroadcastInfoEvent = (content: VoiceBroadcastInfoEventContent) => {
return mkEvent({
@ -83,6 +101,7 @@ describe("VoiceBroadcastRecording", () => {
voiceBroadcastRecording = new VoiceBroadcastRecording(infoEvent, client);
voiceBroadcastRecording.on(VoiceBroadcastRecordingEvent.StateChanged, onStateChanged);
jest.spyOn(voiceBroadcastRecording, "destroy");
jest.spyOn(voiceBroadcastRecording, "emit");
jest.spyOn(voiceBroadcastRecording, "removeAllListeners");
};
@ -111,6 +130,58 @@ describe("VoiceBroadcastRecording", () => {
});
};
const itShouldSendAVoiceMessage = (data: number[], size: number, duration: number, sequence: number) => {
// events contain milliseconds
duration *= 1000;
it("should send a voice message", () => {
expect(uploadFile).toHaveBeenCalledWith(
client,
roomId,
new Blob([new Uint8Array(data)], { type: voiceBroadcastRecorder.contentType }),
);
expect(mocked(client.sendMessage)).toHaveBeenCalledWith(
roomId,
{
body: "Voice message",
file: {
file: true,
},
info: {
duration,
mimetype: "audio/ogg",
size,
},
["m.relates_to"]: {
event_id: infoEvent.getId(),
rel_type: "m.reference",
},
msgtype: "m.audio",
["org.matrix.msc1767.audio"]: {
duration,
waveform: undefined,
},
["org.matrix.msc1767.file"]: {
file: {
file: true,
},
mimetype: "audio/ogg",
name: "Voice message.ogg",
size,
url: "mxc://example.com/vb",
},
["org.matrix.msc1767.text"]: "Voice message",
["org.matrix.msc3245.voice"]: {},
url: "mxc://example.com/vb",
["io.element.voice_broadcast_chunk"]: {
sequence,
},
},
);
});
};
beforeEach(() => {
client = stubClient();
room = mkStubRoom(roomId, "Test Room", client);
@ -120,23 +191,11 @@ describe("VoiceBroadcastRecording", () => {
}
});
onStateChanged = jest.fn();
voiceBroadcastRecorder = {
contentType: "audio/ogg",
on: jest.fn(),
off: jest.fn(),
start: jest.fn(),
stop: jest.fn(),
} as unknown as VoiceBroadcastRecorder;
voiceBroadcastRecorder = new VoiceBroadcastRecorder(new VoiceRecording(), getChunkLength());
jest.spyOn(voiceBroadcastRecorder, "start");
jest.spyOn(voiceBroadcastRecorder, "stop");
jest.spyOn(voiceBroadcastRecorder, "destroy");
mocked(createVoiceBroadcastRecorder).mockReturnValue(voiceBroadcastRecorder);
onChunkRecorded = jest.fn();
mocked(voiceBroadcastRecorder.on).mockImplementation((event: any, listener: any): VoiceBroadcastRecorder => {
if (event === VoiceBroadcastRecorderEvent.ChunkRecorded) {
onChunkRecorded = listener;
}
return voiceBroadcastRecorder;
});
mocked(uploadFile).mockResolvedValue({
url: uploadedUrl,
@ -240,68 +299,64 @@ describe("VoiceBroadcastRecording", () => {
itShouldBeInState(VoiceBroadcastInfoState.Stopped);
});
describe("and a chunk time update occurs", () => {
beforeEach(() => {
voiceBroadcastRecorder.emit(VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated, 10);
});
it("should update time left", () => {
expect(voiceBroadcastRecording.getTimeLeft()).toBe(maxLength - 10);
expect(voiceBroadcastRecording.emit).toHaveBeenCalledWith(
VoiceBroadcastRecordingEvent.TimeLeftChanged,
maxLength - 10,
);
});
describe("and a chunk time update occurs, that would increase time left", () => {
beforeEach(() => {
mocked(voiceBroadcastRecording.emit).mockClear();
voiceBroadcastRecorder.emit(VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated, 5);
});
it("should not change time left", () => {
expect(voiceBroadcastRecording.getTimeLeft()).toBe(maxLength - 10);
expect(voiceBroadcastRecording.emit).not.toHaveBeenCalled();
});
});
});
describe("and a chunk has been recorded", () => {
beforeEach(async () => {
await onChunkRecorded({
voiceBroadcastRecorder.emit(
VoiceBroadcastRecorderEvent.ChunkRecorded,
{
buffer: new Uint8Array([1, 2, 3]),
length: 23,
});
});
it("should send a voice message", () => {
expect(uploadFile).toHaveBeenCalledWith(
client,
roomId,
new Blob([new Uint8Array([1, 2, 3])], { type: voiceBroadcastRecorder.contentType }),
);
expect(mocked(client.sendMessage)).toHaveBeenCalledWith(
roomId,
{
body: "Voice message",
file: {
file: true,
},
info: {
duration: 23000,
mimetype: "audio/ogg",
size: 3,
},
["m.relates_to"]: {
event_id: infoEvent.getId(),
rel_type: "m.reference",
},
msgtype: "m.audio",
["org.matrix.msc1767.audio"]: {
duration: 23000,
waveform: undefined,
},
["org.matrix.msc1767.file"]: {
file: {
file: true,
},
mimetype: "audio/ogg",
name: "Voice message.ogg",
size: 3,
url: "mxc://example.com/vb",
},
["org.matrix.msc1767.text"]: "Voice message",
["org.matrix.msc3245.voice"]: {},
url: "mxc://example.com/vb",
["io.element.voice_broadcast_chunk"]: {
sequence: 1,
},
},
);
});
itShouldSendAVoiceMessage([1, 2, 3], 3, 23, 1);
describe("and another chunk has been recorded, that exceeds the max time", () => {
beforeEach(() => {
mocked(voiceBroadcastRecorder.stop).mockResolvedValue({
buffer: new Uint8Array([23, 24, 25]),
length: getMaxBroadcastLength(),
});
voiceBroadcastRecorder.emit(
VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated,
getMaxBroadcastLength(),
);
});
itShouldBeInState(VoiceBroadcastInfoState.Stopped);
itShouldSendAVoiceMessage([23, 24, 25], 3, getMaxBroadcastLength(), 2);
});
});
describe("and calling stop", () => {
beforeEach(async () => {
await onChunkRecorded({
buffer: new Uint8Array([1, 2, 3]),
length: 23,
});
mocked(voiceBroadcastRecorder.stop).mockResolvedValue({
buffer: new Uint8Array([4, 5, 6]),
length: 42,
@ -309,52 +364,7 @@ describe("VoiceBroadcastRecording", () => {
await voiceBroadcastRecording.stop();
});
it("should send the last chunk", () => {
expect(uploadFile).toHaveBeenCalledWith(
client,
roomId,
new Blob([new Uint8Array([4, 5, 6])], { type: voiceBroadcastRecorder.contentType }),
);
expect(mocked(client.sendMessage)).toHaveBeenCalledWith(
roomId,
{
body: "Voice message",
file: {
file: true,
},
info: {
duration: 42000,
mimetype: "audio/ogg",
size: 3,
},
["m.relates_to"]: {
event_id: infoEvent.getId(),
rel_type: "m.reference",
},
msgtype: "m.audio",
["org.matrix.msc1767.audio"]: {
duration: 42000,
waveform: undefined,
},
["org.matrix.msc1767.file"]: {
file: {
file: true,
},
mimetype: "audio/ogg",
name: "Voice message.ogg",
size: 3,
url: "mxc://example.com/vb",
},
["org.matrix.msc1767.text"]: "Voice message",
["org.matrix.msc3245.voice"]: {},
url: "mxc://example.com/vb",
["io.element.voice_broadcast_chunk"]: {
sequence: 2,
},
},
);
});
itShouldSendAVoiceMessage([4, 5, 6], 3, 42, 1);
});
describe.each([
@ -384,10 +394,7 @@ describe("VoiceBroadcastRecording", () => {
it("should stop the recorder and remove all listeners", () => {
expect(mocked(voiceBroadcastRecorder.stop)).toHaveBeenCalled();
expect(mocked(voiceBroadcastRecorder.off)).toHaveBeenCalledWith(
VoiceBroadcastRecorderEvent.ChunkRecorded,
onChunkRecorded,
);
expect(mocked(voiceBroadcastRecorder.destroy)).toHaveBeenCalled();
expect(mocked(voiceBroadcastRecording.removeAllListeners)).toHaveBeenCalled();
});
});

View file

@ -0,0 +1,60 @@
/*
Copyright 2022 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { mocked } from "jest-mock";
import SdkConfig, { DEFAULTS } from "../../../src/SdkConfig";
import { getMaxBroadcastLength } from "../../../src/voice-broadcast";
jest.mock("../../../src/SdkConfig");
describe("getMaxBroadcastLength", () => {
afterEach(() => {
jest.resetAllMocks();
});
describe("when there is a value provided by Sdk config", () => {
beforeEach(() => {
mocked(SdkConfig.get).mockReturnValue({ max_length: 42 });
});
it("should return this value", () => {
expect(getMaxBroadcastLength()).toBe(42);
});
});
describe("when Sdk config does not provide a value", () => {
beforeEach(() => {
DEFAULTS.voice_broadcast = {
max_length: 23,
};
});
it("should return this value", () => {
expect(getMaxBroadcastLength()).toBe(23);
});
});
describe("if there are no defaults", () => {
beforeEach(() => {
DEFAULTS.voice_broadcast = undefined;
});
it("should return the fallback value", () => {
expect(getMaxBroadcastLength()).toBe(4 * 60 * 60);
});
});
});