fix: unable to send audio messages on Telegram (#4493)
- Changed the lib used to record the audio (videojs-record). - Changed the audio recording format to .ogg, this will keep compatibility with sending to channels, Telegram, Whatsapp, Web Widget and API. - Changed the visualization of recording waves, it is now using bars, the same format used by applications (Whatsapp and Telegram) Fixes: #4115
This commit is contained in:
parent
c1cc94e37c
commit
b3c8c83830
7 changed files with 184 additions and 175 deletions
|
@ -27,6 +27,16 @@
|
|||
padding: 0 $space-small;
|
||||
}
|
||||
|
||||
.video-js {
|
||||
background: transparent;
|
||||
// Override min-height : 50px in foundation
|
||||
//
|
||||
max-height: $space-mega * 2.4;
|
||||
min-height: 4.8rem;
|
||||
padding: var(--space-normal) 0 0;
|
||||
resize: none;
|
||||
}
|
||||
|
||||
>textarea {
|
||||
@include ghost-input();
|
||||
@include margin(0);
|
||||
|
|
|
@ -1,16 +1,29 @@
|
|||
<template>
|
||||
<div class="audio-wave-wrapper">
|
||||
<div id="audio-wave"></div>
|
||||
<audio id="audio-wave" class="video-js vjs-fill vjs-default-skin"></audio>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import WaveSurfer from 'wavesurfer.js';
|
||||
import MicrophonePlugin from 'wavesurfer.js/dist/plugin/wavesurfer.microphone.js';
|
||||
import RecordRTC from 'recordrtc';
|
||||
import 'video.js/dist/video-js.css';
|
||||
import 'videojs-record/dist/css/videojs.record.css';
|
||||
|
||||
import videojs from 'video.js';
|
||||
|
||||
import inboxMixin from '../../../../shared/mixins/inboxMixin';
|
||||
import alertMixin from '../../../../shared/mixins/alertMixin';
|
||||
|
||||
import Recorder from 'opus-recorder';
|
||||
import encoderWorker from 'opus-recorder/dist/encoderWorker.min';
|
||||
|
||||
import WaveSurfer from 'wavesurfer.js';
|
||||
import MicrophonePlugin from 'wavesurfer.js/dist/plugin/wavesurfer.microphone.js';
|
||||
import 'videojs-wavesurfer/dist/videojs.wavesurfer.js';
|
||||
|
||||
import 'videojs-record/dist/videojs.record.js';
|
||||
import 'videojs-record/dist/plugins/videojs.record.opus-recorder.js';
|
||||
import { format, addSeconds } from 'date-fns';
|
||||
|
||||
WaveSurfer.microphone = MicrophonePlugin;
|
||||
|
||||
export default {
|
||||
|
@ -18,104 +31,116 @@ export default {
|
|||
mixins: [inboxMixin, alertMixin],
|
||||
data() {
|
||||
return {
|
||||
wavesurfer: false,
|
||||
recorder: false,
|
||||
recordingInterval: false,
|
||||
recordingDateStarted: new Date().getTime(),
|
||||
timeDuration: '00:00',
|
||||
player: false,
|
||||
recordingDateStarted: new Date(0),
|
||||
initialTimeDuration: '00:00',
|
||||
options: {
|
||||
container: '#audio-wave',
|
||||
backend: 'WebAudio',
|
||||
interact: true,
|
||||
cursorWidth: 1,
|
||||
plugins: [
|
||||
WaveSurfer.microphone.create({
|
||||
bufferSize: 4096,
|
||||
numberOfInputChannels: 1,
|
||||
numberOfOutputChannels: 1,
|
||||
constraints: {
|
||||
video: false,
|
||||
audio: true,
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
optionsRecorder: {
|
||||
type: 'audio',
|
||||
mimeType: 'audio/wav',
|
||||
disableLogs: true,
|
||||
recorderType: RecordRTC.StereoAudioRecorder,
|
||||
sampleRate: 44100,
|
||||
numberOfAudioChannels: 2,
|
||||
checkForInactiveTracks: true,
|
||||
bufferSize: 4096,
|
||||
recorderOptions: {
|
||||
debug: true,
|
||||
controls: true,
|
||||
bigPlayButton: false,
|
||||
fluid: false,
|
||||
controlBar: {
|
||||
deviceButton: false,
|
||||
fullscreenToggle: false,
|
||||
cameraButton: false,
|
||||
volumePanel: false,
|
||||
},
|
||||
plugins: {
|
||||
wavesurfer: {
|
||||
backend: 'WebAudio',
|
||||
waveColor: '#1f93ff',
|
||||
progressColor: 'rgb(25, 118, 204)',
|
||||
cursorColor: 'rgba(43, 51, 63, 0.7)',
|
||||
backgroundColor: 'none',
|
||||
barWidth: 1,
|
||||
cursorWidth: 1,
|
||||
hideScrollbar: true,
|
||||
plugins: [
|
||||
WaveSurfer.microphone.create({
|
||||
bufferSize: 4096,
|
||||
numberOfInputChannels: 1,
|
||||
numberOfOutputChannels: 1,
|
||||
constraints: {
|
||||
video: false,
|
||||
audio: true,
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
record: {
|
||||
audio: true,
|
||||
video: false,
|
||||
displayMilliseconds: false,
|
||||
maxLength: 300,
|
||||
audioEngine: 'opus-recorder',
|
||||
audioWorkerURL: encoderWorker,
|
||||
audioChannels: 1,
|
||||
audioSampleRate: 48000,
|
||||
audioBitRate: 128,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
isRecording() {
|
||||
if (this.recorder) {
|
||||
return this.recorder.getState() === 'recording';
|
||||
}
|
||||
return false;
|
||||
return this.player && this.player.record().isRecording();
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
this.wavesurfer = WaveSurfer.create(this.options);
|
||||
this.wavesurfer.on('play', this.playingRecorder);
|
||||
this.wavesurfer.on('pause', this.pausedRecorder);
|
||||
this.wavesurfer.microphone.on('deviceReady', this.startRecording);
|
||||
this.wavesurfer.microphone.on('deviceError', this.deviceError);
|
||||
this.wavesurfer.microphone.start();
|
||||
this.fireStateRecorderTimerChanged(this.initialTimeDuration);
|
||||
window.Recorder = Recorder;
|
||||
this.fireProgressRecord(this.initialTimeDuration);
|
||||
this.player = videojs('#audio-wave', this.recorderOptions, () => {
|
||||
this.$nextTick(() => {
|
||||
this.player.record().getDevice();
|
||||
});
|
||||
});
|
||||
this.player.on('deviceReady', this.deviceReady);
|
||||
this.player.on('deviceError', this.deviceError);
|
||||
this.player.on('startRecord', this.startRecord);
|
||||
this.player.on('stopRecord', this.stopRecord);
|
||||
this.player.on('progressRecord', this.progressRecord);
|
||||
this.player.on('finishRecord', this.finishRecord);
|
||||
this.player.on('playbackFinish', this.playbackFinish);
|
||||
},
|
||||
beforeDestroy() {
|
||||
if (this.recorder) {
|
||||
this.recorder.destroy();
|
||||
if (this.player) {
|
||||
this.player.dispose();
|
||||
}
|
||||
if (this.wavesurfer) {
|
||||
this.wavesurfer.destroy();
|
||||
if (window.Recorder) {
|
||||
window.Recorder = undefined;
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
startRecording(stream) {
|
||||
this.recorder = RecordRTC(stream, this.optionsRecorder);
|
||||
this.recorder.onStateChanged = this.onStateRecorderChanged;
|
||||
this.recorder.startRecording();
|
||||
deviceReady() {
|
||||
this.player.record().start();
|
||||
},
|
||||
startRecord() {
|
||||
this.fireStateRecorderChanged('recording');
|
||||
},
|
||||
stopRecord() {
|
||||
this.fireStateRecorderChanged('stopped');
|
||||
},
|
||||
finishRecord() {
|
||||
const file = new File(
|
||||
[this.player.recordedData],
|
||||
this.player.recordedData.name,
|
||||
{ type: this.player.recordedData.type }
|
||||
);
|
||||
this.fireRecorderBlob(file);
|
||||
},
|
||||
progressRecord() {
|
||||
this.fireProgressRecord(this.formatTimeProgress());
|
||||
},
|
||||
stopAudioRecording() {
|
||||
if (this.isRecording) {
|
||||
this.recorder.stopRecording(() => {
|
||||
this.wavesurfer.microphone.stopDevice();
|
||||
this.wavesurfer.loadBlob(this.recorder.getBlob());
|
||||
this.wavesurfer.stop();
|
||||
this.fireRecorderBlob(this.getAudioFile());
|
||||
});
|
||||
}
|
||||
this.player.record().stop();
|
||||
},
|
||||
getAudioFile() {
|
||||
if (this.hasAudio()) {
|
||||
return new File([this.recorder.getBlob()], this.getAudioFileName(), {
|
||||
type: 'audio/wav',
|
||||
});
|
||||
}
|
||||
return false;
|
||||
},
|
||||
hasAudio() {
|
||||
return !(this.isRecording || this.wavesurfer.isPlaying());
|
||||
},
|
||||
playingRecorder() {
|
||||
this.fireStateRecorderChanged('playing');
|
||||
},
|
||||
pausedRecorder() {
|
||||
this.fireStateRecorderChanged('paused');
|
||||
},
|
||||
deviceError(err) {
|
||||
deviceError() {
|
||||
const deviceError = this.player.deviceErrorCode;
|
||||
const deviceErrorName = deviceError?.name.toLowerCase();
|
||||
if (
|
||||
err?.name &&
|
||||
(err.name.toLowerCase().includes('notallowederror') ||
|
||||
err.name.toLowerCase().includes('permissiondeniederror'))
|
||||
deviceErrorName?.includes('notallowederror') ||
|
||||
deviceErrorName?.includes('permissiondeniederror')
|
||||
) {
|
||||
this.showAlert(
|
||||
this.$t('CONVERSATION.REPLYBOX.TIP_AUDIORECORDER_PERMISSION')
|
||||
|
@ -127,56 +152,37 @@ export default {
|
|||
);
|
||||
}
|
||||
},
|
||||
onStateRecorderChanged(state) {
|
||||
// recording stopped inactive destroyed
|
||||
switch (state) {
|
||||
case 'recording':
|
||||
this.timerDurationChanged();
|
||||
break;
|
||||
case 'stopped':
|
||||
this.timerDurationChanged();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
this.fireStateRecorderChanged(state);
|
||||
},
|
||||
timerDurationChanged() {
|
||||
if (this.isRecording) {
|
||||
this.recordingInterval = setInterval(() => {
|
||||
this.calculateTimeDuration(
|
||||
(new Date().getTime() - this.recordingDateStarted) / 1000
|
||||
);
|
||||
this.fireStateRecorderTimerChanged(this.timeDuration);
|
||||
}, 1000);
|
||||
} else {
|
||||
clearInterval(this.recordingInterval);
|
||||
}
|
||||
},
|
||||
calculateTimeDuration(secs) {
|
||||
let hr = Math.floor(secs / 3600);
|
||||
let min = Math.floor((secs - hr * 3600) / 60);
|
||||
let sec = Math.floor(secs - hr * 3600 - min * 60);
|
||||
if (min < 10) {
|
||||
min = '0' + min;
|
||||
}
|
||||
if (sec < 10) {
|
||||
sec = '0' + sec;
|
||||
}
|
||||
if (hr <= 0) {
|
||||
this.timeDuration = min + ':' + sec;
|
||||
} else {
|
||||
if (hr < 10) {
|
||||
hr = '0' + hr;
|
||||
}
|
||||
this.timeDuration = hr + ':' + min + ':' + sec;
|
||||
}
|
||||
formatTimeProgress() {
|
||||
return format(
|
||||
addSeconds(
|
||||
new Date(this.recordingDateStarted.getTimezoneOffset() * 1000 * 60),
|
||||
this.player.record().getDuration()
|
||||
),
|
||||
'mm:ss'
|
||||
);
|
||||
},
|
||||
playPause() {
|
||||
this.wavesurfer.playPause();
|
||||
if (this.player.wavesurfer().surfer.isPlaying()) {
|
||||
this.fireStateRecorderChanged('paused');
|
||||
} else {
|
||||
this.fireStateRecorderChanged('playing');
|
||||
}
|
||||
this.player.wavesurfer().surfer.playPause();
|
||||
},
|
||||
play() {
|
||||
this.fireStateRecorderChanged('playing');
|
||||
this.player.wavesurfer().play();
|
||||
},
|
||||
pause() {
|
||||
this.fireStateRecorderChanged('paused');
|
||||
this.player.wavesurfer().pause();
|
||||
},
|
||||
playbackFinish() {
|
||||
this.fireStateRecorderChanged('paused');
|
||||
this.player.wavesurfer().pause();
|
||||
},
|
||||
fireRecorderBlob(blob) {
|
||||
this.$emit('recorder-blob', {
|
||||
this.$emit('finish-record', {
|
||||
name: blob.name,
|
||||
type: blob.type,
|
||||
size: blob.size,
|
||||
|
@ -186,29 +192,8 @@ export default {
|
|||
fireStateRecorderChanged(state) {
|
||||
this.$emit('state-recorder-changed', state);
|
||||
},
|
||||
fireStateRecorderTimerChanged(duration) {
|
||||
this.$emit('state-recorder-timer-changed', duration);
|
||||
},
|
||||
getAudioFileName() {
|
||||
const d = new Date();
|
||||
return `audio-${d.getFullYear()}-${d.getMonth()}-${d.getDate()}-${this.getRandomString()}.wav`;
|
||||
},
|
||||
getRandomString() {
|
||||
if (
|
||||
window.crypto &&
|
||||
window.crypto.getRandomValues &&
|
||||
navigator.userAgent.indexOf('Safari') === -1
|
||||
) {
|
||||
let a = window.crypto.getRandomValues(new Uint32Array(3));
|
||||
let token = '';
|
||||
for (let i = 0, l = a.length; i < l; i += 1) {
|
||||
token += a[i].toString(36);
|
||||
}
|
||||
return token.toLowerCase();
|
||||
}
|
||||
return (Math.random() * new Date().getTime())
|
||||
.toString(36)
|
||||
.replace(/\./g, '');
|
||||
fireProgressRecord(duration) {
|
||||
this.$emit('state-recorder-progress-changed', duration);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -217,7 +202,9 @@ export default {
|
|||
<style lang="scss">
|
||||
.audio-wave-wrapper {
|
||||
min-height: 8rem;
|
||||
max-height: 12rem;
|
||||
overflow: hidden;
|
||||
height: 8rem;
|
||||
}
|
||||
.video-js .vjs-control-bar {
|
||||
background-color: transparent;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -36,9 +36,9 @@
|
|||
<woot-audio-recorder
|
||||
v-if="showAudioRecorderEditor"
|
||||
ref="audioRecorderInput"
|
||||
@state-recorder-timer-changed="onStateRecorderTimerChanged"
|
||||
@state-recorder-progress-changed="onStateProgressRecorderChanged"
|
||||
@state-recorder-changed="onStateRecorderChanged"
|
||||
@recorder-blob="onRecorderBlob"
|
||||
@finish-record="onFinishRecorder"
|
||||
/>
|
||||
<resizable-text-area
|
||||
v-else-if="!showRichContentEditor"
|
||||
|
@ -103,7 +103,7 @@
|
|||
:show-emoji-picker="showEmojiPicker"
|
||||
:on-send="sendMessage"
|
||||
:is-send-disabled="isReplyButtonDisabled"
|
||||
:recording-audio-duration-text="recordingAudioDuration"
|
||||
:recording-audio-duration-text="recordingAudioDurationText"
|
||||
:recording-audio-state="recordingAudioState"
|
||||
:is-recording-audio="isRecordingAudio"
|
||||
:set-format-mode="setFormatMode"
|
||||
|
@ -193,7 +193,7 @@ export default {
|
|||
attachedFiles: [],
|
||||
isRecordingAudio: false,
|
||||
recordingAudioState: '',
|
||||
recordingAudioDuration: '',
|
||||
recordingAudioDurationText: '',
|
||||
isUploading: false,
|
||||
replyType: REPLY_EDITOR_MODES.REPLY,
|
||||
mentionSearchKey: '',
|
||||
|
@ -585,11 +585,13 @@ export default {
|
|||
}
|
||||
},
|
||||
toggleAudioRecorderPlayPause() {
|
||||
if (this.isRecordingAudio && !this.isRecorderAudioStopped) {
|
||||
this.isRecorderAudioStopped = true;
|
||||
this.$refs.audioRecorderInput.stopAudioRecording();
|
||||
} else if (this.isRecordingAudio && this.isRecorderAudioStopped) {
|
||||
this.$refs.audioRecorderInput.playPause();
|
||||
if (this.isRecordingAudio) {
|
||||
if (!this.isRecorderAudioStopped) {
|
||||
this.isRecorderAudioStopped = true;
|
||||
this.$refs.audioRecorderInput.stopAudioRecording();
|
||||
} else if (this.isRecorderAudioStopped) {
|
||||
this.$refs.audioRecorderInput.playPause();
|
||||
}
|
||||
}
|
||||
},
|
||||
hideEmojiPicker() {
|
||||
|
@ -612,19 +614,17 @@ export default {
|
|||
onFocus() {
|
||||
this.isFocused = true;
|
||||
},
|
||||
onStateRecorderTimerChanged(time) {
|
||||
this.recordingAudioDuration = time;
|
||||
onStateProgressRecorderChanged(duration) {
|
||||
this.recordingAudioDurationText = duration;
|
||||
},
|
||||
onStateRecorderChanged(state) {
|
||||
this.recordingAudioState = state;
|
||||
if (state.includes('notallowederror')) {
|
||||
if (state && 'notallowederror'.includes(state)) {
|
||||
this.toggleAudioRecorder();
|
||||
}
|
||||
},
|
||||
onRecorderBlob(file) {
|
||||
if (file) {
|
||||
this.onFileUpload(file);
|
||||
}
|
||||
onFinishRecorder(file) {
|
||||
return file && this.onFileUpload(file);
|
||||
},
|
||||
toggleTyping(status) {
|
||||
const conversationId = this.currentChat.id;
|
||||
|
|
|
@ -90,9 +90,9 @@
|
|||
"more-vertical-outline": "M12 7.75a1.75 1.75 0 1 1 0-3.5 1.75 1.75 0 0 1 0 3.5ZM12 13.75a1.75 1.75 0 1 1 0-3.5 1.75 1.75 0 0 1 0 3.5ZM10.25 18a1.75 1.75 0 1 0 3.5 0 1.75 1.75 0 0 0-3.5 0Z",
|
||||
"microphone-outline": "M12,2A3,3 0 0,1 15,5V11A3,3 0 0,1 12,14A3,3 0 0,1 9,11V5A3,3 0 0,1 12,2M19,11C19,14.53 16.39,17.44 13,17.93V21H11V17.93C7.61,17.44 5,14.53 5,11H7A5,5 0 0,0 12,16A5,5 0 0,0 17,11H19Z",
|
||||
"microphone-off-outline": "M19,11C19,12.19 18.66,13.3 18.1,14.28L16.87,13.05C17.14,12.43 17.3,11.74 17.3,11H19M15,11.16L9,5.18V5A3,3 0 0,1 12,2A3,3 0 0,1 15,5V11L15,11.16M4.27,3L21,19.73L19.73,21L15.54,16.81C14.77,17.27 13.91,17.58 13,17.72V21H11V17.72C7.72,17.23 5,14.41 5,11H6.7C6.7,14 9.24,16.1 12,16.1C12.81,16.1 13.6,15.91 14.31,15.58L12.65,13.92L12,14A3,3 0 0,1 9,11V10.28L3,4.27L4.27,3Z",
|
||||
"microphone-stop-outline": "M12,2A10,10 0 0,0 2,12A10,10 0 0,0 12,22A10,10 0 0,0 22,12A10,10 0 0,0 12,2M12,4C16.41,4 20,7.59 20,12C20,16.41 16.41,20 12,20C7.59,20 4,16.41 4,12C4,7.59 7.59,4 12,4M9,9V15H15V9",
|
||||
"microphone-pause-outline": "M13,16V8H15V16H13M9,16V8H11V16H9M12,2A10,10 0 0,1 22,12A10,10 0 0,1 12,22A10,10 0 0,1 2,12A10,10 0 0,1 12,2M12,4A8,8 0 0,0 4,12A8,8 0 0,0 12,20A8,8 0 0,0 20,12A8,8 0 0,0 12,4Z",
|
||||
"microphone-play-outline": "M12,20C7.59,20 4,16.41 4,12C4,7.59 7.59,4 12,4C16.41,4 20,7.59 20,12C20,16.41 16.41,20 12,20M12,2A10,10 0 0,0 2,12A10,10 0 0,0 12,22A10,10 0 0,0 22,12A10,10 0 0,0 12,2M10,16.5L16,12L10,7.5V16.5Z",
|
||||
"microphone-stop-outline": "M18,18H6V6H18V18Z",
|
||||
"microphone-pause-outline": "M14,19H18V5H14M6,19H10V5H6V19Z",
|
||||
"microphone-play-outline": "M8,5.14V19.14L19,12.14L8,5.14Z",
|
||||
"number-symbol-outline": "M10.987 2.89a.75.75 0 1 0-1.474-.28L8.494 7.999 3.75 8a.75.75 0 1 0 0 1.5l4.46-.002-.946 5-4.514.002a.75.75 0 0 0 0 1.5l4.23-.002-.967 5.116a.75.75 0 1 0 1.474.278l1.02-5.395 5.474-.002-.968 5.119a.75.75 0 1 0 1.474.278l1.021-5.398 4.742-.002a.75.75 0 1 0 0-1.5l-4.458.002.946-5 4.512-.002a.75.75 0 1 0 0-1.5l-4.229.002.966-5.104a.75.75 0 0 0-1.474-.28l-1.018 5.385-5.474.002.966-5.107Zm-1.25 6.608 5.474-.003-.946 5-5.474.002.946-5Z",
|
||||
"open-outline": "M6.25 4.5A1.75 1.75 0 0 0 4.5 6.25v11.5c0 .966.783 1.75 1.75 1.75h11.5a1.75 1.75 0 0 0 1.75-1.75v-4a.75.75 0 0 1 1.5 0v4A3.25 3.25 0 0 1 17.75 21H6.25A3.25 3.25 0 0 1 3 17.75V6.25A3.25 3.25 0 0 1 6.25 3h4a.75.75 0 0 1 0 1.5h-4ZM13 3.75a.75.75 0 0 1 .75-.75h6.5a.75.75 0 0 1 .75.75v6.5a.75.75 0 0 1-1.5 0V5.56l-5.22 5.22a.75.75 0 0 1-1.06-1.06l5.22-5.22h-4.69a.75.75 0 0 1-.75-.75Z",
|
||||
"people-outline": "M4 13.999 13 14a2 2 0 0 1 1.995 1.85L15 16v1.5C14.999 21 11.284 22 8.5 22c-2.722 0-6.335-.956-6.495-4.27L2 17.5v-1.501c0-1.054.816-1.918 1.85-1.995L4 14ZM15.22 14H20c1.054 0 1.918.816 1.994 1.85L22 16v1c-.001 3.062-2.858 4-5 4a7.16 7.16 0 0 1-2.14-.322c.336-.386.607-.827.802-1.327A6.19 6.19 0 0 0 17 19.5l.267-.006c.985-.043 3.086-.363 3.226-2.289L20.5 17v-1a.501.501 0 0 0-.41-.492L20 15.5h-4.051a2.957 2.957 0 0 0-.595-1.34L15.22 14H20h-4.78ZM4 15.499l-.1.01a.51.51 0 0 0-.254.136.506.506 0 0 0-.136.253l-.01.101V17.5c0 1.009.45 1.722 1.417 2.242.826.445 2.003.714 3.266.753l.317.005.317-.005c1.263-.039 2.439-.308 3.266-.753.906-.488 1.359-1.145 1.412-2.057l.005-.186V16a.501.501 0 0 0-.41-.492L13 15.5l-9-.001ZM8.5 3a4.5 4.5 0 1 1 0 9 4.5 4.5 0 0 1 0-9Zm9 2a3.5 3.5 0 1 1 0 7 3.5 3.5 0 0 1 0-7Zm-9-.5c-1.654 0-3 1.346-3 3s1.346 3 3 3 3-1.346 3-3-1.346-3-3-3Zm9 2c-1.103 0-2 .897-2 2s.897 2 2 2 2-.897 2-2-.897-2-2-2Z",
|
||||
|
|
|
@ -90,6 +90,8 @@ class Channel::Telegram < ApplicationRecord
|
|||
telegram_attachment = {}
|
||||
|
||||
case attachment[:file_type]
|
||||
when 'audio'
|
||||
telegram_attachment[:type] = 'audio'
|
||||
when 'image'
|
||||
telegram_attachment[:type] = 'photo'
|
||||
when 'file'
|
||||
|
|
|
@ -5,6 +5,15 @@ const vue = require('./loaders/vue');
|
|||
|
||||
environment.plugins.prepend('VueLoaderPlugin', new VueLoaderPlugin());
|
||||
environment.loaders.prepend('vue', vue);
|
||||
|
||||
environment.loaders.append('opus', {
|
||||
test: /encoderWorker\.min\.js$/,
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
},
|
||||
});
|
||||
|
||||
environment.loaders.append('audio', {
|
||||
test: /\.(mp3)(\?.*)?$/,
|
||||
loader: 'url-loader',
|
||||
|
|
|
@ -46,17 +46,18 @@
|
|||
"marked": "4.0.10",
|
||||
"md5": "^2.3.0",
|
||||
"ninja-keys": "^1.1.9",
|
||||
"opus-recorder": "^8.0.5",
|
||||
"posthog-js": "^1.13.7",
|
||||
"prosemirror-markdown": "1.5.1",
|
||||
"prosemirror-state": "1.3.4",
|
||||
"prosemirror-view": "1.18.4",
|
||||
"query-string": "5",
|
||||
"recordrtc": "^5.6.2",
|
||||
"semver": "7.3.5",
|
||||
"spinkit": "~1.2.5",
|
||||
"tailwindcss": "^1.9.6",
|
||||
"url-loader": "^2.0.0",
|
||||
"v-tooltip": "~2.1.3",
|
||||
"videojs-record": "^4.5.0",
|
||||
"vue": "2.6.12",
|
||||
"vue-axios": "~1.2.2",
|
||||
"vue-chartjs": "3.5.1",
|
||||
|
@ -75,7 +76,7 @@
|
|||
"vuelidate": "0.7.6",
|
||||
"vuex": "~2.1.1",
|
||||
"vuex-router-sync": "~4.1.2",
|
||||
"wavesurfer.js": "^5.2.0"
|
||||
"wavesurfer.js": "^6.0.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.13.16",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue