Merge branch 'develop' of github.com:matrix-org/matrix-react-sdk into t3chguy/fix/17022

This commit is contained in:
Michael Telatynski 2021-04-23 10:05:45 +01:00
commit 86a30a2ad5
17 changed files with 294 additions and 76 deletions

View file

@ -35,7 +35,7 @@ $activeBorderColor: $secondary-fg-color;
.mx_SpacePanel_spaceTreeWrapper {
flex: 1;
overflow-y: scroll;
padding: 8px 8px 16px 0;
}
.mx_SpacePanel_toggleCollapse {
@ -59,11 +59,10 @@ $activeBorderColor: $secondary-fg-color;
margin: 0;
list-style: none;
padding: 0;
padding-left: 16px;
}
.mx_AutoHideScrollbar {
padding: 8px 0 16px;
> .mx_SpaceItem {
padding-left: 16px;
}
}
.mx_SpaceButton_toggleCollapse {

View file

@ -224,35 +224,6 @@ $SpaceRoomViewInnerWidth: 428px;
.mx_FacePile_faces {
cursor: pointer;
> span:hover {
.mx_BaseAvatar {
filter: brightness(0.8);
}
}
> span:first-child {
position: relative;
.mx_BaseAvatar {
filter: brightness(0.8);
}
&::before {
content: "";
z-index: 1;
position: absolute;
top: 0;
left: 0;
height: 30px;
width: 30px;
background: #ffffff; // white icon fill
mask-position: center;
mask-size: 24px;
mask-repeat: no-repeat;
mask-image: url('$(res)/img/element-icons/room/ellipsis.svg');
}
}
}
}

View file

@ -20,7 +20,7 @@ limitations under the License.
flex-direction: row-reverse;
vertical-align: middle;
> span + span {
> .mx_FacePile_face + .mx_FacePile_face {
margin-right: -8px;
}
@ -31,9 +31,32 @@ limitations under the License.
.mx_BaseAvatar_initial {
margin: 1px; // to offset the border on the image
}
.mx_FacePile_more {
position: relative;
border-radius: 100%;
width: 30px;
height: 30px;
background-color: $groupFilterPanel-bg-color;
&::before {
content: "";
z-index: 1;
position: absolute;
top: 0;
left: 0;
height: inherit;
width: inherit;
background: $tertiary-fg-color;
mask-position: center;
mask-size: 20px;
mask-repeat: no-repeat;
mask-image: url('$(res)/img/element-icons/room/ellipsis.svg');
}
}
}
> span {
.mx_FacePile_summary {
margin-left: 12px;
font-size: $font-14px;
line-height: $font-24px;

View file

@ -85,7 +85,7 @@ $dialog-close-fg-color: #9fa9ba;
$dialog-background-bg-color: $header-panel-bg-color;
$lightbox-background-bg-color: #000;
$lightbox-background-bg-opacity: 85%;
$lightbox-background-bg-opacity: 0.85;
$settings-grey-fg-color: #a2a2a2;
$settings-profile-placeholder-bg-color: #21262c;

View file

@ -83,7 +83,7 @@ $dialog-close-fg-color: #9fa9ba;
$dialog-background-bg-color: $header-panel-bg-color;
$lightbox-background-bg-color: #000;
$lightbox-background-bg-opacity: 85%;
$lightbox-background-bg-opacity: 0.85;
$settings-grey-fg-color: #a2a2a2;
$settings-profile-placeholder-bg-color: #e7e7e7;

View file

@ -127,7 +127,7 @@ $dialog-close-fg-color: #c1c1c1;
$dialog-background-bg-color: #e9e9e9;
$lightbox-background-bg-color: #000;
$lightbox-background-bg-opacity: 95%;
$lightbox-background-bg-opacity: 0.95;
$imagebody-giflabel: rgba(0, 0, 0, 0.7);
$imagebody-giflabel-border: rgba(0, 0, 0, 0.2);

View file

@ -118,7 +118,7 @@ $dialog-close-fg-color: #c1c1c1;
$dialog-background-bg-color: #e9e9e9;
$lightbox-background-bg-color: #000;
$lightbox-background-bg-opacity: 95%;
$lightbox-background-bg-opacity: 0.95;
$imagebody-giflabel: rgba(0, 0, 0, 0.7);
$imagebody-giflabel-border: rgba(0, 0, 0, 0.2);

View file

@ -129,4 +129,30 @@ declare global {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error/columnNumber
columnNumber?: number;
}
// https://github.com/microsoft/TypeScript/issues/28308#issuecomment-650802278
interface AudioWorkletProcessor {
readonly port: MessagePort;
process(
inputs: Float32Array[][],
outputs: Float32Array[][],
parameters: Record<string, Float32Array>
): boolean;
}
// https://github.com/microsoft/TypeScript/issues/28308#issuecomment-650802278
const AudioWorkletProcessor: {
prototype: AudioWorkletProcessor;
new (options?: AudioWorkletNodeOptions): AudioWorkletProcessor;
};
// https://github.com/microsoft/TypeScript/issues/28308#issuecomment-650802278
function registerProcessor(
name: string,
processorCtor: (new (
options?: AudioWorkletNodeOptions
) => AudioWorkletProcessor) & {
parameterDescriptors?: AudioParamDescriptor[];
}
);
}

View file

@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import React, { HTMLAttributes } from "react";
import React, { HTMLAttributes, ReactNode, useContext } from "react";
import { Room } from "matrix-js-sdk/src/models/room";
import { RoomMember } from "matrix-js-sdk/src/models/room-member";
import { sortBy } from "lodash";
@ -24,6 +24,7 @@ import { _t } from "../../../languageHandler";
import DMRoomMap from "../../../utils/DMRoomMap";
import TextWithTooltip from "../elements/TextWithTooltip";
import { useRoomMembers } from "../../../hooks/useRoomMembers";
import MatrixClientContext from "../../../contexts/MatrixClientContext";
const DEFAULT_NUM_FACES = 5;
@ -36,6 +37,7 @@ interface IProps extends HTMLAttributes<HTMLSpanElement> {
const isKnownMember = (member: RoomMember) => !!DMRoomMap.shared().getDMRoomsForUserId(member.userId)?.length;
const FacePile = ({ room, onlyKnownUsers = true, numShown = DEFAULT_NUM_FACES, ...props }: IProps) => {
const cli = useContext(MatrixClientContext);
let members = useRoomMembers(room);
// sort users with an explicit avatar first
@ -46,21 +48,42 @@ const FacePile = ({ room, onlyKnownUsers = true, numShown = DEFAULT_NUM_FACES, .
// sort known users first
iteratees.unshift(member => isKnownMember(member));
}
if (members.length < 1) return null;
const shownMembers = sortBy(members, iteratees).slice(0, numShown);
// exclude ourselves from the shown members list
const shownMembers = sortBy(members.filter(m => m.userId !== cli.getUserId()), iteratees).slice(0, numShown);
if (shownMembers.length < 1) return null;
// We reverse the order of the shown faces in CSS to simplify their visual overlap,
// reverse members in tooltip order to make the order between the two match up.
const commaSeparatedMembers = shownMembers.map(m => m.rawDisplayName).reverse().join(", ");
let tooltip: ReactNode;
if (props.onClick) {
tooltip = <div>
<div className="mx_Tooltip_title">
{ _t("View all %(count)s members", { count: members.length }) }
</div>
<div className="mx_Tooltip_sub">
{ _t("Including %(commaSeparatedMembers)s", { commaSeparatedMembers }) }
</div>
</div>;
} else {
tooltip = _t("%(count)s members including %(commaSeparatedMembers)s", {
count: members.length,
commaSeparatedMembers,
});
}
return <div {...props} className="mx_FacePile">
<div className="mx_FacePile_faces">
{ shownMembers.map(member => {
return <TextWithTooltip key={member.userId} tooltip={member.name}>
<MemberAvatar member={member} width={28} height={28} />
</TextWithTooltip>;
}) }
</div>
{ onlyKnownUsers && <span>
<TextWithTooltip class="mx_FacePile_faces" tooltip={tooltip} tooltipProps={{ yOffset: 32 }}>
{ members.length > numShown ? <span className="mx_FacePile_face mx_FacePile_more" /> : null }
{ shownMembers.map(m =>
<MemberAvatar key={m.userId} member={m} width={28} height={28} className="mx_FacePile_face" /> )}
</TextWithTooltip>
{ onlyKnownUsers && <span className="mx_FacePile_summary">
{ _t("%(count)s people you know have already joined", { count: members.length }) }
</span> }
</div>
</div>;
};
export default FacePile;

View file

@ -25,6 +25,7 @@ export default class TextWithTooltip extends React.Component {
class: PropTypes.string,
tooltipClass: PropTypes.string,
tooltip: PropTypes.node.isRequired,
tooltipProps: PropTypes.object,
};
constructor() {
@ -46,15 +47,17 @@ export default class TextWithTooltip extends React.Component {
render() {
const Tooltip = sdk.getComponent("elements.Tooltip");
const {class: className, children, tooltip, tooltipClass, ...props} = this.props;
const {class: className, children, tooltip, tooltipClass, tooltipProps, ...props} = this.props;
return (
<span {...props} onMouseOver={this.onMouseOver} onMouseLeave={this.onMouseLeave} className={className}>
{children}
{this.state.hover && <Tooltip
{...tooltipProps}
label={tooltip}
tooltipClassName={tooltipClass}
className={"mx_TextWithTooltip_tooltip"} /> }
className={"mx_TextWithTooltip_tooltip"}
/> }
</span>
);
}

View file

@ -53,9 +53,38 @@ export default class VoiceRecordComposerTile extends React.PureComponent<IProps,
await this.state.recorder.stop();
const mxc = await this.state.recorder.upload();
MatrixClientPeg.get().sendMessage(this.props.room.roomId, {
body: "Voice message",
msgtype: "org.matrix.msc2516.voice",
url: mxc,
"body": "Voice message",
"msgtype": "org.matrix.msc2516.voice",
//"msgtype": MsgType.Audio,
"url": mxc,
"info": {
duration: Math.round(this.state.recorder.durationSeconds * 1000),
mimetype: this.state.recorder.contentType,
size: this.state.recorder.contentLength,
},
// MSC1767 experiment
"org.matrix.msc1767.text": "Voice message",
"org.matrix.msc1767.file": {
url: mxc,
name: "Voice message.ogg",
mimetype: this.state.recorder.contentType,
size: this.state.recorder.contentLength,
},
"org.matrix.msc1767.audio": {
duration: Math.round(this.state.recorder.durationSeconds * 1000),
// TODO: @@ TravisR: Waveform? (MSC1767 decision)
},
"org.matrix.experimental.msc2516.voice": { // MSC2516+MSC1767 experiment
duration: Math.round(this.state.recorder.durationSeconds * 1000),
// Events can't have floats, so we try to maintain resolution by using 1024
// as a maximum value. The waveform contains values between zero and 1, so this
// should come out largely sane.
//
// We're expecting about one data point per second of audio.
waveform: this.state.recorder.finalWaveform.map(v => Math.round(v * 1024)),
},
});
await VoiceRecordingStore.instance.disposeRecording();
this.setState({recorder: null});

View file

@ -1916,7 +1916,13 @@
"Please <newIssueLink>create a new issue</newIssueLink> on GitHub so that we can investigate this bug.": "Please <newIssueLink>create a new issue</newIssueLink> on GitHub so that we can investigate this bug.",
"collapse": "collapse",
"expand": "expand",
"View all %(count)s members|other": "View all %(count)s members",
"View all %(count)s members|one": "View 1 member",
"Including %(commaSeparatedMembers)s": "Including %(commaSeparatedMembers)s",
"%(count)s members including %(commaSeparatedMembers)s|other": "%(count)s members including %(commaSeparatedMembers)s",
"%(count)s members including %(commaSeparatedMembers)s|one": "%(commaSeparatedMembers)s",
"%(count)s people you know have already joined|other": "%(count)s people you know have already joined",
"%(count)s people you know have already joined|one": "%(count)s person you know has already joined",
"Rotate Right": "Rotate Right",
"Rotate Left": "Rotate Left",
"Zoom out": "Zoom out",

View file

@ -405,8 +405,7 @@ export class SpaceStoreClass extends AsyncStoreWithClient<IState> {
this.spaceFilteredRooms.forEach((roomIds, s) => {
// Update NotificationStates
const rooms = this.matrixClient.getRooms().filter(room => roomIds.has(room.roomId));
this.getNotificationState(s)?.setRooms(rooms);
this.getNotificationState(s)?.setRooms(visibleRooms.filter(room => roomIds.has(room.roomId)));
});
}, 100, {trailing: true, leading: true});

View file

@ -54,7 +54,7 @@ export function arraySeed<T>(val: T, length: number): T[] {
* @param a The array to clone. Must be defined.
* @returns A copy of the array.
*/
export function arrayFastClone(a: any[]): any[] {
export function arrayFastClone<T>(a: T[]): T[] {
return a.slice(0, a.length);
}

View file

@ -0,0 +1,67 @@
/*
Copyright 2021 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import {IAmplitudePayload, ITimingPayload, PayloadEvent, WORKLET_NAME} from "./consts";
import {percentageOf} from "../utils/numbers";
// from AudioWorkletGlobalScope: https://developer.mozilla.org/en-US/docs/Web/API/AudioWorkletGlobalScope
declare const currentTime: number;
// declare const currentFrame: number;
// declare const sampleRate: number;
class MxVoiceWorklet extends AudioWorkletProcessor {
private nextAmplitudeSecond = 0;
process(inputs, outputs, parameters) {
// We only fire amplitude updates once a second to avoid flooding the recording instance
// with useless data. Much of the data would end up discarded, so we ratelimit ourselves
// here.
const currentSecond = Math.round(currentTime);
if (currentSecond === this.nextAmplitudeSecond) {
// We're expecting exactly one mono input source, so just grab the very first frame of
// samples for the analysis.
const monoChan = inputs[0][0];
// The amplitude of the frame's samples is effectively the loudness of the frame. This
// translates into a bar which can be rendered as part of the whole recording clip's
// waveform.
//
// We translate the amplitude down to 0-1 for sanity's sake.
const minVal = Math.min(...monoChan);
const maxVal = Math.max(...monoChan);
const amplitude = percentageOf(maxVal, -1, 1) - percentageOf(minVal, -1, 1);
this.port.postMessage(<IAmplitudePayload>{
ev: PayloadEvent.AmplitudeMark,
amplitude: amplitude,
forSecond: currentSecond,
});
this.nextAmplitudeSecond++;
}
// We mostly use this worklet to fire regular clock updates through to components
this.port.postMessage(<ITimingPayload>{ev: PayloadEvent.Timekeep, timeSeconds: currentTime});
// We're supposed to return false when we're "done" with the audio clip, but seeing as
// we are acting as a passive processor we are never truly "done". The browser will clean
// us up when it is done with us.
return true;
}
}
registerProcessor(WORKLET_NAME, MxVoiceWorklet);
export default null; // to appease module loaders (we never use the export)

View file

@ -23,6 +23,8 @@ import {clamp} from "../utils/numbers";
import EventEmitter from "events";
import {IDestroyable} from "../utils/IDestroyable";
import {Singleflight} from "../utils/Singleflight";
import {PayloadEvent, WORKLET_NAME} from "./consts";
import {arrayFastClone} from "../utils/arrays";
const CHANNELS = 1; // stereo isn't important
const SAMPLE_RATE = 48000; // 48khz is what WebRTC uses. 12khz is where we lose quality.
@ -49,16 +51,34 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
private recorderSource: MediaStreamAudioSourceNode;
private recorderStream: MediaStream;
private recorderFFT: AnalyserNode;
private recorderProcessor: ScriptProcessorNode;
private recorderWorklet: AudioWorkletNode;
private buffer = new Uint8Array(0);
private mxc: string;
private recording = false;
private observable: SimpleObservable<IRecordingUpdate>;
private amplitudes: number[] = []; // at each second mark, generated
public constructor(private client: MatrixClient) {
super();
}
public get finalWaveform(): number[] {
return arrayFastClone(this.amplitudes);
}
public get contentType(): string {
return "audio/ogg";
}
public get contentLength(): number {
return this.buffer.length;
}
public get durationSeconds(): number {
if (!this.recorder) throw new Error("Duration not available without a recording");
return this.recorderContext.currentTime;
}
private async makeRecorder() {
this.recorderStream = await navigator.mediaDevices.getUserMedia({
audio: {
@ -80,18 +100,34 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
// it makes the time domain less than helpful.
this.recorderFFT.fftSize = 64;
// We use an audio processor to get accurate timing information.
// The size of the audio buffer largely decides how quickly we push timing/waveform data
// out of this class. Smaller buffers mean we update more frequently as we can't hold as
// many bytes. Larger buffers mean slower updates. For scale, 1024 gives us about 30Hz of
// updates and 2048 gives us about 20Hz. We use 1024 to get as close to perceived realtime
// as possible. Must be a power of 2.
this.recorderProcessor = this.recorderContext.createScriptProcessor(1024, CHANNELS, CHANNELS);
// Set up our worklet. We use this for timing information and waveform analysis: the
// web audio API prefers this be done async to avoid holding the main thread with math.
const mxRecorderWorkletPath = document.body.dataset.vectorRecorderWorkletScript;
if (!mxRecorderWorkletPath) {
throw new Error("Unable to create recorder: no worklet script registered");
}
await this.recorderContext.audioWorklet.addModule(mxRecorderWorkletPath);
this.recorderWorklet = new AudioWorkletNode(this.recorderContext, WORKLET_NAME);
// Connect our inputs and outputs
this.recorderSource.connect(this.recorderFFT);
this.recorderSource.connect(this.recorderProcessor);
this.recorderProcessor.connect(this.recorderContext.destination);
this.recorderSource.connect(this.recorderWorklet);
this.recorderWorklet.connect(this.recorderContext.destination);
// Dev note: we can't use `addEventListener` for some reason. It just doesn't work.
this.recorderWorklet.port.onmessage = (ev) => {
switch (ev.data['ev']) {
case PayloadEvent.Timekeep:
this.processAudioUpdate(ev.data['timeSeconds']);
break;
case PayloadEvent.AmplitudeMark:
// Sanity check to make sure we're adding about one sample per second
if (ev.data['forSecond'] === this.amplitudes.length) {
this.amplitudes.push(ev.data['amplitude']);
}
break;
}
};
this.recorder = new Recorder({
encoderPath, // magic from webpack
@ -138,7 +174,7 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
return this.mxc;
}
private processAudioUpdate = (ev: AudioProcessingEvent) => {
private processAudioUpdate = (timeSeconds: number) => {
if (!this.recording) return;
// The time domain is the input to the FFT, which means we use an array of the same
@ -162,12 +198,12 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
this.observable.update({
waveform: translatedData,
timeSeconds: ev.playbackTime,
timeSeconds: timeSeconds,
});
// Now that we've updated the data/waveform, let's do a time check. We don't want to
// go horribly over the limit. We also emit a warning state if needed.
const secondsLeft = TARGET_MAX_LENGTH - ev.playbackTime;
const secondsLeft = TARGET_MAX_LENGTH - timeSeconds;
if (secondsLeft <= 0) {
// noinspection JSIgnoredPromiseFromCall - we aren't concerned with it overlapping
this.stop();
@ -191,7 +227,6 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
}
this.observable = new SimpleObservable<IRecordingUpdate>();
await this.makeRecorder();
this.recorderProcessor.addEventListener("audioprocess", this.processAudioUpdate);
await this.recorder.start();
this.recording = true;
this.emit(RecordingState.Started);
@ -205,6 +240,7 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
// Disconnect the source early to start shutting down resources
this.recorderSource.disconnect();
this.recorderWorklet.disconnect();
await this.recorder.stop();
// close the context after the recorder so the recorder doesn't try to
@ -216,7 +252,6 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
// Finally do our post-processing and clean up
this.recording = false;
this.recorderProcessor.removeEventListener("audioprocess", this.processAudioUpdate);
await this.recorder.close();
this.emit(RecordingState.Ended);
@ -240,7 +275,7 @@ export class VoiceRecording extends EventEmitter implements IDestroyable {
this.emit(RecordingState.Uploading);
this.mxc = await this.client.uploadContent(new Blob([this.buffer], {
type: "audio/ogg",
type: this.contentType,
}), {
onlyContentUri: false, // to stop the warnings in the console
}).then(r => r['content_uri']);

37
src/voice/consts.ts Normal file
View file

@ -0,0 +1,37 @@
/*
Copyright 2021 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
export const WORKLET_NAME = "mx-voice-worklet";
export enum PayloadEvent {
Timekeep = "timekeep",
AmplitudeMark = "amplitude_mark",
}
export interface IPayload {
ev: PayloadEvent;
}
export interface ITimingPayload extends IPayload {
ev: PayloadEvent.Timekeep;
timeSeconds: number;
}
export interface IAmplitudePayload extends IPayload {
ev: PayloadEvent.AmplitudeMark;
forSecond: number;
amplitude: number;
}