Add calling test that exercises virtual audio

This commit is contained in:
Miriam Zimmerman
2025-09-24 13:41:37 -04:00
committed by GitHub
parent 0bdc96f0a9
commit 0eabffe3cf
7 changed files with 203 additions and 31 deletions

View File

@@ -369,7 +369,7 @@ jobs:
matrix:
workerIndex: [0, 1, 2, 3]
runs-on: ubuntu-22.04-8-cores
runs-on: ubuntu-latest-8-cores
if: ${{ github.repository == 'signalapp/Signal-Desktop-Private' }}
timeout-minutes: 30
@@ -397,7 +397,7 @@ jobs:
key: electron-gyp-${{ runner.os }}-${{ hashFiles('pnpm-lock.yaml') }}
- name: Install xvfb and libpulse0
run: sudo apt-get install xvfb libpulse0 || (sudo apt-get update && sudo apt-get install xvfb libpulse0)
run: sudo apt-get install -y xvfb libpulse0 || (sudo apt-get update && sudo apt-get install -y xvfb libpulse0)
# - name: Setup sccache
# uses: mozilla-actions/sccache-action@054db53350805f83040bf3e6e9b8cf5a139aa7c9 # v0.0.7
@@ -408,7 +408,10 @@ jobs:
# key: sccache-${{ runner.os }}-${{ hashFiles('pnpm-lock.yaml', 'patches/**') }}
- name: Install Desktop node_modules
run: pnpm install
run: |
pnpm install
sudo chown root node_modules/.pnpm/electron@*/node_modules/electron/dist/chrome-sandbox
sudo chmod 4755 node_modules/.pnpm/electron@*/node_modules/electron/dist/chrome-sandbox
env:
# CC: sccache gcc
# CXX: sccache g++
@@ -440,8 +443,11 @@ jobs:
if: ${{ matrix.workerIndex == 0 }}
run: |
set -o pipefail
sudo apt-get install -y pipewire pipewire-pulse wireplumber psmisc pulseaudio-utils
systemctl --user start pipewire.service
systemctl --user start pipewire-pulse.service
xvfb-run --auto-servernum pnpm run test-mock-docker
timeout-minutes: 15
timeout-minutes: 10
env:
NODE_ENV: production
DEBUG: mock:test:*

BIN
fixtures/the_raven.wav Normal file

Binary file not shown.

View File

@@ -0,0 +1,3 @@
Source for "the_raven.wav" is the first minute of
https://commons.wikimedia.org/wiki/File:Ravenandotherpoems_01_poe.mp3,
a public domain file, converted to Wav by Signal.

View File

@@ -52,6 +52,7 @@ export type CIType = {
print: (...args: ReadonlyArray<unknown>) => void;
resetReleaseNotesFetcher(): void;
forceUnprocessed: boolean;
setMediaPermissions(): Promise<void>;
};
export type GetCIOptionsType = Readonly<{
@@ -242,6 +243,10 @@ export function getCI({
]);
}
async function setMediaPermissions() {
await window.IPC.setMediaPermissions(true);
}
return {
deviceName,
getConversationId,
@@ -263,5 +268,6 @@ export function getCI({
print,
resetReleaseNotesFetcher,
forceUnprocessed,
setMediaPermissions,
};
}

View File

@@ -568,6 +568,23 @@ export class Bootstrap {
await fs.writeFile(path.join(outDir, `screenshot-${id}.png`), screenshot);
}
public async screenshotWindow(
window: Page,
testName?: string
): Promise<void> {
const outDir = await this.#getArtifactsDir(testName);
if (outDir == null) {
return;
}
const screenshot = await window.screenshot();
const id = this.#screenshotId;
this.#screenshotId += 1;
await fs.writeFile(path.join(outDir, `screenshot-${id}.png`), screenshot);
}
public async saveLogs(
app: App | undefined = this.#lastApp,
testName?: string

View File

@@ -1,13 +1,32 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { join } from 'node:path';
import createDebug from 'debug';
import { execFile } from 'node:child_process';
import { StorageState } from '@signalapp/mock-server';
import { expect } from 'playwright/test';
import type { Page } from 'playwright';
import { promisify } from 'node:util';
import * as durations from '../../util/durations/index.js';
import type { App } from '../playwright.js';
import { Bootstrap } from '../bootstrap.js';
import { runTurnInContainer, tearDownTurnContainer } from './helpers.js';
const FIXTURES = join(__dirname, '..', '..', '..', 'fixtures');
const VIRTUAL_AUDIO = join(
__dirname,
'..',
'..',
'..',
'node_modules',
'.bin',
'virtual_audio'
);
const debug = createDebug('mock:test:calling:messages');
const execFilePromise = promisify(execFile);
describe('callMessages', function callMessages(this: Mocha.Suite) {
this.timeout(durations.MINUTE);
@@ -16,9 +35,50 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
let app1: App;
let app2: App;
beforeEach(async () => {
runTurnInContainer();
async function setUpAudio(source: string, sink: string) {
debug(`setup source: ${source}, sink: ${sink}`);
const args = ['--setup', '--input-source', source, '--output-sink', sink];
try {
const { stdout, stderr } = await execFilePromise(VIRTUAL_AUDIO, args, {
timeout: 20000,
encoding: 'utf8',
});
debug(stdout);
debug(stderr);
} catch (err) {
debug(err);
throw err;
}
}
async function tearDownAudio(source: string, sink: string) {
debug(`tear down source ${source}, sink: ${sink}`);
await execFilePromise(VIRTUAL_AUDIO, [
'--teardown',
'--input-source',
source,
'--output-sink',
sink,
]);
}
before(async () => {
runTurnInContainer();
// Set up two virtual sources and sinks.
await setUpAudio('input_source_a', 'output_sink_a');
await setUpAudio('input_source_b', 'output_sink_b');
});
after(async () => {
tearDownTurnContainer();
// Despite the API suggesting otherwise, we actually only need to call
// `teardown` once: it will tear down **all** sources and sinks it has set
// up, not just the ones passed here.
await tearDownAudio('input_source_a', 'output_sink_a');
});
beforeEach(async () => {
bootstrap1 = new Bootstrap();
await bootstrap1.init();
@@ -57,11 +117,12 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
await bootstrap2.phone.setStorageState(state2);
app2 = await bootstrap2.link();
await app1.enableMedia();
await app2.enableMedia();
});
afterEach(async function after(this: Mocha.Context) {
tearDownTurnContainer();
if (!bootstrap1) {
return;
}
@@ -75,21 +136,39 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
await bootstrap1.teardown();
});
// Start an audio call with the given ACI.
// Assumes that a conversation with them is visible in the left pane.
async function startAudioCallWith(win: Page, aci: string) {
const leftPane = win.locator('#LeftPane');
await leftPane.locator(`[data-testid="${aci}"]`).click();
// Try to start a call
await win.locator('.module-ConversationHeader__button--audio').click();
await win
.locator('.CallingLobbyJoinButton')
.and(win.locator('button:visible'))
.click();
}
// Wait until the calling modal is not populated.
async function awaitNoCall(win: Page) {
await expect(win.locator('.module-calling__modal-container')).toBeEmpty();
}
async function setInputAndOutput(win: Page, input: string, output: string) {
debug(`setInputAndOutput input: ${input} output: ${output}`);
await win
.locator('.CallSettingsButton__Button')
.and(win.getByLabel('Settings'))
.click();
await win.locator('#audio-input').selectOption(input);
await win.locator('#audio-output').selectOption(output);
await win.locator('.module-calling-device-selection__close-button').click();
}
it('can call and decline a call', async () => {
const window1 = await app1.getWindow();
const leftPane1 = window1.locator('#LeftPane');
await leftPane1
.locator(`[data-testid="${bootstrap2.phone.device.aci}"]`)
.click();
// Try to start a call
await window1.locator('.module-ConversationHeader__button--audio').click();
const window1Permissions = await app1.waitForWindow();
await window1Permissions.getByText('Allow Access').click();
await window1
.locator('.CallingLobbyJoinButton')
.and(window1.locator('button:visible'))
.click();
await startAudioCallWith(window1, bootstrap2.phone.device.aci);
const window2 = await app2.getWindow();
@@ -99,12 +178,72 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
.locator('.IncomingCallBar__button--decline')
.click({ timeout: 3000 });
await expect(
window1.locator('.module-calling__modal-container')
).toBeEmpty();
await awaitNoCall(window1);
await awaitNoCall(window2);
});
it('can call and accept a call', async () => {
const theRaven = join(FIXTURES, 'the_raven.wav');
const window1 = await app1.getWindow();
await startAudioCallWith(window1, bootstrap2.phone.device.aci);
const window2 = await app2.getWindow();
// Only wait for 3 seconds to make sure that this succeeded properly rather
// than timing out after ~10 seconds and using a direct connection
await window2
.locator('.IncomingCallBar__button--accept-audio')
.click({ timeout: 3000 });
try {
await setInputAndOutput(window1, 'input_source_a', 'output_sink_a');
await setInputAndOutput(window2, 'input_source_b', 'output_sink_b');
execFile(
VIRTUAL_AUDIO,
[
'--play',
'--input-source',
'input_source_a',
'--output-sink',
'output_sink_a',
'--input-file',
theRaven,
],
(error, stdout, stderr) => {
if (error) {
throw error;
}
debug(stdout);
debug(stderr);
}
);
// Wait for audio levels indicator to be visible.
await expect(
window2.locator('.module-calling__modal-container')
).toBeEmpty();
window2.locator('.CallingAudioIndicator--with-content')
).toBeVisible({ timeout: 15000 });
} finally {
await bootstrap2.screenshotWindow(window2, 'callee');
// hang up after we detect audio (or fail to)
await window2.locator('.CallControls__JoinLeaveButton--hangup').click();
await execFilePromise(VIRTUAL_AUDIO, [
'--stop',
'--input-source',
'input_source_a',
'--output-sink',
'output_source_a',
]);
await awaitNoCall(window1);
await awaitNoCall(window2);
await window2.locator('.NavTabs__Item--Settings').click();
await window2.locator('.Preferences__button--calls').click();
await bootstrap2.screenshotWindow(window2, 'callee');
}
});
});

View File

@@ -146,10 +146,6 @@ export class App extends EventEmitter {
return this.#waitForEvent('storageServiceComplete');
}
public async waitForWindow(): Promise<Page> {
return this.#app.waitForEvent('window');
}
public async waitForManifestVersion(version: number): Promise<void> {
// eslint-disable-next-line no-constant-condition
while (true) {
@@ -231,6 +227,11 @@ export class App extends EventEmitter {
await window.evaluate('window.SignalCI.uploadBackup()');
}
public async enableMedia(): Promise<void> {
const window = await this.getWindow();
await window.evaluate('window.SignalCI.setMediaPermissions()');
}
public async migrateAllMessages(): Promise<void> {
const window = await this.getWindow();
await window.evaluate('window.SignalCI.migrateAllMessages()');