Completed first implementation of SRS integration

This commit is contained in:
Davide Passoni 2024-09-09 12:30:54 +02:00
parent d774977387
commit abd561a60d
16 changed files with 264 additions and 227 deletions

View File

@ -1,14 +0,0 @@
class AudioDopplerProcessor extends AudioWorkletProcessor {
process(inputs, outputs, parameters) {
const output = outputs[0];
output.forEach((channel) => {
for (let i = 0; i < channel.length; i++) {
channel[i] = Math.random() * 2 - 1;
}
});
return true;
}
}
registerProcessor("audio-doppler-processor", AudioDopplerProcessor);

View File

@ -1,4 +1,6 @@
// TODO Convert to typescript
// Audio library I shamelessly copied from the web
// SAFARI Polyfills
if (!window.AudioBuffer.prototype.copyToChannel) {
window.AudioBuffer.prototype.copyToChannel = function copyToChannel(buffer, channel) {

View File

@ -24,6 +24,9 @@ export class AudioManager {
/* List of all possible audio sources (microphone, file stream etc...) */
#sources: AudioSource[] = [];
/* The audio backend must be manually started so that the browser can detect the user is enabling audio.
Otherwise, no playback will be performed. */
#running: boolean = false;
#address: string = "localhost";
#port: number = 4000;
#socket: WebSocket | null = null;
@ -44,6 +47,7 @@ export class AudioManager {
}
start() {
this.#running = true;
this.#audioContext = new AudioContext({ sampleRate: 16000 });
this.#playbackPipeline = new PlaybackPipeline();
@ -77,7 +81,7 @@ export class AudioManager {
/* Extract the frequency value and play it on the speakers if we are listening to it*/
audioPacket.getFrequencies().forEach((frequencyInfo) => {
if (sink.getFrequency() === frequencyInfo.frequency && sink.getModulation() === frequencyInfo.modulation) {
this.#playbackPipeline.play(audioPacket.getAudioData().buffer);
this.#playbackPipeline.playBuffer(audioPacket.getAudioData().buffer);
}
});
} else {
@ -99,18 +103,25 @@ export class AudioManager {
/* Add two default radios */
this.addRadio();
this.addRadio();
});
document.dispatchEvent(new CustomEvent("audioManagerStateChanged"));
}
stop() {
this.#running = false;
this.#sources.forEach((source) => {
source.disconnect();
});
this.#sinks.forEach((sink) => {
sink.disconnect();
});
this.#sources = [];
this.#sinks = [];
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
document.dispatchEvent(new CustomEvent("audioSinksUpdated"));
document.dispatchEvent(new CustomEvent("audioManagerStateChanged"));
}
setAddress(address) {
@ -122,22 +133,47 @@ export class AudioManager {
}
addFileSource(file) {
console.log(`Adding file source from ${file.name}`);
if (!this.#running) {
console.log("Audio manager not started, aborting...");
return;
}
const newSource = new FileSource(file);
this.#sources.push(newSource);
newSource.connect(this.#sinks[0]);
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
}
getSources() {
return this.#sources;
}
removeSource(source: AudioSource) {
console.log(`Removing source ${source.getName()}`);
if (!this.#running) {
console.log("Audio manager not started, aborting...");
return;
}
source.disconnect();
this.#sources = this.#sources.filter((v) => v != source);
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
}
addUnitSink(unit: Unit) {
console.log(`Adding unit sink for unit with ID ${unit.ID}`);
if (!this.#running) {
console.log("Audio manager not started, aborting...");
return;
}
this.#sinks.push(new UnitSink(unit));
document.dispatchEvent(new CustomEvent("audioSinksUpdated"));
}
getSinks() {
return this.#sinks;
}
addRadio() {
console.log("Adding new radio");
if (!this.#running) {
console.log("Audio manager not started, aborting...");
return;
}
const newRadio = new RadioSink();
this.#sinks.push(newRadio);
newRadio.setName(`Radio ${this.#sinks.length}`);
@ -145,7 +181,16 @@ export class AudioManager {
document.dispatchEvent(new CustomEvent("audioSinksUpdated"));
}
getSinks() {
return this.#sinks;
}
removeSink(sink) {
console.log(`Removing sink ${sink.getName()}`);
if (!this.#running) {
console.log("Audio manager not started, aborting...");
return;
}
sink.disconnect();
this.#sinks = this.#sinks.filter((v) => v != sink);
let idx = 1;
@ -155,16 +200,6 @@ export class AudioManager {
document.dispatchEvent(new CustomEvent("audioSinksUpdated"));
}
removeSource(source) {
source.disconnect();
this.#sources = this.#sources.filter((v) => v != source);
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
}
getSources() {
return this.#sources;
}
getGuid() {
return this.#guid;
}
@ -181,6 +216,10 @@ export class AudioManager {
return this.#SRSClientUnitIDs;
}
isRunning() {
return this.#running;
}
#syncRadioSettings() {
let message = {
type: "Settings update",

View File

@ -1,3 +1,4 @@
// TODO This code is in common with the backend, would be nice to share it */
import { byteArrayToDouble, byteArrayToInteger, doubleToByteArray, integerToByteArray } from "../other/utils";
import { Buffer } from "buffer";
@ -20,11 +21,6 @@ export class AudioPacket {
#unitID: number = 0;
#hops: number = 0;
/* Out of standard data (this is not compliant with SRS standard, used for external audio effects) */
#latitude: number | null = null;
#longitude: number | null = null;
#altitude: number | null = null;
/* Usually internally set only */
#packetID: number | null = null;
@ -138,18 +134,6 @@ export class AudioPacket {
[...Buffer.from(this.#clientGUID, "utf-8")]
);
if (
this.#latitude !== undefined &&
this.#longitude !== undefined &&
this.#altitude !== undefined
) {
encodedData.concat(
[...doubleToByteArray(this.#latitude)],
[...doubleToByteArray(this.#longitude)],
[...doubleToByteArray(this.#altitude)]
);
}
// Set the lengths of the parts
let encPacketLen = integerToByteArray(encodedData.length, 2);
encodedData[0] = encPacketLen[0];
@ -223,28 +207,4 @@ export class AudioPacket {
getHops() {
return this.#hops;
}
setLatitude(latitude: number) {
this.#latitude = latitude;
}
getLatitude() {
return this.#latitude;
}
setLongitude(longitude: number) {
this.#longitude = longitude;
}
getLongitude() {
return this.#longitude;
}
setAltitude(altitude: number) {
this.#altitude = altitude;
}
getAltitude() {
return this.#altitude;
}
}

View File

@ -1,6 +1,7 @@
import { getApp } from "../olympusapp";
export abstract class AudioSink {
/* Base audio sink class */
export class AudioSink {
#name: string;
#gainNode: GainNode;

View File

@ -2,6 +2,7 @@ import { getApp } from "../olympusapp";
import { AudioSink } from "./audiosink";
import { WebAudioPeakMeter } from "web-audio-peak-meter";
/* Base abstract audio source class */
export abstract class AudioSource {
#connectedTo: AudioSink[] = [];
#name = "";
@ -11,7 +12,9 @@ export abstract class AudioSource {
constructor() {
this.#gainNode = getApp().getAudioManager().getAudioContext().createGain();
this.#meter = new WebAudioPeakMeter(this.#gainNode, document.createElement('div'));
/* This library requires a div element to initialize the object. Create a fake element, we will read the data and render it ourselves. */
this.#meter = new WebAudioPeakMeter(this.#gainNode, document.createElement("div"));
}
connect(sink: AudioSink) {
@ -61,5 +64,6 @@ export abstract class AudioSource {
return this.#gainNode;
}
/* Play method must be implemented by child classes */
abstract play(): void;
}

View File

@ -3,6 +3,8 @@ import { Unit } from "../unit/unit";
import { Filter, Noise } from "./audiolibrary";
import { AudioPacket } from "./audiopacket";
const MAX_DISTANCE = 1852; // Ignore clients that are further away than 1NM, to save performance.
export class AudioUnitPipeline {
#inputNode: GainNode;
#sourceUnit: Unit;
@ -11,16 +13,15 @@ export class AudioUnitPipeline {
#destinationNode: MediaStreamAudioDestinationNode;
#audioTrackProcessor: any;
#encoder: AudioEncoder;
#distance: number = 0;
#convolver: ConvolverNode;
#delay: DelayNode;
#multitap: DelayNode[];
#multitapGain: GainNode;
#wet: GainNode;
#convolverNode: ConvolverNode;
#preDelayNode: DelayNode;
#multitapNodes: DelayNode[];
#multitapGainNode: GainNode;
#wetGainNode: GainNode;
#tailOsc: Noise;
#dataBuffer: number[] = [];
#distance: number = 0;
constructor(sourceUnit: Unit, unitID: number, inputNode: GainNode) {
this.#sourceUnit = sourceUnit;
@ -65,33 +66,42 @@ export class AudioUnitPipeline {
/* Create the pipeline */
this.#inputNode = inputNode;
this.#inputNode.connect(this.#gainNode);
this.#setupEffects();
/* Create the interval task to update the data */
setInterval(() => {
/* Get the destination unit and compute the distance to it */
let destinationUnit = getApp().getUnitsManager().getUnitByID(this.#unitID);
if (destinationUnit) {
let distance = destinationUnit?.getPosition().distanceTo(this.#sourceUnit.getPosition());
/* The units positions are updated at a low frequency. Filter the distance to avoid sudden volume jumps */
this.#distance = 0.9 * this.#distance + 0.1 * distance;
let newGain = 1.0 - Math.pow(this.#distance / 1000, 0.5); // Arbitrary
/* Don't bother updating parameters if the client is too far away */
if (this.#distance < MAX_DISTANCE) {
/* Compute a new gain decreasing with distance. */
let newGain = 1.0 - Math.pow(this.#distance / 1000, 0.5); // Arbitrary
this.#gainNode.gain.setValueAtTime(newGain, getApp().getAudioManager().getAudioContext().currentTime);
this.#multitapGain.gain.setValueAtTime(newGain / 10, getApp().getAudioManager().getAudioContext().currentTime);
/* Set the values of the main gain node and the multitap gain node, used for reverb effect */
this.#gainNode.gain.setValueAtTime(newGain, getApp().getAudioManager().getAudioContext().currentTime);
this.#multitapGainNode.gain.setValueAtTime(newGain / 10, getApp().getAudioManager().getAudioContext().currentTime);
let reverbTime = this.#distance / 1000 / 2; //Arbitrary
let preDelay = this.#distance / 1000; // Arbitrary
this.#delay.delayTime.setValueAtTime(preDelay, getApp().getAudioManager().getAudioContext().currentTime);
this.#multitap.forEach((t, i) => {
t.delayTime.setValueAtTime(0.001 + i * (preDelay / 2), getApp().getAudioManager().getAudioContext().currentTime);
});
this.#tailOsc.release = reverbTime / 3;
/* Increase reverb and predelay with distance */
let reverbTime = this.#distance / 1000 / 4; //Arbitrary
let preDelay = this.#distance / 1000 / 2; // Arbitrary
this.#preDelayNode.delayTime.setValueAtTime(preDelay, getApp().getAudioManager().getAudioContext().currentTime);
this.#multitapNodes.forEach((t, i) => {
t.delayTime.setValueAtTime(0.001 + i * (preDelay / 2), getApp().getAudioManager().getAudioContext().currentTime);
});
this.#tailOsc.release = reverbTime / 3;
}
}
}, 100);
}
handleEncodedData(encodedAudioChunk, unitID) {
/* Encode the data in SRS format and send it to the backend */
let arrayBuffer = new ArrayBuffer(encodedAudioChunk.byteLength);
encodedAudioChunk.copyTo(arrayBuffer);
@ -115,67 +125,67 @@ export class AudioUnitPipeline {
handleRawData(audioData) {
/* Ignore players that are too far away */
if (this.#distance < 1000) {
if (this.#distance < MAX_DISTANCE) {
this.#encoder.encode(audioData);
audioData.close();
}
}
#setupEffects() {
let reverbTime = 0.1; //Arbitrary
this.#convolver = getApp().getAudioManager().getAudioContext().createConvolver();
this.#delay = getApp().getAudioManager().getAudioContext().createDelay(1);
this.#multitap = [];
/* Create the nodes necessary for the pipeline */
this.#convolverNode = getApp().getAudioManager().getAudioContext().createConvolver();
this.#preDelayNode = getApp().getAudioManager().getAudioContext().createDelay(1);
this.#multitapGainNode = getApp().getAudioManager().getAudioContext().createGain();
this.#wetGainNode = getApp().getAudioManager().getAudioContext().createGain();
this.#multitapNodes = [];
for (let i = 2; i > 0; i--) {
this.#multitap.push(getApp().getAudioManager().getAudioContext().createDelay(1));
this.#multitapNodes.push(getApp().getAudioManager().getAudioContext().createDelay(1));
}
this.#multitap.map((t, i) => {
if (this.#multitap[i + 1]) {
t.connect(this.#multitap[i + 1]);
/* Connect the nodes as follows
/------> pre delay -> convolver ------\
input -> main gain -> wet gain -< >-> destination
\-> multitap[0] -> ... -> multitap[n]-/
The multitap nodes simulate distinct echoes coming from the original sound. Multitap[0] is the original sound.
The predelay and convolver nodes simulate reverb.
*/
this.#inputNode.connect(this.#gainNode);
this.#gainNode.connect(this.#wetGainNode);
this.#wetGainNode.connect(this.#preDelayNode);
this.#wetGainNode.connect(this.#multitapNodes[0]);
this.#multitapNodes.map((t, i) => {
if (this.#multitapNodes[i + 1]) {
t.connect(this.#multitapNodes[i + 1]);
}
});
this.#multitapNodes[this.#multitapNodes.length - 1].connect(this.#multitapGainNode);
this.#multitapGainNode.connect(this.#destinationNode);
this.#preDelayNode.connect(this.#convolverNode);
this.#convolverNode.connect(this.#destinationNode);
this.#multitapGain = getApp().getAudioManager().getAudioContext().createGain();
this.#multitap[this.#multitap.length - 1].connect(this.#multitapGain);
this.#multitapGain.connect(this.#destinationNode);
this.#wet = getApp().getAudioManager().getAudioContext().createGain();
this.#gainNode.connect(this.#wet);
this.#wet.connect(this.#delay);
this.#wet.connect(this.#multitap[0]);
this.#delay.connect(this.#convolver);
getApp().getAudioManager().getAudioContext().audioWorklet.addModule("audiodopplerprocessor.js").then(() => {
const randomNoiseNode = new AudioWorkletNode(
getApp().getAudioManager().getAudioContext(),
"audio-doppler-processor",
);
this.#convolver.connect(randomNoiseNode);
randomNoiseNode.connect(this.#destinationNode);
});
this.#renderTail(reverbTime);
/* Render the random noise needed for the convolver node to simulate reverb */
this.#renderTail(0.1); //Arbitrary
}
#renderTail(reverbTime) {
let attack = 0;
let decay = 0.0;
/* Generate an offline audio context to render the reverb noise */
const tailContext = new OfflineAudioContext(
2,
getApp().getAudioManager().getAudioContext().sampleRate * reverbTime,
getApp().getAudioManager().getAudioContext().sampleRate
);
/* A noise oscillator and a two filters are added to smooth the reverb */
this.#tailOsc = new Noise(tailContext, 1);
const tailLPFilter = new Filter(tailContext, "lowpass", 5000, 1);
const tailHPFilter = new Filter(tailContext, "highpass", 500, 1);
/* Initialize and connect the oscillator with the filters */
this.#tailOsc.init();
this.#tailOsc.connect(tailHPFilter.input);
tailHPFilter.connect(tailLPFilter.input);
@ -184,12 +194,13 @@ export class AudioUnitPipeline {
this.#tailOsc.decay = decay;
setTimeout(() => {
/* Set the buffer of the convolver node */
tailContext.startRendering().then((buffer) => {
this.#convolver.buffer = buffer;
this.#convolverNode.buffer = buffer;
});
this.#tailOsc.on({ frequency: 500, velocity: 127 });
//tailOsc.off();
//tailOsc.off(); // TODO In the original example I copied, this was turned off. No idea why but it seems to work correctly if left on. To investigate.
}, 20);
}
}

View File

@ -1,9 +1,8 @@
import { AudioSource } from "./audiosource";
import { getApp } from "../olympusapp";
import {WebAudioPeakMeter} from 'web-audio-peak-meter';
export class FileSource extends AudioSource {
#file: File | null = null;
#file: File;
#source: AudioBufferSourceNode;
#duration: number = 0;
#currentPosition: number = 0;
@ -19,11 +18,8 @@ export class FileSource extends AudioSource {
this.#file = file;
this.setName(this.#file?.name ?? "N/A");
if (!this.#file) {
return;
}
/* Create the file reader and read the file from disk */
var reader = new FileReader();
reader.onload = (e) => {
var contents = e.target?.result;
@ -31,6 +27,7 @@ export class FileSource extends AudioSource {
getApp()
.getAudioManager()
.getAudioContext()
/* Decode the audio file. This method takes care of codecs */
.decodeAudioData(contents as ArrayBuffer, (audioBuffer) => {
this.#audioBuffer = audioBuffer;
this.#duration = audioBuffer.duration;
@ -41,11 +38,13 @@ export class FileSource extends AudioSource {
}
play() {
/* A new buffer source must be created every time the file is played */
this.#source = getApp().getAudioManager().getAudioContext().createBufferSource();
this.#source.buffer = this.#audioBuffer;
this.#source.connect(this.getOutputNode());
this.#source.loop = this.#looping;
/* Start playing the file at the selected position */
this.#source.start(0, this.#currentPosition);
this.#playing = true;
const now = Date.now() / 1000;
@ -54,20 +53,22 @@ export class FileSource extends AudioSource {
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
this.#updateInterval = setInterval(() => {
/* Update the current position value every second */
const now = Date.now() / 1000;
this.#currentPosition += now - this.#lastUpdateTime;
this.#lastUpdateTime = now;
if (this.#currentPosition > this.#duration) {
this.#currentPosition = 0;
if (!this.#looping) this.stop();
if (!this.#looping) this.pause();
}
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
}, 1000);
}
stop() {
pause() {
/* Disconnect the source and update the position to the current time (precisely)*/
this.#source.stop();
this.#source.disconnect();
this.#playing = false;
@ -92,12 +93,17 @@ export class FileSource extends AudioSource {
}
setCurrentPosition(percentPosition) {
/* To change the current play position we must:
1) pause the current playback;
2) update the current position value;
3) after some time, restart playing. The delay is needed to avoid immediately restarting many times if the user drags the position slider;
*/
if (this.#playing) {
clearTimeout(this.#restartTimeout);
this.#restartTimeout = setTimeout(() => this.play(), 1000);
}
this.stop();
this.pause();
this.#currentPosition = (percentPosition / 100) * this.#duration;
}

View File

@ -2,7 +2,7 @@ import { getApp } from "../olympusapp";
import { AudioSource } from "./audiosource";
export class MicrophoneSource extends AudioSource {
#node: MediaStreamAudioSourceNode;
#sourceNode: MediaStreamAudioSourceNode;
constructor() {
super();
@ -10,12 +10,12 @@ export class MicrophoneSource extends AudioSource {
this.setName("Microphone");
}
/* Asynchronously initialize the microphone and connect it to the output node */
async initialize() {
const microphone = await navigator.mediaDevices.getUserMedia({ audio: true });
if (getApp().getAudioManager().getAudioContext()) {
this.#node = getApp().getAudioManager().getAudioContext().createMediaStreamSource(microphone);
this.#node.connect(this.getOutputNode());
this.#sourceNode = getApp().getAudioManager().getAudioContext().createMediaStreamSource(microphone);
this.#sourceNode.connect(this.getOutputNode());
}
}

View File

@ -34,7 +34,7 @@ export class PlaybackPipeline {
this.#gainNode.connect(getApp().getAudioManager().getAudioContext().destination);
}
play(arrayBuffer) {
playBuffer(arrayBuffer) {
const init = {
type: "key",
data: arrayBuffer,

View File

@ -2,6 +2,7 @@ import { AudioSink } from "./audiosink";
import { AudioPacket } from "./audiopacket";
import { getApp } from "../olympusapp";
/* Radio sink, basically implements a simple SRS Client in Olympus. Does not support encryption at this moment */
export class RadioSink extends AudioSink {
#encoder: AudioEncoder;
#desinationNode: MediaStreamAudioDestinationNode;

View File

@ -3,6 +3,8 @@ import { getApp } from "../olympusapp";
import { Unit } from "../unit/unit";
import { AudioUnitPipeline } from "./audiounitpipeline";
/* Unit sink to implement a "loudspeaker" external sound. Useful for stuff like 5MC calls, air sirens,
scramble calls and so on. Ideally, one may want to move this code to the backend*/
export class UnitSink extends AudioSink {
#unit: Unit;
#unitPipelines: {[key: string]: AudioUnitPipeline} = {};
@ -13,6 +15,7 @@ export class UnitSink extends AudioSink {
this.#unit = sourceUnit;
this.setName(`${sourceUnit.getUnitName()} - ${sourceUnit.getName()}`);
/* TODO as of now, any client connecting after the sink was created will not receive the sound. Add ability to add new pipelines */
getApp()
.getAudioManager()
.getSRSClientsUnitIDs()

View File

@ -28,6 +28,7 @@ interface CustomEventMap {
hideUnitContextMenu: CustomEvent<any>;
audioSourcesUpdated: CustomEvent<any>;
audioSinksUpdated: CustomEvent<any>;
audioManagerStateChanged: CustomEvent<any>;
}
declare global {

View File

@ -4,9 +4,11 @@ import { getApp } from "../../olympusapp";
import { FaQuestionCircle } from "react-icons/fa";
import { AudioSourcePanel } from "./components/sourcepanel";
import { AudioSource } from "../../audio/audiosource";
import { FaVolumeHigh } from "react-icons/fa6";
export function AudioMenu(props: { open: boolean; onClose: () => void; children?: JSX.Element | JSX.Element[] }) {
const [sources, setSources] = useState([] as AudioSource[]);
const [audioManagerEnabled, setAudioManagerEnabled] = useState(false);
useEffect(() => {
/* Force a rerender */
@ -18,21 +20,50 @@ export function AudioMenu(props: { open: boolean; onClose: () => void; children?
.map((source) => source)
);
});
}, []);
document.addEventListener("audioManagerStateChanged", () => {
setAudioManagerEnabled(getApp().getAudioManager().isRunning());
});
}, []);
return (
<Menu title="Audio sources" open={props.open} showBackButton={false} onClose={props.onClose}>
<div className="p-4 text-sm text-gray-400">The audio source panel allows you to add and manage audio sources.</div>
<div className="mx-6 flex rounded-lg bg-olympus-400 p-4 text-sm">
<div>
<FaQuestionCircle className="my-4 ml-2 mr-6 text-gray-400" />
</div>
<div className="flex flex-col gap-1">
<div className="text-gray-100">Use the controls to apply effects and start/stop the playback of an audio source.</div>
<div className="text-gray-400">Sources can be connected to your radios, or attached to a unit to be played on loudspeakers.</div>
</div>
{audioManagerEnabled && (
<>
<div>
<FaQuestionCircle className="my-4 ml-2 mr-6 text-gray-400" />
</div>
<div className="flex flex-col gap-1">
<div className="text-gray-100">Use the controls to apply effects and start/stop the playback of an audio source.</div>
<div className="text-gray-400">Sources can be connected to your radios, or attached to a unit to be played on loudspeakers.</div>
</div>
</>
)}
{!audioManagerEnabled && (
<>
<div>
<FaQuestionCircle className="my-4 ml-2 mr-6 text-gray-400" />
</div>
<div className="flex flex-col gap-1">
<div className="text-gray-100">
To enable the audio menu, first start the audio backend with the{" "}
<span
className={`
mx-1 mt-[-7px] inline-block translate-y-2 rounded-full
border-[1px] border-white p-1
`}
>
<FaVolumeHigh />
</span>{" "}
button on the navigation header.
</div>
</div>
</>
)}
</div>
<div
className={`
flex flex-col gap-2 p-5 font-normal text-gray-800
@ -40,35 +71,36 @@ export function AudioMenu(props: { open: boolean; onClose: () => void; children?
`}
>
<>
{sources
.map((source) => {
return <AudioSourcePanel source={source} />;
})}
{sources.map((source) => {
return <AudioSourcePanel source={source} />;
})}
</>
<button
type="button"
className={`
mb-2 me-2 rounded-lg bg-blue-700 px-5 py-2.5 text-sm font-medium
text-white
dark:bg-blue-600 dark:hover:bg-blue-700 dark:focus:ring-blue-800
focus:outline-none focus:ring-4 focus:ring-blue-300
hover:bg-blue-800
`}
onClick={() => {
var input = document.createElement("input");
input.type = "file";
input.click();
input.onchange = (e: Event) => {
let target = e.target as HTMLInputElement;
if (target && target.files) {
var file = target.files[0];
getApp().getAudioManager().addFileSource(file);
}
};
}}
>
Add audio source
</button>
{audioManagerEnabled && (
<button
type="button"
className={`
mb-2 me-2 rounded-lg bg-blue-700 px-5 py-2.5 text-sm font-medium
text-white
dark:bg-blue-600 dark:hover:bg-blue-700 dark:focus:ring-blue-800
focus:outline-none focus:ring-4 focus:ring-blue-300
hover:bg-blue-800
`}
onClick={() => {
var input = document.createElement("input");
input.type = "file";
input.click();
input.onchange = (e: Event) => {
let target = e.target as HTMLInputElement;
if (target && target.files) {
var file = target.files[0];
getApp().getAudioManager().addFileSource(file);
}
};
}}
>
Add audio source
</button>
)}
</div>
</Menu>
);

View File

@ -1,13 +1,14 @@
import React, { useEffect, useState } from "react";
import { Menu } from "./components/menu";
import { getApp } from "../../olympusapp";
import { OlToggle } from "../components/oltoggle";
import { RadioPanel } from "./components/radiopanel";
import { FaQuestionCircle } from "react-icons/fa";
import { RadioSink } from "../../audio/radiosink";
import { FaVolumeHigh } from "react-icons/fa6";
export function RadioMenu(props: { open: boolean; onClose: () => void; children?: JSX.Element | JSX.Element[] }) {
const [radios, setRadios] = useState([] as RadioSink[]);
const [audioManagerEnabled, setAudioManagerEnabled] = useState(false);
useEffect(() => {
/* Force a rerender */
@ -20,20 +21,50 @@ export function RadioMenu(props: { open: boolean; onClose: () => void; children?
.map((radio) => radio)
);
});
document.addEventListener("audioManagerStateChanged", () => {
setAudioManagerEnabled(getApp().getAudioManager().isRunning());
});
}, []);
return (
<Menu title="Radio" open={props.open} showBackButton={false} onClose={props.onClose}>
<div className="p-4 text-sm text-gray-400">The radio menu allows you to talk on radio to the players online using SRS.</div>
<div className="mx-6 flex rounded-lg bg-olympus-400 p-4 text-sm">
<div>
<FaQuestionCircle className="my-4 ml-2 mr-6 text-gray-400" />
</div>
<div className="flex flex-col gap-1">
<div className="text-gray-100">Use the radio controls to tune to a frequency, then click on the PTT button to talk. </div>
<div className="text-gray-400">You can add up to 10 radios. Use the audio effects menu to play audio tracks or to add background noises.</div>
</div>
{audioManagerEnabled && (
<>
<div>
<FaQuestionCircle className="my-4 ml-2 mr-6 text-gray-400" />
</div>
<div className="flex flex-col gap-1">
<div className="text-gray-100">Use the radio controls to tune to a frequency, then click on the PTT button to talk. </div>
<div className="text-gray-400">You can add up to 10 radios. Use the audio effects menu to play audio tracks or to add background noises.</div>
</div>
</>
)}
{!audioManagerEnabled && (
<>
<div>
<FaQuestionCircle className="my-4 ml-2 mr-6 text-gray-400" />
</div>
<div className="flex flex-col gap-1">
<div className="text-gray-100">
To enable the radio menu, first start the audio backend with the{" "}
<span
className={`
mx-1 mt-[-7px] inline-block translate-y-2 rounded-full
border-[1px] border-white p-1
`}
>
<FaVolumeHigh />
</span>{" "}
button on the navigation header.
</div>
</div>
</>
)}
</div>
<div
className={`
flex flex-col gap-2 p-5 font-normal text-gray-800
@ -43,7 +74,7 @@ export function RadioMenu(props: { open: boolean; onClose: () => void; children?
{radios.map((radio) => {
return <RadioPanel radio={radio}></RadioPanel>;
})}
{radios.length < 10 && (
{audioManagerEnabled && radios.length < 10 && (
<button
type="button"
className={`

View File

@ -1,3 +1,4 @@
// TODO This code is in common with the frontend, would be nice to share it */
import { byteArrayToDouble, byteArrayToInteger, doubleToByteArray, integerToByteArray } from "../utils";
import { Buffer } from "buffer";
@ -20,11 +21,6 @@ export class AudioPacket {
#unitID: number = 0;
#hops: number = 0;
/* Out of standard data (this is not compliant with SRS standard, used for external audio effects) */
#latitude: number | null = null;
#longitude: number | null = null;
#altitude: number | null = null;
/* Usually internally set only */
#packetID: number | null = null;
@ -138,18 +134,6 @@ export class AudioPacket {
[...Buffer.from(this.#clientGUID, "utf-8")]
);
if (
this.#latitude !== undefined &&
this.#longitude !== undefined &&
this.#altitude !== undefined
) {
encodedData.concat(
[...doubleToByteArray(this.#latitude)],
[...doubleToByteArray(this.#longitude)],
[...doubleToByteArray(this.#altitude)]
);
}
// Set the lengths of the parts
let encPacketLen = integerToByteArray(encodedData.length, 2);
encodedData[0] = encPacketLen[0];
@ -223,28 +207,4 @@ export class AudioPacket {
getHops() {
return this.#hops;
}
setLatitude(latitude: number) {
this.#latitude = latitude;
}
getLatitude() {
return this.#latitude;
}
setLongitude(longitude: number) {
this.#longitude = longitude;
}
getLongitude() {
return this.#longitude;
}
setAltitude(altitude: number) {
this.#altitude = altitude;
}
getAltitude() {
return this.#altitude;
}
}