diff --git a/frontend/react/src/audio/audiofilesource.ts b/frontend/react/src/audio/audiofilesource.ts new file mode 100644 index 00000000..d3d47de7 --- /dev/null +++ b/frontend/react/src/audio/audiofilesource.ts @@ -0,0 +1,51 @@ +import { AudioSource } from "./audiosource"; +import { bufferToF32Planar } from "../other/utils"; +import { getApp } from "../olympusapp"; + +export class AudioFileSource extends AudioSource { + #gainNode: GainNode; + #file: File | null = null; + #source: AudioBufferSourceNode; + + constructor(file) { + super(); + this.#file = file; + + this.#gainNode = getApp().getAudioManager().getAudioContext().createGain(); + } + + getNode() { + return this.#gainNode; + } + + play() { + if (!this.#file) { + return; + } + var reader = new FileReader(); + reader.onload = (e) => { + var contents = e.target?.result; + if (contents) { + getApp().getAudioManager().getAudioContext().decodeAudioData(contents as ArrayBuffer, (arrayBuffer) => { + this.#source = getApp().getAudioManager().getAudioContext().createBufferSource(); + this.#source.buffer = arrayBuffer; + this.#source.connect(this.#gainNode); + this.#source.start(); + }); + } + }; + reader.readAsArrayBuffer(this.#file); + } + + stop() { + this.#source.stop(); + } + + setGain(gain) { + this.#gainNode.gain.setValueAtTime(gain, getApp().getAudioManager().getAudioContext().currentTime); + } + + getName() { + return this.#file?.name ?? "N/A"; + } +} diff --git a/frontend/react/src/audio/audiomanager.ts b/frontend/react/src/audio/audiomanager.ts index 5510e229..18f151c8 100644 --- a/frontend/react/src/audio/audiomanager.ts +++ b/frontend/react/src/audio/audiomanager.ts @@ -1,77 +1,102 @@ -import { AudioRadioSetting } from "../interfaces"; +import { AudioMessageType } from "../constants/constants"; +import { MicrophoneSource } from "./microphonesource"; +import { SRSRadio } from "./srsradio"; import { getApp } from "../olympusapp"; +import { fromBytes, makeID } from "../other/utils"; +import { AudioFileSource } from "./audiofilesource"; +import { AudioSource } from "./audiosource"; import { Buffer } from "buffer"; -import { MicrophoneHandler } from "./microphonehandler"; import { PlaybackPipeline } from "./playbackpipeline"; -enum MessageType { - audio, - settings, -} - -function fromBytes(array) { - let res = 0; - for (let i = 0; i < array.length; i++) { - res = res << 8; - res += array[array.length - i - 1]; - } - return res; -} - -var context = new AudioContext(); - export class AudioManager { - #radioSettings: AudioRadioSetting[] = [ - { - frequency: 251000000, - modulation: 0, - ptt: false, - tuned: false, - volume: 0.5, - }, - ]; + #audioContext: AudioContext; - #microphoneHandlers: (MicrophoneHandler | null)[] = []; + /* The playback pipeline enables audio playback on the speakers/headphones */ + #playbackPipeline: PlaybackPipeline; + + /* The SRS radio audio sinks used to transmit the audio stream to the SRS backend */ + #radios: SRSRadio[] = []; + + /* List of all possible audio sources (microphone, file stream etc...) */ + #sources: AudioSource[] = []; #address: string = "localhost"; #port: number = 4000; #socket: WebSocket | null = null; + #guid: string = makeID(22); constructor() { document.addEventListener("configLoaded", () => { let config = getApp().getConfig(); if (config["WSPort"]) { this.setPort(config["WSPort"]); - this.start(); } }); - this.#microphoneHandlers = this.#radioSettings.map(() => null); + setInterval(() => { + this.#syncRadioSettings(); + }, 1000); } start() { - const pipeline = new PlaybackPipeline(); + this.#audioContext = new AudioContext({ sampleRate: 16000 }); + this.#playbackPipeline = new PlaybackPipeline(); + + /* Connect the audio websocket */ let res = this.#address.match(/(?:http|https):\/\/(.+):/); let wsAddress = res ? res[1] : this.#address; - this.#socket = new WebSocket(`ws://${wsAddress}:${this.#port}`); + /* Log the opening of the connection */ this.#socket.addEventListener("open", (event) => { console.log("Connection to audio websocket successfull"); }); + /* Log any websocket errors */ this.#socket.addEventListener("error", (event) => { console.log(event); }); - this.#socket.addEventListener("message", async (event) => { - let bytes = event.data; - let packet = new Uint8Array(await bytes.arrayBuffer()) - let audioLength = fromBytes(packet.slice(2, 4)); - let audioData = packet.slice(6, 6 + audioLength); - let frequency = new DataView(packet.slice(6 + audioLength, 6 + audioLength + 8).reverse().buffer).getFloat64(0); - pipeline.play(audioData.buffer); + /* Handle the reception of a new message */ + this.#socket.addEventListener("message", (event) => { + this.#radios.forEach(async (radio) => { + /* Extract the audio data as array */ + let packetUint8Array = new Uint8Array(await event.data.arrayBuffer()); + + /* Extract the encoded audio data */ + let audioLength = fromBytes(packetUint8Array.slice(2, 4)); + let audioUint8Array = packetUint8Array.slice(6, 6 + audioLength); + + /* Extract the frequency value and play it on the speakers if we are listening to it*/ + let frequency = new DataView(packetUint8Array.slice(6 + audioLength, 6 + audioLength + 8).reverse().buffer).getFloat64(0); + if (radio.getSetting().frequency === frequency) { + this.#playbackPipeline.play(audioUint8Array.buffer); + } + }); }); + + /* Add two default radios */ + this.#radios = [new SRSRadio(), new SRSRadio()]; + document.dispatchEvent(new CustomEvent("radiosUpdated")); + + /* Add the microphone source and connect it directly to the radio */ + const microphoneSource = new MicrophoneSource(); + microphoneSource.initialize().then(() => { + this.#radios.forEach((radio) => { + microphoneSource.getNode().connect(radio.getNode()); + }); + this.#sources.push(microphoneSource); + document.dispatchEvent(new CustomEvent("audioSourcesUpdated")); + }); + } + + stop() { + this.#sources.forEach((source) => { + source.getNode().disconnect(); + }); + this.#sources = []; + + this.#radios = []; } setAddress(address) { @@ -82,24 +107,56 @@ export class AudioManager { this.#port = port; } - getRadioSettings() { - return JSON.parse(JSON.stringify(this.#radioSettings)); + addFileSource(file) { + const newSource = new AudioFileSource(file); + this.#sources.push(newSource); + newSource.getNode().connect(this.#radios[0].getNode()); + document.dispatchEvent(new CustomEvent("audioSourcesUpdated")); } - setRadioSettings(radioSettings: AudioRadioSetting[]) { - this.#radioSettings = radioSettings; + getRadios() { + return this.#radios; + } + addRadio() { + const newRadio = new SRSRadio(); + this.#sources[0].getNode().connect(newRadio.getNode()); + this.#radios.push(newRadio); + document.dispatchEvent(new CustomEvent("radiosUpdated")); + } + + removeRadio(idx) { + this.#radios[idx].getNode().disconnect(); + this.#radios.splice(idx, 1); + document.dispatchEvent(new CustomEvent("radiosUpdated")); + } + + getSources() { + return this.#sources; + } + + getGuid() { + return this.#guid; + } + + send(array) { + this.#socket?.send(array); + } + + getAudioContext() { + return this.#audioContext; + } + + #syncRadioSettings() { let message = { type: "Settings update", - settings: this.#radioSettings, + guid: this.#guid, + coalition: 2, + settings: this.#radios.map((radio) => { + return radio.getSetting(); + }), }; - this.#radioSettings.forEach((setting, idx) => { - if (setting.ptt && !this.#microphoneHandlers[idx]) { - this.#microphoneHandlers[idx] = new MicrophoneHandler(this.#socket, setting); - } - }); - - if (this.#socket?.readyState == 1) this.#socket?.send(new Uint8Array([MessageType.settings, ...Buffer.from(JSON.stringify(message), "utf-8")])); + if (this.#socket?.readyState == 1) this.#socket?.send(new Uint8Array([AudioMessageType.settings, ...Buffer.from(JSON.stringify(message), "utf-8")])); } } diff --git a/frontend/react/src/audio/audiopacket.ts b/frontend/react/src/audio/audiopacket.ts index 82afb48e..1be4d7cd 100644 --- a/frontend/react/src/audio/audiopacket.ts +++ b/frontend/react/src/audio/audiopacket.ts @@ -23,7 +23,7 @@ var packetID = 0; export class AudioPacket { #packet: Uint8Array; - constructor(data, settings) { + constructor(data, settings, guid) { let header: number[] = [0, 0, 0, 0, 0, 0]; let encFrequency: number[] = [...doubleToByteArray(settings.frequency)]; @@ -44,8 +44,8 @@ export class AudioPacket { encUnitID, encPacketID, encHops, - [...Buffer.from("ImF72dh9EYcIDyYRGaF9S9", "utf-8")], - [...Buffer.from("ImF72dh9EYcIDyYRGaF9S9", "utf-8")] + [...Buffer.from(guid, "utf-8")], + [...Buffer.from(guid, "utf-8")] ); let encPacketLen = getBytes(packet.length, 2); diff --git a/frontend/react/src/audio/audiosink.ts b/frontend/react/src/audio/audiosink.ts new file mode 100644 index 00000000..0ac06f8d --- /dev/null +++ b/frontend/react/src/audio/audiosink.ts @@ -0,0 +1,3 @@ +export abstract class AudioSink { + abstract getNode(): AudioNode; +} \ No newline at end of file diff --git a/frontend/react/src/audio/audiosource.ts b/frontend/react/src/audio/audiosource.ts new file mode 100644 index 00000000..2688f674 --- /dev/null +++ b/frontend/react/src/audio/audiosource.ts @@ -0,0 +1,22 @@ +import { AudioSourceSetting } from "../interfaces"; +import { AudioSink } from "./audiosink"; + +export abstract class AudioSource { + #setting: AudioSourceSetting = { + connectedTo: "", + filename: "", + playing: true, + }; + + getSetting() { + return this.#setting; + } + + setSetting(setting: AudioSourceSetting) { + this.#setting = setting; + } + + abstract play(): void; + abstract getNode(): AudioNode; + abstract getName(): string; +} diff --git a/frontend/react/src/audio/capturepipeline.ts b/frontend/react/src/audio/capturepipeline.ts deleted file mode 100644 index 482f4894..00000000 --- a/frontend/react/src/audio/capturepipeline.ts +++ /dev/null @@ -1,88 +0,0 @@ -export class CapturePipeline { - sampleRate: any; - codec: any; - sourceId: any; - onrawdata: any; - onencoded: any; - deviceId: any; - audioContext: any; - mic: any; - source: any; - destination: any; - encoder: any; - audioTrackProcessor: any; - duration: any; - - constructor(codec = "opus", sampleRate = 16000, duration = 40000) { - this.sampleRate = sampleRate; - this.codec = codec; - this.duration = duration; - this.onrawdata = null; - this.onencoded = null; - } - async connect() { - const mic = navigator.mediaDevices.getUserMedia({ audio: true }); - - this.audioContext = new AudioContext({ - sampleRate: this.sampleRate, - latencyHint: "interactive", - }); - this.mic = await mic; - this.source = this.audioContext.createMediaStreamSource(this.mic); - this.destination = this.audioContext.createMediaStreamDestination(); - this.destination.channelCount = 1; - this.source.connect(this.destination); - - this.encoder = new AudioEncoder({ - output: this.handleEncodedData.bind(this), - error: this.handleEncodingError.bind(this), - }); - - this.encoder.configure({ - codec: this.codec, - numberOfChannels: 1, - sampleRate: this.sampleRate, - opus: { - frameDuration: this.duration, - }, - bitrateMode: "constant" - }); - - //@ts-ignore - this.audioTrackProcessor = new MediaStreamTrackProcessor({ - track: this.destination.stream.getAudioTracks()[0], - }); - this.audioTrackProcessor.readable.pipeTo( - new WritableStream({ - write: this.handleRawData.bind(this), - }) - ); - } - disconnect() { - this.source.disconnect(); - delete this.audioTrackProcessor; - delete this.encoder; - delete this.destination; - delete this.mic; - delete this.source; - } - - handleEncodedData(chunk, metadata) { - if (this.onencoded) { - this.onencoded(chunk, metadata); - } - const data = new ArrayBuffer(chunk.byteLength); - chunk.copyTo(data); - } - handleEncodingError(e) { - console.log(e); - } - - handleRawData(audioData) { - if (this.onrawdata) { - this.onrawdata(audioData); - } - this.encoder.encode(audioData); - audioData.close(); - } -} diff --git a/frontend/react/src/audio/microphonesource.ts b/frontend/react/src/audio/microphonesource.ts new file mode 100644 index 00000000..92cec7b6 --- /dev/null +++ b/frontend/react/src/audio/microphonesource.ts @@ -0,0 +1,29 @@ +import { getApp } from "../olympusapp"; +import { AudioSource } from "./audiosource"; + +export class MicrophoneSource extends AudioSource { + #node: AudioNode; + + constructor() { + super(); + } + + async initialize() { + const microphone = await navigator.mediaDevices.getUserMedia({ audio: true }); + if (getApp().getAudioManager().getAudioContext()) { + this.#node = getApp().getAudioManager().getAudioContext().createMediaStreamSource(microphone); + } + } + + getNode() { + return this.#node; + } + + play() { + // TODO, now is always on + } + + getName() { + return "Microphone" + } +} diff --git a/frontend/react/src/audio/playbackpipeline.ts b/frontend/react/src/audio/playbackpipeline.ts index b8540f0c..8f9a7c0f 100644 --- a/frontend/react/src/audio/playbackpipeline.ts +++ b/frontend/react/src/audio/playbackpipeline.ts @@ -1,82 +1,56 @@ +import { getApp } from "../olympusapp"; + export class PlaybackPipeline { - sampleRate: any; - codec: any; - sourceId: any; - onrawdata: any; - ondecoded: any; - deviceId: any; - audioContext: any; - mic: any; - source: any; - destination: any; - decoder: any; - audioTrackProcessor: any; - duration: any; - trackGenerator: any; - writer: any; + #decoder = new AudioDecoder({ + output: (chunk) => this.#handleDecodedData(chunk), + error: (e) => console.log(e), + }); + #trackGenerator: any; // TODO can we have typings? + #writer: any; + #gainNode: GainNode; - constructor(codec = "opus", sampleRate = 16000, duration = 40000) { - this.sampleRate = sampleRate; - this.codec = codec; - this.duration = duration; - this.ondecoded = null; - this.audioContext = new AudioContext(); - - this.decoder = new AudioDecoder({ - output: (chunk) => this.handleDecodedData(chunk), - error: this.handleDecodingError.bind(this), - }); - - this.decoder.configure({ - codec: this.codec, + constructor() { + this.#decoder.configure({ + codec: 'opus', numberOfChannels: 1, - sampleRate: this.sampleRate, + sampleRate: 16000, + //@ts-ignore // TODO why is this giving an error? opus: { - frameDuration: this.duration, + frameDuration: 40000, }, bitrateMode: "constant", }); //@ts-ignore - this.trackGenerator = new MediaStreamTrackGenerator({ kind: "audio" }); - this.writer = this.trackGenerator.writable.getWriter(); + this.#trackGenerator = new MediaStreamTrackGenerator({ kind: "audio" }); + this.#writer = this.#trackGenerator.writable.getWriter(); - const stream = new MediaStream([this.trackGenerator]); + const stream = new MediaStream([this.#trackGenerator]); + const mediaStreamSource = getApp().getAudioManager().getAudioContext().createMediaStreamSource(stream); - const mediaStreamSource = this.audioContext.createMediaStreamSource(stream); - mediaStreamSource.connect(this.audioContext.destination) + /* Connect to the device audio output */ + this.#gainNode = getApp().getAudioManager().getAudioContext().createGain(); + mediaStreamSource.connect(this.#gainNode); + this.#gainNode.connect(getApp().getAudioManager().getAudioContext().destination); } - play(buffer) { + play(arrayBuffer) { const init = { type: "key", - data: buffer, - timestamp: 23000000, + data: arrayBuffer, + timestamp: 0, duration: 2000000, - transfer: [buffer], + transfer: [arrayBuffer], }; - //@ts-ignore - let chunk = new EncodedAudioChunk(init); + //@ts-ignore //TODO Typings? + let encodedAudioChunk = new EncodedAudioChunk(init); - this.decoder.decode(chunk); + this.#decoder.decode(encodedAudioChunk); } - disconnect() { - this.source.disconnect(); - delete this.audioTrackProcessor; - delete this.decoder; - delete this.destination; - delete this.mic; - delete this.source; - } - - handleDecodedData(chunk) { - this.writer.ready.then(() => { - this.writer.write(chunk); + #handleDecodedData(audioData) { + this.#writer.ready.then(() => { + this.#writer.write(audioData); }) } - handleDecodingError(e) { - console.log(e); - } - } diff --git a/frontend/react/src/audio/srsradio.ts b/frontend/react/src/audio/srsradio.ts new file mode 100644 index 00000000..75196af9 --- /dev/null +++ b/frontend/react/src/audio/srsradio.ts @@ -0,0 +1,83 @@ +import { AudioSink } from "./audiosink"; +import { AudioPacket } from "./audiopacket"; +import { getApp } from "../olympusapp"; + +export class SRSRadio extends AudioSink { + #encoder: AudioEncoder; + #node: MediaStreamAudioDestinationNode; + #audioTrackProcessor: any; // TODO can we have typings? + #gainNode: GainNode; + + #setting = { + frequency: 251000000, + modulation: 0, + ptt: false, + tuned: false, + volume: 0.5, + }; + + constructor() { + super(); + + /* A gain node is used because it allows to connect multiple inputs */ + this.#gainNode = getApp().getAudioManager().getAudioContext().createGain(); + this.#node = getApp().getAudioManager().getAudioContext().createMediaStreamDestination(); + this.#node.channelCount = 1; + + this.#encoder = new AudioEncoder({ + output: (data) => this.#handleEncodedData(data), + error: (e) => {console.log(e);}, + }); + + this.#encoder.configure({ + codec: 'opus', + numberOfChannels: 1, + sampleRate: 16000, + //@ts-ignore // TODO why is it giving error? + opus: { + frameDuration: 40000, + }, + bitrateMode: "constant" + }); + + //@ts-ignore + this.#audioTrackProcessor = new MediaStreamTrackProcessor({ + track: this.#node.stream.getAudioTracks()[0], + }); + this.#audioTrackProcessor.readable.pipeTo( + new WritableStream({ + write: (arrayBuffer) => this.#handleRawData(arrayBuffer), + }) + ); + + this.#gainNode.connect(this.#node); + } + + getSetting() { + return this.#setting; + } + + setSetting(setting) { + this.#setting = setting; + document.dispatchEvent(new CustomEvent("radiosUpdated")); + } + + getNode() { + return this.#gainNode; + } + + #handleEncodedData(audioBuffer) { + let arrayBuffer = new ArrayBuffer(audioBuffer.byteLength); + audioBuffer.copyTo(arrayBuffer); + + if (this.#setting.ptt) { + let packet = new AudioPacket(new Uint8Array(arrayBuffer), this.#setting, getApp().getAudioManager().getGuid()); + getApp().getAudioManager().send(packet.getArray()); + } + } + + #handleRawData(audioData) { + this.#encoder.encode(audioData); + audioData.close(); + } +} diff --git a/frontend/react/src/constants/constants.ts b/frontend/react/src/constants/constants.ts index 111d961e..9948ea2d 100644 --- a/frontend/react/src/constants/constants.ts +++ b/frontend/react/src/constants/constants.ts @@ -356,3 +356,8 @@ export const GROUPING_ZOOM_TRANSITION = 13; export const MAX_SHOTS_SCATTER = 3; export const MAX_SHOTS_INTENSITY = 3; export const SHOTS_SCATTER_DEGREES = 10; + +export enum AudioMessageType { + audio, + settings, +} diff --git a/frontend/react/src/dom.d.ts b/frontend/react/src/dom.d.ts index 68f9e88e..19928962 100644 --- a/frontend/react/src/dom.d.ts +++ b/frontend/react/src/dom.d.ts @@ -26,6 +26,8 @@ interface CustomEventMap { hideMapContextMenu: CustomEvent; showUnitContextMenu: CustomEvent; hideUnitContextMenu: CustomEvent; + audioSourcesUpdated: CustomEvent; + radiosUpdated: CustomEvent; } declare global { diff --git a/frontend/react/src/index.css b/frontend/react/src/index.css index 4f66ad7a..0367ad24 100644 --- a/frontend/react/src/index.css +++ b/frontend/react/src/index.css @@ -40,4 +40,8 @@ z-index: 2006; } +.vertical-slider { + writing-mode: vertical-lr !important; + direction: rtl !important; +} diff --git a/frontend/react/src/interfaces.ts b/frontend/react/src/interfaces.ts index cabd9fc5..01c1661a 100644 --- a/frontend/react/src/interfaces.ts +++ b/frontend/react/src/interfaces.ts @@ -292,10 +292,17 @@ export interface ServerStatus { paused: boolean; } -export interface AudioRadioSetting { +export interface SRSRadioSetting { frequency: number; modulation: number; volume: number; ptt: boolean; tuned: boolean; -} \ No newline at end of file +} + +export interface AudioSourceSetting { + filename: string; + playing: boolean; + connectedTo: string; +} + diff --git a/frontend/react/src/other/utils.ts b/frontend/react/src/other/utils.ts index 818d5c38..7bfed841 100644 --- a/frontend/react/src/other/utils.ts +++ b/frontend/react/src/other/utils.ts @@ -532,3 +532,38 @@ export function getUnitsByLabel(filterString: string) { return [filteredAircraft, filteredHelicopters, filteredAirDefense, filteredGroundUnits, filteredNavyUnits]; } + +export function fromBytes(array) { + let res = 0; + for (let i = 0; i < array.length; i++) { + res = res << 8; + res += array[array.length - i - 1]; + } + return res; +} + +export function makeID(length) { + let result = ""; + const characters = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + const charactersLength = characters.length; + let counter = 0; + while (counter < length) { + result += characters.charAt(Math.floor(Math.random() * charactersLength)); + counter += 1; + } + return result; +} + +export function bufferToF32Planar(input: AudioBuffer): Float32Array { + const result = new Float32Array(input.length * 1); + + let offset = 0; + for (let i = 0; i < 1; i++) { + const data = input.getChannelData(i); + result.set(data, offset); + offset = data.length; + } + + return result; +} \ No newline at end of file diff --git a/frontend/react/src/audio/microphonehandler.ts b/frontend/react/src/radio/microphonehandler.ts similarity index 91% rename from frontend/react/src/audio/microphonehandler.ts rename to frontend/react/src/radio/microphonehandler.ts index e421bca9..90f6a68d 100644 --- a/frontend/react/src/audio/microphonehandler.ts +++ b/frontend/react/src/radio/microphonehandler.ts @@ -1,10 +1,10 @@ -import { AudioRadioSetting } from "../interfaces"; +import { SRSRadioSetting } from "../interfaces"; import { AudioPacket } from "./audiopacket"; import { CapturePipeline } from "./capturepipeline"; export class MicrophoneHandler { #socket: WebSocket; - #setting: AudioRadioSetting; + #setting: SRSRadioSetting; constructor(socket, setting) { this.#socket = socket; diff --git a/frontend/react/src/ui/components/olrangeslider.tsx b/frontend/react/src/ui/components/olrangeslider.tsx index 9dc53c2c..8412bc11 100644 --- a/frontend/react/src/ui/components/olrangeslider.tsx +++ b/frontend/react/src/ui/components/olrangeslider.tsx @@ -6,6 +6,7 @@ export function OlRangeSlider(props: { max?: number; step?: number; className?: string; + vertical?: boolean; onChange: (e: ChangeEvent) => void; }) { var elementRef = useRef(null); @@ -33,6 +34,7 @@ export function OlRangeSlider(props: { h-2 w-full cursor-pointer appearance-none rounded-lg bg-gray-200 dark:bg-gray-700 `} + /> ); } diff --git a/frontend/react/src/ui/panels/components/audiosourcepanel.tsx b/frontend/react/src/ui/panels/components/audiosourcepanel.tsx new file mode 100644 index 00000000..1ba0f0df --- /dev/null +++ b/frontend/react/src/ui/panels/components/audiosourcepanel.tsx @@ -0,0 +1,49 @@ +import React, { useEffect, useState } from "react"; +import { OlStateButton } from "../../components/olstatebutton"; +import { faPlay, faRepeat } from "@fortawesome/free-solid-svg-icons"; +import { getApp } from "../../../olympusapp"; +import { AudioSource } from "../../../audio/audiosource"; +import { FaVolumeHigh } from "react-icons/fa6"; +import { OlRangeSlider } from "../../components/olrangeslider"; + +export function AudioSourcePanel(props: { index: number; source: AudioSource }) { + return ( +
+ Source: {props.source.getName()} +
+ { + let sources = getApp().getAudioManager().getSources(); + sources[props.index].play(); + }} + tooltip="Play file" + > + { + //let setting = props.setting; + //setting.volume = parseFloat(ev.currentTarget.value) / 100; + //props.updateSetting(setting); + }} + className="my-auto" + /> + { + + }} + tooltip="Loop" + > +
+ +
+ ); +} diff --git a/frontend/react/src/ui/panels/components/radiopanel.tsx b/frontend/react/src/ui/panels/components/radiopanel.tsx new file mode 100644 index 00000000..f37278c8 --- /dev/null +++ b/frontend/react/src/ui/panels/components/radiopanel.tsx @@ -0,0 +1,70 @@ +import React, { useEffect, useState } from "react"; +import { OlFrequencyInput } from "../../components/olfrequencyinput"; +import { FaTrash } from "react-icons/fa6"; +import { OlLabelToggle } from "../../components/ollabeltoggle"; +import { OlStateButton } from "../../components/olstatebutton"; +import { faEarListen, faMicrophoneLines } from "@fortawesome/free-solid-svg-icons"; +import { SRSRadio } from "../../../audio/srsradio"; +import { SRSRadioSetting } from "../../../interfaces"; +import { getApp } from "../../../olympusapp"; + +export function RadioPanel(props: { index: number; setting: SRSRadioSetting, onSettingUpdate: (SRSRadioSetting) => void }) { + return ( +
+
+ Radio {props.index + 1} +
{getApp().getAudioManager().removeRadio(props.index);}}> + +
+
+ { + let setting = props.setting; + setting.frequency = value; + props.onSettingUpdate(setting); + }} + /> +
+ { + let setting = props.setting; + setting.modulation = setting.modulation === 1 ? 0 : 1; + props.onSettingUpdate(setting); + }} + > + + { + let setting = props.setting; + setting.ptt = !setting.ptt; + props.onSettingUpdate(setting); + }} + tooltip="Talk on frequency" + > + + { + let setting = props.setting; + setting.tuned = !setting.tuned; + props.onSettingUpdate(setting); + }} + tooltip="Tune to radio" + > +
+
+ ); +} diff --git a/frontend/react/src/ui/panels/radiomenu.tsx b/frontend/react/src/ui/panels/radiomenu.tsx index d0b759f8..ac63b7a3 100644 --- a/frontend/react/src/ui/panels/radiomenu.tsx +++ b/frontend/react/src/ui/panels/radiomenu.tsx @@ -1,73 +1,107 @@ import React, { useEffect, useState } from "react"; import { Menu } from "./components/menu"; -import { OlCheckbox } from "../components/olcheckbox"; -import { OlRangeSlider } from "../components/olrangeslider"; -import { OlNumberInput } from "../components/olnumberinput"; -import { MapOptions } from "../../types/types"; import { getApp } from "../../olympusapp"; -import { OlFrequencyInput } from "../components/olfrequencyinput"; -import { OlStateButton } from "../components/olstatebutton"; -import { faEarListen, faMicrophoneLines } from "@fortawesome/free-solid-svg-icons"; -import { OlLabelToggle } from "../components/ollabeltoggle"; -import { FaVolumeHigh } from "react-icons/fa6"; +import { OlToggle } from "../components/oltoggle"; +import { RadioPanel } from "./components/radiopanel"; +import { FaQuestionCircle } from "react-icons/fa"; +import { SRSRadioSetting } from "../../interfaces"; export function RadioMenu(props: { open: boolean; onClose: () => void; children?: JSX.Element | JSX.Element[] }) { - const [frequency1, setFrequency1] = useState(251000000); - const [ptt1, setPTT1] = useState(false); - const [frequency2, setFrequency2] = useState(251000000); - const [frequency3, setFrequency3] = useState(243000000); - const [frequency4, setFrequency4] = useState(11200000); + const [radioEnabled, setRadioEnabled] = useState(false); + const [radioSettings, setRadioSettings] = useState([] as SRSRadioSetting[]); useEffect(() => { - if (getApp()) { - let settings = getApp().getAudioManager().getRadioSettings(); - settings[0].frequency = frequency1; - settings[0].ptt = ptt1; - getApp().getAudioManager().setRadioSettings(settings); - } - }); + /* Force a rerender */ + document.addEventListener("radiosUpdated", () => { + setRadioSettings( + getApp() + ?.getAudioManager() + .getRadios() + .map((radio) => radio.getSetting()) + ); + }); + }, []); return ( +
The radio menu allows you to talk on radio to the players online using SRS.
+
+
+ +
+
+
Use the radio controls to tune to a frequency, then click on the PTT button to talk.
+
You can add up to 10 radios. Use the audio effects menu to play audio tracks or to add background noises.
+
+
-
- Radio 1 - { - setFrequency1(value); +
+ Enable radio: + { + radioEnabled ? getApp().getAudioManager().stop() : getApp().getAudioManager().start(); + setRadioEnabled(!radioEnabled); }} /> -
- - {}} className="my-auto" /> - 50 -
-
- {}}> - { - setPTT1(!ptt1); - }} - tooltip="Talk on frequency" - > - {}} tooltip="Tune to radio"> -
+ {radioEnabled && radioSettings.map((setting, idx) => { + return ( + { + getApp().getAudioManager().getRadios()[idx].setSetting(setting); + }} + > + ); + })} + {radioEnabled && radioSettings.length < 10 && ( + + )}
); } + +/* +{refreshSources >= 0 && + getApp() + ?.getAudioManager() + .getSources() + .map((source, idx) => { + return ; + })} + */ diff --git a/frontend/server/Richard_Wagner_-_The_Valkyrie_-_Ride_of_the_Valkyries.ogg b/frontend/server/Richard_Wagner_-_The_Valkyrie_-_Ride_of_the_Valkyries.ogg new file mode 100644 index 00000000..fb13b008 Binary files /dev/null and b/frontend/server/Richard_Wagner_-_The_Valkyrie_-_Ride_of_the_Valkyries.ogg differ diff --git a/frontend/server/src/audio/audiobackend.ts b/frontend/server/src/audio/audiobackend.ts index fc20acc3..71331aa4 100644 --- a/frontend/server/src/audio/audiobackend.ts +++ b/frontend/server/src/audio/audiobackend.ts @@ -17,11 +17,5 @@ export class AudioBackend { wss.on("connection", (ws) => { this.handlers.push(new SRSHandler(ws, this.SRSPort)); }); - - wss.on("disconnection", (ws) => { - this.handlers = this.handlers.filter((handler) => { - handler.ws != ws; - }); - }); } } diff --git a/frontend/server/src/audio/srshandler.ts b/frontend/server/src/audio/srshandler.ts index 831b10d2..d402942e 100644 --- a/frontend/server/src/audio/srshandler.ts +++ b/frontend/server/src/audio/srshandler.ts @@ -13,28 +13,15 @@ enum MessageType { settings, } -function makeID(length) { - let result = ""; - const characters = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - const charactersLength = characters.length; - let counter = 0; - while (counter < length) { - result += characters.charAt(Math.floor(Math.random() * charactersLength)); - counter += 1; - } - return result; -} - export class SRSHandler { ws: any; tcp = new net.Socket(); udp = require("dgram").createSocket("udp4"); data = JSON.parse(JSON.stringify(defaultSRSData)); syncInterval: any; + packetQueue = []; constructor(ws, SRSPort) { - this.data.ClientGuid = "ImF72dh9EYcIDyYRGaF9S9"; this.data.Name = `Olympus${globalIndex}`; globalIndex += 1; @@ -45,6 +32,7 @@ export class SRSHandler { switch (data[0]) { case MessageType.audio: let audioBuffer = data.slice(1); + this.packetQueue.push(audioBuffer); this.udp.send(audioBuffer, SRSPort, "localhost", (error) => { if (error) console.log(`Error sending data to SRS server: ${error}`); @@ -52,6 +40,8 @@ export class SRSHandler { break; case MessageType.settings: let message = JSON.parse(data.slice(1)); + this.data.ClientGuid = message.guid; + this.data.Coalition = message.coalition; message.settings.forEach((setting, idx) => { this.data.RadioInfo.radios[idx].freq = setting.frequency; this.data.RadioInfo.radios[idx].modulation = setting.modulation; @@ -80,9 +70,10 @@ export class SRSHandler { Version: SRS_VERSION, }; - this.udp.send(this.data.ClientGuid, SRSPort, "localhost", (error) => { - if (error) console.log(`Error pinging SRS server on UDP: ${error}`); - }); + this.data.ClientGuid !== "" && + this.udp.send(this.data.ClientGuid, SRSPort, "localhost", (error) => { + if (error) console.log(`Error pinging SRS server on UDP: ${error}`); + }); if (this.tcp.readyState == "open") this.tcp.write(`${JSON.stringify(SYNC)}\n`); @@ -92,12 +83,11 @@ export class SRSHandler { /* UDP */ this.udp.on("listening", () => { - console.log(`Listening to SRS Server on UDP port ${SRSPort}`) + console.log(`Listening to SRS Server on UDP port ${SRSPort}`); }); this.udp.on("message", (message, remote) => { - if (this.ws && message.length > 22) - this.ws.send(message); + if (this.ws && message.length > 22) this.ws.send(message); }); } }