More work on sources

This commit is contained in:
Davide Passoni 2024-09-03 20:19:11 +02:00
parent b352bc824c
commit a64ccab15f
22 changed files with 604 additions and 281 deletions

View File

@ -0,0 +1,51 @@
import { AudioSource } from "./audiosource";
import { bufferToF32Planar } from "../other/utils";
import { getApp } from "../olympusapp";
export class AudioFileSource extends AudioSource {
#gainNode: GainNode;
#file: File | null = null;
#source: AudioBufferSourceNode;
constructor(file) {
super();
this.#file = file;
this.#gainNode = getApp().getAudioManager().getAudioContext().createGain();
}
getNode() {
return this.#gainNode;
}
play() {
if (!this.#file) {
return;
}
var reader = new FileReader();
reader.onload = (e) => {
var contents = e.target?.result;
if (contents) {
getApp().getAudioManager().getAudioContext().decodeAudioData(contents as ArrayBuffer, (arrayBuffer) => {
this.#source = getApp().getAudioManager().getAudioContext().createBufferSource();
this.#source.buffer = arrayBuffer;
this.#source.connect(this.#gainNode);
this.#source.start();
});
}
};
reader.readAsArrayBuffer(this.#file);
}
stop() {
this.#source.stop();
}
setGain(gain) {
this.#gainNode.gain.setValueAtTime(gain, getApp().getAudioManager().getAudioContext().currentTime);
}
getName() {
return this.#file?.name ?? "N/A";
}
}

View File

@ -1,77 +1,102 @@
import { AudioRadioSetting } from "../interfaces";
import { AudioMessageType } from "../constants/constants";
import { MicrophoneSource } from "./microphonesource";
import { SRSRadio } from "./srsradio";
import { getApp } from "../olympusapp";
import { fromBytes, makeID } from "../other/utils";
import { AudioFileSource } from "./audiofilesource";
import { AudioSource } from "./audiosource";
import { Buffer } from "buffer";
import { MicrophoneHandler } from "./microphonehandler";
import { PlaybackPipeline } from "./playbackpipeline";
enum MessageType {
audio,
settings,
}
function fromBytes(array) {
let res = 0;
for (let i = 0; i < array.length; i++) {
res = res << 8;
res += array[array.length - i - 1];
}
return res;
}
var context = new AudioContext();
export class AudioManager {
#radioSettings: AudioRadioSetting[] = [
{
frequency: 251000000,
modulation: 0,
ptt: false,
tuned: false,
volume: 0.5,
},
];
#audioContext: AudioContext;
#microphoneHandlers: (MicrophoneHandler | null)[] = [];
/* The playback pipeline enables audio playback on the speakers/headphones */
#playbackPipeline: PlaybackPipeline;
/* The SRS radio audio sinks used to transmit the audio stream to the SRS backend */
#radios: SRSRadio[] = [];
/* List of all possible audio sources (microphone, file stream etc...) */
#sources: AudioSource[] = [];
#address: string = "localhost";
#port: number = 4000;
#socket: WebSocket | null = null;
#guid: string = makeID(22);
constructor() {
document.addEventListener("configLoaded", () => {
let config = getApp().getConfig();
if (config["WSPort"]) {
this.setPort(config["WSPort"]);
this.start();
}
});
this.#microphoneHandlers = this.#radioSettings.map(() => null);
setInterval(() => {
this.#syncRadioSettings();
}, 1000);
}
start() {
const pipeline = new PlaybackPipeline();
this.#audioContext = new AudioContext({ sampleRate: 16000 });
this.#playbackPipeline = new PlaybackPipeline();
/* Connect the audio websocket */
let res = this.#address.match(/(?:http|https):\/\/(.+):/);
let wsAddress = res ? res[1] : this.#address;
this.#socket = new WebSocket(`ws://${wsAddress}:${this.#port}`);
/* Log the opening of the connection */
this.#socket.addEventListener("open", (event) => {
console.log("Connection to audio websocket successfull");
});
/* Log any websocket errors */
this.#socket.addEventListener("error", (event) => {
console.log(event);
});
this.#socket.addEventListener("message", async (event) => {
let bytes = event.data;
let packet = new Uint8Array(await bytes.arrayBuffer())
let audioLength = fromBytes(packet.slice(2, 4));
let audioData = packet.slice(6, 6 + audioLength);
let frequency = new DataView(packet.slice(6 + audioLength, 6 + audioLength + 8).reverse().buffer).getFloat64(0);
pipeline.play(audioData.buffer);
/* Handle the reception of a new message */
this.#socket.addEventListener("message", (event) => {
this.#radios.forEach(async (radio) => {
/* Extract the audio data as array */
let packetUint8Array = new Uint8Array(await event.data.arrayBuffer());
/* Extract the encoded audio data */
let audioLength = fromBytes(packetUint8Array.slice(2, 4));
let audioUint8Array = packetUint8Array.slice(6, 6 + audioLength);
/* Extract the frequency value and play it on the speakers if we are listening to it*/
let frequency = new DataView(packetUint8Array.slice(6 + audioLength, 6 + audioLength + 8).reverse().buffer).getFloat64(0);
if (radio.getSetting().frequency === frequency) {
this.#playbackPipeline.play(audioUint8Array.buffer);
}
});
});
/* Add two default radios */
this.#radios = [new SRSRadio(), new SRSRadio()];
document.dispatchEvent(new CustomEvent("radiosUpdated"));
/* Add the microphone source and connect it directly to the radio */
const microphoneSource = new MicrophoneSource();
microphoneSource.initialize().then(() => {
this.#radios.forEach((radio) => {
microphoneSource.getNode().connect(radio.getNode());
});
this.#sources.push(microphoneSource);
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
});
}
stop() {
this.#sources.forEach((source) => {
source.getNode().disconnect();
});
this.#sources = [];
this.#radios = [];
}
setAddress(address) {
@ -82,24 +107,56 @@ export class AudioManager {
this.#port = port;
}
getRadioSettings() {
return JSON.parse(JSON.stringify(this.#radioSettings));
addFileSource(file) {
const newSource = new AudioFileSource(file);
this.#sources.push(newSource);
newSource.getNode().connect(this.#radios[0].getNode());
document.dispatchEvent(new CustomEvent("audioSourcesUpdated"));
}
setRadioSettings(radioSettings: AudioRadioSetting[]) {
this.#radioSettings = radioSettings;
getRadios() {
return this.#radios;
}
addRadio() {
const newRadio = new SRSRadio();
this.#sources[0].getNode().connect(newRadio.getNode());
this.#radios.push(newRadio);
document.dispatchEvent(new CustomEvent("radiosUpdated"));
}
removeRadio(idx) {
this.#radios[idx].getNode().disconnect();
this.#radios.splice(idx, 1);
document.dispatchEvent(new CustomEvent("radiosUpdated"));
}
getSources() {
return this.#sources;
}
getGuid() {
return this.#guid;
}
send(array) {
this.#socket?.send(array);
}
getAudioContext() {
return this.#audioContext;
}
#syncRadioSettings() {
let message = {
type: "Settings update",
settings: this.#radioSettings,
guid: this.#guid,
coalition: 2,
settings: this.#radios.map((radio) => {
return radio.getSetting();
}),
};
this.#radioSettings.forEach((setting, idx) => {
if (setting.ptt && !this.#microphoneHandlers[idx]) {
this.#microphoneHandlers[idx] = new MicrophoneHandler(this.#socket, setting);
}
});
if (this.#socket?.readyState == 1) this.#socket?.send(new Uint8Array([MessageType.settings, ...Buffer.from(JSON.stringify(message), "utf-8")]));
if (this.#socket?.readyState == 1) this.#socket?.send(new Uint8Array([AudioMessageType.settings, ...Buffer.from(JSON.stringify(message), "utf-8")]));
}
}

View File

@ -23,7 +23,7 @@ var packetID = 0;
export class AudioPacket {
#packet: Uint8Array;
constructor(data, settings) {
constructor(data, settings, guid) {
let header: number[] = [0, 0, 0, 0, 0, 0];
let encFrequency: number[] = [...doubleToByteArray(settings.frequency)];
@ -44,8 +44,8 @@ export class AudioPacket {
encUnitID,
encPacketID,
encHops,
[...Buffer.from("ImF72dh9EYcIDyYRGaF9S9", "utf-8")],
[...Buffer.from("ImF72dh9EYcIDyYRGaF9S9", "utf-8")]
[...Buffer.from(guid, "utf-8")],
[...Buffer.from(guid, "utf-8")]
);
let encPacketLen = getBytes(packet.length, 2);

View File

@ -0,0 +1,3 @@
export abstract class AudioSink {
abstract getNode(): AudioNode;
}

View File

@ -0,0 +1,22 @@
import { AudioSourceSetting } from "../interfaces";
import { AudioSink } from "./audiosink";
export abstract class AudioSource {
#setting: AudioSourceSetting = {
connectedTo: "",
filename: "",
playing: true,
};
getSetting() {
return this.#setting;
}
setSetting(setting: AudioSourceSetting) {
this.#setting = setting;
}
abstract play(): void;
abstract getNode(): AudioNode;
abstract getName(): string;
}

View File

@ -1,88 +0,0 @@
export class CapturePipeline {
sampleRate: any;
codec: any;
sourceId: any;
onrawdata: any;
onencoded: any;
deviceId: any;
audioContext: any;
mic: any;
source: any;
destination: any;
encoder: any;
audioTrackProcessor: any;
duration: any;
constructor(codec = "opus", sampleRate = 16000, duration = 40000) {
this.sampleRate = sampleRate;
this.codec = codec;
this.duration = duration;
this.onrawdata = null;
this.onencoded = null;
}
async connect() {
const mic = navigator.mediaDevices.getUserMedia({ audio: true });
this.audioContext = new AudioContext({
sampleRate: this.sampleRate,
latencyHint: "interactive",
});
this.mic = await mic;
this.source = this.audioContext.createMediaStreamSource(this.mic);
this.destination = this.audioContext.createMediaStreamDestination();
this.destination.channelCount = 1;
this.source.connect(this.destination);
this.encoder = new AudioEncoder({
output: this.handleEncodedData.bind(this),
error: this.handleEncodingError.bind(this),
});
this.encoder.configure({
codec: this.codec,
numberOfChannels: 1,
sampleRate: this.sampleRate,
opus: {
frameDuration: this.duration,
},
bitrateMode: "constant"
});
//@ts-ignore
this.audioTrackProcessor = new MediaStreamTrackProcessor({
track: this.destination.stream.getAudioTracks()[0],
});
this.audioTrackProcessor.readable.pipeTo(
new WritableStream({
write: this.handleRawData.bind(this),
})
);
}
disconnect() {
this.source.disconnect();
delete this.audioTrackProcessor;
delete this.encoder;
delete this.destination;
delete this.mic;
delete this.source;
}
handleEncodedData(chunk, metadata) {
if (this.onencoded) {
this.onencoded(chunk, metadata);
}
const data = new ArrayBuffer(chunk.byteLength);
chunk.copyTo(data);
}
handleEncodingError(e) {
console.log(e);
}
handleRawData(audioData) {
if (this.onrawdata) {
this.onrawdata(audioData);
}
this.encoder.encode(audioData);
audioData.close();
}
}

View File

@ -0,0 +1,29 @@
import { getApp } from "../olympusapp";
import { AudioSource } from "./audiosource";
export class MicrophoneSource extends AudioSource {
#node: AudioNode;
constructor() {
super();
}
async initialize() {
const microphone = await navigator.mediaDevices.getUserMedia({ audio: true });
if (getApp().getAudioManager().getAudioContext()) {
this.#node = getApp().getAudioManager().getAudioContext().createMediaStreamSource(microphone);
}
}
getNode() {
return this.#node;
}
play() {
// TODO, now is always on
}
getName() {
return "Microphone"
}
}

View File

@ -1,82 +1,56 @@
import { getApp } from "../olympusapp";
export class PlaybackPipeline {
sampleRate: any;
codec: any;
sourceId: any;
onrawdata: any;
ondecoded: any;
deviceId: any;
audioContext: any;
mic: any;
source: any;
destination: any;
decoder: any;
audioTrackProcessor: any;
duration: any;
trackGenerator: any;
writer: any;
#decoder = new AudioDecoder({
output: (chunk) => this.#handleDecodedData(chunk),
error: (e) => console.log(e),
});
#trackGenerator: any; // TODO can we have typings?
#writer: any;
#gainNode: GainNode;
constructor(codec = "opus", sampleRate = 16000, duration = 40000) {
this.sampleRate = sampleRate;
this.codec = codec;
this.duration = duration;
this.ondecoded = null;
this.audioContext = new AudioContext();
this.decoder = new AudioDecoder({
output: (chunk) => this.handleDecodedData(chunk),
error: this.handleDecodingError.bind(this),
});
this.decoder.configure({
codec: this.codec,
constructor() {
this.#decoder.configure({
codec: 'opus',
numberOfChannels: 1,
sampleRate: this.sampleRate,
sampleRate: 16000,
//@ts-ignore // TODO why is this giving an error?
opus: {
frameDuration: this.duration,
frameDuration: 40000,
},
bitrateMode: "constant",
});
//@ts-ignore
this.trackGenerator = new MediaStreamTrackGenerator({ kind: "audio" });
this.writer = this.trackGenerator.writable.getWriter();
this.#trackGenerator = new MediaStreamTrackGenerator({ kind: "audio" });
this.#writer = this.#trackGenerator.writable.getWriter();
const stream = new MediaStream([this.trackGenerator]);
const stream = new MediaStream([this.#trackGenerator]);
const mediaStreamSource = getApp().getAudioManager().getAudioContext().createMediaStreamSource(stream);
const mediaStreamSource = this.audioContext.createMediaStreamSource(stream);
mediaStreamSource.connect(this.audioContext.destination)
/* Connect to the device audio output */
this.#gainNode = getApp().getAudioManager().getAudioContext().createGain();
mediaStreamSource.connect(this.#gainNode);
this.#gainNode.connect(getApp().getAudioManager().getAudioContext().destination);
}
play(buffer) {
play(arrayBuffer) {
const init = {
type: "key",
data: buffer,
timestamp: 23000000,
data: arrayBuffer,
timestamp: 0,
duration: 2000000,
transfer: [buffer],
transfer: [arrayBuffer],
};
//@ts-ignore
let chunk = new EncodedAudioChunk(init);
//@ts-ignore //TODO Typings?
let encodedAudioChunk = new EncodedAudioChunk(init);
this.decoder.decode(chunk);
this.#decoder.decode(encodedAudioChunk);
}
disconnect() {
this.source.disconnect();
delete this.audioTrackProcessor;
delete this.decoder;
delete this.destination;
delete this.mic;
delete this.source;
}
handleDecodedData(chunk) {
this.writer.ready.then(() => {
this.writer.write(chunk);
#handleDecodedData(audioData) {
this.#writer.ready.then(() => {
this.#writer.write(audioData);
})
}
handleDecodingError(e) {
console.log(e);
}
}

View File

@ -0,0 +1,83 @@
import { AudioSink } from "./audiosink";
import { AudioPacket } from "./audiopacket";
import { getApp } from "../olympusapp";
export class SRSRadio extends AudioSink {
#encoder: AudioEncoder;
#node: MediaStreamAudioDestinationNode;
#audioTrackProcessor: any; // TODO can we have typings?
#gainNode: GainNode;
#setting = {
frequency: 251000000,
modulation: 0,
ptt: false,
tuned: false,
volume: 0.5,
};
constructor() {
super();
/* A gain node is used because it allows to connect multiple inputs */
this.#gainNode = getApp().getAudioManager().getAudioContext().createGain();
this.#node = getApp().getAudioManager().getAudioContext().createMediaStreamDestination();
this.#node.channelCount = 1;
this.#encoder = new AudioEncoder({
output: (data) => this.#handleEncodedData(data),
error: (e) => {console.log(e);},
});
this.#encoder.configure({
codec: 'opus',
numberOfChannels: 1,
sampleRate: 16000,
//@ts-ignore // TODO why is it giving error?
opus: {
frameDuration: 40000,
},
bitrateMode: "constant"
});
//@ts-ignore
this.#audioTrackProcessor = new MediaStreamTrackProcessor({
track: this.#node.stream.getAudioTracks()[0],
});
this.#audioTrackProcessor.readable.pipeTo(
new WritableStream({
write: (arrayBuffer) => this.#handleRawData(arrayBuffer),
})
);
this.#gainNode.connect(this.#node);
}
getSetting() {
return this.#setting;
}
setSetting(setting) {
this.#setting = setting;
document.dispatchEvent(new CustomEvent("radiosUpdated"));
}
getNode() {
return this.#gainNode;
}
#handleEncodedData(audioBuffer) {
let arrayBuffer = new ArrayBuffer(audioBuffer.byteLength);
audioBuffer.copyTo(arrayBuffer);
if (this.#setting.ptt) {
let packet = new AudioPacket(new Uint8Array(arrayBuffer), this.#setting, getApp().getAudioManager().getGuid());
getApp().getAudioManager().send(packet.getArray());
}
}
#handleRawData(audioData) {
this.#encoder.encode(audioData);
audioData.close();
}
}

View File

@ -356,3 +356,8 @@ export const GROUPING_ZOOM_TRANSITION = 13;
export const MAX_SHOTS_SCATTER = 3;
export const MAX_SHOTS_INTENSITY = 3;
export const SHOTS_SCATTER_DEGREES = 10;
export enum AudioMessageType {
audio,
settings,
}

View File

@ -26,6 +26,8 @@ interface CustomEventMap {
hideMapContextMenu: CustomEvent<any>;
showUnitContextMenu: CustomEvent<any>;
hideUnitContextMenu: CustomEvent<any>;
audioSourcesUpdated: CustomEvent<any>;
radiosUpdated: CustomEvent<any>;
}
declare global {

View File

@ -40,4 +40,8 @@
z-index: 2006;
}
.vertical-slider {
writing-mode: vertical-lr !important;
direction: rtl !important;
}

View File

@ -292,10 +292,17 @@ export interface ServerStatus {
paused: boolean;
}
export interface AudioRadioSetting {
export interface SRSRadioSetting {
frequency: number;
modulation: number;
volume: number;
ptt: boolean;
tuned: boolean;
}
}
export interface AudioSourceSetting {
filename: string;
playing: boolean;
connectedTo: string;
}

View File

@ -532,3 +532,38 @@ export function getUnitsByLabel(filterString: string) {
return [filteredAircraft, filteredHelicopters, filteredAirDefense, filteredGroundUnits, filteredNavyUnits];
}
export function fromBytes(array) {
let res = 0;
for (let i = 0; i < array.length; i++) {
res = res << 8;
res += array[array.length - i - 1];
}
return res;
}
export function makeID(length) {
let result = "";
const characters =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
const charactersLength = characters.length;
let counter = 0;
while (counter < length) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
counter += 1;
}
return result;
}
export function bufferToF32Planar(input: AudioBuffer): Float32Array {
const result = new Float32Array(input.length * 1);
let offset = 0;
for (let i = 0; i < 1; i++) {
const data = input.getChannelData(i);
result.set(data, offset);
offset = data.length;
}
return result;
}

View File

@ -1,10 +1,10 @@
import { AudioRadioSetting } from "../interfaces";
import { SRSRadioSetting } from "../interfaces";
import { AudioPacket } from "./audiopacket";
import { CapturePipeline } from "./capturepipeline";
export class MicrophoneHandler {
#socket: WebSocket;
#setting: AudioRadioSetting;
#setting: SRSRadioSetting;
constructor(socket, setting) {
this.#socket = socket;

View File

@ -6,6 +6,7 @@ export function OlRangeSlider(props: {
max?: number;
step?: number;
className?: string;
vertical?: boolean;
onChange: (e: ChangeEvent<HTMLInputElement>) => void;
}) {
var elementRef = useRef(null);
@ -33,6 +34,7 @@ export function OlRangeSlider(props: {
h-2 w-full cursor-pointer appearance-none rounded-lg bg-gray-200
dark:bg-gray-700
`}
/>
);
}

View File

@ -0,0 +1,49 @@
import React, { useEffect, useState } from "react";
import { OlStateButton } from "../../components/olstatebutton";
import { faPlay, faRepeat } from "@fortawesome/free-solid-svg-icons";
import { getApp } from "../../../olympusapp";
import { AudioSource } from "../../../audio/audiosource";
import { FaVolumeHigh } from "react-icons/fa6";
import { OlRangeSlider } from "../../components/olrangeslider";
export function AudioSourcePanel(props: { index: number; source: AudioSource }) {
return (
<div
className={`
flex flex-col content-center justify-between gap-2 rounded-md
bg-olympus-200/30 py-3 pl-4 pr-5
`}
>
Source: {props.source.getName()}
<div className="flex gap-4 py-2">
<OlStateButton
checked={false}
icon={faPlay}
onClick={() => {
let sources = getApp().getAudioManager().getSources();
sources[props.index].play();
}}
tooltip="Play file"
></OlStateButton>
<OlRangeSlider
value={50}
onChange={(ev) => {
//let setting = props.setting;
//setting.volume = parseFloat(ev.currentTarget.value) / 100;
//props.updateSetting(setting);
}}
className="my-auto"
/>
<OlStateButton
checked={false}
icon={faRepeat}
onClick={() => {
}}
tooltip="Loop"
></OlStateButton>
</div>
</div>
);
}

View File

@ -0,0 +1,70 @@
import React, { useEffect, useState } from "react";
import { OlFrequencyInput } from "../../components/olfrequencyinput";
import { FaTrash } from "react-icons/fa6";
import { OlLabelToggle } from "../../components/ollabeltoggle";
import { OlStateButton } from "../../components/olstatebutton";
import { faEarListen, faMicrophoneLines } from "@fortawesome/free-solid-svg-icons";
import { SRSRadio } from "../../../audio/srsradio";
import { SRSRadioSetting } from "../../../interfaces";
import { getApp } from "../../../olympusapp";
export function RadioPanel(props: { index: number; setting: SRSRadioSetting, onSettingUpdate: (SRSRadioSetting) => void }) {
return (
<div
className={`
flex flex-col content-center justify-between gap-2 rounded-md
bg-olympus-200/30 py-3 pl-4 pr-5
`}
>
<div className="flex content-center justify-between">
<span className="my-auto">Radio {props.index + 1}</span>
<div className="rounded-md bg-red-800 p-2" onClick={() => {getApp().getAudioManager().removeRadio(props.index);}}>
<FaTrash className={`text-gray-50`}></FaTrash>
</div>
</div>
<OlFrequencyInput
value={props.setting.frequency}
onChange={(value) => {
let setting = props.setting;
setting.frequency = value;
props.onSettingUpdate(setting);
}}
/>
<div className="flex flex-row gap-2">
<OlLabelToggle
leftLabel="AM"
rightLabel="FM"
toggled={props.setting.modulation !== 0}
onClick={() => {
let setting = props.setting;
setting.modulation = setting.modulation === 1 ? 0 : 1;
props.onSettingUpdate(setting);
}}
></OlLabelToggle>
<OlStateButton
className="ml-auto"
checked={props.setting.ptt}
icon={faMicrophoneLines}
onClick={() => {
let setting = props.setting;
setting.ptt = !setting.ptt;
props.onSettingUpdate(setting);
}}
tooltip="Talk on frequency"
></OlStateButton>
<OlStateButton
checked={props.setting.tuned}
icon={faEarListen}
onClick={() => {
let setting = props.setting;
setting.tuned = !setting.tuned;
props.onSettingUpdate(setting);
}}
tooltip="Tune to radio"
></OlStateButton>
</div>
</div>
);
}

View File

@ -1,73 +1,107 @@
import React, { useEffect, useState } from "react";
import { Menu } from "./components/menu";
import { OlCheckbox } from "../components/olcheckbox";
import { OlRangeSlider } from "../components/olrangeslider";
import { OlNumberInput } from "../components/olnumberinput";
import { MapOptions } from "../../types/types";
import { getApp } from "../../olympusapp";
import { OlFrequencyInput } from "../components/olfrequencyinput";
import { OlStateButton } from "../components/olstatebutton";
import { faEarListen, faMicrophoneLines } from "@fortawesome/free-solid-svg-icons";
import { OlLabelToggle } from "../components/ollabeltoggle";
import { FaVolumeHigh } from "react-icons/fa6";
import { OlToggle } from "../components/oltoggle";
import { RadioPanel } from "./components/radiopanel";
import { FaQuestionCircle } from "react-icons/fa";
import { SRSRadioSetting } from "../../interfaces";
export function RadioMenu(props: { open: boolean; onClose: () => void; children?: JSX.Element | JSX.Element[] }) {
const [frequency1, setFrequency1] = useState(251000000);
const [ptt1, setPTT1] = useState(false);
const [frequency2, setFrequency2] = useState(251000000);
const [frequency3, setFrequency3] = useState(243000000);
const [frequency4, setFrequency4] = useState(11200000);
const [radioEnabled, setRadioEnabled] = useState(false);
const [radioSettings, setRadioSettings] = useState([] as SRSRadioSetting[]);
useEffect(() => {
if (getApp()) {
let settings = getApp().getAudioManager().getRadioSettings();
settings[0].frequency = frequency1;
settings[0].ptt = ptt1;
getApp().getAudioManager().setRadioSettings(settings);
}
});
/* Force a rerender */
document.addEventListener("radiosUpdated", () => {
setRadioSettings(
getApp()
?.getAudioManager()
.getRadios()
.map((radio) => radio.getSetting())
);
});
}, []);
return (
<Menu title="Radio" open={props.open} showBackButton={false} onClose={props.onClose}>
<div className="p-4 text-sm text-gray-400">The radio menu allows you to talk on radio to the players online using SRS.</div>
<div className="mx-6 flex rounded-lg bg-olympus-400 p-4 text-sm">
<div>
<FaQuestionCircle className="my-4 ml-2 mr-6 text-gray-400" />
</div>
<div className="flex flex-col gap-1">
<div className="text-gray-100">Use the radio controls to tune to a frequency, then click on the PTT button to talk. </div>
<div className="text-gray-400">You can add up to 10 radios. Use the audio effects menu to play audio tracks or to add background noises.</div>
</div>
</div>
<div
className={`
flex flex-col gap-2 p-5 font-normal text-gray-800
dark:text-white
`}
>
<div
className={`
flex flex-col content-center justify-between gap-2 rounded-md
bg-olympus-200/30 py-3 pl-4 pr-5
`}
>
Radio 1
<OlFrequencyInput
value={frequency1}
onChange={(value) => {
setFrequency1(value);
<div className="flex justify-between">
<span>Enable radio:</span>
<OlToggle
toggled={radioEnabled}
onClick={() => {
radioEnabled ? getApp().getAudioManager().stop() : getApp().getAudioManager().start();
setRadioEnabled(!radioEnabled);
}}
/>
<div className="flex gap-4 py-2">
<FaVolumeHigh className="h-8 w-8 p-1" />
<OlRangeSlider value={50} onChange={() => {}} className="my-auto" />
<span className="my-auto">50</span>
</div>
<div className="flex flex-row gap-2">
<OlLabelToggle leftLabel="AM" rightLabel="FM" toggled={false} onClick={() => {}}></OlLabelToggle>
<OlStateButton
className="ml-auto"
checked={ptt1}
icon={faMicrophoneLines}
onClick={() => {
setPTT1(!ptt1);
}}
tooltip="Talk on frequency"
></OlStateButton>
<OlStateButton checked={false} icon={faEarListen} onClick={() => {}} tooltip="Tune to radio"></OlStateButton>
</div>
</div>
{radioEnabled && radioSettings.map((setting, idx) => {
return (
<RadioPanel
index={idx}
setting={setting}
onSettingUpdate={(setting) => {
getApp().getAudioManager().getRadios()[idx].setSetting(setting);
}}
></RadioPanel>
);
})}
{radioEnabled && radioSettings.length < 10 && (
<button
type="button"
className={`
mb-2 me-2 rounded-lg bg-blue-700 px-5 py-2.5 text-sm font-medium
text-white
dark:bg-blue-600 dark:hover:bg-blue-700 dark:focus:ring-blue-800
focus:outline-none focus:ring-4 focus:ring-blue-300
hover:bg-blue-800
`}
onClick={() => getApp().getAudioManager().addRadio()}
>
Add radio
</button>
)}
</div>
</Menu>
);
}
/*
{refreshSources >= 0 &&
getApp()
?.getAudioManager()
.getSources()
.map((source, idx) => {
return <AudioSourcePanel index={idx} source={source} />;
})}
<button
onClick={() => {
var input = document.createElement("input");
input.type = "file";
input.click();
input.onchange = (e: Event) => {
let target = e.target as HTMLInputElement;
if (target && target.files) {
var file = target.files[0];
getApp().getAudioManager().addFileSource(file);
}
};
}}
>
Add audio source
</button> */

View File

@ -17,11 +17,5 @@ export class AudioBackend {
wss.on("connection", (ws) => {
this.handlers.push(new SRSHandler(ws, this.SRSPort));
});
wss.on("disconnection", (ws) => {
this.handlers = this.handlers.filter((handler) => {
handler.ws != ws;
});
});
}
}

View File

@ -13,28 +13,15 @@ enum MessageType {
settings,
}
function makeID(length) {
let result = "";
const characters =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
const charactersLength = characters.length;
let counter = 0;
while (counter < length) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
counter += 1;
}
return result;
}
export class SRSHandler {
ws: any;
tcp = new net.Socket();
udp = require("dgram").createSocket("udp4");
data = JSON.parse(JSON.stringify(defaultSRSData));
syncInterval: any;
packetQueue = [];
constructor(ws, SRSPort) {
this.data.ClientGuid = "ImF72dh9EYcIDyYRGaF9S9";
this.data.Name = `Olympus${globalIndex}`;
globalIndex += 1;
@ -45,6 +32,7 @@ export class SRSHandler {
switch (data[0]) {
case MessageType.audio:
let audioBuffer = data.slice(1);
this.packetQueue.push(audioBuffer);
this.udp.send(audioBuffer, SRSPort, "localhost", (error) => {
if (error)
console.log(`Error sending data to SRS server: ${error}`);
@ -52,6 +40,8 @@ export class SRSHandler {
break;
case MessageType.settings:
let message = JSON.parse(data.slice(1));
this.data.ClientGuid = message.guid;
this.data.Coalition = message.coalition;
message.settings.forEach((setting, idx) => {
this.data.RadioInfo.radios[idx].freq = setting.frequency;
this.data.RadioInfo.radios[idx].modulation = setting.modulation;
@ -80,9 +70,10 @@ export class SRSHandler {
Version: SRS_VERSION,
};
this.udp.send(this.data.ClientGuid, SRSPort, "localhost", (error) => {
if (error) console.log(`Error pinging SRS server on UDP: ${error}`);
});
this.data.ClientGuid !== "" &&
this.udp.send(this.data.ClientGuid, SRSPort, "localhost", (error) => {
if (error) console.log(`Error pinging SRS server on UDP: ${error}`);
});
if (this.tcp.readyState == "open")
this.tcp.write(`${JSON.stringify(SYNC)}\n`);
@ -92,12 +83,11 @@ export class SRSHandler {
/* UDP */
this.udp.on("listening", () => {
console.log(`Listening to SRS Server on UDP port ${SRSPort}`)
console.log(`Listening to SRS Server on UDP port ${SRSPort}`);
});
this.udp.on("message", (message, remote) => {
if (this.ws && message.length > 22)
this.ws.send(message);
if (this.ws && message.length > 22) this.ws.send(message);
});
}
}