More work on SRS backend for radio playback and recording

This commit is contained in:
Pax1601 2024-09-02 07:51:51 +02:00
parent ba2c48dead
commit b352bc824c
12 changed files with 247 additions and 56 deletions

View File

@ -16,6 +16,7 @@
"@fortawesome/react-fontawesome": "^0.2.0",
"@tanem/svg-injector": "^10.1.68",
"@turf/turf": "^6.5.0",
"@types/dom-webcodecs": "^0.1.11",
"@types/leaflet": "^1.9.8",
"@types/react-leaflet": "^3.0.0",
"@types/turf": "^3.5.32",
@ -24,6 +25,7 @@
"leaflet": "^1.9.4",
"leaflet-control-mini-map": "^0.4.0",
"leaflet-path-drag": "^1.9.5",
"opus-decoder": "^0.7.6",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-icons": "^5.0.1",

View File

@ -2,16 +2,28 @@ import { AudioRadioSetting } from "../interfaces";
import { getApp } from "../olympusapp";
import { Buffer } from "buffer";
import { MicrophoneHandler } from "./microphonehandler";
import { PlaybackPipeline } from "./playbackpipeline";
enum MessageType {
audio,
settings,
}
function fromBytes(array) {
let res = 0;
for (let i = 0; i < array.length; i++) {
res = res << 8;
res += array[array.length - i - 1];
}
return res;
}
var context = new AudioContext();
export class AudioManager {
#radioSettings: AudioRadioSetting[] = [
{
frequency: 124000000,
frequency: 251000000,
modulation: 0,
ptt: false,
tuned: false,
@ -19,7 +31,7 @@ export class AudioManager {
},
];
#microphoneHandlers: (MicrophoneHandler | null)[] =[];
#microphoneHandlers: (MicrophoneHandler | null)[] = [];
#address: string = "localhost";
#port: number = 4000;
@ -38,6 +50,7 @@ export class AudioManager {
}
start() {
const pipeline = new PlaybackPipeline();
let res = this.#address.match(/(?:http|https):\/\/(.+):/);
let wsAddress = res ? res[1] : this.#address;
@ -51,8 +64,13 @@ export class AudioManager {
console.log(event);
});
this.#socket.addEventListener("message", (event) => {
console.log("Message from server ", event.data);
this.#socket.addEventListener("message", async (event) => {
let bytes = event.data;
let packet = new Uint8Array(await bytes.arrayBuffer())
let audioLength = fromBytes(packet.slice(2, 4));
let audioData = packet.slice(6, 6 + audioLength);
let frequency = new DataView(packet.slice(6 + audioLength, 6 + audioLength + 8).reverse().buffer).getFloat64(0);
pipeline.play(audioData.buffer);
});
}
@ -80,9 +98,8 @@ export class AudioManager {
if (setting.ptt && !this.#microphoneHandlers[idx]) {
this.#microphoneHandlers[idx] = new MicrophoneHandler(this.#socket, setting);
}
})
});
if (this.#socket?.readyState == 1)
this.#socket?.send(new Uint8Array([MessageType.settings, ...Buffer.from(JSON.stringify(message), "utf-8")]));
if (this.#socket?.readyState == 1) this.#socket?.send(new Uint8Array([MessageType.settings, ...Buffer.from(JSON.stringify(message), "utf-8")]));
}
}

View File

@ -60,7 +60,6 @@ export class AudioPacket {
packet[4] = frequencyAudioLen[0];
packet[5] = frequencyAudioLen[1];
this.#packet = new Uint8Array([0].concat(packet));
}

View File

@ -0,0 +1,88 @@
export class CapturePipeline {
sampleRate: any;
codec: any;
sourceId: any;
onrawdata: any;
onencoded: any;
deviceId: any;
audioContext: any;
mic: any;
source: any;
destination: any;
encoder: any;
audioTrackProcessor: any;
duration: any;
constructor(codec = "opus", sampleRate = 16000, duration = 40000) {
this.sampleRate = sampleRate;
this.codec = codec;
this.duration = duration;
this.onrawdata = null;
this.onencoded = null;
}
async connect() {
const mic = navigator.mediaDevices.getUserMedia({ audio: true });
this.audioContext = new AudioContext({
sampleRate: this.sampleRate,
latencyHint: "interactive",
});
this.mic = await mic;
this.source = this.audioContext.createMediaStreamSource(this.mic);
this.destination = this.audioContext.createMediaStreamDestination();
this.destination.channelCount = 1;
this.source.connect(this.destination);
this.encoder = new AudioEncoder({
output: this.handleEncodedData.bind(this),
error: this.handleEncodingError.bind(this),
});
this.encoder.configure({
codec: this.codec,
numberOfChannels: 1,
sampleRate: this.sampleRate,
opus: {
frameDuration: this.duration,
},
bitrateMode: "constant"
});
//@ts-ignore
this.audioTrackProcessor = new MediaStreamTrackProcessor({
track: this.destination.stream.getAudioTracks()[0],
});
this.audioTrackProcessor.readable.pipeTo(
new WritableStream({
write: this.handleRawData.bind(this),
})
);
}
disconnect() {
this.source.disconnect();
delete this.audioTrackProcessor;
delete this.encoder;
delete this.destination;
delete this.mic;
delete this.source;
}
handleEncodedData(chunk, metadata) {
if (this.onencoded) {
this.onencoded(chunk, metadata);
}
const data = new ArrayBuffer(chunk.byteLength);
chunk.copyTo(data);
}
handleEncodingError(e) {
console.log(e);
}
handleRawData(audioData) {
if (this.onrawdata) {
this.onrawdata(audioData);
}
this.encoder.encode(audioData);
audioData.close();
}
}

View File

@ -1,5 +1,6 @@
import { AudioRadioSetting } from "../interfaces";
import { AudioPacket } from "./audiopacket";
import { CapturePipeline } from "./capturepipeline";
export class MicrophoneHandler {
#socket: WebSocket;
@ -11,42 +12,23 @@ export class MicrophoneHandler {
console.log("Starting microphone handler");
//@ts-ignore
let getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
const pipeline = new CapturePipeline();
if (getUserMedia) {
//@ts-ignore
navigator.getUserMedia(
{ audio: {
sampleRate: 16000,
channelCount: 1,
volume: 1.0
} },
(stream) => {
this.start_microphone(stream);
},
(e) => {
alert("Error capturing audio.");
}
);
} else {
alert("getUserMedia not supported in this browser.");
}
}
navigator.mediaDevices.enumerateDevices()
.then(function(devices) {
devices.forEach(function(device) {
console.log(device.kind + ": " + device.label +
" id = " + device.deviceId);
});
})
start_microphone(stream) {
const recorder = new MediaRecorder(stream);
// fires every one second and passes an BlobEvent
recorder.ondataavailable = async (event) => {
// get the Blob from the event
const blob = event.data;
let rawData = await blob.arrayBuffer();
let packet = new AudioPacket(new Uint8Array(rawData), this.#setting);
this.#socket.send(packet.getArray());
};
recorder.start(200);
pipeline.connect().then(() => {
pipeline.onencoded = (data) => {
let buffer = new ArrayBuffer(data.byteLength);
data.copyTo(buffer);
let packet = new AudioPacket(new Uint8Array(buffer), this.#setting);
this.#socket.send(packet.getArray());
}
})
}
}

View File

@ -0,0 +1,82 @@
export class PlaybackPipeline {
sampleRate: any;
codec: any;
sourceId: any;
onrawdata: any;
ondecoded: any;
deviceId: any;
audioContext: any;
mic: any;
source: any;
destination: any;
decoder: any;
audioTrackProcessor: any;
duration: any;
trackGenerator: any;
writer: any;
constructor(codec = "opus", sampleRate = 16000, duration = 40000) {
this.sampleRate = sampleRate;
this.codec = codec;
this.duration = duration;
this.ondecoded = null;
this.audioContext = new AudioContext();
this.decoder = new AudioDecoder({
output: (chunk) => this.handleDecodedData(chunk),
error: this.handleDecodingError.bind(this),
});
this.decoder.configure({
codec: this.codec,
numberOfChannels: 1,
sampleRate: this.sampleRate,
opus: {
frameDuration: this.duration,
},
bitrateMode: "constant",
});
//@ts-ignore
this.trackGenerator = new MediaStreamTrackGenerator({ kind: "audio" });
this.writer = this.trackGenerator.writable.getWriter();
const stream = new MediaStream([this.trackGenerator]);
const mediaStreamSource = this.audioContext.createMediaStreamSource(stream);
mediaStreamSource.connect(this.audioContext.destination)
}
play(buffer) {
const init = {
type: "key",
data: buffer,
timestamp: 23000000,
duration: 2000000,
transfer: [buffer],
};
//@ts-ignore
let chunk = new EncodedAudioChunk(init);
this.decoder.decode(chunk);
}
disconnect() {
this.source.disconnect();
delete this.audioTrackProcessor;
delete this.decoder;
delete this.destination;
delete this.mic;
delete this.source;
}
handleDecodedData(chunk) {
this.writer.ready.then(() => {
this.writer.write(chunk);
})
}
handleDecodingError(e) {
console.log(e);
}
}

View File

@ -12,7 +12,7 @@ import { OlLabelToggle } from "../components/ollabeltoggle";
import { FaVolumeHigh } from "react-icons/fa6";
export function RadioMenu(props: { open: boolean; onClose: () => void; children?: JSX.Element | JSX.Element[] }) {
const [frequency1, setFrequency1] = useState(124000000);
const [frequency1, setFrequency1] = useState(251000000);
const [ptt1, setPTT1] = useState(false);
const [frequency2, setFrequency2] = useState(251000000);
const [frequency3, setFrequency3] = useState(243000000);

View File

@ -1113,7 +1113,7 @@ export function UnitControlMenu(props: { open: boolean; onClose: () => void }) {
<div className="text-sm text-gray-200">Radio frequency</div>
<div className="flex content-center gap-2">
<OlFrequencyInput value={activeAdvancedSettings? activeAdvancedSettings.radio.frequency: 124000000} onChange={(value) => {
<OlFrequencyInput value={activeAdvancedSettings? activeAdvancedSettings.radio.frequency: 251000000} onChange={(value) => {
if (activeAdvancedSettings) {
activeAdvancedSettings.radio.frequency = value;
setActiveAdvancedSettings(JSON.parse(JSON.stringify(activeAdvancedSettings)));

View File

@ -22,8 +22,8 @@
"express-basic-auth": "^1.2.1",
"http-proxy-middleware": "^2.0.6",
"morgan": "~1.9.1",
"node-opus": "^0.3.3",
"open": "^10.0.0",
"opus-decoder": "^0.7.6",
"regedit": "^5.1.2",
"save": "^2.9.0",
"sha256": "^0.2.0",

View File

@ -2,7 +2,7 @@ export var defaultSRSData = {
ClientGuid: "",
Name: "",
Seat: 0,
Coalition: 0,
Coalition: 2,
AllowRecord: false,
RadioInfo: {
radios: [

View File

@ -1,4 +1,7 @@
import { defaultSRSData } from "./defaultdata";
const { OpusEncoder } = require("@discordjs/opus");
const encoder = new OpusEncoder(16000, 1);
var net = require("net");
const SRS_VERSION = "2.1.0.10";
@ -41,12 +44,10 @@ export class SRSHandler {
this.ws.on("message", (data) => {
switch (data[0]) {
case MessageType.audio:
this.udp.send(data.slice(1), 5002, "localhost", function (error) {
if (error) {
console.log("Error!!!");
} else {
console.log("Data sent");
}
let audioBuffer = data.slice(1);
this.udp.send(audioBuffer, SRSPort, "localhost", (error) => {
if (error)
console.log(`Error sending data to SRS server: ${error}`);
});
break;
case MessageType.settings:
@ -54,7 +55,7 @@ export class SRSHandler {
message.settings.forEach((setting, idx) => {
this.data.RadioInfo.radios[idx].freq = setting.frequency;
this.data.RadioInfo.radios[idx].modulation = setting.modulation;
})
});
break;
default:
break;
@ -65,8 +66,12 @@ export class SRSHandler {
});
/* TCP */
this.tcp.on("error", (ex) => {
console.log("Could not connect to SRS Server");
});
this.tcp.connect(SRSPort, "localhost", () => {
console.log("Connected");
console.log(`Connected to SRS Server on TCP Port ${SRSPort}`);
this.syncInterval = setInterval(() => {
let SYNC = {
@ -75,10 +80,24 @@ export class SRSHandler {
Version: SRS_VERSION,
};
this.udp.send(this.data.ClientGuid, SRSPort, "localhost", (error) => {
if (error) console.log(`Error pinging SRS server on UDP: ${error}`);
});
if (this.tcp.readyState == "open")
this.tcp.write(`${JSON.stringify(SYNC)}\n`);
else clearInterval(this.syncInterval);
}, 1000);
});
/* UDP */
this.udp.on("listening", () => {
console.log(`Listening to SRS Server on UDP port ${SRSPort}`)
});
this.udp.on("message", (message, remote) => {
if (this.ws && message.length > 22)
this.ws.send(message);
});
}
}

View File

@ -2,7 +2,9 @@
"compilerOptions": {
"outDir": "./build",
"allowJs": true,
"target": "es5"
"target": "es5",
"module": "Node16",
"moduleResolution": "Node16"
},
"include": [
"./src/**/*"