Merge pull request #5046 from 21pages/fps3

multi user video qos and fix quality change
This commit is contained in:
RustDesk 2023-07-23 20:38:53 +08:00 committed by GitHub
commit dc0f21298e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
39 changed files with 1345 additions and 916 deletions

4
Cargo.lock generated
View File

@ -2987,8 +2987,8 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hwcodec"
version = "0.1.0"
source = "git+https://github.com/21pages/hwcodec?branch=stable#3ea79865a10387b7e1b7630c2ae068bd2081f680"
version = "0.1.1"
source = "git+https://github.com/21pages/hwcodec?branch=stable#d5daa75d8cb273781dc21676cb00edda5a4cf8b9"
dependencies = [
"bindgen 0.59.2",
"cc",

View File

@ -662,6 +662,7 @@ message Misc {
Resolution change_resolution = 24;
PluginRequest plugin_request = 25;
PluginFailure plugin_failure = 26;
uint32 full_speed_fps = 27;
}
}

View File

@ -6,7 +6,7 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
use serde_derive::{Deserialize, Serialize};
use tokio::{fs::File, io::*};
use crate::{bail, get_version_number, message_proto::*, ResultType, Stream};
use crate::{anyhow::anyhow, bail, get_version_number, message_proto::*, ResultType, Stream};
// https://doc.rust-lang.org/std/os/windows/fs/trait.MetadataExt.html
use crate::{
compress::{compress, decompress},
@ -403,10 +403,18 @@ impl TransferJob {
}
if block.compressed {
let tmp = decompress(&block.data);
self.file.as_mut().unwrap().write_all(&tmp).await?;
self.file
.as_mut()
.ok_or(anyhow!("file is None"))?
.write_all(&tmp)
.await?;
self.finished_size += tmp.len() as u64;
} else {
self.file.as_mut().unwrap().write_all(&block.data).await?;
self.file
.as_mut()
.ok_or(anyhow!("file is None"))?
.write_all(&block.data)
.await?;
self.finished_size += block.data.len() as u64;
}
self.transferred += block.data.len() as u64;
@ -456,7 +464,13 @@ impl TransferJob {
let mut compressed = false;
let mut offset: usize = 0;
loop {
match self.file.as_mut().unwrap().read(&mut buf[offset..]).await {
match self
.file
.as_mut()
.ok_or(anyhow!("file is None"))?
.read(&mut buf[offset..])
.await
{
Err(err) => {
self.file_num += 1;
self.file = None;
@ -501,7 +515,12 @@ impl TransferJob {
async fn send_current_digest(&mut self, stream: &mut Stream) -> ResultType<()> {
let mut msg = Message::new();
let mut resp = FileResponse::new();
let meta = self.file.as_ref().unwrap().metadata().await?;
let meta = self
.file
.as_ref()
.ok_or(anyhow!("file is None"))?
.metadata()
.await?;
let last_modified = meta
.modified()?
.duration_since(SystemTime::UNIX_EPOCH)?
@ -750,13 +769,13 @@ pub async fn handle_read_jobs(
Ok(None) => {
if job.job_completed() {
finished.push(job.id());
let err = job.job_error();
if err.is_some() {
stream
.send(&new_error(job.id(), err.unwrap(), job.file_num()))
.await?;
} else {
stream.send(&new_done(job.id(), job.file_num())).await?;
match job.job_error() {
Some(err) => {
stream
.send(&new_error(job.id(), err, job.file_num()))
.await?
}
None => stream.send(&new_done(job.id(), job.file_num())).await?,
}
} else {
// waiting confirmation.

View File

@ -46,7 +46,6 @@ pub mod keyboard;
pub use dlopen;
#[cfg(not(any(target_os = "android", target_os = "ios")))]
pub use machine_uid;
#[cfg(not(any(target_os = "android", target_os = "ios")))]
pub use sysinfo;
pub use toml;
pub use uuid;
@ -128,7 +127,7 @@ impl AddrMangle {
SocketAddr::V4(addr_v4) => {
let tm = (SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.unwrap_or(std::time::Duration::ZERO)
.as_micros() as u32) as u128;
let ip = u32::from_le_bytes(addr_v4.ip().octets()) as u128;
let port = addr.port() as u128;
@ -161,9 +160,9 @@ impl AddrMangle {
if bytes.len() != 18 {
return Config::get_any_listen_addr(false);
}
let tmp: [u8; 2] = bytes[16..].try_into().unwrap();
let tmp: [u8; 2] = bytes[16..].try_into().unwrap_or_default();
let port = u16::from_le_bytes(tmp);
let tmp: [u8; 16] = bytes[..16].try_into().unwrap();
let tmp: [u8; 16] = bytes[..16].try_into().unwrap_or_default();
let ip = std::net::Ipv6Addr::from(tmp);
return SocketAddr::new(IpAddr::V6(ip), port);
}
@ -291,16 +290,24 @@ pub fn get_time() -> i64 {
#[inline]
pub fn is_ipv4_str(id: &str) -> bool {
regex::Regex::new(r"^(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(:\d+)?$")
.unwrap()
.is_match(id)
if let Ok(reg) = regex::Regex::new(
r"^(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(:\d+)?$",
) {
reg.is_match(id)
} else {
false
}
}
#[inline]
pub fn is_ipv6_str(id: &str) -> bool {
regex::Regex::new(r"^((([a-fA-F0-9]{1,4}:{1,2})+[a-fA-F0-9]{1,4})|(\[([a-fA-F0-9]{1,4}:{1,2})+[a-fA-F0-9]{1,4}\]:\d+))$")
.unwrap()
.is_match(id)
if let Ok(reg) = regex::Regex::new(
r"^((([a-fA-F0-9]{1,4}:{1,2})+[a-fA-F0-9]{1,4})|(\[([a-fA-F0-9]{1,4}:{1,2})+[a-fA-F0-9]{1,4}\]:\d+))$",
) {
reg.is_match(id)
} else {
false
}
}
#[inline]
@ -313,11 +320,13 @@ pub fn is_domain_port_str(id: &str) -> bool {
// modified regex for RFC1123 hostname. check https://stackoverflow.com/a/106223 for original version for hostname.
// according to [TLD List](https://data.iana.org/TLD/tlds-alpha-by-domain.txt) version 2023011700,
// there is no digits in TLD, and length is 2~63.
regex::Regex::new(
if let Ok(reg) = regex::Regex::new(
r"(?i)^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\.)+[a-z][a-z-]{0,61}[a-z]:\d{1,5}$",
)
.unwrap()
.is_match(id)
) {
reg.is_match(id)
} else {
false
}
}
pub fn init_log(_is_async: bool, _name: &str) -> Option<flexi_logger::LoggerHandle> {

View File

@ -4,6 +4,9 @@ pub mod linux;
#[cfg(target_os = "macos")]
pub mod macos;
#[cfg(target_os = "windows")]
pub mod windows;
#[cfg(not(debug_assertions))]
use crate::{config::Config, log};
#[cfg(not(debug_assertions))]

View File

@ -0,0 +1,149 @@
use std::{
collections::VecDeque,
os::windows::raw::HANDLE,
sync::{Arc, Mutex},
time::Instant,
};
use winapi::{
shared::minwindef::{DWORD, FALSE},
um::{
handleapi::CloseHandle,
pdh::{
PdhAddCounterA, PdhCloseQuery, PdhCollectQueryData, PdhCollectQueryDataEx,
PdhGetFormattedCounterValue, PdhOpenQueryA, PDH_FMT_COUNTERVALUE, PDH_FMT_DOUBLE,
PDH_HCOUNTER, PDH_HQUERY,
},
synchapi::{CreateEventA, WaitForSingleObject},
winbase::{INFINITE, WAIT_OBJECT_0},
},
};
lazy_static::lazy_static! {
static ref CPU_USAGE_ONE_MINUTE: Arc<Mutex<Option<(f64, Instant)>>> = Arc::new(Mutex::new(None));
}
// https://github.com/mgostIH/process_list/blob/master/src/windows/mod.rs
#[repr(transparent)]
pub struct RAIIHandle(pub HANDLE);
impl Drop for RAIIHandle {
fn drop(&mut self) {
// This never gives problem except when running under a debugger.
unsafe { CloseHandle(self.0) };
}
}
#[repr(transparent)]
pub(self) struct RAIIPDHQuery(pub PDH_HQUERY);
impl Drop for RAIIPDHQuery {
fn drop(&mut self) {
unsafe { PdhCloseQuery(self.0) };
}
}
pub unsafe fn start_cpu_performance_monitor() {
// Code from:
// https://learn.microsoft.com/en-us/windows/win32/perfctrs/collecting-performance-data
// https://learn.microsoft.com/en-us/windows/win32/api/pdh/nf-pdh-pdhcollectquerydataex
// Why value lower than taskManager:
// https://aaron-margosis.medium.com/task-managers-cpu-numbers-are-all-but-meaningless-2d165b421e43
// Therefore we should compare with Precess Explorer rather than taskManager
std::thread::spawn(|| {
// load avg or cpu usage, test with prime95.
// Prefer cpu usage because we can get accurate value from Precess Explorer.
// const COUNTER_PATH: &'static str = "\\System\\Processor Queue Length\0";
const COUNTER_PATH: &'static str = "\\Processor(_total)\\% Processor Time\0";
const SAMPLE_INTERVAL: DWORD = 2; // 2 second
let mut ret;
let mut query: PDH_HQUERY = std::mem::zeroed();
ret = PdhOpenQueryA(std::ptr::null() as _, 0, &mut query);
if ret != 0 {
log::error!("PdhOpenQueryA failed: 0x{:X}", ret);
return;
}
let _query = RAIIPDHQuery(query);
let mut counter: PDH_HCOUNTER = std::mem::zeroed();
ret = PdhAddCounterA(query, COUNTER_PATH.as_ptr() as _, 0, &mut counter);
if ret != 0 {
log::error!("PdhAddCounterA failed: 0x{:X}", ret);
return;
}
ret = PdhCollectQueryData(query);
if ret != 0 {
log::error!("PdhCollectQueryData failed: 0x{:X}", ret);
return;
}
let mut _counter_type: DWORD = 0;
let mut counter_value: PDH_FMT_COUNTERVALUE = std::mem::zeroed();
let event = CreateEventA(std::ptr::null_mut(), FALSE, FALSE, std::ptr::null() as _);
if event.is_null() {
log::error!("CreateEventA failed: 0x{:X}", ret);
return;
}
let _event: RAIIHandle = RAIIHandle(event);
ret = PdhCollectQueryDataEx(query, SAMPLE_INTERVAL, event);
if ret != 0 {
log::error!("PdhCollectQueryDataEx failed: 0x{:X}", ret);
return;
}
let mut queue: VecDeque<f64> = VecDeque::new();
let mut recent_valid: VecDeque<bool> = VecDeque::new();
loop {
// latest one minute
if queue.len() == 31 {
queue.pop_front();
}
if recent_valid.len() == 31 {
recent_valid.pop_front();
}
// allow get value within one minute
if queue.len() > 0 && recent_valid.iter().filter(|v| **v).count() > queue.len() / 2 {
let sum: f64 = queue.iter().map(|f| f.to_owned()).sum();
let avg = sum / (queue.len() as f64);
*CPU_USAGE_ONE_MINUTE.lock().unwrap() = Some((avg, Instant::now()));
} else {
*CPU_USAGE_ONE_MINUTE.lock().unwrap() = None;
}
if WAIT_OBJECT_0 != WaitForSingleObject(event, INFINITE) {
recent_valid.push_back(false);
continue;
}
if PdhGetFormattedCounterValue(
counter,
PDH_FMT_DOUBLE,
&mut _counter_type,
&mut counter_value,
) != 0
|| counter_value.CStatus != 0
{
recent_valid.push_back(false);
continue;
}
queue.push_back(counter_value.u.doubleValue().clone());
recent_valid.push_back(true);
}
});
}
pub fn cpu_uage_one_minute() -> Option<f64> {
let v = CPU_USAGE_ONE_MINUTE.lock().unwrap().clone();
if let Some((v, instant)) = v {
if instant.elapsed().as_secs() < 30 {
return Some(v);
}
}
None
}
pub fn sync_cpu_usage(cpu_usage: Option<f64>) {
let v = match cpu_usage {
Some(cpu_usage) => Some((cpu_usage, Instant::now())),
None => None,
};
*CPU_USAGE_ONE_MINUTE.lock().unwrap() = v;
log::info!("cpu usage synced: {:?}", cpu_usage);
}

View File

@ -1,8 +1,8 @@
use docopt::Docopt;
use hbb_common::env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use scrap::{
aom::{AomDecoder, AomDecoderConfig, AomEncoder, AomEncoderConfig},
codec::{EncoderApi, EncoderCfg},
aom::{AomDecoder, AomEncoder, AomEncoderConfig},
codec::{codec_thread_num, EncoderApi, EncoderCfg, Quality as Q},
Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig,
VpxVideoCodecId::{self, *},
STRIDE_ALIGN,
@ -15,13 +15,14 @@ const USAGE: &'static str = "
Codec benchmark.
Usage:
benchmark [--count=COUNT] [--bitrate=KBS] [--hw-pixfmt=PIXFMT]
benchmark [--count=COUNT] [--quality=QUALITY] [--hw-pixfmt=PIXFMT]
benchmark (-h | --help)
Options:
-h --help Show this screen.
--count=COUNT Capture frame count [default: 100].
--bitrate=KBS Video bitrate in kilobits per second [default: 5000].
--quality=QUALITY Video quality [default: Balanced].
Valid values: Best, Balanced, Low.
--hw-pixfmt=PIXFMT Hardware codec pixfmt. [default: i420]
Valid values: i420, nv12.
";
@ -29,7 +30,7 @@ Options:
#[derive(Debug, serde::Deserialize)]
struct Args {
flag_count: usize,
flag_bitrate: usize,
flag_quality: Quality,
flag_hw_pixfmt: Pixfmt,
}
@ -39,20 +40,32 @@ enum Pixfmt {
NV12,
}
#[derive(Debug, serde::Deserialize)]
enum Quality {
Best,
Balanced,
Low,
}
fn main() {
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
let bitrate_k = args.flag_bitrate;
let quality = args.flag_quality;
let yuv_count = args.flag_count;
let (yuvs, width, height) = capture_yuv(yuv_count);
println!(
"benchmark {}x{} bitrate:{}k hw_pixfmt:{:?}",
width, height, bitrate_k, args.flag_hw_pixfmt
"benchmark {}x{} quality:{:?}k hw_pixfmt:{:?}",
width, height, quality, args.flag_hw_pixfmt
);
[VP8, VP9].map(|c| test_vpx(c, &yuvs, width, height, bitrate_k, yuv_count));
test_av1(&yuvs, width, height, bitrate_k, yuv_count);
let quality = match quality {
Quality::Best => Q::Best,
Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low,
};
[VP8, VP9].map(|c| test_vpx(c, &yuvs, width, height, quality, yuv_count));
test_av1(&yuvs, width, height, quality, yuv_count);
#[cfg(feature = "hwcodec")]
{
use hwcodec::AVPixelFormat;
@ -61,7 +74,7 @@ fn main() {
Pixfmt::NV12 => AVPixelFormat::AV_PIX_FMT_NV12,
};
let yuvs = hw::vpx_yuv_to_hw_yuv(yuvs, width, height, hw_pixfmt);
hw::test(&yuvs, width, height, bitrate_k, yuv_count, hw_pixfmt);
hw::test(&yuvs, width, height, quality, yuv_count, hw_pixfmt);
}
}
@ -95,13 +108,14 @@ fn test_vpx(
yuvs: &Vec<Vec<u8>>,
width: usize,
height: usize,
bitrate_k: usize,
quality: Q,
yuv_count: usize,
) {
let config = EncoderCfg::VPX(VpxEncoderConfig {
width: width as _,
height: height as _,
bitrate: bitrate_k as _,
timebase: [1, 1000],
quality,
codec: codec_id,
});
let mut encoder = VpxEncoder::new(config).unwrap();
@ -129,11 +143,7 @@ fn test_vpx(
size / yuv_count
);
let mut decoder = VpxDecoder::new(VpxDecoderConfig {
codec: codec_id,
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
let mut decoder = VpxDecoder::new(VpxDecoderConfig { codec: codec_id }).unwrap();
let start = Instant::now();
for vpx in vpxs {
let _ = decoder.decode(&vpx);
@ -146,11 +156,11 @@ fn test_vpx(
);
}
fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, bitrate_k: usize, yuv_count: usize) {
fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, quality: Q, yuv_count: usize) {
let config = EncoderCfg::AOM(AomEncoderConfig {
width: width as _,
height: height as _,
bitrate: bitrate_k as _,
quality,
});
let mut encoder = AomEncoder::new(config).unwrap();
let start = Instant::now();
@ -171,10 +181,7 @@ fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, bitrate_k: usize,
start.elapsed() / yuv_count as _,
size / yuv_count
);
let mut decoder = AomDecoder::new(AomDecoderConfig {
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
let mut decoder = AomDecoder::new().unwrap();
let start = Instant::now();
for av1 in av1s {
let _ = decoder.decode(&av1);
@ -206,21 +213,23 @@ mod hw {
yuvs: &Vec<Vec<u8>>,
width: usize,
height: usize,
bitrate_k: usize,
quality: Q,
yuv_count: usize,
pixfmt: AVPixelFormat,
) {
let bitrate = scrap::hwcodec::HwEncoder::convert_quality(quality);
let ctx = EncodeContext {
name: String::from(""),
width: width as _,
height: height as _,
pixfmt,
align: 0,
bitrate: (bitrate_k * 1000) as _,
bitrate: bitrate as i32 * 1000,
timebase: [1, 30],
gop: 60,
quality: Quality_Default,
rc: RC_DEFAULT,
thread_count: codec_thread_num() as _,
};
let encoders = Encoder::available_encoders(ctx.clone());
@ -273,6 +282,7 @@ mod hw {
let ctx = DecodeContext {
name: info.name,
device_type: info.hwdevice,
thread_count: codec_thread_num() as _,
};
let mut decoder = Decoder::new(ctx.clone()).unwrap();

View File

@ -13,7 +13,7 @@ use std::time::{Duration, Instant};
use std::{io, thread};
use docopt::Docopt;
use scrap::codec::{EncoderApi, EncoderCfg};
use scrap::codec::{EncoderApi, EncoderCfg, Quality as Q};
use webm::mux;
use webm::mux::Track;
@ -24,17 +24,18 @@ const USAGE: &'static str = "
Simple WebM screen capture.
Usage:
record-screen <path> [--time=<s>] [--fps=<fps>] [--bv=<kbps>] [--ba=<kbps>] [--codec CODEC]
record-screen <path> [--time=<s>] [--fps=<fps>] [--quality=<quality>] [--ba=<kbps>] [--codec CODEC]
record-screen (-h | --help)
Options:
-h --help Show this screen.
--time=<s> Recording duration in seconds.
--fps=<fps> Frames per second [default: 30].
--bv=<kbps> Video bitrate in kilobits per second [default: 5000].
--ba=<kbps> Audio bitrate in kilobits per second [default: 96].
--codec CODEC Configure the codec used. [default: vp9]
Valid values: vp8, vp9.
-h --help Show this screen.
--time=<s> Recording duration in seconds.
--fps=<fps> Frames per second [default: 30].
--quality=<quality> Video quality [default: Balanced].
Valid values: Best, Balanced, Low.
--ba=<kbps> Audio bitrate in kilobits per second [default: 96].
--codec CODEC Configure the codec used. [default: vp9]
Valid values: vp8, vp9.
";
#[derive(Debug, serde::Deserialize)]
@ -43,7 +44,14 @@ struct Args {
flag_codec: Codec,
flag_time: Option<u64>,
flag_fps: u64,
flag_bv: u32,
flag_quality: Quality,
}
#[derive(Debug, serde::Deserialize)]
enum Quality {
Best,
Balanced,
Low,
}
#[derive(Debug, serde::Deserialize)]
@ -97,11 +105,16 @@ fn main() -> io::Result<()> {
let mut vt = webm.add_video_track(width, height, None, mux_codec);
// Setup the encoder.
let quality = match args.flag_quality {
Quality::Best => Q::Best,
Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low,
};
let mut vpx = vpx_encode::VpxEncoder::new(EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
width,
height,
bitrate: args.flag_bv,
timebase: [1, 1000],
quality,
codec: vpx_codec,
}))
.unwrap();

View File

@ -99,9 +99,11 @@ pub extern "system" fn Java_com_carriez_flutter_1hbb_MainService_onVideoFrameUpd
buffer: JObject,
) {
let jb = JByteBuffer::from(buffer);
let data = env.get_direct_buffer_address(&jb).unwrap();
let len = env.get_direct_buffer_capacity(&jb).unwrap();
VIDEO_RAW.lock().unwrap().update(data, len);
if let Ok(data) = env.get_direct_buffer_address(&jb) {
if let Ok(len) = env.get_direct_buffer_capacity(&jb) {
VIDEO_RAW.lock().unwrap().update(data, len);
}
}
}
#[no_mangle]
@ -111,9 +113,11 @@ pub extern "system" fn Java_com_carriez_flutter_1hbb_MainService_onAudioFrameUpd
buffer: JObject,
) {
let jb = JByteBuffer::from(buffer);
let data = env.get_direct_buffer_address(&jb).unwrap();
let len = env.get_direct_buffer_capacity(&jb).unwrap();
AUDIO_RAW.lock().unwrap().update(data, len);
if let Ok(data) = env.get_direct_buffer_address(&jb) {
if let Ok(len) = env.get_direct_buffer_capacity(&jb) {
AUDIO_RAW.lock().unwrap().update(data, len);
}
}
}
#[no_mangle]
@ -142,12 +146,12 @@ pub extern "system" fn Java_com_carriez_flutter_1hbb_MainService_init(
ctx: JObject,
) {
log::debug!("MainService init from java");
let jvm = env.get_java_vm().unwrap();
*JVM.write().unwrap() = Some(jvm);
let context = env.new_global_ref(ctx).unwrap();
*MAIN_SERVICE_CTX.write().unwrap() = Some(context);
if let Ok(jvm) = env.get_java_vm() {
*JVM.write().unwrap() = Some(jvm);
if let Ok(context) = env.new_global_ref(ctx) {
*MAIN_SERVICE_CTX.write().unwrap() = Some(context);
}
}
}
pub fn call_main_service_mouse_input(mask: i32, x: i32, y: i32) -> JniResult<()> {

View File

@ -6,6 +6,7 @@
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::codec::{base_bitrate, codec_thread_num, Quality};
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
use hbb_common::{
@ -43,7 +44,7 @@ impl Default for aom_image_t {
pub struct AomEncoderConfig {
pub width: u32,
pub height: u32,
pub bitrate: u32,
pub quality: Quality,
}
pub struct AomEncoder {
@ -56,7 +57,6 @@ pub struct AomEncoder {
mod webrtc {
use super::*;
const kQpMin: u32 = 10;
const kUsageProfile: u32 = AOM_USAGE_REALTIME;
const kMinQindex: u32 = 145; // Min qindex threshold for QP scaling.
const kMaxQindex: u32 = 205; // Max qindex threshold for QP scaling.
@ -65,26 +65,8 @@ mod webrtc {
const kRtpTicksPerSecond: i32 = 90000;
const kMinimumFrameRate: f64 = 1.0;
const kQpMax: u32 = 25; // to-do: webrtc use dynamic value, no more than 63
fn number_of_threads(width: u32, height: u32, number_of_cores: usize) -> u32 {
// Keep the number of encoder threads equal to the possible number of
// column/row tiles, which is (1, 2, 4, 8). See comments below for
// AV1E_SET_TILE_COLUMNS/ROWS.
if width * height >= 640 * 360 && number_of_cores > 4 {
return 4;
} else if width * height >= 320 * 180 && number_of_cores > 2 {
return 2;
} else {
// Use 2 threads for low res on ARM.
#[cfg(any(target_arch = "arm", target_arch = "aarch64", target_os = "android"))]
if width * height >= 320 * 180 && number_of_cores > 2 {
return 2;
}
// 1 thread less than VGA.
return 1;
}
}
pub const DEFAULT_Q_MAX: u32 = 56; // no more than 63
pub const DEFAULT_Q_MIN: u32 = 12; // no more than 63, litter than q_max
// Only positive speeds, range for real-time coding currently is: 6 - 8.
// Lower means slower/better quality, higher means fastest/lower quality.
@ -119,14 +101,26 @@ mod webrtc {
// Overwrite default config with input encoder settings & RTC-relevant values.
c.g_w = cfg.width;
c.g_h = cfg.height;
c.g_threads = number_of_threads(cfg.width, cfg.height, num_cpus::get());
c.g_threads = codec_thread_num() as _;
c.g_timebase.num = 1;
c.g_timebase.den = kRtpTicksPerSecond;
c.rc_target_bitrate = cfg.bitrate; // kilobits/sec.
c.g_input_bit_depth = kBitDepth;
c.kf_mode = aom_kf_mode::AOM_KF_DISABLED;
c.rc_min_quantizer = kQpMin;
c.rc_max_quantizer = kQpMax;
let (q_min, q_max, b) = AomEncoder::convert_quality(cfg.quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
} else {
c.rc_min_quantizer = DEFAULT_Q_MIN;
c.rc_max_quantizer = DEFAULT_Q_MAX;
}
let base_bitrate = base_bitrate(cfg.width as _, cfg.height as _);
let bitrate = base_bitrate * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
} else {
c.rc_target_bitrate = base_bitrate;
}
c.rc_undershoot_pct = 50;
c.rc_overshoot_pct = 50;
c.rc_buf_initial_sz = 600;
@ -259,11 +253,24 @@ impl EncoderApi for AomEncoder {
true
}
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()> {
let mut new_enc_cfg = unsafe { *self.ctx.config.enc.to_owned() };
new_enc_cfg.rc_target_bitrate = bitrate;
call_aom!(aom_codec_enc_config_set(&mut self.ctx, &new_enc_cfg));
return Ok(());
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
}
let bitrate = base_bitrate(self.width as _, self.height as _) * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
}
call_aom!(aom_codec_enc_config_set(&mut self.ctx, &c));
Ok(())
}
fn bitrate(&self) -> u32 {
let c = unsafe { *self.ctx.config.enc.to_owned() };
c.rc_target_bitrate
}
}
@ -319,6 +326,35 @@ impl AomEncoder {
..Default::default()
}
}
pub fn convert_quality(quality: Quality) -> (u32, u32, u32) {
// we can use lower bitrate for av1
match quality {
Quality::Best => (12, 25, 100),
Quality::Balanced => (12, 35, 100 * 2 / 3),
Quality::Low => (18, 45, 50),
Quality::Custom(b) => {
let (q_min, q_max) = Self::calc_q_values(b);
(q_min, q_max, b)
}
}
}
#[inline]
fn calc_q_values(b: u32) -> (u32, u32) {
let b = std::cmp::min(b, 200);
let q_min1: i32 = 24;
let q_min2 = 12;
let q_max1 = 45;
let q_max2 = 25;
let t = b as f32 / 200.0;
let q_min: u32 = ((1.0 - t) * q_min1 as f32 + t * q_min2 as f32).round() as u32;
let q_max = ((1.0 - t) * q_max1 as f32 + t * q_max2 as f32).round() as u32;
(q_min, q_max)
}
}
impl Drop for AomEncoder {
@ -360,24 +396,16 @@ impl<'a> Iterator for EncodeFrames<'a> {
}
}
pub struct AomDecoderConfig {
pub num_threads: u32,
}
pub struct AomDecoder {
ctx: aom_codec_ctx_t,
}
impl AomDecoder {
pub fn new(cfg: AomDecoderConfig) -> Result<Self> {
pub fn new() -> Result<Self> {
let i = call_aom_ptr!(aom_codec_av1_dx());
let mut ctx = Default::default();
let cfg = aom_codec_dec_cfg_t {
threads: if cfg.num_threads == 0 {
num_cpus::get() as _
} else {
cfg.num_threads
},
threads: codec_thread_num() as _,
w: 0,
h: 0,
allow_lowbitdepth: 1,

View File

@ -11,22 +11,23 @@ use crate::mediacodec::{
MediaCodecDecoder, MediaCodecDecoders, H264_DECODER_SUPPORT, H265_DECODER_SUPPORT,
};
use crate::{
aom::{self, AomDecoder, AomDecoderConfig, AomEncoder, AomEncoderConfig},
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
common::GoogleImage,
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
CodecName, ImageRgb,
};
#[cfg(not(any(target_os = "android", target_os = "ios")))]
use hbb_common::sysinfo::{System, SystemExt};
use hbb_common::{
anyhow::anyhow,
bail,
config::PeerConfig,
log,
message_proto::{
supported_decoding::PreferCodec, video_frame, EncodedVideoFrames, Message,
SupportedDecoding, SupportedEncoding,
},
sysinfo::{System, SystemExt},
tokio::time::Instant,
ResultType,
};
#[cfg(any(feature = "hwcodec", feature = "mediacodec"))]
@ -35,6 +36,7 @@ use hbb_common::{config::Config2, lazy_static};
lazy_static::lazy_static! {
static ref PEER_DECODINGS: Arc<Mutex<HashMap<i32, SupportedDecoding>>> = Default::default();
static ref CODEC_NAME: Arc<Mutex<CodecName>> = Arc::new(Mutex::new(CodecName::VP9));
static ref THREAD_LOG_TIME: Arc<Mutex<Option<Instant>>> = Arc::new(Mutex::new(None));
}
#[derive(Debug, Clone)]
@ -42,7 +44,7 @@ pub struct HwEncoderConfig {
pub name: String,
pub width: usize,
pub height: usize,
pub bitrate: i32,
pub quality: Quality,
}
#[derive(Debug, Clone)]
@ -61,7 +63,9 @@ pub trait EncoderApi {
fn use_yuv(&self) -> bool;
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()>;
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
fn bitrate(&self) -> u32;
}
pub struct Encoder {
@ -83,9 +87,9 @@ impl DerefMut for Encoder {
}
pub struct Decoder {
vp8: VpxDecoder,
vp9: VpxDecoder,
av1: AomDecoder,
vp8: Option<VpxDecoder>,
vp9: Option<VpxDecoder>,
av1: Option<AomDecoder>,
#[cfg(feature = "hwcodec")]
hw: HwDecoders,
#[cfg(feature = "hwcodec")]
@ -190,7 +194,6 @@ impl Encoder {
#[allow(unused_mut)]
let mut auto_codec = CodecName::VP9;
#[cfg(not(any(target_os = "android", target_os = "ios")))]
if vp8_useable && System::new_all().total_memory() <= 4 * 1024 * 1024 * 1024 {
// 4 Gb
auto_codec = CodecName::VP8
@ -274,18 +277,13 @@ impl Decoder {
pub fn new() -> Decoder {
let vp8 = VpxDecoder::new(VpxDecoderConfig {
codec: VpxVideoCodecId::VP8,
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
.ok();
let vp9 = VpxDecoder::new(VpxDecoderConfig {
codec: VpxVideoCodecId::VP9,
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
let av1 = AomDecoder::new(AomDecoderConfig {
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
.ok();
let av1 = AomDecoder::new().ok();
Decoder {
vp8,
vp9,
@ -315,13 +313,25 @@ impl Decoder {
) -> ResultType<bool> {
match frame {
video_frame::Union::Vp8s(vp8s) => {
Decoder::handle_vpxs_video_frame(&mut self.vp8, vp8s, rgb)
if let Some(vp8) = &mut self.vp8 {
Decoder::handle_vpxs_video_frame(vp8, vp8s, rgb)
} else {
bail!("vp8 decoder not available");
}
}
video_frame::Union::Vp9s(vp9s) => {
Decoder::handle_vpxs_video_frame(&mut self.vp9, vp9s, rgb)
if let Some(vp9) = &mut self.vp9 {
Decoder::handle_vpxs_video_frame(vp9, vp9s, rgb)
} else {
bail!("vp9 decoder not available");
}
}
video_frame::Union::Av1s(av1s) => {
Decoder::handle_av1s_video_frame(&mut self.av1, av1s, rgb)
if let Some(av1) = &mut self.av1 {
Decoder::handle_av1s_video_frame(av1, av1s, rgb)
} else {
bail!("av1 decoder not available");
}
}
#[cfg(feature = "hwcodec")]
video_frame::Union::H264s(h264s) => {
@ -471,3 +481,72 @@ fn enable_hwcodec_option() -> bool {
}
return true; // default is true
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Quality {
Best,
Balanced,
Low,
Custom(u32),
}
impl Default for Quality {
fn default() -> Self {
Self::Balanced
}
}
pub fn base_bitrate(width: u32, height: u32) -> u32 {
#[allow(unused_mut)]
let mut base_bitrate = ((width * height) / 1000) as u32; // same as 1.1.9
if base_bitrate == 0 {
base_bitrate = 1920 * 1080 / 1000;
}
#[cfg(target_os = "android")]
{
// fix when android screen shrinks
let fix = crate::Display::fix_quality() as u32;
log::debug!("Android screen, fix quality:{}", fix);
base_bitrate = base_bitrate * fix;
}
base_bitrate
}
pub fn codec_thread_num() -> usize {
let max: usize = num_cpus::get();
let mut res = 0;
let info;
#[cfg(windows)]
{
let percent = hbb_common::platform::windows::cpu_uage_one_minute();
info = format!("cpu usage:{:?}", percent);
if let Some(pecent) = percent {
if pecent < 100.0 {
res = ((100.0 - pecent) * (max as f64) / 200.0).round() as usize;
}
}
}
#[cfg(not(windows))]
{
let s = System::new_all();
// https://man7.org/linux/man-pages/man3/getloadavg.3.html
let avg = s.load_average();
info = format!("cpu loadavg:{}", avg.one);
res = (((max as f64) - avg.one) * 0.5).round() as usize;
}
res = if res > 0 && res <= max / 2 {
res
} else {
std::cmp::max(1, max / 2)
};
// avoid frequent log
let log = match THREAD_LOG_TIME.lock().unwrap().clone() {
Some(instant) => instant.elapsed().as_secs() > 1,
None => true,
};
if log {
log::info!("cpu num: {max}, {info}, codec thread: {res}");
*THREAD_LOG_TIME.lock().unwrap() = Some(Instant::now());
}
res
}

View File

@ -1,5 +1,5 @@
use crate::{
codec::{EncoderApi, EncoderCfg},
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
hw, ImageFormat, ImageRgb, HW_STRIDE_ALIGN,
};
use hbb_common::{
@ -34,6 +34,9 @@ pub struct HwEncoder {
yuv: Vec<u8>,
pub format: DataFormat,
pub pixfmt: AVPixelFormat,
width: u32,
height: u32,
bitrate: u32, //kbs
}
impl EncoderApi for HwEncoder {
@ -43,17 +46,24 @@ impl EncoderApi for HwEncoder {
{
match cfg {
EncoderCfg::HW(config) => {
let b = Self::convert_quality(config.quality);
let base_bitrate = base_bitrate(config.width as _, config.height as _);
let mut bitrate = base_bitrate * b / 100;
if base_bitrate <= 0 {
bitrate = base_bitrate;
}
let ctx = EncodeContext {
name: config.name.clone(),
width: config.width as _,
height: config.height as _,
pixfmt: DEFAULT_PIXFMT,
align: HW_STRIDE_ALIGN as _,
bitrate: config.bitrate * 1000,
bitrate: bitrate as i32 * 1000,
timebase: DEFAULT_TIME_BASE,
gop: DEFAULT_GOP,
quality: DEFAULT_HW_QUALITY,
rc: DEFAULT_RC,
thread_count: codec_thread_num() as _, // ffmpeg's thread_count is used for cpu
};
let format = match Encoder::format_from_name(config.name.clone()) {
Ok(format) => format,
@ -70,6 +80,9 @@ impl EncoderApi for HwEncoder {
yuv: vec![],
format,
pixfmt: ctx.pixfmt,
width: ctx.width as _,
height: ctx.height as _,
bitrate,
}),
Err(_) => Err(anyhow!(format!("Failed to create encoder"))),
}
@ -114,10 +127,19 @@ impl EncoderApi for HwEncoder {
false
}
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()> {
self.encoder.set_bitrate((bitrate * 1000) as _).ok();
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
let b = Self::convert_quality(quality);
let bitrate = base_bitrate(self.width as _, self.height as _) * b / 100;
if bitrate > 0 {
self.encoder.set_bitrate((bitrate * 1000) as _).ok();
self.bitrate = bitrate;
}
Ok(())
}
fn bitrate(&self) -> u32 {
self.bitrate
}
}
impl HwEncoder {
@ -159,6 +181,16 @@ impl HwEncoder {
Err(_) => Ok(Vec::<EncodeFrame>::new()),
}
}
pub fn convert_quality(quality: crate::codec::Quality) -> u32 {
use crate::codec::Quality;
match quality {
Quality::Best => 150,
Quality::Balanced => 100,
Quality::Low => 50,
Quality::Custom(b) => b,
}
}
}
pub struct HwDecoder {
@ -208,6 +240,7 @@ impl HwDecoder {
let ctx = DecodeContext {
name: info.name.clone(),
device_type: info.hwdevice.clone(),
thread_count: codec_thread_num() as _,
};
match Decoder::new(ctx) {
Ok(decoder) => Ok(HwDecoder { decoder, info }),
@ -304,6 +337,7 @@ pub fn check_config() {
gop: DEFAULT_GOP,
quality: DEFAULT_HW_QUALITY,
rc: DEFAULT_RC,
thread_count: 4,
};
let encoders = CodecInfo::score(Encoder::available_encoders(ctx));
let decoders = CodecInfo::score(Decoder::available_decoders());

View File

@ -7,19 +7,21 @@ use hbb_common::log;
use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, Message, VideoFrame};
use hbb_common::ResultType;
use crate::codec::EncoderApi;
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
use crate::{GoogleImage, STRIDE_ALIGN};
use super::vpx::{vpx_codec_err_t::*, *};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
use hbb_common::bytes::Bytes;
use std::os::raw::c_uint;
use std::os::raw::{c_int, c_uint};
use std::{ptr, slice};
generate_call_macro!(call_vpx, false);
generate_call_macro!(call_vpx_allow_err, true);
generate_call_ptr_macro!(call_vpx_ptr);
const DEFAULT_QP_MAX: u32 = 56; // no more than 63
const DEFAULT_QP_MIN: u32 = 12; // no more than 63
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum VpxVideoCodecId {
VP8,
@ -54,11 +56,52 @@ impl EncoderApi for VpxEncoder {
VpxVideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_cx()),
VpxVideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_cx()),
};
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0));
let c = match config.codec {
VpxVideoCodecId::VP8 => webrtc::vp8::enc_cfg(i, &config)?,
VpxVideoCodecId::VP9 => webrtc::vp9::enc_cfg(i, &config)?,
};
// https://www.webmproject.org/docs/encoder-parameters/
// default: c.rc_min_quantizer = 0, c.rc_max_quantizer = 63
// try rc_resize_allowed later
c.g_w = config.width;
c.g_h = config.height;
c.g_timebase.num = config.timebase[0];
c.g_timebase.den = config.timebase[1];
c.rc_undershoot_pct = 95;
// When the data buffer falls below this percentage of fullness, a dropped frame is indicated. Set the threshold to zero (0) to disable this feature.
// In dynamic scenes, low bitrate gets low fps while high bitrate gets high fps.
c.rc_dropframe_thresh = 25;
c.g_threads = codec_thread_num() as _;
c.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
// https://developers.google.com/media/vp9/bitrate-modes/
// Constant Bitrate mode (CBR) is recommended for live streaming with VP9.
c.rc_end_usage = vpx_rc_mode::VPX_CBR;
// c.kf_min_dist = 0;
// c.kf_max_dist = 999999;
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED; // reduce bandwidth a lot
let (q_min, q_max, b) = Self::convert_quality(config.quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
} else {
c.rc_min_quantizer = DEFAULT_QP_MIN;
c.rc_max_quantizer = DEFAULT_QP_MAX;
}
let base_bitrate = base_bitrate(config.width as _, config.height as _);
let bitrate = base_bitrate * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
} else {
c.rc_target_bitrate = base_bitrate;
}
/*
The VPX encoder supports two-pass encoding for rate control purposes.
In two-pass encoding, the entire encoding process is performed twice.
The first pass generates new control parameters for the second pass.
This approach enables the best PSNR at the same bit rate.
*/
let mut ctx = Default::default();
call_vpx!(vpx_codec_enc_init_ver(
@ -68,9 +111,50 @@ impl EncoderApi for VpxEncoder {
0,
VPX_ENCODER_ABI_VERSION as _
));
match config.codec {
VpxVideoCodecId::VP8 => webrtc::vp8::set_control(&mut ctx, &c)?,
VpxVideoCodecId::VP9 => webrtc::vp9::set_control(&mut ctx, &c)?,
if config.codec == VpxVideoCodecId::VP9 {
// set encoder internal speed settings
// in ffmpeg, it is --speed option
/*
set to 0 or a positive value 1-16, the codec will try to adapt its
complexity depending on the time it spends encoding. Increasing this
number will make the speed go up and the quality go down.
Negative values mean strict enforcement of this
while positive values are adaptive
*/
/* https://developers.google.com/media/vp9/live-encoding
Speed 5 to 8 should be used for live / real-time encoding.
Lower numbers (5 or 6) are higher quality but require more CPU power.
Higher numbers (7 or 8) will be lower quality but more manageable for lower latency
use cases and also for lower CPU power devices such as mobile.
*/
call_vpx!(vpx_codec_control_(&mut ctx, VP8E_SET_CPUUSED as _, 7,));
// set row level multi-threading
/*
as some people in comments and below have already commented,
more recent versions of libvpx support -row-mt 1 to enable tile row
multi-threading. This can increase the number of tiles by up to 4x in VP9
(since the max number of tile rows is 4, regardless of video height).
To enable this, use -tile-rows N where N is the number of tile rows in
log2 units (so -tile-rows 1 means 2 tile rows and -tile-rows 2 means 4 tile
rows). The total number of active threads will then be equal to
$tile_rows * $tile_columns
*/
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_ROW_MT as _,
1 as c_int
));
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_TILE_COLUMNS as _,
4 as c_int
));
} else if config.codec == VpxVideoCodecId::VP8 {
// https://github.com/webmproject/libvpx/blob/972149cafeb71d6f08df89e91a0130d6a38c4b15/vpx/vp8cx.h#L172
// https://groups.google.com/a/webmproject.org/g/webm-discuss/c/DJhSrmfQ61M
call_vpx!(vpx_codec_control_(&mut ctx, VP8E_SET_CPUUSED as _, 12,));
}
Ok(Self {
@ -108,11 +192,24 @@ impl EncoderApi for VpxEncoder {
true
}
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()> {
let mut new_enc_cfg = unsafe { *self.ctx.config.enc.to_owned() };
new_enc_cfg.rc_target_bitrate = bitrate;
call_vpx!(vpx_codec_enc_config_set(&mut self.ctx, &new_enc_cfg));
return Ok(());
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
if q_min > 0 && q_min < q_max && q_max < 64 {
c.rc_min_quantizer = q_min;
c.rc_max_quantizer = q_max;
}
let bitrate = base_bitrate(self.width as _, self.height as _) * b / 100;
if bitrate > 0 {
c.rc_target_bitrate = bitrate;
}
call_vpx!(vpx_codec_enc_config_set(&mut self.ctx, &c));
Ok(())
}
fn bitrate(&self) -> u32 {
let c = unsafe { *self.ctx.config.enc.to_owned() };
c.rc_target_bitrate
}
}
@ -189,6 +286,34 @@ impl VpxEncoder {
..Default::default()
}
}
fn convert_quality(quality: Quality) -> (u32, u32, u32) {
match quality {
Quality::Best => (6, 45, 150),
Quality::Balanced => (12, 56, 100 * 2 / 3),
Quality::Low => (18, 56, 50),
Quality::Custom(b) => {
let (q_min, q_max) = Self::calc_q_values(b);
(q_min, q_max, b)
}
}
}
#[inline]
fn calc_q_values(b: u32) -> (u32, u32) {
let b = std::cmp::min(b, 200);
let q_min1: i32 = 36;
let q_min2 = 12;
let q_max1 = 56;
let q_max2 = 37;
let t = b as f32 / 200.0;
let q_min: u32 = ((1.0 - t) * q_min1 as f32 + t * q_min2 as f32).round() as u32;
let q_max = ((1.0 - t) * q_max1 as f32 + t * q_max2 as f32).round() as u32;
(q_min, q_max)
}
}
impl Drop for VpxEncoder {
@ -218,8 +343,10 @@ pub struct VpxEncoderConfig {
pub width: c_uint,
/// The height (in pixels).
pub height: c_uint,
/// The target bitrate (in kilobits per second).
pub bitrate: c_uint,
/// The timebase numerator and denominator (in seconds).
pub timebase: [c_int; 2],
/// The image quality
pub quality: Quality,
/// The codec
pub codec: VpxVideoCodecId,
}
@ -227,7 +354,6 @@ pub struct VpxEncoderConfig {
#[derive(Clone, Copy, Debug)]
pub struct VpxDecoderConfig {
pub codec: VpxVideoCodecId,
pub num_threads: u32,
}
pub struct EncodeFrames<'a> {
@ -274,11 +400,7 @@ impl VpxDecoder {
};
let mut ctx = Default::default();
let cfg = vpx_codec_dec_cfg_t {
threads: if config.num_threads == 0 {
num_cpus::get() as _
} else {
config.num_threads
},
threads: codec_thread_num() as _,
w: 0,
h: 0,
};
@ -417,371 +539,3 @@ impl Drop for Image {
}
unsafe impl Send for vpx_codec_ctx_t {}
mod webrtc {
use super::*;
const K_QP_MAX: u32 = 25; // worth adjusting
const MODE: VideoCodecMode = VideoCodecMode::KScreensharing;
const K_RTP_TICKS_PER_SECOND: i32 = 90000;
const NUMBER_OF_TEMPORAL_LAYERS: u32 = 1;
const DENOISING_ON: bool = true;
const FRAME_DROP_ENABLED: bool = false;
#[allow(dead_code)]
#[derive(Debug, PartialEq, Eq)]
enum VideoCodecMode {
KRealtimeVideo,
KScreensharing,
}
#[allow(dead_code)]
#[derive(Debug, PartialEq, Eq)]
enum VideoCodecComplexity {
KComplexityLow = -1,
KComplexityNormal = 0,
KComplexityHigh = 1,
KComplexityHigher = 2,
KComplexityMax = 3,
}
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
pub mod vp9 {
use super::*;
const SVC: bool = false;
// https://webrtc.googlesource.com/src/+/refs/heads/main/api/video_codecs/video_encoder.cc#35
const KEY_FRAME_INTERVAL: u32 = 3000;
const ADAPTIVE_QP_MODE: bool = true;
pub fn enc_cfg(
i: *const vpx_codec_iface_t,
cfg: &VpxEncoderConfig,
) -> ResultType<vpx_codec_enc_cfg_t> {
let mut c: vpx_codec_enc_cfg_t =
unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0));
// kProfile0
c.g_bit_depth = vpx_bit_depth::VPX_BITS_8;
c.g_profile = 0;
c.g_input_bit_depth = 8;
c.g_w = cfg.width;
c.g_h = cfg.height;
c.rc_target_bitrate = cfg.bitrate; // in kbit/s
c.g_error_resilient = if SVC { VPX_ERROR_RESILIENT_DEFAULT } else { 0 };
c.g_timebase.num = 1;
c.g_timebase.den = K_RTP_TICKS_PER_SECOND;
c.g_lag_in_frames = 0;
c.rc_dropframe_thresh = if FRAME_DROP_ENABLED { 30 } else { 0 };
c.rc_end_usage = vpx_rc_mode::VPX_CBR;
c.g_pass = vpx_enc_pass::VPX_RC_ONE_PASS;
c.rc_min_quantizer = if MODE == VideoCodecMode::KScreensharing {
8
} else {
2
};
c.rc_max_quantizer = K_QP_MAX;
c.rc_undershoot_pct = 50;
c.rc_overshoot_pct = 50;
c.rc_buf_initial_sz = 500;
c.rc_buf_optimal_sz = 600;
c.rc_buf_sz = 1000;
// Key-frame interval is enforced manually by this wrapper.
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED;
// TODO(webm:1592): work-around for libvpx issue, as it can still
// put some key-frames at will even in VPX_KF_DISABLED kf_mode.
c.kf_max_dist = KEY_FRAME_INTERVAL;
c.kf_min_dist = c.kf_max_dist;
c.rc_resize_allowed = 0;
// Determine number of threads based on the image size and #cores.
c.g_threads = number_of_threads(c.g_w, c.g_h, num_cpus::get());
c.temporal_layering_mode =
vp9e_temporal_layering_mode::VP9E_TEMPORAL_LAYERING_MODE_NOLAYERING as _;
c.ts_number_layers = 1;
c.ts_rate_decimator[0] = 1;
c.ts_periodicity = 1;
c.ts_layer_id[0] = 0;
Ok(c)
}
pub fn set_control(ctx: *mut vpx_codec_ctx_t, cfg: &vpx_codec_enc_cfg_t) -> ResultType<()> {
use vp8e_enc_control_id::*;
macro_rules! call_ctl {
($ctx:expr, $vpxe:expr, $arg:expr) => {{
call_vpx_allow_err!(vpx_codec_control_($ctx, $vpxe as i32, $arg));
}};
}
call_ctl!(
ctx,
VP8E_SET_MAX_INTRA_BITRATE_PCT,
max_intra_target(cfg.rc_buf_optimal_sz)
);
call_ctl!(ctx, VP9E_SET_AQ_MODE, if ADAPTIVE_QP_MODE { 3 } else { 0 });
call_ctl!(ctx, VP9E_SET_FRAME_PARALLEL_DECODING, 0);
#[cfg(not(any(target_arch = "arm", target_arch = "aarch64", target_os = "android")))]
call_ctl!(ctx, VP9E_SET_SVC_GF_TEMPORAL_REF, 0);
call_ctl!(
ctx,
VP8E_SET_CPUUSED,
get_default_performance_flags(cfg.g_w, cfg.g_h).0
);
call_ctl!(ctx, VP9E_SET_TILE_COLUMNS, cfg.g_threads >> 1);
// Turn on row-based multithreading.
call_ctl!(ctx, VP9E_SET_ROW_MT, 1);
let denoising = DENOISING_ON
&& allow_denoising()
&& get_default_performance_flags(cfg.g_w, cfg.g_h).1;
call_ctl!(
ctx,
VP9E_SET_NOISE_SENSITIVITY,
if denoising { 1 } else { 0 }
);
if MODE == VideoCodecMode::KScreensharing {
call_ctl!(ctx, VP9E_SET_TUNE_CONTENT, 1);
}
// Enable encoder skip of static/low content blocks.
call_ctl!(ctx, VP8E_SET_STATIC_THRESHOLD, 1);
Ok(())
}
// return (base_layer_speed, allow_denoising)
fn get_default_performance_flags(width: u32, height: u32) -> (u32, bool) {
if cfg!(any(
target_arch = "arm",
target_arch = "aarch64",
target_os = "android"
)) {
(8, true)
} else if width * height < 352 * 288 {
(5, true)
} else if width * height < 1920 * 1080 {
(7, true)
} else {
(9, false)
}
}
fn allow_denoising() -> bool {
// Do not enable the denoiser on ARM since optimization is pending.
// Denoiser is on by default on other platforms.
if cfg!(any(
target_arch = "arm",
target_arch = "aarch64",
target_os = "android"
)) {
false
} else {
true
}
}
fn number_of_threads(width: u32, height: u32, number_of_cores: usize) -> u32 {
// Keep the number of encoder threads equal to the possible number of column
// tiles, which is (1, 2, 4, 8). See comments below for VP9E_SET_TILE_COLUMNS.
if width * height >= 1280 * 720 && number_of_cores > 4 {
return 4;
} else if width * height >= 640 * 360 && number_of_cores > 2 {
return 2;
} else {
// Use 2 threads for low res on ARM.
#[cfg(any(target_arch = "arm", target_arch = "aarch64", target_os = "android"))]
if width * height >= 320 * 180 && number_of_cores > 2 {
return 2;
}
// 1 thread less than VGA.
return 1;
}
}
}
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
pub mod vp8 {
use super::*;
// https://webrtc.googlesource.com/src/+/refs/heads/main/api/video_codecs/video_encoder.cc#23
const DISABLE_KEY_FRAME_INTERVAL: bool = true;
const KEY_FRAME_INTERVAL: u32 = 3000;
const COMPLEXITY: VideoCodecComplexity = VideoCodecComplexity::KComplexityNormal;
const K_TOKEN_PARTITIONS: vp8e_token_partitions =
vp8e_token_partitions::VP8_ONE_TOKENPARTITION;
pub fn enc_cfg(
i: *const vpx_codec_iface_t,
cfg: &VpxEncoderConfig,
) -> ResultType<vpx_codec_enc_cfg_t> {
let mut c: vpx_codec_enc_cfg_t =
unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0));
c.g_w = cfg.width;
c.g_h = cfg.height;
c.g_timebase.num = 1;
c.g_timebase.den = K_RTP_TICKS_PER_SECOND;
c.g_lag_in_frames = 0;
c.g_error_resilient = if NUMBER_OF_TEMPORAL_LAYERS > 1 {
VPX_ERROR_RESILIENT_DEFAULT
} else {
0
};
c.rc_end_usage = vpx_rc_mode::VPX_CBR;
c.g_pass = vpx_enc_pass::VPX_RC_ONE_PASS;
c.rc_resize_allowed = 0;
c.rc_min_quantizer = if MODE == VideoCodecMode::KScreensharing {
12
} else {
2
};
c.rc_max_quantizer = K_QP_MAX;
c.rc_undershoot_pct = 100;
c.rc_overshoot_pct = 15;
c.rc_buf_initial_sz = 500;
c.rc_buf_optimal_sz = 600;
c.rc_buf_sz = 1000;
if !DISABLE_KEY_FRAME_INTERVAL && KEY_FRAME_INTERVAL > 0 {
c.kf_mode = vpx_kf_mode::VPX_KF_AUTO;
c.kf_max_dist = KEY_FRAME_INTERVAL;
} else {
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED;
}
c.g_threads = number_of_threads(c.g_w, c.g_h, num_cpus::get());
c.rc_target_bitrate = cfg.bitrate;
c.rc_dropframe_thresh = if FRAME_DROP_ENABLED { 30 } else { 0 };
Ok(c)
}
pub fn set_control(ctx: *mut vpx_codec_ctx_t, cfg: &vpx_codec_enc_cfg_t) -> ResultType<()> {
use vp8e_enc_control_id::*;
macro_rules! call_ctl {
($ctx:expr, $vpxe:expr, $arg:expr) => {{
call_vpx_allow_err!(vpx_codec_control_($ctx, $vpxe as i32, $arg));
}};
}
call_ctl!(
ctx,
VP8E_SET_STATIC_THRESHOLD,
if MODE == VideoCodecMode::KScreensharing {
100
} else {
1
}
);
call_ctl!(
ctx,
VP8E_SET_CPUUSED,
get_cpu_speed(cfg.g_w, cfg.g_h, num_cpus::get())
);
call_ctl!(ctx, VP8E_SET_TOKEN_PARTITIONS, K_TOKEN_PARTITIONS);
call_ctl!(
ctx,
VP8E_SET_MAX_INTRA_BITRATE_PCT,
max_intra_target(cfg.rc_buf_optimal_sz)
);
call_ctl!(
ctx,
VP8E_SET_SCREEN_CONTENT_MODE,
if MODE == VideoCodecMode::KScreensharing {
2 // On with more aggressive rate control.
} else {
0
}
);
Ok(())
}
fn get_cpu_speed_default() -> i32 {
match COMPLEXITY {
VideoCodecComplexity::KComplexityHigh => -5,
VideoCodecComplexity::KComplexityHigher => -4,
VideoCodecComplexity::KComplexityMax => -3,
_ => -6,
}
}
fn get_cpu_speed(width: u32, height: u32, number_of_cores: usize) -> i32 {
if cfg!(any(
target_arch = "arm",
target_arch = "aarch64",
target_os = "android"
)) {
if number_of_cores <= 3 {
-12
} else if width * height <= 352 * 288 {
-8
} else if width * height <= 640 * 480 {
-10
} else {
-12
}
} else {
let cpu_speed_default = get_cpu_speed_default();
if width * height < 352 * 288 {
if cpu_speed_default < -4 {
-4
} else {
cpu_speed_default
}
} else {
cpu_speed_default
}
}
}
fn number_of_threads(width: u32, height: u32, cpus: usize) -> u32 {
if cfg!(target_os = "android") {
if width * height >= 320 * 180 {
if cpus >= 4 {
// 3 threads for CPUs with 4 and more cores since most of times only 4
// cores will be active.
3
} else if cpus == 3 || cpus == 2 {
2
} else {
1
}
} else {
1
}
} else {
if width * height >= 1920 * 1080 && cpus > 8 {
8 // 8 threads for 1080p on high perf machines.
} else if width * height > 1280 * 960 && cpus >= 6 {
// 3 threads for 1080p.
return 3;
} else if width * height > 640 * 480 && cpus >= 3 {
// Default 2 threads for qHD/HD, but allow 3 if core count is high enough,
// as this will allow more margin for high-core/low clock machines or if
// not built with highest optimization.
if cpus >= 6 {
3
} else {
2
}
} else {
// 1 thread for VGA or less.
1
}
}
}
}
fn max_intra_target(optimal_buffer_size: u32) -> u32 {
const MAX_FRAMERATE: u32 = 60; // TODO
let scale_par: f32 = 0.5;
let target_pct: u32 =
((optimal_buffer_size as f32) * scale_par * MAX_FRAMERATE as f32 / 10.0) as u32;
let min_intra_size: u32 = 300;
if target_pct < min_intra_size {
min_intra_size
} else {
target_pct
}
}
}

View File

@ -129,7 +129,7 @@ impl MagInterface {
unsafe {
// load lib
let lib_file_name = "Magnification.dll";
let lib_file_name_c = CString::new(lib_file_name).unwrap();
let lib_file_name_c = CString::new(lib_file_name)?;
s.lib_handle = LoadLibraryExA(
lib_file_name_c.as_ptr() as _,
NULL,
@ -189,7 +189,7 @@ impl MagInterface {
}
unsafe fn load_func(lib_module: HMODULE, func_name: &str) -> Result<FARPROC> {
let func_name_c = CString::new(func_name).unwrap();
let func_name_c = CString::new(func_name)?;
let func = GetProcAddress(lib_module, func_name_c.as_ptr() as _);
if func.is_null() {
return Err(Error::new(
@ -442,7 +442,7 @@ impl CapturerMag {
}
pub(crate) fn exclude(&mut self, cls: &str, name: &str) -> Result<bool> {
let name_c = CString::new(name).unwrap();
let name_c = CString::new(name)?;
unsafe {
let mut hwnd = if cls.len() == 0 {
FindWindowExA(NULL as _, NULL as _, NULL as _, name_c.as_ptr())

View File

@ -594,13 +594,14 @@ fn request_screen_cast(
}
let fd_res = fd_res.lock().unwrap();
let streams_res = streams_res.lock().unwrap();
if fd_res.is_some() && !streams_res.is_empty() {
Ok((conn, fd_res.clone().unwrap(), streams_res.clone()))
} else {
Err(Box::new(DBusError(
"Failed to obtain screen capture.".into(),
)))
if let Some(fd_res) = fd_res.clone() {
if !streams_res.is_empty() {
return Ok((conn, fd_res, streams_res.clone()));
}
}
Err(Box::new(DBusError(
"Failed to obtain screen capture.".into(),
)))
}
pub fn get_capturables(capture_cursor: bool) -> Result<Vec<PipeWireCapturable>, Box<dyn Error>> {

View File

@ -30,7 +30,12 @@ fn prompt_input() -> u8 {
unsafe fn plug_in(index: idd::UINT, edid: idd::UINT) {
println!("Plug in monitor begin");
if idd::FALSE == idd::MonitorPlugIn(index, edid, 25) {
println!("{}", CStr::from_ptr(idd::GetLastMsg()).to_str().unwrap());
println!(
"{}",
CStr::from_ptr(idd::GetLastMsg())
.to_str()
.unwrap_or_default()
);
} else {
println!("Plug in monitor done");
@ -46,7 +51,12 @@ unsafe fn plug_in(index: idd::UINT, edid: idd::UINT) {
sync: 60 as idd::DWORD,
});
if idd::FALSE == idd::MonitorModesUpdate(index, modes.len() as u32, modes.as_mut_ptr()) {
println!("{}", CStr::from_ptr(idd::GetLastMsg()).to_str().unwrap());
println!(
"{}",
CStr::from_ptr(idd::GetLastMsg())
.to_str()
.unwrap_or_default()
);
}
}
}
@ -55,7 +65,12 @@ unsafe fn plug_in(index: idd::UINT, edid: idd::UINT) {
unsafe fn plug_out(index: idd::UINT) {
println!("Plug out monitor begin");
if idd::FALSE == idd::MonitorPlugOut(index) {
println!("{}", CStr::from_ptr(idd::GetLastMsg()).to_str().unwrap());
println!(
"{}",
CStr::from_ptr(idd::GetLastMsg())
.to_str()
.unwrap_or_default()
);
} else {
println!("Plug out monitor done");
}
@ -64,7 +79,13 @@ unsafe fn plug_out(index: idd::UINT) {
fn main() {
#[cfg(windows)]
{
let abs_path = Path::new(DRIVER_INSTALL_PATH).canonicalize().unwrap();
let abs_path = match Path::new(DRIVER_INSTALL_PATH).canonicalize() {
Ok(p) => p,
Err(e) => {
println!("Failed to get absolute path of driver install: {:?}", e);
return;
}
};
unsafe {
let invalid_device = 0 as idd::HSWDEVICE;
@ -86,7 +107,12 @@ fn main() {
if idd::InstallUpdate(full_inf_path.as_ptr() as _, &mut reboot_required)
== idd::FALSE
{
println!("{}", CStr::from_ptr(idd::GetLastMsg()).to_str().unwrap());
println!(
"{}",
CStr::from_ptr(idd::GetLastMsg())
.to_str()
.unwrap_or_default()
);
} else {
println!(
"Install or update driver done, reboot is {} required",
@ -104,7 +130,12 @@ fn main() {
if idd::Uninstall(full_inf_path.as_ptr() as _, &mut reboot_required)
== idd::FALSE
{
println!("{}", CStr::from_ptr(idd::GetLastMsg()).to_str().unwrap());
println!(
"{}",
CStr::from_ptr(idd::GetLastMsg())
.to_str()
.unwrap_or_default()
);
} else {
println!(
"Uninstall driver done, reboot is {} required",
@ -123,7 +154,12 @@ fn main() {
continue;
}
if idd::FALSE == idd::DeviceCreate(&mut h_sw_device) {
println!("{}", CStr::from_ptr(idd::GetLastMsg()).to_str().unwrap());
println!(
"{}",
CStr::from_ptr(idd::GetLastMsg())
.to_str()
.unwrap_or_default()
);
idd::DeviceClose(h_sw_device);
h_sw_device = invalid_device;
} else {

View File

@ -56,9 +56,15 @@ impl Interface for Session {
}
"re-input-password" => {
log::error!("{}: {}", title, text);
let password = rpassword::prompt_password("Enter password: ").unwrap();
let login_data = Data::Login((password, true));
self.sender.send(login_data).ok();
match rpassword::prompt_password("Enter password: ") {
Ok(password) => {
let login_data = Data::Login((password, true));
self.sender.send(login_data).ok();
}
Err(e) => {
log::error!("reinput password failed, {:?}", e);
}
}
}
msg if msg.contains("error") => {
log::error!("{}: {}: {}", msgtype, title, text);
@ -85,8 +91,23 @@ impl Interface for Session {
handle_hash(self.lc.clone(), &pass, hash, self, peer).await;
}
async fn handle_login_from_ui(&mut self, os_username: String, os_password: String, password: String, remember: bool, peer: &mut Stream) {
handle_login_from_ui(self.lc.clone(), os_username, os_password, password, remember, peer).await;
async fn handle_login_from_ui(
&mut self,
os_username: String,
os_password: String,
password: String,
remember: bool,
peer: &mut Stream,
) {
handle_login_from_ui(
self.lc.clone(),
os_username,
os_password,
password,
remember,
peer,
)
.await;
}
async fn handle_test_delay(&mut self, t: TestDelay, peer: &mut Stream) {
@ -117,13 +138,14 @@ pub async fn connect_test(id: &str, key: String, token: String) {
break;
}
Ok(Some(Ok(bytes))) => {
let msg_in = Message::parse_from_bytes(&bytes).unwrap();
match msg_in.union {
Some(message::Union::Hash(hash)) => {
log::info!("Got hash");
break;
if let Ok(msg_in) = Message::parse_from_bytes(&bytes) {
match msg_in.union {
Some(message::Union::Hash(hash)) => {
log::info!("Got hash");
break;
}
_ => {}
}
_ => {}
}
}
_ => {}

View File

@ -24,6 +24,8 @@ use sha2::{Digest, Sha256};
use uuid::Uuid;
pub use file_trait::FileManager;
#[cfg(windows)]
use hbb_common::tokio;
#[cfg(not(feature = "flutter"))]
#[cfg(not(any(target_os = "android", target_os = "ios")))]
use hbb_common::tokio::sync::mpsc::UnboundedSender;
@ -315,19 +317,20 @@ impl Client {
if !ph.other_failure.is_empty() {
bail!(ph.other_failure);
}
match ph.failure.enum_value_or_default() {
punch_hole_response::Failure::ID_NOT_EXIST => {
match ph.failure.enum_value() {
Ok(punch_hole_response::Failure::ID_NOT_EXIST) => {
bail!("ID does not exist");
}
punch_hole_response::Failure::OFFLINE => {
Ok(punch_hole_response::Failure::OFFLINE) => {
bail!("Remote desktop is offline");
}
punch_hole_response::Failure::LICENSE_MISMATCH => {
Ok(punch_hole_response::Failure::LICENSE_MISMATCH) => {
bail!("Key mismatch");
}
punch_hole_response::Failure::LICENSE_OVERUSE => {
Ok(punch_hole_response::Failure::LICENSE_OVERUSE) => {
bail!("Key overuse");
}
_ => bail!("other punch hole failure"),
}
} else {
peer_nat_type = ph.nat_type();
@ -468,11 +471,8 @@ impl Client {
)
.await;
interface.update_direct(Some(false));
if conn.is_err() {
bail!(
"Failed to connect via relay server: {}",
conn.err().unwrap()
);
if let Err(e) = conn {
bail!("Failed to connect via relay server: {}", e);
}
direct = false;
} else {
@ -505,11 +505,13 @@ impl Client {
});
let mut sign_pk = None;
let mut option_pk = None;
if !signed_id_pk.is_empty() && rs_pk.is_some() {
if let Ok((id, pk)) = decode_id_pk(&signed_id_pk, &rs_pk.unwrap()) {
if id == peer_id {
sign_pk = Some(sign::PublicKey(pk));
option_pk = Some(pk.to_vec());
if !signed_id_pk.is_empty() {
if let Some(rs_pk) = rs_pk {
if let Ok((id, pk)) = decode_id_pk(&signed_id_pk, &rs_pk) {
if id == peer_id {
sign_pk = Some(sign::PublicKey(pk));
option_pk = Some(pk.to_vec());
}
}
}
if sign_pk.is_none() {
@ -1788,6 +1790,8 @@ where
let mut skip_beginning = 0;
std::thread::spawn(move || {
#[cfg(windows)]
sync_cpu_usage();
let mut video_handler = VideoHandler::new();
loop {
if let Ok(data) = video_receiver.recv() {
@ -1820,7 +1824,7 @@ where
);
}
// Clear to get real-time fps
if count > 300 {
if count > 150 {
count = 0;
duration = Duration::ZERO;
}
@ -1871,6 +1875,39 @@ pub fn start_audio_thread() -> MediaSender {
audio_sender
}
#[cfg(windows)]
fn sync_cpu_usage() {
use std::sync::Once;
static ONCE: Once = Once::new();
ONCE.call_once(|| {
let t = std::thread::spawn(do_sync_cpu_usage);
t.join().ok();
});
}
#[cfg(windows)]
#[tokio::main(flavor = "current_thread")]
async fn do_sync_cpu_usage() {
use crate::ipc::{connect, Data};
let start = std::time::Instant::now();
match connect(50, "").await {
Ok(mut conn) => {
if conn.send(&&Data::SyncWinCpuUsage(None)).await.is_ok() {
if let Ok(Some(data)) = conn.next_timeout(50).await {
match data {
Data::SyncWinCpuUsage(cpu_usage) => {
hbb_common::platform::windows::sync_cpu_usage(cpu_usage);
}
_ => {}
}
}
}
}
_ => {}
}
log::info!("{:?} used to sync cpu usage", start.elapsed());
}
/// Handle latency test.
///
/// # Arguments

View File

@ -234,7 +234,7 @@ impl<T: InvokeUiSession> Remote<T> {
}
}
_ = status_timer.tick() => {
self.fps_control();
self.fps_control(direct);
let elapsed = fps_instant.elapsed().as_millis();
if elapsed < 1000 {
continue;
@ -864,23 +864,56 @@ impl<T: InvokeUiSession> Remote<T> {
}
}
#[inline]
fn fps_control(&mut self) {
fn fps_control(&mut self, direct: bool) {
let len = self.video_queue.len();
let ctl = &mut self.fps_control;
// Current full speed decoding fps
let decode_fps = self.decode_fps.load(std::sync::atomic::Ordering::Relaxed);
// 500ms
let debounce = if decode_fps > 10 { decode_fps / 2 } else { 5 };
if len < debounce || decode_fps == 0 {
if decode_fps == 0 {
return;
}
// First setting , or the length of the queue still increases after setting, or exceed the size of the last setting again
if ctl.set_times < 10 // enough
&& (ctl.set_times == 0
|| (len > ctl.last_queue_size && ctl.last_set_instant.elapsed().as_secs() > 30))
let limited_fps = if direct {
decode_fps * 9 / 10 // 30 got 27
} else {
decode_fps * 4 / 5 // 30 got 24
};
// send full speed fps
let version = self.handler.lc.read().unwrap().version;
let max_encode_speed = 144 * 10 / 9;
if version >= hbb_common::get_version_number("1.2.1")
&& (ctl.last_full_speed_fps.is_none() // First time
|| ((ctl.last_full_speed_fps.unwrap_or_default() - decode_fps as i32).abs() >= 5 // diff 5
&& !(decode_fps > max_encode_speed // already exceed max encoding speed
&& ctl.last_full_speed_fps.unwrap_or_default() > max_encode_speed as i32)))
{
// 80% fps to ensure decoding is faster than encoding
let mut custom_fps = decode_fps as i32 * 4 / 5;
let mut misc = Misc::new();
misc.set_full_speed_fps(decode_fps as _);
let mut msg = Message::new();
msg.set_misc(misc);
self.sender.send(Data::Message(msg)).ok();
ctl.last_full_speed_fps = Some(decode_fps as _);
}
// decrease judgement
let debounce = if decode_fps > 10 { decode_fps / 2 } else { 5 }; // 500ms
let should_decrease = len >= debounce && len > ctl.last_queue_size + 5; // exceed debounce or still caching
// increase judgement
if len <= 1 {
ctl.idle_counter += 1;
} else {
ctl.idle_counter = 0;
}
let mut should_increase = false;
if let Some(last_custom_fps) = ctl.last_custom_fps {
// ever set
if last_custom_fps + 5 < limited_fps as i32 && ctl.idle_counter > 3 {
// limited_fps is 5 larger than last set, and idle time is more than 3 seconds
should_increase = true;
}
}
if should_decrease || should_increase {
// limited_fps to ensure decoding is faster than encoding
let mut custom_fps = limited_fps as i32;
if custom_fps < 1 {
custom_fps = 1;
}
@ -894,8 +927,7 @@ impl<T: InvokeUiSession> Remote<T> {
msg.set_misc(misc);
self.sender.send(Data::Message(msg)).ok();
ctl.last_queue_size = len;
ctl.set_times += 1;
ctl.last_set_instant = Instant::now();
ctl.last_custom_fps = Some(custom_fps);
}
// send refresh
if ctl.refresh_times < 10 // enough
@ -1406,7 +1438,7 @@ impl<T: InvokeUiSession> Remote<T> {
}
Some(message::Union::PeerInfo(pi)) => {
self.handler.set_displays(&pi.displays);
},
}
_ => {}
}
}
@ -1604,20 +1636,22 @@ impl RemoveJob {
struct FpsControl {
last_queue_size: usize,
set_times: usize,
refresh_times: usize,
last_set_instant: Instant,
last_refresh_instant: Instant,
last_full_speed_fps: Option<i32>,
last_custom_fps: Option<i32>,
idle_counter: usize,
}
impl Default for FpsControl {
fn default() -> Self {
Self {
last_queue_size: Default::default(),
set_times: Default::default(),
refresh_times: Default::default(),
last_set_instant: Instant::now(),
last_refresh_instant: Instant::now(),
last_full_speed_fps: None,
last_custom_fps: None,
idle_counter: 0,
}
}
}

View File

@ -725,7 +725,7 @@ pub fn run_me<T: AsRef<std::ffi::OsStr>>(args: Vec<T>) -> std::io::Result<std::p
}
#[cfg(feature = "appimage")]
{
let appdir = std::env::var("APPDIR").unwrap();
let appdir = std::env::var("APPDIR").map_err(|_| std::io::ErrorKind::Other)?;
let appimage_cmd = std::path::Path::new(&appdir).join("AppRun");
log::info!("path: {:?}", appimage_cmd);
return std::process::Command::new(appimage_cmd).args(&args).spawn();

View File

@ -104,7 +104,10 @@ fn rust_args_to_c_args(args: Vec<String>, outlen: *mut c_int) -> *mut *mut c_cha
// Let's fill a vector with null-terminated strings
for s in args {
v.push(CString::new(s).unwrap());
match CString::new(s) {
Ok(s) => v.push(s),
Err(_) => return std::ptr::null_mut() as _,
}
}
// Turning each null-terminated string into a pointer.

View File

@ -229,6 +229,8 @@ pub enum Data {
#[cfg(all(feature = "flutter", feature = "plugin_framework"))]
#[cfg(not(any(target_os = "android", target_os = "ios")))]
Plugin(Plugin),
#[cfg(windows)]
SyncWinCpuUsage(Option<f64>),
}
#[tokio::main(flavor = "current_thread")]
@ -452,6 +454,16 @@ async fn handle(data: Data, stream: &mut Connection) {
.await
);
}
#[cfg(windows)]
Data::SyncWinCpuUsage(None) => {
allow_err!(
stream
.send(&Data::SyncWinCpuUsage(
hbb_common::platform::windows::cpu_uage_one_minute()
))
.await
);
}
Data::TestRendezvousServer => {
crate::test_rendezvous_server();
}

View File

@ -12,15 +12,14 @@ fn main() {
let args: Vec<_> = std::env::args().skip(1).collect();
let api = args.get(2).cloned().unwrap_or_default();
if args.len() >= 2 {
println!(
"rustdesk-licensed-{}.exe",
gen_name(&License {
key: args[0].clone(),
host: args[1].clone(),
api,
})
.unwrap()
);
match gen_name(&License {
key: args[0].clone(),
host: args[1].clone(),
api,
}) {
Ok(name) => println!("rustdesk-licensed-{}.exe", name),
Err(e) => println!("{:?}", e),
}
}
if args.len() == 1 {
println!("{:?}", get_license_from_string(&args[0]));

View File

@ -64,11 +64,10 @@ impl AppHandler for Rc<Host> {
// https://github.com/xi-editor/druid/blob/master/druid-shell/src/platform/mac/application.rs
pub unsafe fn set_delegate(handler: Option<Box<dyn AppHandler>>) {
let decl = ClassDecl::new("AppDelegate", class!(NSObject));
if decl.is_none() {
let Some(mut decl) = ClassDecl::new("AppDelegate", class!(NSObject)) else {
log::error!("Failed to new AppDelegate");
return;
}
let mut decl = decl.unwrap();
};
decl.add_ivar::<*mut c_void>(APP_HANDLER_IVAR);
decl.add_method(
@ -116,7 +115,10 @@ pub unsafe fn set_delegate(handler: Option<Box<dyn AppHandler>>) {
let handler_ptr = Box::into_raw(Box::new(state));
(*delegate).set_ivar(APP_HANDLER_IVAR, handler_ptr as *mut c_void);
// Set the url scheme handler
let cls = Class::get("NSAppleEventManager").unwrap();
let Some(cls) = Class::get("NSAppleEventManager") else {
log::error!("Failed to get NSAppleEventManager");
return;
};
let manager: *mut Object = msg_send![cls, sharedAppleEventManager];
let _: () = msg_send![manager,
setEventHandler: delegate
@ -199,10 +201,10 @@ fn service_should_handle_reopen(
_sel: Sel,
_sender: id,
_has_visible_windows: BOOL,
) -> BOOL {
) -> BOOL {
log::debug!("Invoking the main rustdesk process");
std::thread::spawn(move || crate::handle_url_scheme("".to_string()));
// Prevent default logic.
std::thread::spawn(move || crate::handle_url_scheme("".to_string()));
// Prevent default logic.
NO
}

View File

@ -728,7 +728,9 @@ pub fn get_double_click_time() -> u32 {
// g_object_get (settings, "gtk-double-click-time", &double_click_time, NULL);
unsafe {
let mut double_click_time = 0u32;
let property = std::ffi::CString::new("gtk-double-click-time").unwrap();
let Ok(property) = std::ffi::CString::new("gtk-double-click-time") else {
return 0;
};
let settings = gtk_settings_get_default();
g_object_get(
settings,
@ -801,7 +803,10 @@ pub fn resolutions(name: &str) -> Vec<Resolution> {
if let Some(resolutions) = caps.name("resolutions") {
let resolution_pat =
r"\s*(?P<width>\d+)x(?P<height>\d+)\s+(?P<rates>(\d+\.\d+\D*)+)\s*\n";
let resolution_re = Regex::new(&format!(r"{}", resolution_pat)).unwrap();
let Ok(resolution_re) = Regex::new(&format!(r"{}", resolution_pat)) else {
log::error!("Regex new failed");
return vec![];
};
for resolution_caps in resolution_re.captures_iter(resolutions.as_str()) {
if let Some((width, height)) =
get_width_height_from_captures(&resolution_caps)

View File

@ -74,7 +74,7 @@ pub fn is_can_input_monitoring(prompt: bool) -> bool {
// remove just one app from all the permissions: tccutil reset All com.carriez.rustdesk
pub fn is_can_screen_recording(prompt: bool) -> bool {
// we got some report that we show no permission even after set it, so we try to use new api for screen recording check
// the new api is only available on macOS >= 10.15, but on stackoverflow, some people said it works on >= 10.16 (crash on 10.15),
// the new api is only available on macOS >= 10.15, but on stackoverflow, some people said it works on >= 10.16 (crash on 10.15),
// but also some said it has bug on 10.16, so we just use it on 11.0.
unsafe {
if CanUseNewApiForScreenCaptureCheck() == YES {
@ -146,14 +146,26 @@ pub fn is_installed_daemon(prompt: bool) -> bool {
return true;
}
let install_script = PRIVILEGES_SCRIPTS_DIR.get_file("install.scpt").unwrap();
let install_script_body = install_script.contents_utf8().unwrap();
let Some(install_script) = PRIVILEGES_SCRIPTS_DIR.get_file("install.scpt") else {
return false;
};
let Some(install_script_body) = install_script.contents_utf8() else {
return false;
};
let daemon_plist = PRIVILEGES_SCRIPTS_DIR.get_file(&daemon).unwrap();
let daemon_plist_body = daemon_plist.contents_utf8().unwrap();
let Some(daemon_plist) = PRIVILEGES_SCRIPTS_DIR.get_file(&daemon) else {
return false;
};
let Some(daemon_plist_body) = daemon_plist.contents_utf8() else {
return false;
};
let agent_plist = PRIVILEGES_SCRIPTS_DIR.get_file(&agent).unwrap();
let agent_plist_body = agent_plist.contents_utf8().unwrap();
let Some(agent_plist) = PRIVILEGES_SCRIPTS_DIR.get_file(&agent) else {
return false;
};
let Some(agent_plist_body) = agent_plist.contents_utf8() else {
return false;
};
std::thread::spawn(move || {
match std::process::Command::new("osascript")
@ -198,8 +210,12 @@ pub fn uninstall_service(show_new_window: bool) -> bool {
return false;
}
let script_file = PRIVILEGES_SCRIPTS_DIR.get_file("uninstall.scpt").unwrap();
let script_body = script_file.contents_utf8().unwrap();
let Some(script_file) = PRIVILEGES_SCRIPTS_DIR.get_file("uninstall.scpt") else {
return false;
};
let Some(script_body) = script_file.contents_utf8() else {
return false;
};
std::thread::spawn(move || {
match std::process::Command::new("osascript")

View File

@ -6,7 +6,9 @@ use crate::{
privacy_win_mag::{self, WIN_MAG_INJECTED_PROCESS_EXE},
};
use hbb_common::{
allow_err, bail,
allow_err,
anyhow::anyhow,
bail,
config::{self, Config},
log,
message_proto::Resolution,
@ -848,10 +850,9 @@ pub fn check_update_broker_process() -> ResultType<()> {
let origin_process_exe = privacy_win_mag::ORIGIN_PROCESS_EXE;
let exe_file = std::env::current_exe()?;
if exe_file.parent().is_none() {
let Some(cur_dir) = exe_file.parent() else {
bail!("Cannot get parent of current exe file");
}
let cur_dir = exe_file.parent().unwrap();
};
let cur_exe = cur_dir.join(process_exe);
if !std::path::Path::new(&cur_exe).exists() {
@ -902,29 +903,29 @@ fn get_install_info_with_subkey(subkey: String) -> (String, String, String, Stri
(subkey, path, start_menu, exe)
}
pub fn copy_raw_cmd(src_raw: &str, _raw: &str, _path: &str) -> String {
pub fn copy_raw_cmd(src_raw: &str, _raw: &str, _path: &str) -> ResultType<String> {
let main_raw = format!(
"XCOPY \"{}\" \"{}\" /Y /E /H /C /I /K /R /Z",
PathBuf::from(src_raw)
.parent()
.unwrap()
.ok_or(anyhow!("Can't get parent directory of {src_raw}"))?
.to_string_lossy()
.to_string(),
_path
);
return main_raw;
return Ok(main_raw);
}
pub fn copy_exe_cmd(src_exe: &str, exe: &str, path: &str) -> String {
let main_exe = copy_raw_cmd(src_exe, exe, path);
format!(
pub fn copy_exe_cmd(src_exe: &str, exe: &str, path: &str) -> ResultType<String> {
let main_exe = copy_raw_cmd(src_exe, exe, path)?;
Ok(format!(
"
{main_exe}
copy /Y \"{ORIGIN_PROCESS_EXE}\" \"{path}\\{broker_exe}\"
",
ORIGIN_PROCESS_EXE = privacy_win_mag::ORIGIN_PROCESS_EXE,
broker_exe = privacy_win_mag::INJECTED_PROCESS_EXE,
)
))
}
fn get_after_install(exe: &str) -> String {
@ -1118,7 +1119,7 @@ copy /Y \"{tmp_path}\\Uninstall {app_name}.lnk\" \"{path}\\\"
} else {
&dels
},
copy_exe = copy_exe_cmd(&src_exe, &exe, &path),
copy_exe = copy_exe_cmd(&src_exe, &exe, &path)?,
import_config = get_import_config(&exe),
);
run_cmds(cmds, debug, "install")?;
@ -1200,7 +1201,7 @@ fn write_cmds(cmds: String, ext: &str, tip: &str) -> ResultType<std::path::PathB
tmp.push(format!("{}_{}.{}", crate::get_app_name(), tip, ext));
let mut file = std::fs::File::create(&tmp)?;
if ext == "bat" {
let tmp2 = get_undone_file(&tmp);
let tmp2 = get_undone_file(&tmp)?;
std::fs::File::create(&tmp2).ok();
cmds = format!(
"
@ -1231,18 +1232,20 @@ fn to_le(v: &mut [u16]) -> &[u8] {
unsafe { v.align_to().1 }
}
fn get_undone_file(tmp: &PathBuf) -> PathBuf {
fn get_undone_file(tmp: &PathBuf) -> ResultType<PathBuf> {
let mut tmp1 = tmp.clone();
tmp1.set_file_name(format!(
"{}.undone",
tmp.file_name().unwrap().to_string_lossy()
tmp.file_name()
.ok_or(anyhow!("Failed to get filename of {:?}", tmp))?
.to_string_lossy()
));
tmp1
Ok(tmp1)
}
fn run_cmds(cmds: String, show: bool, tip: &str) -> ResultType<()> {
let tmp = write_cmds(cmds, "bat", tip)?;
let tmp2 = get_undone_file(&tmp);
let tmp2 = get_undone_file(&tmp)?;
let tmp_fn = tmp.to_str().unwrap_or("");
let res = runas::Command::new("cmd")
.args(&["/C", &tmp_fn])
@ -1547,18 +1550,8 @@ pub fn elevate_or_run_as_system(is_setup: bool, is_elevate: bool, is_run_as_syst
}
}
// https://github.com/mgostIH/process_list/blob/master/src/windows/mod.rs
#[repr(transparent)]
pub(self) struct RAIIHandle(pub HANDLE);
impl Drop for RAIIHandle {
fn drop(&mut self) {
// This never gives problem except when running under a debugger.
unsafe { CloseHandle(self.0) };
}
}
pub fn is_elevated(process_id: Option<DWORD>) -> ResultType<bool> {
use hbb_common::platform::windows::RAIIHandle;
unsafe {
let handle: HANDLE = match process_id {
Some(process_id) => OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, FALSE, process_id),

View File

@ -153,10 +153,10 @@ pub fn start() -> ResultType<()> {
}
let exe_file = std::env::current_exe()?;
if exe_file.parent().is_none() {
let Some(cur_dir) = exe_file
.parent() else {
bail!("Cannot get parent of current exe file");
}
let cur_dir = exe_file.parent().unwrap();
};
let dll_file = cur_dir.join("WindowInjection.dll");
if !dll_file.exists() {
@ -344,6 +344,7 @@ async fn set_privacy_mode_state(
}
pub(super) mod privacy_hook {
use super::*;
use std::sync::mpsc::{channel, Sender};
@ -426,7 +427,7 @@ pub(super) mod privacy_hook {
}
Err(e) => {
// Fatal error
tx.send(format!("Unexpected err when hook {}", e)).unwrap();
allow_err!(tx.send(format!("Unexpected err when hook {}", e)));
return;
}
}

View File

@ -186,16 +186,18 @@ impl RendezvousMediator {
}
Some(rendezvous_message::Union::RegisterPkResponse(rpr)) => {
update_latency();
match rpr.result.enum_value_or_default() {
register_pk_response::Result::OK => {
match rpr.result.enum_value() {
Ok(register_pk_response::Result::OK) => {
Config::set_key_confirmed(true);
Config::set_host_key_confirmed(&rz.host_prefix, true);
*SOLVING_PK_MISMATCH.lock().unwrap() = "".to_owned();
}
register_pk_response::Result::UUID_MISMATCH => {
Ok(register_pk_response::Result::UUID_MISMATCH) => {
allow_err!(rz.handle_uuid_mismatch(&mut socket).await);
}
_ => {}
_ => {
log::error!("unknown RegisterPkResponse");
}
}
}
Some(rendezvous_message::Union::PunchHole(ph)) => {
@ -376,7 +378,7 @@ impl RendezvousMediator {
async fn handle_punch_hole(&self, ph: PunchHole, server: ServerPtr) -> ResultType<()> {
let relay_server = self.get_relay_server(ph.relay_server);
if ph.nat_type.enum_value_or_default() == NatType::SYMMETRIC
if ph.nat_type.enum_value() == Ok(NatType::SYMMETRIC)
|| Config::get_nat_type() == NatType::SYMMETRIC as i32
{
let uuid = Uuid::new_v4().to_string();
@ -515,7 +517,7 @@ async fn direct_server(server: ServerPtr) {
listener = Some(l);
log::info!(
"Direct server listening on: {:?}",
listener.as_ref().unwrap().local_addr()
listener.as_ref().map(|l| l.local_addr())
);
}
Err(err) => {

View File

@ -362,14 +362,7 @@ pub async fn start_server(is_server: bool) {
log::info!("DISPLAY={:?}", std::env::var("DISPLAY"));
log::info!("XAUTHORITY={:?}", std::env::var("XAUTHORITY"));
}
#[cfg(feature = "hwcodec")]
{
use std::sync::Once;
static ONCE: Once = Once::new();
ONCE.call_once(|| {
scrap::hwcodec::check_config_process();
})
}
call_once_each_process();
if is_server {
crate::common::set_server_running(true);
@ -530,3 +523,16 @@ async fn sync_and_watch_config_dir() {
}
log::warn!("skipped config sync");
}
fn call_once_each_process() {
use std::sync::Once;
static ONCE: Once = Once::new();
ONCE.call_once(|| {
#[cfg(feature = "hwcodec")]
scrap::hwcodec::check_config_process();
#[cfg(windows)]
unsafe {
hbb_common::platform::windows::start_cpu_performance_monitor();
}
})
}

View File

@ -13,10 +13,10 @@
// https://github.com/krruzic/pulsectl
use super::*;
use magnum_opus::{Application::*, Channels::*, Encoder};
use std::sync::atomic::{AtomicBool, Ordering};
#[cfg(not(any(target_os = "linux", target_os = "android")))]
use hbb_common::anyhow::anyhow;
use magnum_opus::{Application::*, Channels::*, Encoder};
use std::sync::atomic::{AtomicBool, Ordering};
pub const NAME: &'static str = "audio";
pub const AUDIO_DATA_SIZE_U8: usize = 960 * 4; // 10ms in 48000 stereo
@ -206,13 +206,10 @@ mod cpal_impl {
}
}
}
if device.is_none() {
device = Some(
HOST.default_input_device()
.with_context(|| "Failed to get default input device for loopback")?,
);
}
let device = device.unwrap();
let device = device.unwrap_or(
HOST.default_input_device()
.with_context(|| "Failed to get default input device for loopback")?,
);
log::info!("Input device: {}", device.name().unwrap_or("".to_owned()));
let format = device
.default_input_config()

View File

@ -155,6 +155,7 @@ pub struct Connection {
restart: bool,
recording: bool,
last_test_delay: i64,
network_delay: Option<u32>,
lock_after_session_end: bool,
show_remote_cursor: bool,
// by peer
@ -195,6 +196,7 @@ pub struct Connection {
#[cfg(not(any(feature = "flatpak", feature = "appimage")))]
tx_desktop_ready: mpsc::Sender<()>,
closed: bool,
delay_response_instant: Instant,
}
impl ConnInner {
@ -292,6 +294,7 @@ impl Connection {
restart: Connection::permission("enable-remote-restart"),
recording: Connection::permission("enable-record-session"),
last_test_delay: 0,
network_delay: None,
lock_after_session_end: false,
show_remote_cursor: false,
ip: "".to_owned(),
@ -324,6 +327,7 @@ impl Connection {
#[cfg(not(any(feature = "flatpak", feature = "appimage")))]
tx_desktop_ready: _tx_desktop_ready,
closed: false,
delay_response_instant: Instant::now(),
};
if !conn.on_open(addr).await {
conn.closed = true;
@ -591,18 +595,19 @@ impl Connection {
break;
}
let time = get_time();
let mut qos = video_service::VIDEO_QOS.lock().unwrap();
if time > 0 && conn.last_test_delay == 0 {
conn.last_test_delay = time;
let mut msg_out = Message::new();
let qos = video_service::VIDEO_QOS.lock().unwrap();
msg_out.set_test_delay(TestDelay{
time,
last_delay:qos.current_delay,
target_bitrate:qos.target_bitrate,
last_delay:conn.network_delay.unwrap_or_default(),
target_bitrate: qos.bitrate(),
..Default::default()
});
conn.inner.send(msg_out.into());
}
qos.user_delay_response_elapsed(conn.inner.id(), conn.delay_response_instant.elapsed().as_millis());
}
}
}
@ -622,7 +627,6 @@ impl Connection {
);
video_service::notify_video_frame_fetched(id, None);
scrap::codec::Encoder::update(id, scrap::codec::EncodingUpdate::Remove);
video_service::VIDEO_QOS.lock().unwrap().reset();
if conn.authorized {
password::update_temporary_password();
}
@ -661,11 +665,11 @@ impl Connection {
}
MessageInput::Key((mut msg, press)) => {
// todo: press and down have similar meanings.
if press && msg.mode.unwrap() == KeyboardMode::Legacy {
if press && msg.mode.enum_value() == Ok(KeyboardMode::Legacy) {
msg.down = true;
}
handle_key(&msg);
if press && msg.mode.unwrap() == KeyboardMode::Legacy {
if press && msg.mode.enum_value() == Ok(KeyboardMode::Legacy) {
msg.down = false;
handle_key(&msg);
}
@ -1187,7 +1191,9 @@ impl Connection {
#[inline]
#[cfg(not(any(target_os = "android", target_os = "ios")))]
fn input_pointer(&self, msg: PointerDeviceEvent, conn_id: i32) {
self.tx_input.send(MessageInput::Pointer((msg, conn_id))).ok();
self.tx_input
.send(MessageInput::Pointer((msg, conn_id)))
.ok();
}
#[inline]
@ -1550,7 +1556,9 @@ impl Connection {
video_service::VIDEO_QOS
.lock()
.unwrap()
.update_network_delay(new_delay);
.user_network_delay(self.inner.id(), new_delay);
self.network_delay = Some(new_delay);
self.delay_response_instant = Instant::now();
}
} else if let Some(message::Union::SwitchSidesResponse(_s)) = msg.union {
#[cfg(feature = "flutter")]
@ -1588,7 +1596,8 @@ impl Connection {
self.input_mouse(me, self.inner.id());
}
}
Some(message::Union::PointerDeviceEvent(pde)) => {
Some(message::Union::PointerDeviceEvent(pde)) =>
{
#[cfg(not(any(target_os = "android", target_os = "ios")))]
if self.peer_keyboard_enabled() {
MOUSE_MOVE_TIME.store(get_time(), Ordering::SeqCst);
@ -1612,11 +1621,11 @@ impl Connection {
me.press
};
let key = match me.mode.enum_value_or_default() {
KeyboardMode::Map => {
let key = match me.mode.enum_value() {
Ok(KeyboardMode::Map) => {
Some(crate::keyboard::keycode_to_rdev_key(me.chr()))
}
KeyboardMode::Translate => {
Ok(KeyboardMode::Translate) => {
if let Some(key_event::Union::Chr(code)) = me.union {
Some(crate::keyboard::keycode_to_rdev_key(code & 0x0000FFFF))
} else {
@ -1899,11 +1908,9 @@ impl Connection {
// Drop the audio sender previously.
drop(std::mem::replace(&mut self.audio_sender, None));
self.audio_sender = Some(start_audio_thread());
allow_err!(self
.audio_sender
self.audio_sender
.as_ref()
.unwrap()
.send(MediaData::AudioFormat(format)));
.map(|a| allow_err!(a.send(MediaData::AudioFormat(format))));
}
}
#[cfg(feature = "flutter")]
@ -1929,6 +1936,10 @@ impl Connection {
crate::plugin::handle_client_event(&p.id, &self.lr.my_id, &p.content);
self.send(msg).await;
}
Some(misc::Union::FullSpeedFps(fps)) => video_service::VIDEO_QOS
.lock()
.unwrap()
.user_full_speed_fps(self.inner.id(), fps),
_ => {}
},
Some(message::Union::AudioFrame(frame)) => {
@ -2043,14 +2054,14 @@ impl Connection {
video_service::VIDEO_QOS
.lock()
.unwrap()
.update_image_quality(image_quality);
.user_image_quality(self.inner.id(), image_quality);
}
}
if o.custom_fps > 0 {
video_service::VIDEO_QOS
.lock()
.unwrap()
.update_user_fps(o.custom_fps as _);
.user_custom_fps(self.inner.id(), o.custom_fps as _);
}
if let Some(q) = o.supported_decoding.clone().take() {
scrap::codec::Encoder::update(self.inner.id(), scrap::codec::EncodingUpdate::New(q));
@ -2328,6 +2339,8 @@ async fn start_ipc(
mut _rx_desktop_ready: mpsc::Receiver<()>,
tx_stream_ready: mpsc::Sender<()>,
) -> ResultType<()> {
use hbb_common::anyhow::anyhow;
loop {
if !crate::platform::is_prelogin() {
break;
@ -2421,7 +2434,7 @@ async fn start_ipc(
}
let _res = tx_stream_ready.send(()).await;
let mut stream = stream.unwrap();
let mut stream = stream.ok_or(anyhow!("none stream"))?;
loop {
tokio::select! {
res = stream.next() => {
@ -2581,6 +2594,10 @@ mod raii {
if active_conns_lock.is_empty() {
crate::privacy_win_mag::stop();
}
video_service::VIDEO_QOS
.lock()
.unwrap()
.on_connection_close(self.0);
}
}
}

View File

@ -1497,11 +1497,11 @@ pub fn handle_key_(evt: &KeyEvent) {
_ => {}
};
match evt.mode.unwrap() {
KeyboardMode::Map => {
match evt.mode.enum_value() {
Ok(KeyboardMode::Map) => {
map_keyboard_mode(evt);
}
KeyboardMode::Translate => {
Ok(KeyboardMode::Translate) => {
translate_keyboard_mode(evt);
}
_ => {

View File

@ -231,7 +231,13 @@ pub mod server {
}
pub fn run_portable_service() {
let shmem = Arc::new(SharedMemory::open_existing(SHMEM_NAME).unwrap());
let shmem = match SharedMemory::open_existing(SHMEM_NAME) {
Ok(shmem) => Arc::new(shmem),
Err(e) => {
log::error!("Failed to open existing shared memory: {:?}", e);
return;
}
};
let shmem1 = shmem.clone();
let shmem2 = shmem.clone();
let mut threads = vec![];
@ -249,7 +255,7 @@ pub mod server {
}));
let record_pos_handle = crate::input_service::try_start_record_cursor_pos();
for th in threads.drain(..) {
th.join().unwrap();
th.join().ok();
log::info!("thread joined");
}
@ -319,7 +325,11 @@ pub mod server {
}
if c.is_none() {
*crate::video_service::CURRENT_DISPLAY.lock().unwrap() = current_display;
let (_, _current, display) = get_current_display().unwrap();
let Ok((_, _current, display)) = get_current_display() else {
log::error!("Failed to get current display");
*EXIT.lock().unwrap() = true;
return;
};
display_width = display.width();
display_height = display.height();
match Capturer::new(display, use_yuv) {
@ -380,8 +390,8 @@ pub mod server {
continue;
}
}
match c.as_mut().unwrap().frame(spf) {
Ok(f) => {
match c.as_mut().map(|f| f.frame(spf)) {
Some(Ok(f)) => {
utils::set_frame_info(
&shmem,
FrameInfo {
@ -396,7 +406,7 @@ pub mod server {
first_frame_captured = true;
dxgi_failed_times = 0;
}
Err(e) => {
Some(Err(e)) => {
if e.kind() != std::io::ErrorKind::WouldBlock {
// DXGI_ERROR_INVALID_CALL after each success on Microsoft GPU driver
// log::error!("capture frame failed:{:?}", e);
@ -406,7 +416,8 @@ pub mod server {
std::thread::sleep(spf);
continue;
}
if !c.as_ref().unwrap().is_gdi() {
if c.as_ref().map(|c| c.is_gdi()) == Some(false) {
// nog gdi
dxgi_failed_times += 1;
}
if dxgi_failed_times > MAX_DXGI_FAIL_TIME {
@ -418,6 +429,9 @@ pub mod server {
shmem.write(ADDR_CAPTURE_WOULDBLOCK, &utils::i32_to_vec(TRUE));
}
}
_ => {
println!("unreachable!");
}
}
}
}

View File

@ -1,8 +1,9 @@
use super::*;
use scrap::codec::Quality;
use std::time::Duration;
pub const FPS: u8 = 30;
pub const MIN_FPS: u8 = 1;
pub const MAX_FPS: u8 = 120;
pub const FPS: u32 = 30;
pub const MIN_FPS: u32 = 1;
pub const MAX_FPS: u32 = 120;
trait Percent {
fn as_percent(&self) -> u32;
}
@ -18,22 +19,32 @@ impl Percent for ImageQuality {
}
}
pub struct VideoQoS {
width: u32,
height: u32,
user_image_quality: u32,
current_image_quality: u32,
enable_abr: bool,
pub current_delay: u32,
pub fps: u8, // abr
pub user_fps: u8,
pub target_bitrate: u32, // abr
updated: bool,
#[derive(Default, Debug, Copy, Clone)]
struct Delay {
state: DelayState,
debounce_count: u32,
staging_state: DelayState,
delay: u32,
counter: u32,
slower_than_old_state: Option<bool>,
}
#[derive(PartialEq, Debug)]
#[derive(Default, Debug, Copy, Clone)]
struct UserData {
full_speed_fps: Option<u32>,
custom_fps: Option<u32>,
quality: Option<(i64, Quality)>, // (time, quality)
delay: Option<Delay>,
response_delayed: bool,
}
pub struct VideoQoS {
fps: u32,
quality: Quality,
users: HashMap<i32, UserData>,
bitrate_store: u32,
}
#[derive(PartialEq, Debug, Clone, Copy)]
enum DelayState {
Normal = 0,
LowDelay = 200,
@ -41,6 +52,12 @@ enum DelayState {
Broken = 1000,
}
impl Default for DelayState {
fn default() -> Self {
DelayState::Normal
}
}
impl DelayState {
fn from_delay(delay: u32) -> Self {
if delay > DelayState::Broken as u32 {
@ -59,187 +76,290 @@ impl Default for VideoQoS {
fn default() -> Self {
VideoQoS {
fps: FPS,
user_fps: FPS,
user_image_quality: ImageQuality::Balanced.as_percent(),
current_image_quality: ImageQuality::Balanced.as_percent(),
enable_abr: false,
width: 0,
height: 0,
current_delay: 0,
target_bitrate: 0,
updated: false,
state: DelayState::Normal,
debounce_count: 0,
quality: Default::default(),
users: Default::default(),
bitrate_store: 0,
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum RefreshType {
SetImageQuality,
}
impl VideoQoS {
pub fn set_size(&mut self, width: u32, height: u32) {
if width == 0 || height == 0 {
return;
}
self.width = width;
self.height = height;
}
pub fn spf(&mut self) -> Duration {
if self.fps < MIN_FPS || self.fps > MAX_FPS {
self.fps = self.base_fps();
}
pub fn spf(&self) -> Duration {
Duration::from_secs_f32(1. / (self.fps as f32))
}
fn base_fps(&self) -> u8 {
if self.user_fps >= MIN_FPS && self.user_fps <= MAX_FPS {
return self.user_fps;
}
return FPS;
pub fn fps(&self) -> u32 {
self.fps
}
// update_network_delay periodically
// decrease the bitrate when the delay gets bigger
pub fn update_network_delay(&mut self, delay: u32) {
if self.current_delay.eq(&0) {
self.current_delay = delay;
pub fn store_bitrate(&mut self, bitrate: u32) {
self.bitrate_store = bitrate;
}
pub fn bitrate(&self) -> u32 {
self.bitrate_store
}
pub fn quality(&self) -> Quality {
self.quality
}
pub fn abr_enabled() -> bool {
"N" != Config::get_option("enable-abr")
}
pub fn refresh(&mut self, typ: Option<RefreshType>) {
// fps
let user_fps = |u: &UserData| {
// full_speed_fps
let mut fps = u.full_speed_fps.unwrap_or_default() * 9 / 10;
// custom_fps
if let Some(custom_fps) = u.custom_fps {
if fps == 0 || custom_fps < fps {
fps = custom_fps;
}
}
// delay
if let Some(delay) = u.delay {
fps = match delay.state {
DelayState::Normal => fps,
DelayState::LowDelay => fps,
DelayState::HighDelay => fps / 2,
DelayState::Broken => fps / 4,
}
}
// delay response
if u.response_delayed {
if fps > MIN_FPS + 2 {
fps = MIN_FPS + 2;
}
}
return fps;
};
let mut fps = self
.users
.iter()
.map(|(_, u)| user_fps(u))
.filter(|u| *u >= MIN_FPS)
.min()
.unwrap_or(FPS);
if fps > MAX_FPS {
fps = MAX_FPS;
}
self.fps = fps;
// quality
// latest image quality
let latest_quality = self
.users
.iter()
.map(|(_, u)| u.quality)
.filter(|q| *q != None)
.max_by(|a, b| a.unwrap_or_default().0.cmp(&b.unwrap_or_default().0))
.unwrap_or_default()
.unwrap_or_default()
.1;
let mut quality = latest_quality;
// network delay
if Self::abr_enabled() && typ != Some(RefreshType::SetImageQuality) {
// max delay
let delay = self
.users
.iter()
.map(|u| u.1.delay)
.filter(|d| d.is_some())
.max_by(|a, b| {
(a.unwrap_or_default().state as u32).cmp(&(b.unwrap_or_default().state as u32))
});
let delay = delay.unwrap_or_default().unwrap_or_default().state;
if delay != DelayState::Normal {
match self.quality {
Quality::Best => {
quality = Quality::Balanced;
}
Quality::Balanced => {
quality = Quality::Low;
}
Quality::Low => {
quality = Quality::Low;
}
Quality::Custom(b) => match delay {
DelayState::LowDelay => {
quality =
Quality::Custom(if b >= 150 { 100 } else { std::cmp::min(50, b) });
}
DelayState::HighDelay => {
quality =
Quality::Custom(if b >= 100 { 50 } else { std::cmp::min(25, b) });
}
DelayState::Broken => {
quality =
Quality::Custom(if b >= 50 { 25 } else { std::cmp::min(10, b) });
}
DelayState::Normal => {}
},
}
} else {
match self.quality {
Quality::Low => {
if latest_quality == Quality::Best {
quality = Quality::Balanced;
}
}
Quality::Custom(current_b) => {
if let Quality::Custom(latest_b) = latest_quality {
if current_b < latest_b / 2 {
quality = Quality::Custom(latest_b / 2);
}
}
}
_ => {}
}
}
}
self.quality = quality;
}
pub fn user_custom_fps(&mut self, id: i32, fps: u32) {
if fps < MIN_FPS {
return;
}
self.current_delay = delay / 2 + self.current_delay / 2;
log::trace!(
"VideoQoS update_network_delay:{}, {}, state:{:?}",
self.current_delay,
delay,
self.state,
);
// ABR
if !self.enable_abr {
return;
}
let current_state = DelayState::from_delay(self.current_delay);
if current_state != self.state && self.debounce_count > 5 {
log::debug!(
"VideoQoS state changed:{:?} -> {:?}",
self.state,
current_state
if let Some(user) = self.users.get_mut(&id) {
user.custom_fps = Some(fps);
} else {
self.users.insert(
id,
UserData {
custom_fps: Some(fps),
..Default::default()
},
);
self.state = current_state;
self.debounce_count = 0;
self.refresh_quality();
}
self.refresh(None);
}
pub fn user_full_speed_fps(&mut self, id: i32, full_speed_fps: u32) {
if let Some(user) = self.users.get_mut(&id) {
user.full_speed_fps = Some(full_speed_fps);
} else {
self.debounce_count += 1;
self.users.insert(
id,
UserData {
full_speed_fps: Some(full_speed_fps),
..Default::default()
},
);
}
self.refresh(None);
}
fn refresh_quality(&mut self) {
match self.state {
DelayState::Normal => {
self.fps = self.base_fps();
self.current_image_quality = self.user_image_quality;
pub fn user_image_quality(&mut self, id: i32, image_quality: i32) {
// https://github.com/rustdesk/rustdesk/blob/d716e2b40c38737f1aa3f16de0dec67394a6ac68/src/server/video_service.rs#L493
let convert_quality = |q: i32| {
if q == ImageQuality::Balanced.value() {
Quality::Balanced
} else if q == ImageQuality::Low.value() {
Quality::Low
} else if q == ImageQuality::Best.value() {
Quality::Best
} else {
let mut b = (q >> 8 & 0xFF) * 2;
b = std::cmp::max(b, 10);
b = std::cmp::min(b, 200);
Quality::Custom(b as u32)
}
DelayState::LowDelay => {
self.fps = self.base_fps();
self.current_image_quality = std::cmp::min(self.user_image_quality, 50);
}
DelayState::HighDelay => {
self.fps = self.base_fps() / 2;
self.current_image_quality = std::cmp::min(self.user_image_quality, 25);
}
DelayState::Broken => {
self.fps = self.base_fps() / 4;
self.current_image_quality = 10;
}
}
let _ = self.generate_bitrate().ok();
self.updated = true;
}
};
// handle image_quality change from peer
pub fn update_image_quality(&mut self, image_quality: i32) {
if image_quality == ImageQuality::Low.value()
|| image_quality == ImageQuality::Balanced.value()
|| image_quality == ImageQuality::Best.value()
{
// not custom
self.user_fps = FPS;
self.fps = FPS;
}
let image_quality = Self::convert_quality(image_quality) as _;
if self.current_image_quality != image_quality {
self.current_image_quality = image_quality;
let _ = self.generate_bitrate().ok();
self.updated = true;
}
self.user_image_quality = self.current_image_quality;
}
pub fn update_user_fps(&mut self, fps: u8) {
if fps >= MIN_FPS && fps <= MAX_FPS {
if self.user_fps != fps {
self.user_fps = fps;
self.fps = fps;
self.updated = true;
}
}
}
pub fn generate_bitrate(&mut self) -> ResultType<u32> {
// https://www.nvidia.com/en-us/geforce/guides/broadcasting-guide/
if self.width == 0 || self.height == 0 {
bail!("Fail to generate_bitrate, width or height is not set");
}
if self.current_image_quality == 0 {
self.current_image_quality = ImageQuality::Balanced.as_percent();
}
let base_bitrate = ((self.width * self.height) / 800) as u32;
#[cfg(target_os = "android")]
{
// fix when android screen shrinks
let fix = scrap::Display::fix_quality() as u32;
log::debug!("Android screen, fix quality:{}", fix);
let base_bitrate = base_bitrate * fix;
self.target_bitrate = base_bitrate * self.current_image_quality / 100;
Ok(self.target_bitrate)
}
#[cfg(not(target_os = "android"))]
{
self.target_bitrate = base_bitrate * self.current_image_quality / 100;
Ok(self.target_bitrate)
}
}
pub fn check_if_updated(&mut self) -> bool {
if self.updated {
self.updated = false;
return true;
}
return false;
}
pub fn reset(&mut self) {
self.fps = FPS;
self.user_fps = FPS;
self.updated = true;
}
pub fn check_abr_config(&mut self) -> bool {
self.enable_abr = "N" != Config::get_option("enable-abr");
self.enable_abr
}
pub fn convert_quality(q: i32) -> i32 {
if q == ImageQuality::Balanced.value() {
100 * 2 / 3
} else if q == ImageQuality::Low.value() {
100 / 2
} else if q == ImageQuality::Best.value() {
100
let quality = Some((hbb_common::get_time(), convert_quality(image_quality)));
if let Some(user) = self.users.get_mut(&id) {
user.quality = quality;
} else {
(q >> 8 & 0xFF) * 2
self.users.insert(
id,
UserData {
quality,
..Default::default()
},
);
}
self.refresh(Some(RefreshType::SetImageQuality));
}
pub fn user_network_delay(&mut self, id: i32, delay: u32) {
let state = DelayState::from_delay(delay);
let debounce = 3;
if let Some(user) = self.users.get_mut(&id) {
if let Some(d) = &mut user.delay {
d.delay = (delay + d.delay) / 2;
let new_state = DelayState::from_delay(d.delay);
let slower_than_old_state = new_state as i32 - d.staging_state as i32;
let slower_than_old_state = if slower_than_old_state > 0 {
Some(true)
} else if slower_than_old_state < 0 {
Some(false)
} else {
None
};
if d.slower_than_old_state == slower_than_old_state {
let old_counter = d.counter;
d.counter += delay / 1000 + 1;
if old_counter < debounce && d.counter >= debounce {
d.counter = 0;
d.state = d.staging_state;
d.staging_state = new_state;
}
if d.counter % debounce == 0 {
self.refresh(None);
}
} else {
d.counter = 0;
d.staging_state = new_state;
d.slower_than_old_state = slower_than_old_state;
}
} else {
user.delay = Some(Delay {
state: DelayState::Normal,
staging_state: state,
delay,
counter: 0,
slower_than_old_state: None,
});
}
} else {
self.users.insert(
id,
UserData {
delay: Some(Delay {
state: DelayState::Normal,
staging_state: state,
delay,
counter: 0,
slower_than_old_state: None,
}),
..Default::default()
},
);
}
}
pub fn user_delay_response_elapsed(&mut self, id: i32, elapsed: u128) {
if let Some(user) = self.users.get_mut(&id) {
let old = user.response_delayed;
user.response_delayed = elapsed > 3000;
if old != user.response_delayed {
self.refresh(None);
}
}
}
pub fn on_connection_close(&mut self, id: i32) {
self.users.remove(&id);
self.refresh(None);
}
}

View File

@ -144,7 +144,7 @@ pub fn capture_cursor_embedded() -> bool {
#[inline]
pub fn notify_video_frame_fetched(conn_id: i32, frame_tm: Option<Instant>) {
FRAME_FETCHED_NOTIFIER.0.send((conn_id, frame_tm)).unwrap()
FRAME_FETCHED_NOTIFIER.0.send((conn_id, frame_tm)).ok();
}
#[inline]
@ -514,12 +514,12 @@ fn run(sp: GenericService) -> ResultType<()> {
let mut c = get_capturer(true, last_portable_service_running)?;
let mut video_qos = VIDEO_QOS.lock().unwrap();
video_qos.set_size(c.width as _, c.height as _);
let mut spf = video_qos.spf();
let bitrate = video_qos.generate_bitrate()?;
let abr = video_qos.check_abr_config();
video_qos.refresh(None);
let mut spf;
let mut quality = video_qos.quality();
let abr = VideoQoS::abr_enabled();
drop(video_qos);
log::info!("init bitrate={}, abr enabled:{}", bitrate, abr);
log::info!("init quality={:?}, abr enabled:{}", quality, abr);
let encoder_cfg = match Encoder::negotiated_codec() {
scrap::CodecName::H264(name) | scrap::CodecName::H265(name) => {
@ -527,14 +527,15 @@ fn run(sp: GenericService) -> ResultType<()> {
name,
width: c.width,
height: c.height,
bitrate: bitrate as _,
quality,
})
}
name @ (scrap::CodecName::VP8 | scrap::CodecName::VP9) => {
EncoderCfg::VPX(VpxEncoderConfig {
width: c.width as _,
height: c.height as _,
bitrate,
timebase: [1, 1000], // Output timestamp precision
quality,
codec: if name == scrap::CodecName::VP8 {
VpxVideoCodecId::VP8
} else {
@ -545,7 +546,7 @@ fn run(sp: GenericService) -> ResultType<()> {
scrap::CodecName::AV1 => EncoderCfg::AOM(AomEncoderConfig {
width: c.width as _,
height: c.height as _,
bitrate: bitrate as _,
quality,
}),
};
@ -555,6 +556,7 @@ fn run(sp: GenericService) -> ResultType<()> {
Err(err) => bail!("Failed to create encoder: {}", err),
}
c.set_use_yuv(encoder.use_yuv());
VIDEO_QOS.lock().unwrap().store_bitrate(encoder.bitrate());
if *SWITCH.lock().unwrap() {
log::debug!("Broadcasting display switch");
@ -608,14 +610,12 @@ fn run(sp: GenericService) -> ResultType<()> {
check_uac_switch(c.privacy_mode_id, c._capturer_privacy_mode_id)?;
let mut video_qos = VIDEO_QOS.lock().unwrap();
if video_qos.check_if_updated() && video_qos.target_bitrate > 0 {
log::debug!(
"qos is updated, target_bitrate:{}, fps:{}",
video_qos.target_bitrate,
video_qos.fps
);
allow_err!(encoder.set_bitrate(video_qos.target_bitrate));
spf = video_qos.spf();
spf = video_qos.spf();
if quality != video_qos.quality() {
log::debug!("quality: {:?} -> {:?}", quality, video_qos.quality());
quality = video_qos.quality();
allow_err!(encoder.set_quality(quality));
video_qos.store_bitrate(encoder.bitrate());
}
drop(video_qos);

View File

@ -124,8 +124,16 @@ pub fn start(args: &mut [String]) {
crate::platform::windows::enable_lowlevel_keyboard(hw as _);
}
let mut iter = args.iter();
let cmd = iter.next().unwrap().clone();
let id = iter.next().unwrap().clone();
let Some(cmd) = iter.next() else {
log::error!("Failed to get cmd arg");
return;
};
let cmd = cmd.to_owned();
let Some(id) = iter.next() else {
log::error!("Failed to get id arg");
return;
};
let id = id.to_owned();
let pass = iter.next().unwrap_or(&"".to_owned()).clone();
let args: Vec<String> = iter.map(|x| x.clone()).collect();
frame.set_title(&id);
@ -259,7 +267,8 @@ impl UI {
}
fn get_options(&self) -> Value {
let hashmap: HashMap<String, String> = serde_json::from_str(&get_options()).unwrap();
let hashmap: HashMap<String, String> =
serde_json::from_str(&get_options()).unwrap_or_default();
let mut m = Value::map();
for (k, v) in hashmap {
m.set_item(k, v);

View File

@ -377,7 +377,7 @@ impl sciter::EventHandler for SciterSession {
let source = Element::from(source);
use sciter::dom::ELEMENT_AREAS;
let flags = ELEMENT_AREAS::CONTENT_BOX as u32 | ELEMENT_AREAS::SELF_RELATIVE as u32;
let rc = source.get_location(flags).unwrap();
let rc = source.get_location(flags).unwrap_or_default();
log::debug!(
"[video] start video thread on <{}> which is about {:?} pixels",
source,

View File

@ -233,7 +233,7 @@ pub fn get_options() -> String {
for (k, v) in options.iter() {
m.insert(k.into(), v.to_owned().into());
}
serde_json::to_string(&m).unwrap()
serde_json::to_string(&m).unwrap_or_default()
}
#[inline]
@ -1112,23 +1112,23 @@ async fn check_id(
{
match msg_in.union {
Some(rendezvous_message::Union::RegisterPkResponse(rpr)) => {
match rpr.result.enum_value_or_default() {
register_pk_response::Result::OK => {
match rpr.result.enum_value() {
Ok(register_pk_response::Result::OK) => {
ok = true;
}
register_pk_response::Result::ID_EXISTS => {
Ok(register_pk_response::Result::ID_EXISTS) => {
return "Not available";
}
register_pk_response::Result::TOO_FREQUENT => {
Ok(register_pk_response::Result::TOO_FREQUENT) => {
return "Too frequent";
}
register_pk_response::Result::NOT_SUPPORT => {
Ok(register_pk_response::Result::NOT_SUPPORT) => {
return "server_not_support";
}
register_pk_response::Result::SERVER_ERROR => {
Ok(register_pk_response::Result::SERVER_ERROR) => {
return "Server error";
}
register_pk_response::Result::INVALID_ID_FORMAT => {
Ok(register_pk_response::Result::INVALID_ID_FORMAT) => {
return INVALID_FORMAT;
}
_ => {}