Merge branch 'master' of github.com:asur4s/rustdesk

This commit is contained in:
Asura
2022-07-11 08:17:17 -07:00
52 changed files with 2351 additions and 865 deletions

View File

@@ -3,6 +3,7 @@ use super::{input_service::*, *};
use crate::clipboard_file::*;
#[cfg(not(any(target_os = "android", target_os = "ios")))]
use crate::common::update_clipboard;
use crate::video_service;
#[cfg(any(target_os = "android", target_os = "ios"))]
use crate::{common::MOBILE_INFO2, mobile::connection_manager::start_channel};
use crate::{ipc, VERSION};
@@ -69,7 +70,6 @@ pub struct Connection {
audio: bool,
file: bool,
last_test_delay: i64,
image_quality: i32,
lock_after_session_end: bool,
show_remote_cursor: bool, // by peer
ip: String,
@@ -105,7 +105,7 @@ impl Subscriber for ConnInner {
}
}
const TEST_DELAY_TIMEOUT: Duration = Duration::from_secs(3);
const TEST_DELAY_TIMEOUT: Duration = Duration::from_secs(1);
const SEC30: Duration = Duration::from_secs(30);
const H1: Duration = Duration::from_secs(3600);
const MILLI1: Duration = Duration::from_millis(1);
@@ -154,7 +154,6 @@ impl Connection {
audio: Config::get_option("enable-audio").is_empty(),
file: Config::get_option("enable-file-transfer").is_empty(),
last_test_delay: 0,
image_quality: ImageQuality::Balanced.value(),
lock_after_session_end: false,
show_remote_cursor: false,
ip: "".to_owned(),
@@ -376,8 +375,11 @@ impl Connection {
if time > 0 && conn.last_test_delay == 0 {
conn.last_test_delay = time;
let mut msg_out = Message::new();
let qos = video_service::VIDEO_QOS.lock().unwrap();
msg_out.set_test_delay(TestDelay{
time,
last_delay:qos.current_delay,
target_bitrate:qos.target_bitrate,
..Default::default()
});
conn.inner.send(msg_out.into());
@@ -394,8 +396,8 @@ impl Connection {
let _ = privacy_mode::turn_off_privacy(0);
}
video_service::notify_video_frame_feched(id, None);
video_service::update_test_latency(id, 0);
video_service::update_image_quality(id, None);
scrap::codec::Encoder::update_video_encoder(id, scrap::codec::EncoderUpdate::Remove);
video_service::VIDEO_QOS.lock().unwrap().reset();
if let Err(err) = conn.try_port_forward_loop(&mut rx_from_cm).await {
conn.on_close(&err.to_string(), false);
}
@@ -665,7 +667,7 @@ impl Connection {
res.set_peer_info(pi);
} else {
try_activate_screen();
match super::video_service::get_displays() {
match video_service::get_displays() {
Err(err) => {
res.set_error(format!("X11 error: {}", err));
}
@@ -781,6 +783,22 @@ impl Connection {
if let Some(message::Union::login_request(lr)) = msg.union {
if let Some(o) = lr.option.as_ref() {
self.update_option(o).await;
if let Some(q) = o.video_codec_state.clone().take() {
scrap::codec::Encoder::update_video_encoder(
self.inner.id(),
scrap::codec::EncoderUpdate::State(q),
);
} else {
scrap::codec::Encoder::update_video_encoder(
self.inner.id(),
scrap::codec::EncoderUpdate::DisableHwIfNotExist,
);
}
} else {
scrap::codec::Encoder::update_video_encoder(
self.inner.id(),
scrap::codec::EncoderUpdate::DisableHwIfNotExist,
);
}
self.video_ack_required = lr.video_ack_required;
if self.authorized {
@@ -887,10 +905,11 @@ impl Connection {
self.inner.send(msg_out.into());
} else {
self.last_test_delay = 0;
let latency = crate::get_time() - t.time;
if latency > 0 {
super::video_service::update_test_latency(self.inner.id(), latency);
}
let new_delay = (crate::get_time() - t.time) as u32;
video_service::VIDEO_QOS
.lock()
.unwrap()
.update_network_delay(new_delay);
}
} else if self.authorized {
match msg.union {
@@ -1066,7 +1085,7 @@ impl Connection {
},
Some(message::Union::misc(misc)) => match misc.union {
Some(misc::Union::switch_display(s)) => {
super::video_service::switch_display(s.display);
video_service::switch_display(s.display);
}
Some(misc::Union::chat_message(c)) => {
self.send_to_cm(ipc::Data::ChatMessage { text: c.text });
@@ -1076,7 +1095,7 @@ impl Connection {
}
Some(misc::Union::refresh_video(r)) => {
if r {
super::video_service::refresh();
video_service::refresh();
}
}
Some(misc::Union::video_received(_)) => {
@@ -1096,13 +1115,20 @@ impl Connection {
async fn update_option(&mut self, o: &OptionMessage) {
log::info!("Option update: {:?}", o);
if let Ok(q) = o.image_quality.enum_value() {
self.image_quality = q.value();
super::video_service::update_image_quality(self.inner.id(), Some(q.value()));
}
let q = o.custom_image_quality;
if q > 0 {
self.image_quality = q;
super::video_service::update_image_quality(self.inner.id(), Some(q));
let image_quality;
if let ImageQuality::NotSet = q {
if o.custom_image_quality > 0 {
image_quality = o.custom_image_quality;
} else {
image_quality = ImageQuality::Balanced.value();
}
} else {
image_quality = q.value();
}
video_service::VIDEO_QOS
.lock()
.unwrap()
.update_image_quality(image_quality);
}
if let Ok(q) = o.lock_after_session_end.enum_value() {
if q != BoolOption::NotSet {

219
src/server/video_qos.rs Normal file
View File

@@ -0,0 +1,219 @@
use super::*;
use std::time::Duration;
const FPS: u8 = 30;
trait Percent {
fn as_percent(&self) -> u32;
}
impl Percent for ImageQuality {
fn as_percent(&self) -> u32 {
match self {
ImageQuality::NotSet => 0,
ImageQuality::Low => 50,
ImageQuality::Balanced => 66,
ImageQuality::Best => 100,
}
}
}
pub struct VideoQoS {
width: u32,
height: u32,
user_image_quality: u32,
current_image_quality: u32,
enable_abr: bool,
pub current_delay: u32,
pub fps: u8, // abr
pub target_bitrate: u32, // abr
updated: bool,
state: DelayState,
debounce_count: u32,
}
#[derive(PartialEq, Debug)]
enum DelayState {
Normal = 0,
LowDelay = 200,
HighDelay = 500,
Broken = 1000,
}
impl DelayState {
fn from_delay(delay: u32) -> Self {
if delay > DelayState::Broken as u32 {
DelayState::Broken
} else if delay > DelayState::HighDelay as u32 {
DelayState::HighDelay
} else if delay > DelayState::LowDelay as u32 {
DelayState::LowDelay
} else {
DelayState::Normal
}
}
}
impl Default for VideoQoS {
fn default() -> Self {
VideoQoS {
fps: FPS,
user_image_quality: ImageQuality::Balanced.as_percent(),
current_image_quality: ImageQuality::Balanced.as_percent(),
enable_abr: false,
width: 0,
height: 0,
current_delay: 0,
target_bitrate: 0,
updated: false,
state: DelayState::Normal,
debounce_count: 0,
}
}
}
impl VideoQoS {
pub fn set_size(&mut self, width: u32, height: u32) {
if width == 0 || height == 0 {
return;
}
self.width = width;
self.height = height;
}
pub fn spf(&mut self) -> Duration {
if self.fps <= 0 {
self.fps = FPS;
}
Duration::from_secs_f32(1. / (self.fps as f32))
}
// update_network_delay periodically
// decrease the bitrate when the delay gets bigger
pub fn update_network_delay(&mut self, delay: u32) {
if self.current_delay.eq(&0) {
self.current_delay = delay;
return;
}
self.current_delay = delay / 2 + self.current_delay / 2;
log::trace!(
"VideoQoS update_network_delay:{}, {}, state:{:?}",
self.current_delay,
delay,
self.state,
);
// ABR
if !self.enable_abr {
return;
}
let current_state = DelayState::from_delay(self.current_delay);
if current_state != self.state && self.debounce_count > 5 {
log::debug!(
"VideoQoS state changed:{:?} -> {:?}",
self.state,
current_state
);
self.state = current_state;
self.debounce_count = 0;
self.refresh_quality();
} else {
self.debounce_count += 1;
}
}
fn refresh_quality(&mut self) {
match self.state {
DelayState::Normal => {
self.fps = FPS;
self.current_image_quality = self.user_image_quality;
}
DelayState::LowDelay => {
self.fps = FPS;
self.current_image_quality = std::cmp::min(self.user_image_quality, 50);
}
DelayState::HighDelay => {
self.fps = FPS / 2;
self.current_image_quality = std::cmp::min(self.user_image_quality, 25);
}
DelayState::Broken => {
self.fps = FPS / 4;
self.current_image_quality = 10;
}
}
let _ = self.generate_bitrate().ok();
self.updated = true;
}
// handle image_quality change from peer
pub fn update_image_quality(&mut self, image_quality: i32) {
let image_quality = Self::convert_quality(image_quality) as _;
log::debug!("VideoQoS update_image_quality: {}", image_quality);
if self.current_image_quality != image_quality {
self.current_image_quality = image_quality;
let _ = self.generate_bitrate().ok();
self.updated = true;
}
self.user_image_quality = self.current_image_quality;
}
pub fn generate_bitrate(&mut self) -> ResultType<u32> {
// https://www.nvidia.com/en-us/geforce/guides/broadcasting-guide/
if self.width == 0 || self.height == 0 {
bail!("Fail to generate_bitrate, width or height is not set");
}
if self.current_image_quality == 0 {
self.current_image_quality = ImageQuality::Balanced.as_percent();
}
let base_bitrate = ((self.width * self.height) / 800) as u32;
#[cfg(target_os = "android")]
{
// fix when andorid screen shrinks
let fix = Display::fix_quality() as u32;
log::debug!("Android screen, fix quality:{}", fix);
let base_bitrate = base_bitrate * fix;
self.target_bitrate = base_bitrate * self.current_image_quality / 100;
Ok(self.target_bitrate)
}
#[cfg(not(target_os = "android"))]
{
self.target_bitrate = base_bitrate * self.current_image_quality / 100;
Ok(self.target_bitrate)
}
}
pub fn check_if_updated(&mut self) -> bool {
if self.updated {
self.updated = false;
return true;
}
return false;
}
pub fn reset(&mut self) {
*self = Default::default();
}
pub fn check_abr_config(&mut self) -> bool {
self.enable_abr = if let Some(v) = Config2::get().options.get("enable-abr") {
v != "N"
} else {
true // default is true
};
self.enable_abr
}
pub fn convert_quality(q: i32) -> i32 {
if q == ImageQuality::Balanced.value() {
100 * 2 / 3
} else if q == ImageQuality::Low.value() {
100 / 2
} else if q == ImageQuality::Best.value() {
100
} else {
(q >> 8 & 0xFF) * 2
}
}
}

View File

@@ -18,12 +18,16 @@
// to-do:
// https://slhck.info/video/2017/03/01/rate-control.html
use super::*;
use super::{video_qos::VideoQoS, *};
use hbb_common::tokio::sync::{
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
Mutex as TokioMutex,
};
use scrap::{Capturer, Config, Display, EncodeFrame, Encoder, Frame, VideoCodecId, STRIDE_ALIGN};
use scrap::{
codec::{Encoder, EncoderCfg, HwEncoderConfig},
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
Capturer, Display, Frame,
};
use std::{
collections::HashSet,
io::{ErrorKind::WouldBlock, Result},
@@ -38,14 +42,13 @@ lazy_static::lazy_static! {
static ref CURRENT_DISPLAY: Arc<Mutex<usize>> = Arc::new(Mutex::new(usize::MAX));
static ref LAST_ACTIVE: Arc<Mutex<Instant>> = Arc::new(Mutex::new(Instant::now()));
static ref SWITCH: Arc<Mutex<bool>> = Default::default();
static ref TEST_LATENCIES: Arc<Mutex<HashMap<i32, i64>>> = Default::default();
static ref IMAGE_QUALITIES: Arc<Mutex<HashMap<i32, i32>>> = Default::default();
static ref FRAME_FETCHED_NOTIFIER: (UnboundedSender<(i32, Option<Instant>)>, Arc<TokioMutex<UnboundedReceiver<(i32, Option<Instant>)>>>) = {
let (tx, rx) = unbounded_channel();
(tx, Arc::new(TokioMutex::new(rx)))
};
static ref PRIVACY_MODE_CONN_ID: Mutex<i32> = Mutex::new(0);
static ref IS_CAPTURER_MAGNIFIER_SUPPORTED: bool = is_capturer_mag_supported();
pub static ref VIDEO_QOS: Arc<Mutex<VideoQoS>> = Default::default();
}
fn is_capturer_mag_supported() -> bool {
@@ -125,7 +128,7 @@ impl VideoFrameController {
}
trait TraitCapturer {
fn frame<'a>(&'a mut self, timeout_ms: u32) -> Result<Frame<'a>>;
fn frame<'a>(&'a mut self, timeout: Duration) -> Result<Frame<'a>>;
#[cfg(windows)]
fn is_gdi(&self) -> bool;
@@ -134,8 +137,8 @@ trait TraitCapturer {
}
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, timeout_ms: u32) -> Result<Frame<'a>> {
self.frame(timeout_ms)
fn frame<'a>(&'a mut self, timeout: Duration) -> Result<Frame<'a>> {
self.frame(timeout)
}
#[cfg(windows)]
@@ -151,7 +154,7 @@ impl TraitCapturer for Capturer {
#[cfg(windows)]
impl TraitCapturer for scrap::CapturerMag {
fn frame<'a>(&'a mut self, _timeout_ms: u32) -> Result<Frame<'a>> {
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> Result<Frame<'a>> {
self.frame(_timeout_ms)
}
@@ -201,9 +204,11 @@ fn check_display_changed(
}
// Capturer object is expensive, avoiding to create it frequently.
fn create_capturer(privacy_mode_id: i32, display: Display) -> ResultType<Box<dyn TraitCapturer>> {
let use_yuv = true;
fn create_capturer(
privacy_mode_id: i32,
display: Display,
use_yuv: bool,
) -> ResultType<Box<dyn TraitCapturer>> {
#[cfg(not(windows))]
let c: Option<Box<dyn TraitCapturer>> = None;
#[cfg(windows)]
@@ -292,7 +297,7 @@ pub fn test_create_capturer(privacy_mode_id: i32, timeout_millis: u64) -> bool {
let test_begin = Instant::now();
while test_begin.elapsed().as_millis() < timeout_millis as _ {
if let Ok((_, _, display)) = get_current_display() {
if let Ok(_) = create_capturer(privacy_mode_id, display) {
if let Ok(_) = create_capturer(privacy_mode_id, display, true) {
return true;
}
}
@@ -320,9 +325,6 @@ fn run(sp: GenericService) -> ResultType<()> {
#[cfg(windows)]
ensure_close_virtual_device()?;
let fps = 30;
let wait = 1000 / fps;
let spf = time::Duration::from_secs_f32(1. / (fps as f32));
let (ndisplay, current, display) = get_current_display()?;
let (origin, width, height) = (display.origin(), display.width(), display.height());
log::debug!(
@@ -336,6 +338,38 @@ fn run(sp: GenericService) -> ResultType<()> {
num_cpus::get(),
);
let mut video_qos = VIDEO_QOS.lock().unwrap();
video_qos.set_size(width as _, height as _);
let mut spf = video_qos.spf();
let bitrate = video_qos.generate_bitrate()?;
let abr = video_qos.check_abr_config();
drop(video_qos);
log::info!("init bitrate={}, abr enabled:{}", bitrate, abr);
let encoder_cfg = match Encoder::current_hw_encoder_name() {
Some(codec_name) => EncoderCfg::HW(HwEncoderConfig {
codec_name,
width,
height,
bitrate: bitrate as _,
}),
None => EncoderCfg::VPX(VpxEncoderConfig {
width: width as _,
height: height as _,
timebase: [1, 1000], // Output timestamp precision
bitrate,
codec: VpxVideoCodecId::VP9,
num_threads: (num_cpus::get() / 2) as _,
}),
};
let mut encoder;
match Encoder::new(encoder_cfg) {
Ok(x) => encoder = x,
Err(err) => bail!("Failed to create encoder: {}", err),
}
let privacy_mode_id = *PRIVACY_MODE_CONN_ID.lock().unwrap();
#[cfg(not(windows))]
let captuerer_privacy_mode_id = privacy_mode_id;
@@ -355,26 +389,7 @@ fn run(sp: GenericService) -> ResultType<()> {
} else {
log::info!("In privacy mode, the peer side cannot watch the screen");
}
let mut c = create_capturer(captuerer_privacy_mode_id, display)?;
let q = get_image_quality();
let (bitrate, rc_min_quantizer, rc_max_quantizer, speed) = get_quality(width, height, q);
log::info!("bitrate={}, rc_min_quantizer={}", bitrate, rc_min_quantizer);
let cfg = Config {
width: width as _,
height: height as _,
timebase: [1, 1000], // Output timestamp precision
bitrate,
codec: VideoCodecId::VP9,
rc_min_quantizer,
rc_max_quantizer,
speed,
};
let mut vpx;
match Encoder::new(&cfg, (num_cpus::get() / 2) as _) {
Ok(x) => vpx = x,
Err(err) => bail!("Failed to create encoder: {}", err),
}
let mut c = create_capturer(captuerer_privacy_mode_id, display, encoder.use_yuv())?;
if *SWITCH.lock().unwrap() {
log::debug!("Broadcasting display switch");
@@ -401,10 +416,24 @@ fn run(sp: GenericService) -> ResultType<()> {
let mut try_gdi = 1;
#[cfg(windows)]
log::info!("gdi: {}", c.is_gdi());
while sp.ok() {
#[cfg(windows)]
check_uac_switch(privacy_mode_id, captuerer_privacy_mode_id)?;
{
let mut video_qos = VIDEO_QOS.lock().unwrap();
if video_qos.check_if_updated() {
log::debug!(
"qos is updated, target_bitrate:{}, fps:{}",
video_qos.target_bitrate,
video_qos.fps
);
encoder.set_bitrate(video_qos.target_bitrate).unwrap();
spf = video_qos.spf();
}
}
if *SWITCH.lock().unwrap() {
bail!("SWITCH");
}
@@ -413,9 +442,6 @@ fn run(sp: GenericService) -> ResultType<()> {
bail!("SWITCH");
}
check_privacy_mode_changed(&sp, privacy_mode_id)?;
if get_image_quality() != q {
bail!("SWITCH");
}
#[cfg(windows)]
{
if crate::platform::windows::desktop_changed() {
@@ -437,7 +463,7 @@ fn run(sp: GenericService) -> ResultType<()> {
frame_controller.reset();
#[cfg(any(target_os = "android", target_os = "ios"))]
let res = match (*c).frame(wait as _) {
let res = match c.frame(spf) {
Ok(frame) => {
let time = now - start;
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
@@ -448,7 +474,7 @@ fn run(sp: GenericService) -> ResultType<()> {
}
scrap::Frame::RAW(data) => {
if (data.len() != 0) {
let send_conn_ids = handle_one_frame(&sp, data, ms, &mut vpx)?;
let send_conn_ids = handle_one_frame(&sp, data, ms, &mut encoder)?;
frame_controller.set_send(now, send_conn_ids);
}
}
@@ -460,11 +486,11 @@ fn run(sp: GenericService) -> ResultType<()> {
};
#[cfg(not(any(target_os = "android", target_os = "ios")))]
let res = match (*c).frame(wait as _) {
let res = match c.frame(spf) {
Ok(frame) => {
let time = now - start;
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
let send_conn_ids = handle_one_frame(&sp, &frame, ms, &mut vpx)?;
let send_conn_ids = handle_one_frame(&sp, &frame, ms, &mut encoder)?;
frame_controller.set_send(now, send_conn_ids);
#[cfg(windows)]
{
@@ -531,7 +557,6 @@ fn run(sp: GenericService) -> ResultType<()> {
Ok(())
}
#[inline]
fn check_privacy_mode_changed(sp: &GenericService, privacy_mode_id: i32) -> ResultType<()> {
let privacy_mode_id_2 = *PRIVACY_MODE_CONN_ID.lock().unwrap();
if privacy_mode_id != privacy_mode_id_2 {
@@ -547,10 +572,11 @@ fn check_privacy_mode_changed(sp: &GenericService, privacy_mode_id: i32) -> Resu
}
#[inline]
fn create_msg(vp9s: Vec<VP9>) -> Message {
#[cfg(any(target_os = "android", target_os = "ios"))]
fn create_msg(vp9s: Vec<EncodedVideoFrame>) -> Message {
let mut msg_out = Message::new();
let mut vf = VideoFrame::new();
vf.set_vp9s(VP9s {
vf.set_vp9s(EncodedVideoFrames {
frames: vp9s.into(),
..Default::default()
});
@@ -559,22 +585,12 @@ fn create_msg(vp9s: Vec<VP9>) -> Message {
msg_out
}
#[inline]
fn create_frame(frame: &EncodeFrame) -> VP9 {
VP9 {
data: frame.data.to_vec(),
key: frame.key,
pts: frame.pts,
..Default::default()
}
}
#[inline]
fn handle_one_frame(
sp: &GenericService,
frame: &[u8],
ms: i64,
vpx: &mut Encoder,
encoder: &mut Encoder,
) -> ResultType<HashSet<i32>> {
sp.snapshot(|sps| {
// so that new sub and old sub share the same encoder after switch
@@ -585,20 +601,8 @@ fn handle_one_frame(
})?;
let mut send_conn_ids: HashSet<i32> = Default::default();
let mut frames = Vec::new();
for ref frame in vpx
.encode(ms, frame, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(create_frame(frame));
}
for ref frame in vpx.flush().with_context(|| "Failed to flush")? {
frames.push(create_frame(frame));
}
// to-do: flush periodically, e.g. 1 second
if frames.len() > 0 {
send_conn_ids = sp.send_video_frame(create_msg(frames));
if let Ok(msg) = encoder.encode_to_message(frame, ms) {
send_conn_ids = sp.send_video_frame(msg);
}
Ok(send_conn_ids)
}
@@ -618,7 +622,7 @@ pub fn handle_one_frame_encoded(
Ok(())
})?;
let mut send_conn_ids: HashSet<i32> = Default::default();
let vp9_frame = VP9 {
let vp9_frame = EncodedVideoFrame {
data: frame.to_vec(),
key: true,
pts: ms,
@@ -748,82 +752,3 @@ fn get_current_display() -> ResultType<(usize, usize, Display)> {
}
return Ok((n, current, displays.remove(current)));
}
#[inline]
fn update_latency(id: i32, latency: i64, latencies: &mut HashMap<i32, i64>) {
if latency <= 0 {
latencies.remove(&id);
} else {
latencies.insert(id, latency);
}
}
pub fn update_test_latency(id: i32, latency: i64) {
update_latency(id, latency, &mut *TEST_LATENCIES.lock().unwrap());
}
fn convert_quality(q: i32) -> i32 {
let q = {
if q == ImageQuality::Balanced.value() {
(100 * 2 / 3, 12)
} else if q == ImageQuality::Low.value() {
(100 / 2, 18)
} else if q == ImageQuality::Best.value() {
(100, 12)
} else {
let bitrate = q >> 8 & 0xFF;
let quantizer = q & 0xFF;
(bitrate * 2, (100 - quantizer) * 36 / 100)
}
};
if q.0 <= 0 {
0
} else {
q.0 << 8 | q.1
}
}
pub fn update_image_quality(id: i32, q: Option<i32>) {
match q {
Some(q) => {
let q = convert_quality(q);
if q > 0 {
IMAGE_QUALITIES.lock().unwrap().insert(id, q);
} else {
IMAGE_QUALITIES.lock().unwrap().remove(&id);
}
}
None => {
IMAGE_QUALITIES.lock().unwrap().remove(&id);
}
}
}
fn get_image_quality() -> i32 {
IMAGE_QUALITIES
.lock()
.unwrap()
.values()
.min()
.unwrap_or(&convert_quality(ImageQuality::Balanced.value()))
.clone()
}
#[inline]
fn get_quality(w: usize, h: usize, q: i32) -> (u32, u32, u32, i32) {
// https://www.nvidia.com/en-us/geforce/guides/broadcasting-guide/
let bitrate = q >> 8 & 0xFF;
let quantizer = q & 0xFF;
let b = ((w * h) / 1000) as u32;
#[cfg(target_os = "android")]
{
// fix when andorid screen shrinks
let fix = Display::fix_quality() as u32;
log::debug!("Android screen, fix quality:{}", fix);
let b = b * fix;
return (bitrate as u32 * b / 100, quantizer as _, 56, 7);
}
(bitrate as u32 * b / 100, quantizer as _, 56, 7)
}