Merge branch 'master' of github.com:asur4s/rustdesk

This commit is contained in:
Asura
2022-07-11 08:17:17 -07:00
52 changed files with 2351 additions and 865 deletions

View File

@@ -1,13 +1,13 @@
syntax = "proto3";
package hbb;
message VP9 {
message EncodedVideoFrame {
bytes data = 1;
bool key = 2;
int64 pts = 3;
}
message VP9s { repeated VP9 frames = 1; }
message EncodedVideoFrames { repeated EncodedVideoFrame frames = 1; }
message RGB { bool compress = 1; }
@@ -19,9 +19,11 @@ message YUV {
message VideoFrame {
oneof union {
VP9s vp9s = 6;
EncodedVideoFrames vp9s = 6;
RGB rgb = 7;
YUV yuv = 8;
EncodedVideoFrames h264s = 10;
EncodedVideoFrames h265s = 11;
}
int64 timestamp = 9;
}
@@ -430,6 +432,12 @@ enum ImageQuality {
Best = 4;
}
message VideoCodecState {
int32 ScoreVpx = 1;
int32 ScoreH264 = 2;
int32 ScoreH265 = 3;
}
message OptionMessage {
enum BoolOption {
NotSet = 0;
@@ -445,11 +453,14 @@ message OptionMessage {
BoolOption disable_audio = 7;
BoolOption disable_clipboard = 8;
BoolOption enable_file_transfer = 9;
VideoCodecState video_codec_state = 10;
}
message TestDelay {
int64 time = 1;
bool from_client = 2;
uint32 last_delay = 3;
uint32 target_bitrate = 4;
}
message PublicKey {

View File

@@ -39,9 +39,16 @@ lazy_static::lazy_static! {
pub static ref PROD_RENDEZVOUS_SERVER: Arc<RwLock<String>> = Default::default();
pub static ref APP_NAME: Arc<RwLock<String>> = Arc::new(RwLock::new("RustDesk".to_owned()));
}
#[cfg(any(target_os = "android", target_os = "ios"))]
#[cfg(target_os = "android")]
lazy_static::lazy_static! {
pub static ref APP_DIR: Arc<RwLock<String>> = Arc::new(RwLock::new("/data/user/0/com.carriez.flutter_hbb/app_flutter".to_owned()));
}
#[cfg(target_os = "ios")]
lazy_static::lazy_static! {
pub static ref APP_DIR: Arc<RwLock<String>> = Default::default();
}
#[cfg(any(target_os = "android", target_os = "ios"))]
lazy_static::lazy_static! {
pub static ref APP_HOME_DIR: Arc<RwLock<String>> = Default::default();
}
const CHARS: &'static [char] = &[
@@ -139,6 +146,8 @@ pub struct PeerConfig {
pub disable_clipboard: bool,
#[serde(default)]
pub enable_file_transfer: bool,
#[serde(default)]
pub show_quality_monitor: bool,
// the other scalar value must before this
#[serde(default)]
@@ -881,6 +890,22 @@ impl LanPeers {
}
}
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct HwCodecConfig {
#[serde(default)]
pub options: HashMap<String, String>,
}
impl HwCodecConfig {
pub fn load() -> HwCodecConfig {
Config::load_::<HwCodecConfig>("_hwcodec")
}
pub fn store(&self) {
Config::store_(self, "_hwcodec");
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -18,6 +18,7 @@ cfg-if = "1.0"
libc = "0.2"
num_cpus = "1.13"
lazy_static = "1.4"
hbb_common = { path = "../hbb_common" }
[dependencies.winapi]
version = "0.3"
@@ -48,3 +49,6 @@ tracing = { version = "0.1", optional = true }
gstreamer = { version = "0.16", optional = true }
gstreamer-app = { version = "0.16", features = ["v1_10"], optional = true }
gstreamer-video = { version = "0.16", optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
hwcodec = { git = "https://github.com/21pages/hwcodec", optional = true }

View File

@@ -21,6 +21,8 @@ fn get_display(i: usize) -> Display {
#[cfg(windows)]
fn record(i: usize) {
use std::time::Duration;
for d in Display::all().unwrap() {
println!("{:?} {} {}", d.origin(), d.width(), d.height());
}
@@ -40,7 +42,7 @@ fn record(i: usize) {
println!("Filter window for cls {} name {}", wnd_cls, wnd_name);
}
let frame = capture_mag.frame(0).unwrap();
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
println!("Capture data len: {}, Saving...", frame.len());
let mut bitflipped = Vec::with_capacity(w * h * 4);
@@ -76,7 +78,7 @@ fn record(i: usize) {
println!("Filter window for cls {} title {}", wnd_cls, wnd_title);
}
let buffer = capture_mag.frame(0).unwrap();
let buffer = capture_mag.frame(Duration::from_millis(0)).unwrap();
println!("Capture data len: {}, Saving...", buffer.len());
let mut frame = Default::default();

View File

@@ -1,3 +1,5 @@
use std::time::Duration;
extern crate scrap;
fn main() {
@@ -29,7 +31,7 @@ fn main() {
let mut out = child.stdin.unwrap();
loop {
match capturer.frame(0) {
match capturer.frame(Duration::from_millis(0)) {
Ok(frame) => {
// Write the frame, removing end-of-row padding.
let stride = frame.len() / h;

View File

@@ -13,10 +13,11 @@ use std::time::{Duration, Instant};
use std::{io, thread};
use docopt::Docopt;
use scrap::codec::{EncoderApi, EncoderCfg};
use webm::mux;
use webm::mux::Track;
use scrap::codec as vpx_encode;
use scrap::vpxcodec as vpx_encode;
use scrap::{Capturer, Display, STRIDE_ALIGN};
const USAGE: &'static str = "
@@ -89,27 +90,22 @@ fn main() -> io::Result<()> {
mux::Segment::new(mux::Writer::new(out)).expect("Could not initialize the multiplexer.");
let (vpx_codec, mux_codec) = match args.flag_codec {
Codec::Vp8 => (vpx_encode::VideoCodecId::VP8, mux::VideoCodecId::VP8),
Codec::Vp9 => (vpx_encode::VideoCodecId::VP9, mux::VideoCodecId::VP9),
Codec::Vp8 => (vpx_encode::VpxVideoCodecId::VP8, mux::VideoCodecId::VP8),
Codec::Vp9 => (vpx_encode::VpxVideoCodecId::VP9, mux::VideoCodecId::VP9),
};
let mut vt = webm.add_video_track(width, height, None, mux_codec);
// Setup the encoder.
let mut vpx = vpx_encode::Encoder::new(
&vpx_encode::Config {
width,
height,
timebase: [1, 1000],
bitrate: args.flag_bv,
codec: vpx_codec,
rc_min_quantizer: 0,
rc_max_quantizer: 0,
speed: 6,
},
0,
)
let mut vpx = vpx_encode::VpxEncoder::new(EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
width,
height,
timebase: [1, 1000],
bitrate: args.flag_bv,
codec: vpx_codec,
num_threads: 0,
}))
.unwrap();
// Start recording.
@@ -138,7 +134,7 @@ fn main() -> io::Result<()> {
break;
}
if let Ok(frame) = c.frame(0) {
if let Ok(frame) = c.frame(Duration::from_millis(0)) {
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
for frame in vpx.encode(ms as i64, &frame, STRIDE_ALIGN).unwrap() {

View File

@@ -34,7 +34,7 @@ fn record(i: usize) {
loop {
// Wait until there's a frame.
let buffer = match capturer.frame(0) {
let buffer = match capturer.frame(Duration::from_millis(0)) {
Ok(buffer) => buffer,
Err(error) => {
if error.kind() == WouldBlock {
@@ -83,7 +83,7 @@ fn record(i: usize) {
loop {
// Wait until there's a frame.
let buffer = match capturer.frame(0) {
let buffer = match capturer.frame(Duration::from_millis(0)) {
Ok(buffer) => buffer,
Err(error) => {
if error.kind() == WouldBlock {

View File

@@ -3,8 +3,8 @@ use crate::rgba_to_i420;
use lazy_static::lazy_static;
use serde_json::Value;
use std::collections::HashMap;
use std::io;
use std::sync::Mutex;
use std::{io, time::Duration};
lazy_static! {
static ref SCREEN_SIZE: Mutex<(u16, u16, u16)> = Mutex::new((0, 0, 0)); // (width, height, scale)
@@ -33,7 +33,7 @@ impl Capturer {
self.display.height() as usize
}
pub fn frame<'a>(&'a mut self, _timeout_ms: u32) -> io::Result<Frame<'a>> {
pub fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
if let Some(buf) = get_video_raw() {
crate::would_block_if_equal(&mut self.saved_raw_data, buf)?;
rgba_to_i420(self.width(), self.height(), buf, &mut self.bgra);

View File

@@ -1,536 +1,327 @@
// https://github.com/astraw/vpx-encode
// https://github.com/astraw/env-libvpx-sys
// https://github.com/rust-av/vpx-rs/blob/master/src/decoder.rs
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use std::os::raw::{c_int, c_uint};
use std::{ptr, slice};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum VideoCodecId {
VP8,
VP9,
}
impl Default for VideoCodecId {
fn default() -> VideoCodecId {
VideoCodecId::VP9
}
}
pub struct Encoder {
ctx: vpx_codec_ctx_t,
width: usize,
height: usize,
}
pub struct Decoder {
ctx: vpx_codec_ctx_t,
}
#[derive(Debug)]
pub enum Error {
FailedCall(String),
BadPtr(String),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
macro_rules! call_vpx {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, i32>(result) };
if result_int != 0 {
return Err(Error::FailedCall(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
macro_rules! call_vpx_ptr {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, isize>(result) };
if result_int == 0 {
return Err(Error::BadPtr(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
impl Encoder {
pub fn new(config: &Config, num_threads: u32) -> Result<Self> {
let i;
if cfg!(feature = "VP8") {
i = match config.codec {
VideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_cx()),
VideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_cx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_cx());
}
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0));
// https://www.webmproject.org/docs/encoder-parameters/
// default: c.rc_min_quantizer = 0, c.rc_max_quantizer = 63
// try rc_resize_allowed later
c.g_w = config.width;
c.g_h = config.height;
c.g_timebase.num = config.timebase[0];
c.g_timebase.den = config.timebase[1];
c.rc_target_bitrate = config.bitrate;
c.rc_undershoot_pct = 95;
c.rc_dropframe_thresh = 25;
if config.rc_min_quantizer > 0 {
c.rc_min_quantizer = config.rc_min_quantizer;
}
if config.rc_max_quantizer > 0 {
c.rc_max_quantizer = config.rc_max_quantizer;
}
let mut speed = config.speed;
if speed <= 0 {
speed = 6;
}
c.g_threads = if num_threads == 0 {
num_cpus::get() as _
} else {
num_threads
};
c.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
// https://developers.google.com/media/vp9/bitrate-modes/
// Constant Bitrate mode (CBR) is recommended for live streaming with VP9.
c.rc_end_usage = vpx_rc_mode::VPX_CBR;
// c.kf_min_dist = 0;
// c.kf_max_dist = 999999;
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED; // reduce bandwidth a lot
/*
VPX encoder支持two-pass encode这是为了rate control的。
对于两遍编码,就是需要整个编码过程做两次,第一次会得到一些新的控制参数来进行第二遍的编码,
这样可以在相同的bitrate下得到最好的PSNR
*/
let mut ctx = Default::default();
call_vpx!(vpx_codec_enc_init_ver(
&mut ctx,
i,
&c,
0,
VPX_ENCODER_ABI_VERSION as _
));
if config.codec == VideoCodecId::VP9 {
// set encoder internal speed settings
// in ffmpeg, it is --speed option
/*
set to 0 or a positive value 1-16, the codec will try to adapt its
complexity depending on the time it spends encoding. Increasing this
number will make the speed go up and the quality go down.
Negative values mean strict enforcement of this
while positive values are adaptive
*/
/* https://developers.google.com/media/vp9/live-encoding
Speed 5 to 8 should be used for live / real-time encoding.
Lower numbers (5 or 6) are higher quality but require more CPU power.
Higher numbers (7 or 8) will be lower quality but more manageable for lower latency
use cases and also for lower CPU power devices such as mobile.
*/
call_vpx!(vpx_codec_control_(&mut ctx, VP8E_SET_CPUUSED as _, speed,));
// set row level multi-threading
/*
as some people in comments and below have already commented,
more recent versions of libvpx support -row-mt 1 to enable tile row
multi-threading. This can increase the number of tiles by up to 4x in VP9
(since the max number of tile rows is 4, regardless of video height).
To enable this, use -tile-rows N where N is the number of tile rows in
log2 units (so -tile-rows 1 means 2 tile rows and -tile-rows 2 means 4 tile
rows). The total number of active threads will then be equal to
$tile_rows * $tile_columns
*/
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_ROW_MT as _,
1 as c_int
));
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_TILE_COLUMNS as _,
4 as c_int
));
}
Ok(Self {
ctx,
width: config.width as _,
height: config.height as _,
})
}
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
assert!(2 * data.len() >= 3 * self.width * self.height);
let mut image = Default::default();
call_vpx_ptr!(vpx_img_wrap(
&mut image,
vpx_img_fmt::VPX_IMG_FMT_I420,
self.width as _,
self.height as _,
stride_align as _,
data.as_ptr() as _,
));
call_vpx!(vpx_codec_encode(
&mut self.ctx,
&image,
pts as _,
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the encoder to return any pending packets
pub fn flush(&mut self) -> Result<EncodeFrames> {
call_vpx!(vpx_codec_encode(
&mut self.ctx,
ptr::null(),
-1, // PTS
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
}
impl Drop for Encoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct EncodeFrame<'a> {
/// Compressed data.
pub data: &'a [u8],
/// Whether the frame is a keyframe.
pub key: bool,
/// Presentation timestamp (in timebase units).
pub pts: i64,
}
#[derive(Clone, Copy, Debug)]
pub struct Config {
/// The width (in pixels).
pub width: c_uint,
/// The height (in pixels).
pub height: c_uint,
/// The timebase numerator and denominator (in seconds).
pub timebase: [c_int; 2],
/// The target bitrate (in kilobits per second).
pub bitrate: c_uint,
/// The codec
pub codec: VideoCodecId,
pub rc_min_quantizer: u32,
pub rc_max_quantizer: u32,
pub speed: i32,
}
pub struct EncodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for EncodeFrames<'a> {
type Item = EncodeFrame<'a>;
fn next(&mut self) -> Option<Self::Item> {
loop {
unsafe {
let pkt = vpx_codec_get_cx_data(self.ctx, &mut self.iter);
if pkt.is_null() {
return None;
} else if (*pkt).kind == vpx_codec_cx_pkt_kind::VPX_CODEC_CX_FRAME_PKT {
let f = &(*pkt).data.frame;
return Some(Self::Item {
data: slice::from_raw_parts(f.buf as _, f.sz as _),
key: (f.flags & VPX_FRAME_IS_KEY) != 0,
pts: f.pts,
});
} else {
// Ignore the packet.
}
}
}
}
}
impl Decoder {
/// Create a new decoder
///
/// # Errors
///
/// The function may fail if the underlying libvpx does not provide
/// the VP9 decoder.
pub fn new(codec: VideoCodecId, num_threads: u32) -> Result<Self> {
// This is sound because `vpx_codec_ctx` is a repr(C) struct without any field that can
// cause UB if uninitialized.
let i;
if cfg!(feature = "VP8") {
i = match codec {
VideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_dx()),
VideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_dx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_dx());
}
let mut ctx = Default::default();
let cfg = vpx_codec_dec_cfg_t {
threads: if num_threads == 0 {
num_cpus::get() as _
} else {
num_threads
},
w: 0,
h: 0,
};
/*
unsafe {
println!("{}", vpx_codec_get_caps(i));
}
*/
call_vpx!(vpx_codec_dec_init_ver(
&mut ctx,
i,
&cfg,
0,
VPX_DECODER_ABI_VERSION as _,
));
Ok(Self { ctx })
}
pub fn decode2rgb(&mut self, data: &[u8], rgba: bool) -> Result<Vec<u8>> {
let mut img = Image::new();
for frame in self.decode(data)? {
drop(img);
img = frame;
}
for frame in self.flush()? {
drop(img);
img = frame;
}
if img.is_null() {
Ok(Vec::new())
} else {
let mut out = Default::default();
img.rgb(1, rgba, &mut out);
Ok(out)
}
}
/// Feed some compressed data to the encoder
///
/// The `data` slice is sent to the decoder
///
/// It matches a call to `vpx_codec_decode`.
pub fn decode(&mut self, data: &[u8]) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
data.as_ptr(),
data.len() as _,
ptr::null_mut(),
0,
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the decoder to return any pending frame
pub fn flush(&mut self) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
ptr::null(),
0,
ptr::null_mut(),
0
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
}
impl Drop for Decoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
pub struct DecodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for DecodeFrames<'a> {
type Item = Image;
fn next(&mut self) -> Option<Self::Item> {
let img = unsafe { vpx_codec_get_frame(self.ctx, &mut self.iter) };
if img.is_null() {
return None;
} else {
return Some(Image(img));
}
}
}
// https://chromium.googlesource.com/webm/libvpx/+/bali/vpx/src/vpx_image.c
pub struct Image(*mut vpx_image_t);
impl Image {
#[inline]
pub fn new() -> Self {
Self(std::ptr::null_mut())
}
#[inline]
pub fn is_null(&self) -> bool {
self.0.is_null()
}
#[inline]
pub fn width(&self) -> usize {
self.inner().d_w as _
}
#[inline]
pub fn height(&self) -> usize {
self.inner().d_h as _
}
#[inline]
pub fn format(&self) -> vpx_img_fmt_t {
// VPX_IMG_FMT_I420
self.inner().fmt
}
#[inline]
pub fn inner(&self) -> &vpx_image_t {
unsafe { &*self.0 }
}
#[inline]
pub fn stride(&self, iplane: usize) -> i32 {
self.inner().stride[iplane]
}
pub fn rgb(&self, stride_align: usize, rgba: bool, dst: &mut Vec<u8>) {
let h = self.height();
let mut w = self.width();
let bps = if rgba { 4 } else { 3 };
w = (w + stride_align - 1) & !(stride_align - 1);
dst.resize(h * w * bps, 0);
let img = self.inner();
unsafe {
if rgba {
super::I420ToARGB(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
} else {
super::I420ToRAW(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
}
}
}
#[inline]
pub fn data(&self) -> (&[u8], &[u8], &[u8]) {
unsafe {
let img = self.inner();
let h = (img.d_h as usize + 1) & !1;
let n = img.stride[0] as usize * h;
let y = slice::from_raw_parts(img.planes[0], n);
let n = img.stride[1] as usize * (h >> 1);
let u = slice::from_raw_parts(img.planes[1], n);
let v = slice::from_raw_parts(img.planes[2], n);
(y, u, v)
}
}
}
impl Drop for Image {
fn drop(&mut self) {
if !self.0.is_null() {
unsafe { vpx_img_free(self.0) };
}
}
}
unsafe impl Send for vpx_codec_ctx_t {}
use std::ops::{Deref, DerefMut};
#[cfg(feature = "hwcodec")]
use std::{
collections::HashMap,
sync::{Arc, Mutex},
};
#[cfg(feature = "hwcodec")]
use crate::hwcodec::*;
use crate::vpxcodec::*;
use hbb_common::{
anyhow::anyhow,
log,
message_proto::{video_frame, EncodedVideoFrames, Message, VideoCodecState},
ResultType,
};
#[cfg(feature = "hwcodec")]
use hbb_common::{config::Config2, lazy_static};
#[cfg(feature = "hwcodec")]
lazy_static::lazy_static! {
static ref PEER_DECODER_STATES: Arc<Mutex<HashMap<i32, VideoCodecState>>> = Default::default();
static ref MY_DECODER_STATE: Arc<Mutex<VideoCodecState>> = Default::default();
}
const SCORE_VPX: i32 = 90;
#[derive(Debug, Clone)]
pub struct HwEncoderConfig {
pub codec_name: String,
pub width: usize,
pub height: usize,
pub bitrate: i32,
}
#[derive(Debug, Clone)]
pub enum EncoderCfg {
VPX(VpxEncoderConfig),
HW(HwEncoderConfig),
}
pub trait EncoderApi {
fn new(cfg: EncoderCfg) -> ResultType<Self>
where
Self: Sized;
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<Message>;
fn use_yuv(&self) -> bool;
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()>;
}
pub struct DecoderCfg {
pub vpx: VpxDecoderConfig,
}
pub struct Encoder {
pub codec: Box<dyn EncoderApi>,
}
impl Deref for Encoder {
type Target = Box<dyn EncoderApi>;
fn deref(&self) -> &Self::Target {
&self.codec
}
}
impl DerefMut for Encoder {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.codec
}
}
pub struct Decoder {
vpx: VpxDecoder,
#[cfg(feature = "hwcodec")]
hw: HwDecoders,
#[cfg(feature = "hwcodec")]
i420: Vec<u8>,
}
#[derive(Debug, Clone)]
pub enum EncoderUpdate {
State(VideoCodecState),
Remove,
DisableHwIfNotExist,
}
impl Encoder {
pub fn new(config: EncoderCfg) -> ResultType<Encoder> {
log::info!("new encoder:{:?}", config);
match config {
EncoderCfg::VPX(_) => Ok(Encoder {
codec: Box::new(VpxEncoder::new(config)?),
}),
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(_) => match HwEncoder::new(config) {
Ok(hw) => Ok(Encoder {
codec: Box::new(hw),
}),
Err(e) => {
HwEncoder::best(true, true);
Err(e)
}
},
#[cfg(not(feature = "hwcodec"))]
_ => Err(anyhow!("unsupported encoder type")),
}
}
// TODO
pub fn update_video_encoder(id: i32, update: EncoderUpdate) {
log::info!("encoder update: {:?}", update);
#[cfg(feature = "hwcodec")]
{
let mut states = PEER_DECODER_STATES.lock().unwrap();
match update {
EncoderUpdate::State(state) => {
states.insert(id, state);
}
EncoderUpdate::Remove => {
states.remove(&id);
}
EncoderUpdate::DisableHwIfNotExist => {
if !states.contains_key(&id) {
states.insert(id, VideoCodecState::default());
}
}
}
let current_encoder_name = HwEncoder::current_name();
if states.len() > 0 {
let (best, _) = HwEncoder::best(false, true);
let enabled_h264 = best.h264.is_some()
&& states.len() > 0
&& states.iter().all(|(_, s)| s.ScoreH264 > 0);
let enabled_h265 = best.h265.is_some()
&& states.len() > 0
&& states.iter().all(|(_, s)| s.ScoreH265 > 0);
// score encoder
let mut score_vpx = SCORE_VPX;
let mut score_h264 = best.h264.as_ref().map_or(0, |c| c.score);
let mut score_h265 = best.h265.as_ref().map_or(0, |c| c.score);
// score decoder
score_vpx += states.iter().map(|s| s.1.ScoreVpx).sum::<i32>();
if enabled_h264 {
score_h264 += states.iter().map(|s| s.1.ScoreH264).sum::<i32>();
}
if enabled_h265 {
score_h265 += states.iter().map(|s| s.1.ScoreH265).sum::<i32>();
}
if enabled_h265 && score_h265 >= score_vpx && score_h265 >= score_h264 {
*current_encoder_name.lock().unwrap() = Some(best.h265.unwrap().name);
} else if enabled_h264 && score_h264 >= score_vpx && score_h264 >= score_h265 {
*current_encoder_name.lock().unwrap() = Some(best.h264.unwrap().name);
} else {
*current_encoder_name.lock().unwrap() = None;
}
log::info!(
"connection count:{}, h264:{}, h265:{}, score: vpx({}), h264({}), h265({}), set current encoder name {:?}",
states.len(),
enabled_h264,
enabled_h265,
score_vpx,
score_h264,
score_h265,
current_encoder_name.lock().unwrap()
)
} else {
*current_encoder_name.lock().unwrap() = None;
}
}
#[cfg(not(feature = "hwcodec"))]
{
let _ = id;
let _ = update;
}
}
#[inline]
pub fn current_hw_encoder_name() -> Option<String> {
#[cfg(feature = "hwcodec")]
if check_hwcodec_config() {
return HwEncoder::current_name().lock().unwrap().clone();
} else {
return None;
}
#[cfg(not(feature = "hwcodec"))]
return None;
}
}
#[cfg(feature = "hwcodec")]
impl Drop for Decoder {
fn drop(&mut self) {
*MY_DECODER_STATE.lock().unwrap() = VideoCodecState {
ScoreVpx: SCORE_VPX,
..Default::default()
};
}
}
impl Decoder {
pub fn video_codec_state() -> VideoCodecState {
// video_codec_state is mainted by creation and destruction of Decoder.
// It has been ensured to use after Decoder's creation.
#[cfg(feature = "hwcodec")]
if check_hwcodec_config() {
return MY_DECODER_STATE.lock().unwrap().clone();
} else {
return VideoCodecState {
ScoreVpx: SCORE_VPX,
..Default::default()
};
}
#[cfg(not(feature = "hwcodec"))]
VideoCodecState {
ScoreVpx: SCORE_VPX,
..Default::default()
}
}
pub fn new(config: DecoderCfg) -> Decoder {
let vpx = VpxDecoder::new(config.vpx).unwrap();
let decoder = Decoder {
vpx,
#[cfg(feature = "hwcodec")]
hw: HwDecoder::new_decoders(),
#[cfg(feature = "hwcodec")]
i420: vec![],
};
#[cfg(feature = "hwcodec")]
{
let mut state = MY_DECODER_STATE.lock().unwrap();
state.ScoreVpx = SCORE_VPX;
state.ScoreH264 = decoder.hw.h264.as_ref().map_or(0, |d| d.info.score);
state.ScoreH265 = decoder.hw.h265.as_ref().map_or(0, |d| d.info.score);
}
decoder
}
pub fn handle_video_frame(
&mut self,
frame: &video_frame::Union,
rgb: &mut Vec<u8>,
) -> ResultType<bool> {
match frame {
video_frame::Union::vp9s(vp9s) => {
Decoder::handle_vp9s_video_frame(&mut self.vpx, vp9s, rgb)
}
#[cfg(feature = "hwcodec")]
video_frame::Union::h264s(h264s) => {
if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h264!"))
}
}
#[cfg(feature = "hwcodec")]
video_frame::Union::h265s(h265s) => {
if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h265!"))
}
}
_ => Err(anyhow!("unsupported video frame type!")),
}
}
fn handle_vp9s_video_frame(
decoder: &mut VpxDecoder,
vp9s: &EncodedVideoFrames,
rgb: &mut Vec<u8>,
) -> ResultType<bool> {
let mut last_frame = Image::new();
for vp9 in vp9s.frames.iter() {
for frame in decoder.decode(&vp9.data)? {
drop(last_frame);
last_frame = frame;
}
}
for frame in decoder.flush()? {
drop(last_frame);
last_frame = frame;
}
if last_frame.is_null() {
Ok(false)
} else {
last_frame.rgb(1, true, rgb);
Ok(true)
}
}
#[cfg(feature = "hwcodec")]
fn handle_hw_video_frame(
decoder: &mut HwDecoder,
frames: &EncodedVideoFrames,
rgb: &mut Vec<u8>,
i420: &mut Vec<u8>,
) -> ResultType<bool> {
let mut ret = false;
for h264 in frames.frames.iter() {
for image in decoder.decode(&h264.data)? {
// TODO: just process the last frame
if image.bgra(rgb, i420).is_ok() {
ret = true;
}
}
}
return Ok(ret);
}
}
#[cfg(feature = "hwcodec")]
fn check_hwcodec_config() -> bool {
if let Some(v) = Config2::get().options.get("enable-hwcodec") {
return v != "N";
}
return true; // default is true
}

View File

@@ -49,6 +49,17 @@ extern "C" {
height: c_int,
) -> c_int;
pub fn ARGBToNV12(
src_bgra: *const u8,
src_stride_bgra: c_int,
dst_y: *mut u8,
dst_stride_y: c_int,
dst_uv: *mut u8,
dst_stride_uv: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn NV12ToI420(
src_y: *const u8,
src_stride_y: c_int,
@@ -91,6 +102,17 @@ extern "C" {
width: c_int,
height: c_int,
) -> c_int;
pub fn NV12ToARGB(
src_y: *const u8,
src_stride_y: c_int,
src_uv: *const u8,
src_stride_uv: c_int,
dst_rgba: *mut u8,
dst_stride_rgba: c_int,
width: c_int,
height: c_int,
) -> c_int;
}
// https://github.com/webmproject/libvpx/blob/master/vpx/src/vpx_image.c
@@ -220,3 +242,192 @@ pub unsafe fn nv12_to_i420(
height as _,
);
}
#[cfg(feature = "hwcodec")]
pub mod hw {
use hbb_common::{anyhow::anyhow, ResultType};
use hwcodec::{ffmpeg::ffmpeg_linesize_offset_length, AVPixelFormat};
pub fn hw_bgra_to_i420(
width: usize,
height: usize,
stride: &[i32],
offset: &[i32],
length: i32,
src: &[u8],
dst: &mut Vec<u8>,
) {
let stride_y = stride[0] as usize;
let stride_u = stride[1] as usize;
let stride_v = stride[2] as usize;
let offset_u = offset[0] as usize;
let offset_v = offset[1] as usize;
dst.resize(length as _, 0);
let dst_y = dst.as_mut_ptr();
let dst_u = dst[offset_u..].as_mut_ptr();
let dst_v = dst[offset_v..].as_mut_ptr();
unsafe {
super::ARGBToI420(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
stride_y as _,
dst_u,
stride_u as _,
dst_v,
stride_v as _,
width as _,
height as _,
);
}
}
pub fn hw_bgra_to_nv12(
width: usize,
height: usize,
stride: &[i32],
offset: &[i32],
length: i32,
src: &[u8],
dst: &mut Vec<u8>,
) {
let stride_y = stride[0] as usize;
let stride_uv = stride[1] as usize;
let offset_uv = offset[0] as usize;
dst.resize(length as _, 0);
let dst_y = dst.as_mut_ptr();
let dst_uv = dst[offset_uv..].as_mut_ptr();
unsafe {
super::ARGBToNV12(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
stride_y as _,
dst_uv,
stride_uv as _,
width as _,
height as _,
);
}
}
#[cfg(target_os = "windows")]
pub fn hw_nv12_to_bgra(
width: usize,
height: usize,
src_y: &[u8],
src_uv: &[u8],
src_stride_y: usize,
src_stride_uv: usize,
dst: &mut Vec<u8>,
i420: &mut Vec<u8>,
align: usize,
) -> ResultType<()> {
let nv12_stride_y = src_stride_y;
let nv12_stride_uv = src_stride_uv;
if let Ok((linesize_i420, offset_i420, i420_len)) =
ffmpeg_linesize_offset_length(AVPixelFormat::AV_PIX_FMT_YUV420P, width, height, align)
{
dst.resize(width * height * 4, 0);
let i420_stride_y = linesize_i420[0];
let i420_stride_u = linesize_i420[1];
let i420_stride_v = linesize_i420[2];
i420.resize(i420_len as _, 0);
unsafe {
let i420_offset_y = i420.as_ptr().add(0) as _;
let i420_offset_u = i420.as_ptr().add(offset_i420[0] as _) as _;
let i420_offset_v = i420.as_ptr().add(offset_i420[1] as _) as _;
super::NV12ToI420(
src_y.as_ptr(),
nv12_stride_y as _,
src_uv.as_ptr(),
nv12_stride_uv as _,
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
width as _,
height as _,
);
super::I420ToARGB(
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
return Ok(());
};
}
return Err(anyhow!("get linesize offset failed"));
}
#[cfg(not(target_os = "windows"))]
pub fn hw_nv12_to_bgra(
width: usize,
height: usize,
src_y: &[u8],
src_uv: &[u8],
src_stride_y: usize,
src_stride_uv: usize,
dst: &mut Vec<u8>,
) -> ResultType<()> {
dst.resize(width * height * 4, 0);
unsafe {
match super::NV12ToARGB(
src_y.as_ptr(),
src_stride_y as _,
src_uv.as_ptr(),
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
) {
0 => Ok(()),
_ => Err(anyhow!("NV12ToARGB failed")),
}
}
}
pub fn hw_i420_to_bgra(
width: usize,
height: usize,
src_y: &[u8],
src_u: &[u8],
src_v: &[u8],
src_stride_y: usize,
src_stride_u: usize,
src_stride_v: usize,
dst: &mut Vec<u8>,
) {
let src_y = src_y.as_ptr();
let src_u = src_u.as_ptr();
let src_v = src_v.as_ptr();
dst.resize(width * height * 4, 0);
unsafe {
super::I420ToARGB(
src_y,
src_stride_y as _,
src_u,
src_stride_u as _,
src_v,
src_stride_v as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
};
}
}

View File

@@ -1,5 +1,6 @@
use crate::dxgi;
use std::io::ErrorKind::{NotFound, TimedOut, WouldBlock};
use std::time::Duration;
use std::{io, ops};
pub struct Capturer {
@@ -40,8 +41,8 @@ impl Capturer {
self.height
}
pub fn frame<'a>(&'a mut self, timeout_ms: u32) -> io::Result<Frame<'a>> {
match self.inner.frame(timeout_ms) {
pub fn frame<'a>(&'a mut self, timeout_ms: Duration) -> io::Result<Frame<'a>> {
match self.inner.frame(timeout_ms.as_millis() as _) {
Ok(frame) => Ok(Frame(frame)),
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
Err(error) => Err(error),
@@ -135,7 +136,7 @@ impl CapturerMag {
pub fn get_rect(&self) -> ((i32, i32), usize, usize) {
self.inner.get_rect()
}
pub fn frame<'a>(&'a mut self, _timeout_ms: u32) -> io::Result<Frame<'a>> {
pub fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
self.inner.frame(&mut self.data)?;
Ok(Frame(&self.data))
}

View File

@@ -0,0 +1,344 @@
use crate::{
codec::{EncoderApi, EncoderCfg},
hw, HW_STRIDE_ALIGN,
};
use hbb_common::{
anyhow::{anyhow, Context},
config::HwCodecConfig,
lazy_static, log,
message_proto::{EncodedVideoFrame, EncodedVideoFrames, Message, VideoFrame},
ResultType,
};
use hwcodec::{
decode::{DecodeContext, DecodeFrame, Decoder},
encode::{EncodeContext, EncodeFrame, Encoder},
ffmpeg::{CodecInfo, CodecInfos, DataFormat},
AVPixelFormat,
Quality::{self, *},
RateContorl::{self, *},
};
use std::sync::{Arc, Mutex};
lazy_static::lazy_static! {
static ref HW_ENCODER_NAME: Arc<Mutex<Option<String>>> = Default::default();
}
const CFG_KEY_ENCODER: &str = "bestHwEncoders";
const CFG_KEY_DECODER: &str = "bestHwDecoders";
const DEFAULT_PIXFMT: AVPixelFormat = AVPixelFormat::AV_PIX_FMT_YUV420P;
const DEFAULT_TIME_BASE: [i32; 2] = [1, 30];
const DEFAULT_GOP: i32 = 60;
const DEFAULT_HW_QUALITY: Quality = Quality_Default;
const DEFAULT_RC: RateContorl = RC_DEFAULT;
pub struct HwEncoder {
encoder: Encoder,
yuv: Vec<u8>,
pub format: DataFormat,
pub pixfmt: AVPixelFormat,
}
impl EncoderApi for HwEncoder {
fn new(cfg: EncoderCfg) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
EncoderCfg::HW(config) => {
let ctx = EncodeContext {
name: config.codec_name.clone(),
width: config.width as _,
height: config.height as _,
pixfmt: DEFAULT_PIXFMT,
align: HW_STRIDE_ALIGN as _,
bitrate: config.bitrate * 1000,
timebase: DEFAULT_TIME_BASE,
gop: DEFAULT_GOP,
quality: DEFAULT_HW_QUALITY,
rc: DEFAULT_RC,
};
let format = match Encoder::format_from_name(config.codec_name.clone()) {
Ok(format) => format,
Err(_) => {
return Err(anyhow!(format!(
"failed to get format from name:{}",
config.codec_name
)))
}
};
match Encoder::new(ctx.clone()) {
Ok(encoder) => Ok(HwEncoder {
encoder,
yuv: vec![],
format,
pixfmt: ctx.pixfmt,
}),
Err(_) => Err(anyhow!(format!("Failed to create encoder"))),
}
}
_ => Err(anyhow!("encoder type mismatch")),
}
}
fn encode_to_message(
&mut self,
frame: &[u8],
_ms: i64,
) -> ResultType<hbb_common::message_proto::Message> {
let mut msg_out = Message::new();
let mut vf = VideoFrame::new();
let mut frames = Vec::new();
for frame in self.encode(frame).with_context(|| "Failed to encode")? {
frames.push(EncodedVideoFrame {
data: frame.data,
pts: frame.pts as _,
..Default::default()
});
}
if frames.len() > 0 {
let frames = EncodedVideoFrames {
frames: frames.into(),
..Default::default()
};
match self.format {
DataFormat::H264 => vf.set_h264s(frames),
DataFormat::H265 => vf.set_h265s(frames),
}
msg_out.set_video_frame(vf);
Ok(msg_out)
} else {
Err(anyhow!("no valid frame"))
}
}
fn use_yuv(&self) -> bool {
false
}
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()> {
self.encoder.set_bitrate((bitrate * 1000) as _).ok();
Ok(())
}
}
impl HwEncoder {
/// Get best encoders.
///
/// # Parameter
/// `force_reset`: force to refresh config.
/// `write`: write to config file.
///
/// # Return
/// `CodecInfos`: infos.
/// `bool`: whether the config is refreshed.
pub fn best(force_reset: bool, write: bool) -> (CodecInfos, bool) {
let config = get_config(CFG_KEY_ENCODER);
if !force_reset && config.is_ok() {
(config.unwrap(), false)
} else {
let ctx = EncodeContext {
name: String::from(""),
width: 1920,
height: 1080,
pixfmt: DEFAULT_PIXFMT,
align: HW_STRIDE_ALIGN as _,
bitrate: 0,
timebase: DEFAULT_TIME_BASE,
gop: DEFAULT_GOP,
quality: DEFAULT_HW_QUALITY,
rc: DEFAULT_RC,
};
let encoders = CodecInfo::score(Encoder::avaliable_encoders(ctx));
if write {
set_config(CFG_KEY_ENCODER, &encoders)
.map_err(|e| log::error!("{:?}", e))
.ok();
}
(encoders, true)
}
}
pub fn current_name() -> Arc<Mutex<Option<String>>> {
HW_ENCODER_NAME.clone()
}
pub fn encode(&mut self, bgra: &[u8]) -> ResultType<Vec<EncodeFrame>> {
match self.pixfmt {
AVPixelFormat::AV_PIX_FMT_YUV420P => hw::hw_bgra_to_i420(
self.encoder.ctx.width as _,
self.encoder.ctx.height as _,
&self.encoder.linesize,
&self.encoder.offset,
self.encoder.length,
bgra,
&mut self.yuv,
),
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_bgra_to_nv12(
self.encoder.ctx.width as _,
self.encoder.ctx.height as _,
&self.encoder.linesize,
&self.encoder.offset,
self.encoder.length,
bgra,
&mut self.yuv,
),
}
match self.encoder.encode(&self.yuv) {
Ok(v) => {
let mut data = Vec::<EncodeFrame>::new();
data.append(v);
Ok(data)
}
Err(_) => Ok(Vec::<EncodeFrame>::new()),
}
}
}
pub struct HwDecoder {
decoder: Decoder,
pub info: CodecInfo,
}
pub struct HwDecoders {
pub h264: Option<HwDecoder>,
pub h265: Option<HwDecoder>,
}
impl HwDecoder {
/// See HwEncoder::best
fn best(force_reset: bool, write: bool) -> (CodecInfos, bool) {
let config = get_config(CFG_KEY_DECODER);
if !force_reset && config.is_ok() {
(config.unwrap(), false)
} else {
let decoders = CodecInfo::score(Decoder::avaliable_decoders());
if write {
set_config(CFG_KEY_DECODER, &decoders)
.map_err(|e| log::error!("{:?}", e))
.ok();
}
(decoders, true)
}
}
pub fn new_decoders() -> HwDecoders {
let (best, _) = HwDecoder::best(false, true);
let mut h264: Option<HwDecoder> = None;
let mut h265: Option<HwDecoder> = None;
let mut fail = false;
if let Some(info) = best.h264 {
h264 = HwDecoder::new(info).ok();
if h264.is_none() {
fail = true;
}
}
if let Some(info) = best.h265 {
h265 = HwDecoder::new(info).ok();
if h265.is_none() {
fail = true;
}
}
if fail {
HwDecoder::best(true, true);
}
HwDecoders { h264, h265 }
}
pub fn new(info: CodecInfo) -> ResultType<Self> {
let ctx = DecodeContext {
name: info.name.clone(),
device_type: info.hwdevice.clone(),
};
match Decoder::new(ctx) {
Ok(decoder) => Ok(HwDecoder { decoder, info }),
Err(_) => Err(anyhow!(format!("Failed to create decoder"))),
}
}
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<HwDecoderImage>> {
match self.decoder.decode(data) {
Ok(v) => Ok(v.iter().map(|f| HwDecoderImage { frame: f }).collect()),
Err(_) => Ok(vec![]),
}
}
}
pub struct HwDecoderImage<'a> {
frame: &'a DecodeFrame,
}
impl HwDecoderImage<'_> {
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
let frame = self.frame;
match frame.pixfmt {
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_nv12_to_bgra(
frame.width as _,
frame.height as _,
&frame.data[0],
&frame.data[1],
frame.linesize[0] as _,
frame.linesize[1] as _,
bgra,
i420,
HW_STRIDE_ALIGN,
),
AVPixelFormat::AV_PIX_FMT_YUV420P => {
hw::hw_i420_to_bgra(
frame.width as _,
frame.height as _,
&frame.data[0],
&frame.data[1],
&frame.data[2],
frame.linesize[0] as _,
frame.linesize[1] as _,
frame.linesize[2] as _,
bgra,
);
return Ok(());
}
}
}
}
fn get_config(k: &str) -> ResultType<CodecInfos> {
let v = HwCodecConfig::load()
.options
.get(k)
.unwrap_or(&"".to_owned())
.to_owned();
match CodecInfos::deserialize(&v) {
Ok(v) => Ok(v),
Err(_) => Err(anyhow!("Failed to get config:{}", k)),
}
}
fn set_config(k: &str, v: &CodecInfos) -> ResultType<()> {
match v.serialize() {
Ok(v) => {
let mut config = HwCodecConfig::load();
config.options.insert(k.to_owned(), v);
config.store();
Ok(())
}
Err(_) => Err(anyhow!("Failed to set config:{}", k)),
}
}
pub fn check_config() {
let (encoders, update_encoders) = HwEncoder::best(false, false);
let (decoders, update_decoders) = HwDecoder::best(false, false);
if update_encoders || update_decoders {
if let Ok(encoders) = encoders.serialize() {
if let Ok(decoders) = decoders.serialize() {
let mut config = HwCodecConfig::load();
config.options.insert(CFG_KEY_ENCODER.to_owned(), encoders);
config.options.insert(CFG_KEY_DECODER.to_owned(), decoders);
config.store();
return;
}
}
log::error!("Failed to serialize codec info");
}
}

View File

@@ -1,4 +1,4 @@
pub use self::codec::*;
pub use self::vpxcodec::*;
cfg_if! {
if #[cfg(quartz)] {
@@ -29,8 +29,12 @@ cfg_if! {
pub mod codec;
mod convert;
#[cfg(feature = "hwcodec")]
pub mod hwcodec;
pub mod vpxcodec;
pub use self::convert::*;
pub const STRIDE_ALIGN: usize = 64; // commonly used in libvpx vpx_img_alloc caller
pub const HW_STRIDE_ALIGN: usize = 0; // recommended by av_frame_get_buffer
mod vpx;

View File

@@ -51,7 +51,7 @@ impl Capturer {
self.inner.height()
}
pub fn frame<'a>(&'a mut self, _timeout_ms: u32) -> io::Result<Frame<'a>> {
pub fn frame<'a>(&'a mut self, _timeout_ms: std::time::Duration) -> io::Result<Frame<'a>> {
match self.frame.try_lock() {
Ok(mut handle) => {
let mut frame = None;

View File

@@ -0,0 +1,599 @@
// https://github.com/astraw/vpx-encode
// https://github.com/astraw/env-libvpx-sys
// https://github.com/rust-av/vpx-rs/blob/master/src/decoder.rs
use hbb_common::anyhow::{anyhow, Context};
use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, Message, VideoFrame};
use hbb_common::ResultType;
use crate::codec::EncoderApi;
use crate::STRIDE_ALIGN;
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use std::os::raw::{c_int, c_uint};
use std::{ptr, slice};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum VpxVideoCodecId {
VP8,
VP9,
}
impl Default for VpxVideoCodecId {
fn default() -> VpxVideoCodecId {
VpxVideoCodecId::VP9
}
}
pub struct VpxEncoder {
ctx: vpx_codec_ctx_t,
width: usize,
height: usize,
}
pub struct VpxDecoder {
ctx: vpx_codec_ctx_t,
}
#[derive(Debug)]
pub enum Error {
FailedCall(String),
BadPtr(String),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
macro_rules! call_vpx {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, i32>(result) };
if result_int != 0 {
return Err(Error::FailedCall(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
macro_rules! call_vpx_ptr {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, isize>(result) };
if result_int == 0 {
return Err(Error::BadPtr(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
impl EncoderApi for VpxEncoder {
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
crate::codec::EncoderCfg::VPX(config) => {
let i;
if cfg!(feature = "VP8") {
i = match config.codec {
VpxVideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_cx()),
VpxVideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_cx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_cx());
}
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0));
// https://www.webmproject.org/docs/encoder-parameters/
// default: c.rc_min_quantizer = 0, c.rc_max_quantizer = 63
// try rc_resize_allowed later
c.g_w = config.width;
c.g_h = config.height;
c.g_timebase.num = config.timebase[0];
c.g_timebase.den = config.timebase[1];
c.rc_target_bitrate = config.bitrate;
c.rc_undershoot_pct = 95;
c.rc_dropframe_thresh = 25;
c.g_threads = if config.num_threads == 0 {
num_cpus::get() as _
} else {
config.num_threads
};
c.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
// https://developers.google.com/media/vp9/bitrate-modes/
// Constant Bitrate mode (CBR) is recommended for live streaming with VP9.
c.rc_end_usage = vpx_rc_mode::VPX_CBR;
// c.kf_min_dist = 0;
// c.kf_max_dist = 999999;
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED; // reduce bandwidth a lot
/*
VPX encoder支持two-pass encode这是为了rate control的。
对于两遍编码,就是需要整个编码过程做两次,第一次会得到一些新的控制参数来进行第二遍的编码,
这样可以在相同的bitrate下得到最好的PSNR
*/
let mut ctx = Default::default();
call_vpx!(vpx_codec_enc_init_ver(
&mut ctx,
i,
&c,
0,
VPX_ENCODER_ABI_VERSION as _
));
if config.codec == VpxVideoCodecId::VP9 {
// set encoder internal speed settings
// in ffmpeg, it is --speed option
/*
set to 0 or a positive value 1-16, the codec will try to adapt its
complexity depending on the time it spends encoding. Increasing this
number will make the speed go up and the quality go down.
Negative values mean strict enforcement of this
while positive values are adaptive
*/
/* https://developers.google.com/media/vp9/live-encoding
Speed 5 to 8 should be used for live / real-time encoding.
Lower numbers (5 or 6) are higher quality but require more CPU power.
Higher numbers (7 or 8) will be lower quality but more manageable for lower latency
use cases and also for lower CPU power devices such as mobile.
*/
call_vpx!(vpx_codec_control_(&mut ctx, VP8E_SET_CPUUSED as _, 7,));
// set row level multi-threading
/*
as some people in comments and below have already commented,
more recent versions of libvpx support -row-mt 1 to enable tile row
multi-threading. This can increase the number of tiles by up to 4x in VP9
(since the max number of tile rows is 4, regardless of video height).
To enable this, use -tile-rows N where N is the number of tile rows in
log2 units (so -tile-rows 1 means 2 tile rows and -tile-rows 2 means 4 tile
rows). The total number of active threads will then be equal to
$tile_rows * $tile_columns
*/
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_ROW_MT as _,
1 as c_int
));
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_TILE_COLUMNS as _,
4 as c_int
));
}
Ok(Self {
ctx,
width: config.width as _,
height: config.height as _,
})
}
_ => Err(anyhow!("encoder type mismatch")),
}
}
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<Message> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(VpxEncoder::create_frame(frame));
}
for ref frame in self.flush().with_context(|| "Failed to flush")? {
frames.push(VpxEncoder::create_frame(frame));
}
// to-do: flush periodically, e.g. 1 second
if frames.len() > 0 {
Ok(VpxEncoder::create_msg(frames))
} else {
Err(anyhow!("no valid frame"))
}
}
fn use_yuv(&self) -> bool {
true
}
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()> {
let mut new_enc_cfg = unsafe { *self.ctx.config.enc.to_owned() };
new_enc_cfg.rc_target_bitrate = bitrate;
call_vpx!(vpx_codec_enc_config_set(&mut self.ctx, &new_enc_cfg));
return Ok(());
}
}
impl VpxEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
assert!(2 * data.len() >= 3 * self.width * self.height);
let mut image = Default::default();
call_vpx_ptr!(vpx_img_wrap(
&mut image,
vpx_img_fmt::VPX_IMG_FMT_I420,
self.width as _,
self.height as _,
stride_align as _,
data.as_ptr() as _,
));
call_vpx!(vpx_codec_encode(
&mut self.ctx,
&image,
pts as _,
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the encoder to return any pending packets
pub fn flush(&mut self) -> Result<EncodeFrames> {
call_vpx!(vpx_codec_encode(
&mut self.ctx,
ptr::null(),
-1, // PTS
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
#[inline]
fn create_msg(vp9s: Vec<EncodedVideoFrame>) -> Message {
let mut msg_out = Message::new();
let mut vf = VideoFrame::new();
vf.set_vp9s(EncodedVideoFrames {
frames: vp9s.into(),
..Default::default()
});
msg_out.set_video_frame(vf);
msg_out
}
#[inline]
fn create_frame(frame: &EncodeFrame) -> EncodedVideoFrame {
EncodedVideoFrame {
data: frame.data.to_vec(),
key: frame.key,
pts: frame.pts,
..Default::default()
}
}
}
impl Drop for VpxEncoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct EncodeFrame<'a> {
/// Compressed data.
pub data: &'a [u8],
/// Whether the frame is a keyframe.
pub key: bool,
/// Presentation timestamp (in timebase units).
pub pts: i64,
}
#[derive(Clone, Copy, Debug)]
pub struct VpxEncoderConfig {
/// The width (in pixels).
pub width: c_uint,
/// The height (in pixels).
pub height: c_uint,
/// The timebase numerator and denominator (in seconds).
pub timebase: [c_int; 2],
/// The target bitrate (in kilobits per second).
pub bitrate: c_uint,
/// The codec
pub codec: VpxVideoCodecId,
pub num_threads: u32,
}
#[derive(Clone, Copy, Debug)]
pub struct VpxDecoderConfig {
pub codec: VpxVideoCodecId,
pub num_threads: u32,
}
pub struct EncodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for EncodeFrames<'a> {
type Item = EncodeFrame<'a>;
fn next(&mut self) -> Option<Self::Item> {
loop {
unsafe {
let pkt = vpx_codec_get_cx_data(self.ctx, &mut self.iter);
if pkt.is_null() {
return None;
} else if (*pkt).kind == vpx_codec_cx_pkt_kind::VPX_CODEC_CX_FRAME_PKT {
let f = &(*pkt).data.frame;
return Some(Self::Item {
data: slice::from_raw_parts(f.buf as _, f.sz as _),
key: (f.flags & VPX_FRAME_IS_KEY) != 0,
pts: f.pts,
});
} else {
// Ignore the packet.
}
}
}
}
}
impl VpxDecoder {
/// Create a new decoder
///
/// # Errors
///
/// The function may fail if the underlying libvpx does not provide
/// the VP9 decoder.
pub fn new(config: VpxDecoderConfig) -> Result<Self> {
// This is sound because `vpx_codec_ctx` is a repr(C) struct without any field that can
// cause UB if uninitialized.
let i;
if cfg!(feature = "VP8") {
i = match config.codec {
VpxVideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_dx()),
VpxVideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_dx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_dx());
}
let mut ctx = Default::default();
let cfg = vpx_codec_dec_cfg_t {
threads: if config.num_threads == 0 {
num_cpus::get() as _
} else {
config.num_threads
},
w: 0,
h: 0,
};
/*
unsafe {
println!("{}", vpx_codec_get_caps(i));
}
*/
call_vpx!(vpx_codec_dec_init_ver(
&mut ctx,
i,
&cfg,
0,
VPX_DECODER_ABI_VERSION as _,
));
Ok(Self { ctx })
}
pub fn decode2rgb(&mut self, data: &[u8], rgba: bool) -> Result<Vec<u8>> {
let mut img = Image::new();
for frame in self.decode(data)? {
drop(img);
img = frame;
}
for frame in self.flush()? {
drop(img);
img = frame;
}
if img.is_null() {
Ok(Vec::new())
} else {
let mut out = Default::default();
img.rgb(1, rgba, &mut out);
Ok(out)
}
}
/// Feed some compressed data to the encoder
///
/// The `data` slice is sent to the decoder
///
/// It matches a call to `vpx_codec_decode`.
pub fn decode(&mut self, data: &[u8]) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
data.as_ptr(),
data.len() as _,
ptr::null_mut(),
0,
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the decoder to return any pending frame
pub fn flush(&mut self) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
ptr::null(),
0,
ptr::null_mut(),
0
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
}
impl Drop for VpxDecoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
pub struct DecodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for DecodeFrames<'a> {
type Item = Image;
fn next(&mut self) -> Option<Self::Item> {
let img = unsafe { vpx_codec_get_frame(self.ctx, &mut self.iter) };
if img.is_null() {
return None;
} else {
return Some(Image(img));
}
}
}
// https://chromium.googlesource.com/webm/libvpx/+/bali/vpx/src/vpx_image.c
pub struct Image(*mut vpx_image_t);
impl Image {
#[inline]
pub fn new() -> Self {
Self(std::ptr::null_mut())
}
#[inline]
pub fn is_null(&self) -> bool {
self.0.is_null()
}
#[inline]
pub fn width(&self) -> usize {
self.inner().d_w as _
}
#[inline]
pub fn height(&self) -> usize {
self.inner().d_h as _
}
#[inline]
pub fn format(&self) -> vpx_img_fmt_t {
// VPX_IMG_FMT_I420
self.inner().fmt
}
#[inline]
pub fn inner(&self) -> &vpx_image_t {
unsafe { &*self.0 }
}
#[inline]
pub fn stride(&self, iplane: usize) -> i32 {
self.inner().stride[iplane]
}
pub fn rgb(&self, stride_align: usize, rgba: bool, dst: &mut Vec<u8>) {
let h = self.height();
let mut w = self.width();
let bps = if rgba { 4 } else { 3 };
w = (w + stride_align - 1) & !(stride_align - 1);
dst.resize(h * w * bps, 0);
let img = self.inner();
unsafe {
if rgba {
super::I420ToARGB(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
} else {
super::I420ToRAW(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
}
}
}
#[inline]
pub fn data(&self) -> (&[u8], &[u8], &[u8]) {
unsafe {
let img = self.inner();
let h = (img.d_h as usize + 1) & !1;
let n = img.stride[0] as usize * h;
let y = slice::from_raw_parts(img.planes[0], n);
let n = img.stride[1] as usize * (h >> 1);
let u = slice::from_raw_parts(img.planes[1], n);
let v = slice::from_raw_parts(img.planes[2], n);
(y, u, v)
}
}
}
impl Drop for Image {
fn drop(&mut self) {
if !self.0.is_null() {
unsafe { vpx_img_free(self.0) };
}
}
}
unsafe impl Send for vpx_codec_ctx_t {}

View File

@@ -1,5 +1,5 @@
use crate::x11;
use std::{io, ops};
use std::{io, ops, time::Duration};
pub struct Capturer(x11::Capturer);
@@ -16,7 +16,7 @@ impl Capturer {
self.0.display().rect().h as usize
}
pub fn frame<'a>(&'a mut self, _timeout_ms: u32) -> io::Result<Frame<'a>> {
pub fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
Ok(Frame(self.0.frame()?))
}
}

View File

@@ -282,7 +282,11 @@ impl CapturerMag {
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
let w = GetSystemMetrics(SM_CXVIRTUALSCREEN);
let h = GetSystemMetrics(SM_CYVIRTUALSCREEN);
if !(origin.0 == x as _ && origin.1 == y as _ && width == w as _ && height == h as _) {
if !(origin.0 == x as i32
&& origin.1 == y as i32
&& width == w as usize
&& height == h as usize)
{
return Err(Error::new(
ErrorKind::Other,
format!(
@@ -510,10 +514,10 @@ impl CapturerMag {
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
let w = GetSystemMetrics(SM_CXVIRTUALSCREEN);
let h = GetSystemMetrics(SM_CYVIRTUALSCREEN);
if !(self.rect.left == x as _
&& self.rect.top == y as _
&& self.rect.right == (x + w) as _
&& self.rect.bottom == (y + h) as _)
if !(self.rect.left == x as i32
&& self.rect.top == y as i32
&& self.rect.right == (x + w) as i32
&& self.rect.bottom == (y + h) as i32)
{
return Err(Error::new(
ErrorKind::Other,