add zero copy mode hareware codec for windows (#6778)

Signed-off-by: 21pages <pages21@163.com>
This commit is contained in:
21pages
2024-01-02 16:58:10 +08:00
committed by GitHub
parent f47faa548b
commit 89150317e1
55 changed files with 2540 additions and 429 deletions

View File

@@ -729,6 +729,7 @@ message Misc {
int32 refresh_video_display = 31;
ToggleVirtualDisplay toggle_virtual_display = 32;
TogglePrivacyMode toggle_privacy_mode = 33;
SupportedEncoding supported_encoding = 34;
}
}

View File

@@ -492,7 +492,6 @@ impl Config {
suffix: &str,
) -> T {
let file = Self::file_(suffix);
log::debug!("Configuration path: {}", file.display());
let cfg = load_path(file);
if suffix.is_empty() {
log::trace!("{:?}", cfg);
@@ -1488,6 +1487,26 @@ impl HwCodecConfig {
}
}
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct GpucodecConfig {
#[serde(default, deserialize_with = "deserialize_string")]
pub available: String,
}
impl GpucodecConfig {
pub fn load() -> GpucodecConfig {
Config::load_::<GpucodecConfig>("_gpucodec")
}
pub fn store(&self) {
Config::store_(self, "_gpucodec");
}
pub fn clear() {
GpucodecConfig::default().store();
}
}
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct UserDefaultConfig {
#[serde(default, deserialize_with = "deserialize_hashmap_string_string")]

View File

@@ -58,3 +58,6 @@ gstreamer-video = { version = "0.16", optional = true }
[target.'cfg(any(target_os = "windows", target_os = "linux"))'.dependencies]
hwcodec = { git = "https://github.com/21pages/hwcodec", branch = "stable", optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
gpucodec = { git = "https://github.com/21pages/gpucodec", optional = true }

View File

@@ -6,8 +6,7 @@ use hbb_common::{
use scrap::{
aom::{AomDecoder, AomEncoder, AomEncoderConfig},
codec::{EncoderApi, EncoderCfg, Quality as Q},
convert_to_yuv, Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder,
VpxEncoderConfig,
Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig,
VpxVideoCodecId::{self, *},
STRIDE_ALIGN,
};
@@ -122,7 +121,8 @@ fn test_vpx(
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
let frame = frame.to(encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
let yuv = frame.yuv().unwrap();
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap()
@@ -199,7 +199,8 @@ fn test_av1(
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
let frame = frame.to(encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
let yuv = frame.yuv().unwrap();
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap()
@@ -239,10 +240,7 @@ fn test_av1(
#[cfg(feature = "hwcodec")]
mod hw {
use hwcodec::ffmpeg::CodecInfo;
use scrap::{
codec::HwEncoderConfig,
hwcodec::{HwDecoder, HwEncoder},
};
use scrap::hwcodec::{HwDecoder, HwEncoder, HwEncoderConfig};
use super::*;
@@ -295,7 +293,8 @@ mod hw {
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
let frame = frame.to(encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
let yuv = frame.yuv().unwrap();
for ref frame in encoder.encode(&yuv).unwrap() {
size += frame.data.len();

View File

@@ -24,7 +24,7 @@ fn get_display(i: usize) -> Display {
fn record(i: usize) {
use std::time::Duration;
use scrap::TraitFrame;
use scrap::{Frame, TraitPixelBuffer};
for d in Display::all().unwrap() {
println!("{:?} {} {}", d.origin(), d.width(), d.height());
@@ -44,8 +44,11 @@ fn record(i: usize) {
println!("Filter window for cls {} name {}", wnd_cls, wnd_name);
}
let captured_frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let frame = captured_frame.data();
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let Frame::PixelBuffer(frame) = frame else {
return;
};
let frame = frame.data();
println!("Capture data len: {}, Saving...", frame.len());
let mut bitflipped = Vec::with_capacity(w * h * 4);
@@ -81,6 +84,9 @@ fn record(i: usize) {
}
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let Frame::PixelBuffer(frame) = frame else {
return;
};
println!("Capture data len: {}, Saving...", frame.data().len());
let mut raw = Vec::new();

View File

@@ -1,6 +1,6 @@
use std::time::Duration;
use scrap::TraitFrame;
use scrap::{Frame, TraitPixelBuffer};
extern crate scrap;
@@ -36,6 +36,9 @@ fn main() {
match capturer.frame(Duration::from_millis(0)) {
Ok(frame) => {
// Write the frame, removing end-of-row padding.
let Frame::PixelBuffer(frame) = frame else {
return;
};
let stride = frame.stride()[0];
let rowlen = 4 * w;
for row in frame.data().chunks(stride) {

View File

@@ -17,7 +17,7 @@ use scrap::codec::{EncoderApi, EncoderCfg, Quality as Q};
use webm::mux;
use webm::mux::Track;
use scrap::{convert_to_yuv, vpxcodec as vpx_encode};
use scrap::vpxcodec as vpx_encode;
use scrap::{Capturer, Display, TraitCapturer, STRIDE_ALIGN};
const USAGE: &'static str = "
@@ -152,7 +152,7 @@ fn main() -> io::Result<()> {
if let Ok(frame) = c.frame(Duration::from_millis(0)) {
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
convert_to_yuv(&frame, vpx.yuvfmt(), &mut yuv, &mut mid_data);
frame.to(vpx.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
for frame in vpx.encode(ms as i64, &yuv, STRIDE_ALIGN).unwrap() {
vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key);
}

View File

@@ -6,7 +6,7 @@ use std::io::ErrorKind::WouldBlock;
use std::thread;
use std::time::Duration;
use scrap::{Capturer, Display, TraitCapturer, TraitFrame};
use scrap::{Capturer, Display, Frame, TraitCapturer, TraitPixelBuffer};
fn main() {
let n = Display::all().unwrap().len();
@@ -46,6 +46,9 @@ fn record(i: usize) {
}
}
};
let Frame::PixelBuffer(frame) = frame else {
return;
};
let buffer = frame.data();
println!("Captured data len: {}, Saving...", buffer.len());
@@ -96,6 +99,9 @@ fn record(i: usize) {
}
}
};
let Frame::PixelBuffer(frame) = frame else {
return;
};
let buffer = frame.data();
println!("Captured data len: {}, Saving...", buffer.len());

View File

@@ -1,5 +1,5 @@
use crate::android::ffi::*;
use crate::Pixfmt;
use crate::{Frame, Pixfmt};
use lazy_static::lazy_static;
use serde_json::Value;
use std::collections::HashMap;
@@ -43,26 +43,30 @@ impl crate::TraitCapturer for Capturer {
unsafe {
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
};
Ok(Frame::new(&self.rgba, self.width(), self.height()))
Ok(Frame::PixelBuffer(PixelBuffer::new(
&self.rgba,
self.width(),
self.height(),
)))
} else {
return Err(io::ErrorKind::WouldBlock.into());
}
}
}
pub struct Frame<'a> {
pub struct PixelBuffer<'a> {
data: &'a [u8],
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
impl<'a> PixelBuffer<'a> {
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
Frame {
PixelBuffer {
data,
width,
height,
@@ -71,7 +75,7 @@ impl<'a> Frame<'a> {
}
}
impl<'a> crate::TraitFrame for Frame<'a> {
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
self.data
}

View File

@@ -9,7 +9,7 @@ include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::codec::{base_bitrate, codec_thread_num, Quality};
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
use crate::{EncodeYuvFormat, Pixfmt};
use crate::{EncodeInput, EncodeYuvFormat, Pixfmt};
use hbb_common::{
anyhow::{anyhow, Context},
bytes::Bytes,
@@ -249,10 +249,10 @@ impl EncoderApi for AomEncoder {
}
}
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame> {
fn encode_to_message(&mut self, input: EncodeInput, ms: i64) -> ResultType<VideoFrame> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE_ALIGN)
.encode(ms, input.yuv()?, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(Self::create_frame(frame));
@@ -268,6 +268,11 @@ impl EncoderApi for AomEncoder {
self.yuvfmt.clone()
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
false
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
@@ -287,6 +292,10 @@ impl EncoderApi for AomEncoder {
let c = unsafe { *self.ctx.config.enc.to_owned() };
c.rc_target_bitrate
}
fn support_abr(&self) -> bool {
true
}
}
impl AomEncoder {

View File

@@ -1,9 +1,12 @@
use std::{
collections::HashMap,
ffi::c_void,
ops::{Deref, DerefMut},
sync::{Arc, Mutex},
};
#[cfg(feature = "gpucodec")]
use crate::gpucodec::*;
#[cfg(feature = "hwcodec")]
use crate::hwcodec::*;
#[cfg(feature = "mediacodec")]
@@ -14,7 +17,7 @@ use crate::{
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
common::GoogleImage,
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
CodecName, EncodeYuvFormat, ImageRgb,
CodecName, EncodeInput, EncodeYuvFormat, ImageRgb,
};
use hbb_common::{
@@ -30,29 +33,25 @@ use hbb_common::{
tokio::time::Instant,
ResultType,
};
#[cfg(any(feature = "hwcodec", feature = "mediacodec"))]
#[cfg(any(feature = "hwcodec", feature = "mediacodec", feature = "gpucodec"))]
use hbb_common::{config::Config2, lazy_static};
lazy_static::lazy_static! {
static ref PEER_DECODINGS: Arc<Mutex<HashMap<i32, SupportedDecoding>>> = Default::default();
static ref CODEC_NAME: Arc<Mutex<CodecName>> = Arc::new(Mutex::new(CodecName::VP9));
static ref ENCODE_CODEC_NAME: Arc<Mutex<CodecName>> = Arc::new(Mutex::new(CodecName::VP9));
static ref THREAD_LOG_TIME: Arc<Mutex<Option<Instant>>> = Arc::new(Mutex::new(None));
}
#[derive(Debug, Clone)]
pub struct HwEncoderConfig {
pub name: String,
pub width: usize,
pub height: usize,
pub quality: Quality,
pub keyframe_interval: Option<usize>,
}
pub const ENCODE_NEED_SWITCH: &'static str = "ENCODE_NEED_SWITCH";
#[derive(Debug, Clone)]
pub enum EncoderCfg {
VPX(VpxEncoderConfig),
AOM(AomEncoderConfig),
#[cfg(feature = "hwcodec")]
HW(HwEncoderConfig),
#[cfg(feature = "gpucodec")]
GPU(GpuEncoderConfig),
}
pub trait EncoderApi {
@@ -60,13 +59,18 @@ pub trait EncoderApi {
where
Self: Sized;
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame>;
fn encode_to_message(&mut self, frame: EncodeInput, ms: i64) -> ResultType<VideoFrame>;
fn yuvfmt(&self) -> EncodeYuvFormat;
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool;
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
fn bitrate(&self) -> u32;
fn support_abr(&self) -> bool;
}
pub struct Encoder {
@@ -93,6 +97,8 @@ pub struct Decoder {
av1: Option<AomDecoder>,
#[cfg(feature = "hwcodec")]
hw: HwDecoders,
#[cfg(feature = "gpucodec")]
gpu: GpuDecoders,
#[cfg(feature = "hwcodec")]
i420: Vec<u8>,
#[cfg(feature = "mediacodec")]
@@ -101,9 +107,10 @@ pub struct Decoder {
#[derive(Debug, Clone)]
pub enum EncodingUpdate {
New(SupportedDecoding),
Remove,
NewOnlyVP9,
Update(i32, SupportedDecoding),
Remove(i32),
NewOnlyVP9(i32),
Check,
}
impl Encoder {
@@ -123,26 +130,38 @@ impl Encoder {
codec: Box::new(hw),
}),
Err(e) => {
check_config_process();
*CODEC_NAME.lock().unwrap() = CodecName::VP9;
log::error!("new hw encoder failed: {e:?}, clear config");
hbb_common::config::HwCodecConfig::clear();
*ENCODE_CODEC_NAME.lock().unwrap() = CodecName::VP9;
Err(e)
}
},
#[cfg(feature = "gpucodec")]
EncoderCfg::GPU(_) => match GpuEncoder::new(config, i444) {
Ok(tex) => Ok(Encoder {
codec: Box::new(tex),
}),
Err(e) => {
log::error!("new gpu encoder failed: {e:?}, clear config");
hbb_common::config::GpucodecConfig::clear();
*ENCODE_CODEC_NAME.lock().unwrap() = CodecName::VP9;
Err(e)
}
},
#[cfg(not(feature = "hwcodec"))]
_ => Err(anyhow!("unsupported encoder type")),
}
}
pub fn update(id: i32, update: EncodingUpdate) {
pub fn update(update: EncodingUpdate) {
log::info!("update:{:?}", update);
let mut decodings = PEER_DECODINGS.lock().unwrap();
match update {
EncodingUpdate::New(decoding) => {
EncodingUpdate::Update(id, decoding) => {
decodings.insert(id, decoding);
}
EncodingUpdate::Remove => {
EncodingUpdate::Remove(id) => {
decodings.remove(&id);
}
EncodingUpdate::NewOnlyVP9 => {
EncodingUpdate::NewOnlyVP9(id) => {
decodings.insert(
id,
SupportedDecoding {
@@ -151,32 +170,51 @@ impl Encoder {
},
);
}
EncodingUpdate::Check => {}
}
let vp8_useable = decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_vp8 > 0);
let av1_useable = decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_av1 > 0);
let _all_support_h264_decoding =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h264 > 0);
let _all_support_h265_decoding =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h265 > 0);
#[allow(unused_mut)]
let mut h264_name = None;
let mut h264gpu_encoding = false;
#[allow(unused_mut)]
let mut h265_name = None;
#[cfg(feature = "hwcodec")]
{
if enable_hwcodec_option() {
let best = HwEncoder::best();
let h264_useable =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h264 > 0);
let h265_useable =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h265 > 0);
if h264_useable {
h264_name = best.h264.map_or(None, |c| Some(c.name));
let mut h265gpu_encoding = false;
#[cfg(feature = "gpucodec")]
if enable_gpucodec_option() {
if _all_support_h264_decoding {
if GpuEncoder::available(CodecName::H264GPU).len() > 0 {
h264gpu_encoding = true;
}
if h265_useable {
h265_name = best.h265.map_or(None, |c| Some(c.name));
}
if _all_support_h265_decoding {
if GpuEncoder::available(CodecName::H265GPU).len() > 0 {
h265gpu_encoding = true;
}
}
}
let mut name = CODEC_NAME.lock().unwrap();
#[allow(unused_mut)]
let mut h264hw_encoding = None;
#[allow(unused_mut)]
let mut h265hw_encoding = None;
#[cfg(feature = "hwcodec")]
if enable_hwcodec_option() {
let best = HwEncoder::best();
if _all_support_h264_decoding {
h264hw_encoding = best.h264.map_or(None, |c| Some(c.name));
}
if _all_support_h265_decoding {
h265hw_encoding = best.h265.map_or(None, |c| Some(c.name));
}
}
let h264_useable =
_all_support_h264_decoding && (h264gpu_encoding || h264hw_encoding.is_some());
let h265_useable =
_all_support_h265_decoding && (h265gpu_encoding || h265hw_encoding.is_some());
let mut name = ENCODE_CODEC_NAME.lock().unwrap();
let mut preference = PreferCodec::Auto;
let preferences: Vec<_> = decodings
.iter()
@@ -184,8 +222,8 @@ impl Encoder {
s.prefer == PreferCodec::VP9.into()
|| s.prefer == PreferCodec::VP8.into() && vp8_useable
|| s.prefer == PreferCodec::AV1.into() && av1_useable
|| s.prefer == PreferCodec::H264.into() && h264_name.is_some()
|| s.prefer == PreferCodec::H265.into() && h265_name.is_some()
|| s.prefer == PreferCodec::H264.into() && h264_useable
|| s.prefer == PreferCodec::H265.into() && h265_useable
})
.map(|(_, s)| s.prefer)
.collect();
@@ -205,26 +243,46 @@ impl Encoder {
auto_codec = CodecName::VP8
}
match preference {
PreferCodec::VP8 => *name = CodecName::VP8,
PreferCodec::VP9 => *name = CodecName::VP9,
PreferCodec::AV1 => *name = CodecName::AV1,
PreferCodec::H264 => *name = h264_name.map_or(auto_codec, |c| CodecName::H264(c)),
PreferCodec::H265 => *name = h265_name.map_or(auto_codec, |c| CodecName::H265(c)),
PreferCodec::Auto => *name = auto_codec,
*name = match preference {
PreferCodec::VP8 => CodecName::VP8,
PreferCodec::VP9 => CodecName::VP9,
PreferCodec::AV1 => CodecName::AV1,
PreferCodec::H264 => {
if h264gpu_encoding {
CodecName::H264GPU
} else if let Some(v) = h264hw_encoding {
CodecName::H264HW(v)
} else {
auto_codec
}
}
PreferCodec::H265 => {
if h265gpu_encoding {
CodecName::H265GPU
} else if let Some(v) = h265hw_encoding {
CodecName::H265HW(v)
} else {
auto_codec
}
}
PreferCodec::Auto => auto_codec,
};
if decodings.len() > 0 {
log::info!(
"usable: vp8={vp8_useable}, av1={av1_useable}, h264={h264_useable}, h265={h265_useable}",
);
log::info!(
"connection count: {}, used preference: {:?}, encoder: {:?}",
decodings.len(),
preference,
*name
)
}
log::info!(
"connection count: {}, used preference: {:?}, encoder: {:?}",
decodings.len(),
preference,
*name
)
}
#[inline]
pub fn negotiated_codec() -> CodecName {
CODEC_NAME.lock().unwrap().clone()
ENCODE_CODEC_NAME.lock().unwrap().clone()
}
pub fn supported_encoding() -> SupportedEncoding {
@@ -243,12 +301,52 @@ impl Encoder {
#[cfg(feature = "hwcodec")]
if enable_hwcodec_option() {
let best = HwEncoder::best();
encoding.h264 = best.h264.is_some();
encoding.h265 = best.h265.is_some();
encoding.h264 |= best.h264.is_some();
encoding.h265 |= best.h265.is_some();
}
#[cfg(feature = "gpucodec")]
if enable_gpucodec_option() {
encoding.h264 |= GpuEncoder::available(CodecName::H264GPU).len() > 0;
encoding.h265 |= GpuEncoder::available(CodecName::H265GPU).len() > 0;
}
encoding
}
pub fn set_fallback(config: &EncoderCfg) {
let name = match config {
EncoderCfg::VPX(vpx) => match vpx.codec {
VpxVideoCodecId::VP8 => CodecName::VP8,
VpxVideoCodecId::VP9 => CodecName::VP9,
},
EncoderCfg::AOM(_) => CodecName::AV1,
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(hw) => {
if hw.name.to_lowercase().contains("h264") {
CodecName::H264HW(hw.name.clone())
} else {
CodecName::H265HW(hw.name.clone())
}
}
#[cfg(feature = "gpucodec")]
EncoderCfg::GPU(gpu) => match gpu.feature.data_format {
gpucodec::gpu_common::DataFormat::H264 => CodecName::H264GPU,
gpucodec::gpu_common::DataFormat::H265 => CodecName::H265GPU,
_ => {
log::error!(
"should not reach here, gpucodec not support {:?}",
gpu.feature.data_format
);
return;
}
},
};
let current = ENCODE_CODEC_NAME.lock().unwrap().clone();
if current != name {
log::info!("codec fallback: {:?} -> {:?}", current, name);
*ENCODE_CODEC_NAME.lock().unwrap() = name;
}
}
pub fn use_i444(config: &EncoderCfg) -> bool {
let decodings = PEER_DECODINGS.lock().unwrap().clone();
let prefer_i444 = decodings
@@ -260,14 +358,21 @@ impl Encoder {
VpxVideoCodecId::VP9 => decodings.iter().all(|d| d.1.i444.vp9),
},
EncoderCfg::AOM(_) => decodings.iter().all(|d| d.1.i444.av1),
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(_) => false,
#[cfg(feature = "gpucodec")]
EncoderCfg::GPU(_) => false,
};
prefer_i444 && i444_useable && !decodings.is_empty()
}
}
impl Decoder {
pub fn supported_decodings(id_for_perfer: Option<&str>) -> SupportedDecoding {
pub fn supported_decodings(
id_for_perfer: Option<&str>,
_flutter: bool,
_luid: Option<i64>,
) -> SupportedDecoding {
let (prefer, prefer_chroma) = Self::preference(id_for_perfer);
#[allow(unused_mut)]
@@ -288,8 +393,21 @@ impl Decoder {
#[cfg(feature = "hwcodec")]
if enable_hwcodec_option() {
let best = HwDecoder::best();
decoding.ability_h264 = if best.h264.is_some() { 1 } else { 0 };
decoding.ability_h265 = if best.h265.is_some() { 1 } else { 0 };
decoding.ability_h264 |= if best.h264.is_some() { 1 } else { 0 };
decoding.ability_h265 |= if best.h265.is_some() { 1 } else { 0 };
}
#[cfg(feature = "gpucodec")]
if enable_gpucodec_option() && _flutter {
decoding.ability_h264 |= if GpuDecoder::available(CodecName::H264GPU, _luid).len() > 0 {
1
} else {
0
};
decoding.ability_h265 |= if GpuDecoder::available(CodecName::H265GPU, _luid).len() > 0 {
1
} else {
0
};
}
#[cfg(feature = "mediacodec")]
if enable_hwcodec_option() {
@@ -309,7 +427,33 @@ impl Decoder {
decoding
}
pub fn new() -> Decoder {
pub fn exist_codecs(&self, _flutter: bool) -> CodecAbility {
#[allow(unused_mut)]
let mut ability = CodecAbility {
vp8: self.vp8.is_some(),
vp9: self.vp9.is_some(),
av1: self.av1.is_some(),
..Default::default()
};
#[cfg(feature = "hwcodec")]
{
ability.h264 |= self.hw.h264.is_some();
ability.h265 |= self.hw.h265.is_some();
}
#[cfg(feature = "gpucodec")]
if _flutter {
ability.h264 |= self.gpu.h264.is_some();
ability.h265 |= self.gpu.h265.is_some();
}
#[cfg(feature = "mediacodec")]
{
ability.h264 = self.media_codec.h264.is_some();
ability.h265 = self.media_codec.h265.is_some();
}
ability
}
pub fn new(_luid: Option<i64>) -> Decoder {
let vp8 = VpxDecoder::new(VpxDecoderConfig {
codec: VpxVideoCodecId::VP8,
})
@@ -329,6 +473,12 @@ impl Decoder {
} else {
HwDecoders::default()
},
#[cfg(feature = "gpucodec")]
gpu: if enable_gpucodec_option() && _luid.clone().unwrap_or_default() != 0 {
GpuDecoder::new_decoders(_luid)
} else {
GpuDecoders::default()
},
#[cfg(feature = "hwcodec")]
i420: vec![],
#[cfg(feature = "mediacodec")]
@@ -345,6 +495,8 @@ impl Decoder {
&mut self,
frame: &video_frame::Union,
rgb: &mut ImageRgb,
_texture: &mut *mut c_void,
_pixelbuffer: &mut bool,
chroma: &mut Option<Chroma>,
) -> ResultType<bool> {
match frame {
@@ -369,23 +521,33 @@ impl Decoder {
bail!("av1 decoder not available");
}
}
#[cfg(feature = "hwcodec")]
#[cfg(any(feature = "hwcodec", feature = "gpucodec"))]
video_frame::Union::H264s(h264s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h264!"))
#[cfg(feature = "gpucodec")]
if let Some(decoder) = &mut self.gpu.h264 {
*_pixelbuffer = false;
return Decoder::handle_gpu_video_frame(decoder, h264s, _texture);
}
#[cfg(feature = "hwcodec")]
if let Some(decoder) = &mut self.hw.h264 {
return Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420);
}
Err(anyhow!("don't support h264!"))
}
#[cfg(feature = "hwcodec")]
#[cfg(any(feature = "hwcodec", feature = "gpucodec"))]
video_frame::Union::H265s(h265s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h265!"))
#[cfg(feature = "gpucodec")]
if let Some(decoder) = &mut self.gpu.h265 {
*_pixelbuffer = false;
return Decoder::handle_gpu_video_frame(decoder, h265s, _texture);
}
#[cfg(feature = "hwcodec")]
if let Some(decoder) = &mut self.hw.h265 {
return Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420);
}
Err(anyhow!("don't support h265!"))
}
#[cfg(feature = "mediacodec")]
video_frame::Union::H264s(h264s) => {
@@ -483,6 +645,22 @@ impl Decoder {
return Ok(ret);
}
#[cfg(feature = "gpucodec")]
fn handle_gpu_video_frame(
decoder: &mut GpuDecoder,
frames: &EncodedVideoFrames,
texture: &mut *mut c_void,
) -> ResultType<bool> {
let mut ret = false;
for h26x in frames.frames.iter() {
for image in decoder.decode(&h26x.data)? {
*texture = image.frame.texture;
ret = true;
}
}
return Ok(ret);
}
// rgb [in/out] fmt and stride must be set in ImageRgb
#[cfg(feature = "mediacodec")]
fn handle_mediacodec_video_frame(
@@ -529,7 +707,14 @@ impl Decoder {
}
#[cfg(any(feature = "hwcodec", feature = "mediacodec"))]
fn enable_hwcodec_option() -> bool {
pub fn enable_hwcodec_option() -> bool {
if let Some(v) = Config2::get().options.get("enable-hwcodec") {
return v != "N";
}
return true; // default is true
}
#[cfg(feature = "gpucodec")]
pub fn enable_gpucodec_option() -> bool {
if let Some(v) = Config2::get().options.get("enable-hwcodec") {
return v != "N";
}

View File

@@ -7,8 +7,8 @@
include!(concat!(env!("OUT_DIR"), "/yuv_ffi.rs"));
#[cfg(not(target_os = "ios"))]
use crate::Frame;
use crate::{generate_call_macro, EncodeYuvFormat, TraitFrame};
use crate::PixelBuffer;
use crate::{generate_call_macro, EncodeYuvFormat, TraitPixelBuffer};
use hbb_common::{bail, log, ResultType};
generate_call_macro!(call_yuv, false);
@@ -195,7 +195,7 @@ pub mod hw {
}
#[cfg(not(target_os = "ios"))]
pub fn convert_to_yuv(
captured: &Frame,
captured: &PixelBuffer,
dst_fmt: EncodeYuvFormat,
dst: &mut Vec<u8>,
mid_data: &mut Vec<u8>,

View File

@@ -1,4 +1,6 @@
use crate::{common::TraitCapturer, dxgi, Pixfmt};
#[cfg(feature = "gpucodec")]
use crate::AdapterDevice;
use crate::{common::TraitCapturer, dxgi, Frame, Pixfmt};
use std::{
io::{
self,
@@ -41,7 +43,7 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.inner.frame(timeout.as_millis() as _) {
Ok(frame) => Ok(Frame::new(frame, self.width, self.height)),
Ok(frame) => Ok(frame),
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
Err(error) => Err(error),
}
@@ -54,21 +56,31 @@ impl TraitCapturer for Capturer {
fn set_gdi(&mut self) -> bool {
self.inner.set_gdi()
}
#[cfg(feature = "gpucodec")]
fn device(&self) -> AdapterDevice {
self.inner.device()
}
#[cfg(feature = "gpucodec")]
fn set_output_texture(&mut self, texture: bool) {
self.inner.set_output_texture(texture);
}
}
pub struct Frame<'a> {
pub struct PixelBuffer<'a> {
data: &'a [u8],
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
impl<'a> PixelBuffer<'a> {
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
Frame {
PixelBuffer {
data,
width,
height,
@@ -77,7 +89,7 @@ impl<'a> Frame<'a> {
}
}
impl<'a> crate::TraitFrame for Frame<'a> {
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
self.data
}
@@ -184,6 +196,11 @@ impl Display {
// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-devmodea
self.origin() == (0, 0)
}
#[cfg(feature = "gpucodec")]
pub fn adapter_luid(&self) -> Option<i64> {
self.0.adapter_luid()
}
}
pub struct CapturerMag {
@@ -215,11 +232,11 @@ impl CapturerMag {
impl TraitCapturer for CapturerMag {
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
self.inner.frame(&mut self.data)?;
Ok(Frame::new(
Ok(Frame::PixelBuffer(PixelBuffer::new(
&self.data,
self.inner.get_rect().1,
self.inner.get_rect().2,
))
)))
}
fn is_gdi(&self) -> bool {
@@ -229,4 +246,12 @@ impl TraitCapturer for CapturerMag {
fn set_gdi(&mut self) -> bool {
false
}
#[cfg(feature = "gpucodec")]
fn device(&self) -> AdapterDevice {
AdapterDevice::default()
}
#[cfg(feature = "gpucodec")]
fn set_output_texture(&mut self, _texture: bool) {}
}

View File

@@ -0,0 +1,451 @@
use std::{
collections::HashMap,
ffi::c_void,
sync::{Arc, Mutex},
};
use crate::{
codec::{base_bitrate, enable_gpucodec_option, EncoderApi, EncoderCfg, Quality},
AdapterDevice, CodecName, EncodeInput, EncodeYuvFormat, Pixfmt,
};
use gpucodec::gpu_common::{
self, Available, DecodeContext, DynamicContext, EncodeContext, FeatureContext, MAX_GOP,
};
use gpucodec::{
decode::{self, DecodeFrame, Decoder},
encode::{self, EncodeFrame, Encoder},
};
use hbb_common::{
allow_err,
anyhow::{anyhow, bail, Context},
bytes::Bytes,
log,
message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
ResultType,
};
const OUTPUT_SHARED_HANDLE: bool = false;
// https://www.reddit.com/r/buildapc/comments/d2m4ny/two_graphics_cards_two_monitors/
// https://www.reddit.com/r/techsupport/comments/t2v9u6/dual_monitor_setup_with_dual_gpu/
// https://cybersided.com/two-monitors-two-gpus/
// https://learn.microsoft.com/en-us/windows/win32/api/d3d12/nf-d3d12-id3d12device-getadapterluid#remarks
lazy_static::lazy_static! {
static ref ENOCDE_NOT_USE: Arc<Mutex<HashMap<usize, bool>>> = Default::default();
}
#[derive(Debug, Clone)]
pub struct GpuEncoderConfig {
pub device: AdapterDevice,
pub width: usize,
pub height: usize,
pub quality: Quality,
pub feature: gpucodec::gpu_common::FeatureContext,
pub keyframe_interval: Option<usize>,
}
pub struct GpuEncoder {
encoder: Encoder,
pub format: gpu_common::DataFormat,
ctx: EncodeContext,
bitrate: u32,
last_frame_len: usize,
same_bad_len_counter: usize,
}
impl EncoderApi for GpuEncoder {
fn new(cfg: EncoderCfg, _i444: bool) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
EncoderCfg::GPU(config) => {
let b = Self::convert_quality(config.quality, &config.feature);
let base_bitrate = base_bitrate(config.width as _, config.height as _);
let mut bitrate = base_bitrate * b / 100;
if base_bitrate <= 0 {
bitrate = base_bitrate;
}
let gop = config.keyframe_interval.unwrap_or(MAX_GOP as _) as i32;
let ctx = EncodeContext {
f: config.feature.clone(),
d: DynamicContext {
device: Some(config.device.device),
width: config.width as _,
height: config.height as _,
kbitrate: bitrate as _,
framerate: 30,
gop,
},
};
match Encoder::new(ctx.clone()) {
Ok(encoder) => Ok(GpuEncoder {
encoder,
ctx,
format: config.feature.data_format,
bitrate,
last_frame_len: 0,
same_bad_len_counter: 0,
}),
Err(_) => Err(anyhow!(format!("Failed to create encoder"))),
}
}
_ => Err(anyhow!("encoder type mismatch")),
}
}
fn encode_to_message(
&mut self,
frame: EncodeInput,
_ms: i64,
) -> ResultType<hbb_common::message_proto::VideoFrame> {
let texture = frame.texture()?;
let mut vf = VideoFrame::new();
let mut frames = Vec::new();
for frame in self.encode(texture).with_context(|| "Failed to encode")? {
frames.push(EncodedVideoFrame {
data: Bytes::from(frame.data),
pts: frame.pts as _,
key: frame.key == 1,
..Default::default()
});
}
if frames.len() > 0 {
// This kind of problem is occurred after a period of time when using AMD encoding,
// the encoding length is fixed at about 40, and the picture is still
const MIN_BAD_LEN: usize = 100;
const MAX_BAD_COUNTER: usize = 30;
let this_frame_len = frames[0].data.len();
if this_frame_len < MIN_BAD_LEN && this_frame_len == self.last_frame_len {
self.same_bad_len_counter += 1;
if self.same_bad_len_counter >= MAX_BAD_COUNTER {
log::info!(
"{} times encoding len is {}, switch",
self.same_bad_len_counter,
self.last_frame_len
);
bail!(crate::codec::ENCODE_NEED_SWITCH);
}
} else {
self.same_bad_len_counter = 0;
}
self.last_frame_len = this_frame_len;
let frames = EncodedVideoFrames {
frames: frames.into(),
..Default::default()
};
match self.format {
gpu_common::DataFormat::H264 => vf.set_h264s(frames),
gpu_common::DataFormat::H265 => vf.set_h265s(frames),
_ => bail!("{:?} not supported", self.format),
}
Ok(vf)
} else {
Err(anyhow!("no valid frame"))
}
}
fn yuvfmt(&self) -> EncodeYuvFormat {
// useless
EncodeYuvFormat {
pixfmt: Pixfmt::BGRA,
w: self.ctx.d.width as _,
h: self.ctx.d.height as _,
stride: Vec::new(),
u: 0,
v: 0,
}
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
true
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let b = Self::convert_quality(quality, &self.ctx.f);
let bitrate = base_bitrate(self.ctx.d.width as _, self.ctx.d.height as _) * b / 100;
if bitrate > 0 {
if self.encoder.set_bitrate((bitrate) as _).is_ok() {
self.bitrate = bitrate;
}
}
Ok(())
}
fn bitrate(&self) -> u32 {
self.bitrate
}
fn support_abr(&self) -> bool {
self.ctx.f.driver != gpu_common::EncodeDriver::VPL
}
}
impl GpuEncoder {
pub fn try_get(device: &AdapterDevice, name: CodecName) -> Option<FeatureContext> {
let v: Vec<_> = Self::available(name)
.drain(..)
.filter(|e| e.luid == device.luid)
.collect();
if v.len() > 0 {
Some(v[0].clone())
} else {
None
}
}
pub fn available(name: CodecName) -> Vec<FeatureContext> {
let not_use = ENOCDE_NOT_USE.lock().unwrap().clone();
if not_use.values().any(|not_use| *not_use) {
log::info!("currently not use gpucodec encoders: {not_use:?}");
return vec![];
}
let data_format = match name {
CodecName::H264GPU => gpu_common::DataFormat::H264,
CodecName::H265GPU => gpu_common::DataFormat::H265,
_ => return vec![],
};
let Ok(displays) = crate::Display::all() else {
log::error!("failed to get displays");
return vec![];
};
if displays.is_empty() {
log::error!("no display found");
return vec![];
}
let luids = displays
.iter()
.map(|d| d.adapter_luid())
.collect::<Vec<_>>();
let v: Vec<_> = get_available_config()
.map(|c| c.e)
.unwrap_or_default()
.drain(..)
.filter(|c| c.data_format == data_format)
.collect();
if luids
.iter()
.all(|luid| v.iter().any(|f| Some(f.luid) == *luid))
{
v
} else {
log::info!("not all adapters support {data_format:?}, luids = {luids:?}");
vec![]
}
}
pub fn encode(&mut self, texture: *mut c_void) -> ResultType<Vec<EncodeFrame>> {
match self.encoder.encode(texture) {
Ok(v) => {
let mut data = Vec::<EncodeFrame>::new();
data.append(v);
Ok(data)
}
Err(_) => Ok(Vec::<EncodeFrame>::new()),
}
}
pub fn convert_quality(quality: Quality, f: &FeatureContext) -> u32 {
match quality {
Quality::Best => {
if f.driver == gpu_common::EncodeDriver::VPL
&& f.data_format == gpu_common::DataFormat::H264
{
200
} else {
150
}
}
Quality::Balanced => {
if f.driver == gpu_common::EncodeDriver::VPL
&& f.data_format == gpu_common::DataFormat::H264
{
150
} else {
100
}
}
Quality::Low => {
if f.driver == gpu_common::EncodeDriver::VPL
&& f.data_format == gpu_common::DataFormat::H264
{
75
} else {
50
}
}
Quality::Custom(b) => b,
}
}
pub fn set_not_use(display: usize, not_use: bool) {
log::info!("set display#{display} not use gpucodec encode to {not_use}");
ENOCDE_NOT_USE.lock().unwrap().insert(display, not_use);
}
pub fn not_use() -> bool {
ENOCDE_NOT_USE.lock().unwrap().iter().any(|v| *v.1)
}
}
pub struct GpuDecoder {
decoder: Decoder,
}
#[derive(Default)]
pub struct GpuDecoders {
pub h264: Option<GpuDecoder>,
pub h265: Option<GpuDecoder>,
}
impl GpuDecoder {
pub fn try_get(name: CodecName, luid: Option<i64>) -> Option<DecodeContext> {
let v: Vec<_> = Self::available(name, luid);
if v.len() > 0 {
Some(v[0].clone())
} else {
None
}
}
pub fn available(name: CodecName, luid: Option<i64>) -> Vec<DecodeContext> {
let luid = luid.unwrap_or_default();
let data_format = match name {
CodecName::H264GPU => gpu_common::DataFormat::H264,
CodecName::H265GPU => gpu_common::DataFormat::H265,
_ => return vec![],
};
get_available_config()
.map(|c| c.d)
.unwrap_or_default()
.drain(..)
.filter(|c| c.data_format == data_format && c.luid == luid)
.collect()
}
pub fn possible_available_without_check() -> (bool, bool) {
if !enable_gpucodec_option() {
return (false, false);
}
let v = get_available_config().map(|c| c.d).unwrap_or_default();
(
v.iter()
.any(|d| d.data_format == gpu_common::DataFormat::H264),
v.iter()
.any(|d| d.data_format == gpu_common::DataFormat::H265),
)
}
pub fn new_decoders(luid: Option<i64>) -> GpuDecoders {
let mut h264: Option<GpuDecoder> = None;
let mut h265: Option<GpuDecoder> = None;
if let Ok(decoder) = GpuDecoder::new(CodecName::H264GPU, luid) {
h264 = Some(decoder);
}
if let Ok(decoder) = GpuDecoder::new(CodecName::H265GPU, luid) {
h265 = Some(decoder);
}
log::info!(
"new gpu decoders, support h264: {}, h265: {}",
h264.is_some(),
h265.is_some()
);
GpuDecoders { h264, h265 }
}
pub fn new(name: CodecName, luid: Option<i64>) -> ResultType<Self> {
let ctx = Self::try_get(name, luid).ok_or(anyhow!("Failed to get decode context"))?;
match Decoder::new(ctx) {
Ok(decoder) => Ok(Self { decoder }),
Err(_) => Err(anyhow!(format!("Failed to create decoder"))),
}
}
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<GpuDecoderImage>> {
match self.decoder.decode(data) {
Ok(v) => Ok(v.iter().map(|f| GpuDecoderImage { frame: f }).collect()),
Err(e) => Err(anyhow!(e)),
}
}
}
pub struct GpuDecoderImage<'a> {
pub frame: &'a DecodeFrame,
}
impl GpuDecoderImage<'_> {}
fn get_available_config() -> ResultType<Available> {
let available = hbb_common::config::GpucodecConfig::load().available;
match Available::deserialize(&available) {
Ok(v) => Ok(v),
Err(_) => Err(anyhow!("Failed to deserialize:{}", available)),
}
}
pub fn check_available_gpucodec() {
let d = DynamicContext {
device: None,
width: 1920,
height: 1080,
kbitrate: 5000,
framerate: 60,
gop: MAX_GOP as _,
};
let encoders = encode::available(d);
let decoders = decode::available(OUTPUT_SHARED_HANDLE);
let available = Available {
e: encoders,
d: decoders,
};
if let Ok(available) = available.serialize() {
let mut config = hbb_common::config::GpucodecConfig::load();
config.available = available;
config.store();
return;
}
log::error!("Failed to serialize gpucodec");
}
pub fn gpucodec_new_check_process() {
use std::sync::Once;
static ONCE: Once = Once::new();
ONCE.call_once(|| {
std::thread::spawn(move || {
// Remove to avoid checking process errors
// But when the program is just started, the configuration file has not been updated, and the new connection will read an empty configuration
hbb_common::config::GpucodecConfig::clear();
if let Ok(exe) = std::env::current_exe() {
let arg = "--check-gpucodec-config";
if let Ok(mut child) = std::process::Command::new(exe).arg(arg).spawn() {
// wait up to 30 seconds
for _ in 0..30 {
std::thread::sleep(std::time::Duration::from_secs(1));
if let Ok(Some(_)) = child.try_wait() {
break;
}
}
allow_err!(child.kill());
std::thread::sleep(std::time::Duration::from_millis(30));
match child.try_wait() {
Ok(Some(status)) => {
log::info!("Check gpucodec config, exit with: {status}")
}
Ok(None) => {
log::info!(
"Check gpucodec config, status not ready yet, let's really wait"
);
let res = child.wait();
log::info!("Check gpucodec config, wait result: {res:?}");
}
Err(e) => {
log::error!("Check gpucodec config, error attempting to wait: {e}")
}
}
}
};
});
});
}

View File

@@ -1,6 +1,6 @@
use crate::{
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
hw, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg, Quality as Q},
hw, EncodeInput, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
};
use hbb_common::{
allow_err,
@@ -29,8 +29,18 @@ const DEFAULT_GOP: i32 = i32::MAX;
const DEFAULT_HW_QUALITY: Quality = Quality_Default;
const DEFAULT_RC: RateControl = RC_DEFAULT;
#[derive(Debug, Clone)]
pub struct HwEncoderConfig {
pub name: String,
pub width: usize,
pub height: usize,
pub quality: Q,
pub keyframe_interval: Option<usize>,
}
pub struct HwEncoder {
encoder: Encoder,
name: String,
pub format: DataFormat,
pub pixfmt: AVPixelFormat,
width: u32,
@@ -77,6 +87,7 @@ impl EncoderApi for HwEncoder {
match Encoder::new(ctx.clone()) {
Ok(encoder) => Ok(HwEncoder {
encoder,
name: config.name,
format,
pixfmt: ctx.pixfmt,
width: ctx.width as _,
@@ -90,10 +101,13 @@ impl EncoderApi for HwEncoder {
}
}
fn encode_to_message(&mut self, frame: &[u8], _ms: i64) -> ResultType<VideoFrame> {
fn encode_to_message(&mut self, input: EncodeInput, _ms: i64) -> ResultType<VideoFrame> {
let mut vf = VideoFrame::new();
let mut frames = Vec::new();
for frame in self.encode(frame).with_context(|| "Failed to encode")? {
for frame in self
.encode(input.yuv()?)
.with_context(|| "Failed to encode")?
{
frames.push(EncodedVideoFrame {
data: Bytes::from(frame.data),
pts: frame.pts as _,
@@ -143,6 +157,11 @@ impl EncoderApi for HwEncoder {
}
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
false
}
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
let b = Self::convert_quality(quality);
let bitrate = base_bitrate(self.width as _, self.height as _) * b / 100;
@@ -156,6 +175,10 @@ impl EncoderApi for HwEncoder {
fn bitrate(&self) -> u32 {
self.bitrate
}
fn support_abr(&self) -> bool {
!self.name.contains("qsv")
}
}
impl HwEncoder {
@@ -226,7 +249,7 @@ impl HwDecoder {
}
}
if fail {
check_config_process();
hwcodec_new_check_process();
}
HwDecoders { h264, h265 }
}
@@ -320,7 +343,7 @@ fn get_config(k: &str) -> ResultType<CodecInfos> {
}
}
pub fn check_config() {
pub fn check_available_hwcodec() {
let ctx = EncodeContext {
name: String::from(""),
width: 1920,
@@ -357,7 +380,7 @@ pub fn check_config() {
log::error!("Failed to serialize codec info");
}
pub fn check_config_process() {
pub fn hwcodec_new_check_process() {
use std::sync::Once;
let f = || {
// Clear to avoid checking process errors

View File

@@ -1,7 +1,10 @@
use crate::common::{
wayland,
x11::{self, Frame},
TraitCapturer,
use crate::{
common::{
wayland,
x11::{self},
TraitCapturer,
},
Frame,
};
use std::{io, time::Duration};

View File

@@ -1,9 +1,10 @@
pub use self::vpxcodec::*;
use hbb_common::{
log,
bail, log,
message_proto::{video_frame, Chroma, VideoFrame},
ResultType,
};
use std::slice;
use std::{ffi::c_void, slice};
cfg_if! {
if #[cfg(quartz)] {
@@ -16,8 +17,8 @@ cfg_if! {
mod wayland;
mod x11;
pub use self::linux::*;
pub use self::x11::Frame;
pub use self::wayland::set_map_err;
pub use self::x11::PixelBuffer;
} else {
mod x11;
pub use self::x11::*;
@@ -36,6 +37,8 @@ cfg_if! {
pub mod codec;
pub mod convert;
#[cfg(feature = "gpucodec")]
pub mod gpucodec;
#[cfg(feature = "hwcodec")]
pub mod hwcodec;
#[cfg(feature = "mediacodec")]
@@ -107,9 +110,32 @@ pub trait TraitCapturer {
fn is_gdi(&self) -> bool;
#[cfg(windows)]
fn set_gdi(&mut self) -> bool;
#[cfg(feature = "gpucodec")]
fn device(&self) -> AdapterDevice;
#[cfg(feature = "gpucodec")]
fn set_output_texture(&mut self, texture: bool);
}
pub trait TraitFrame {
#[derive(Debug, Clone, Copy)]
pub struct AdapterDevice {
pub device: *mut c_void,
pub vendor_id: ::std::os::raw::c_uint,
pub luid: i64,
}
impl Default for AdapterDevice {
fn default() -> Self {
Self {
device: std::ptr::null_mut(),
vendor_id: Default::default(),
luid: Default::default(),
}
}
}
pub trait TraitPixelBuffer {
fn data(&self) -> &[u8];
fn width(&self) -> usize;
@@ -120,6 +146,59 @@ pub trait TraitFrame {
fn pixfmt(&self) -> Pixfmt;
}
#[cfg(not(any(target_os = "ios")))]
pub enum Frame<'a> {
PixelBuffer(PixelBuffer<'a>),
Texture(*mut c_void),
}
#[cfg(not(any(target_os = "ios")))]
impl Frame<'_> {
pub fn valid<'a>(&'a self) -> bool {
match self {
Frame::PixelBuffer(pixelbuffer) => !pixelbuffer.data().is_empty(),
Frame::Texture(texture) => !texture.is_null(),
}
}
pub fn to<'a>(
&'a self,
yuvfmt: EncodeYuvFormat,
yuv: &'a mut Vec<u8>,
mid_data: &mut Vec<u8>,
) -> ResultType<EncodeInput> {
match self {
Frame::PixelBuffer(pixelbuffer) => {
convert_to_yuv(&pixelbuffer, yuvfmt, yuv, mid_data)?;
Ok(EncodeInput::YUV(yuv))
}
Frame::Texture(texture) => Ok(EncodeInput::Texture(*texture)),
}
}
}
pub enum EncodeInput<'a> {
YUV(&'a [u8]),
Texture(*mut c_void),
}
impl<'a> EncodeInput<'a> {
pub fn yuv(&self) -> ResultType<&'_ [u8]> {
match self {
Self::YUV(f) => Ok(f),
_ => bail!("not pixelfbuffer frame"),
}
}
pub fn texture(&self) -> ResultType<*mut c_void> {
match self {
Self::Texture(f) => Ok(*f),
_ => bail!("not texture frame"),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Pixfmt {
BGRA,
@@ -166,8 +245,10 @@ pub enum CodecName {
VP8,
VP9,
AV1,
H264(String),
H265(String),
H264HW(String),
H265HW(String),
H264GPU,
H265GPU,
}
#[derive(PartialEq, Debug, Clone)]
@@ -199,8 +280,8 @@ impl From<&CodecName> for CodecFormat {
CodecName::VP8 => Self::VP8,
CodecName::VP9 => Self::VP9,
CodecName::AV1 => Self::AV1,
CodecName::H264(_) => Self::H264,
CodecName::H265(_) => Self::H265,
CodecName::H264HW(_) | CodecName::H264GPU => Self::H264,
CodecName::H265HW(_) | CodecName::H265GPU => Self::H265,
}
}
}

View File

@@ -1,4 +1,4 @@
use crate::{quartz, Pixfmt};
use crate::{quartz, Frame, Pixfmt};
use std::marker::PhantomData;
use std::sync::{Arc, Mutex, TryLockError};
use std::{io, mem};
@@ -55,12 +55,12 @@ impl crate::TraitCapturer for Capturer {
Some(mut frame) => {
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
frame.surface_to_bgra(self.height());
Ok(Frame {
Ok(Frame::PixelBuffer(PixelBuffer {
frame,
data: PhantomData,
width: self.width(),
height: self.height(),
})
}))
}
None => Err(io::ErrorKind::WouldBlock.into()),
@@ -74,14 +74,14 @@ impl crate::TraitCapturer for Capturer {
}
}
pub struct Frame<'a> {
pub struct PixelBuffer<'a> {
frame: quartz::Frame,
data: PhantomData<&'a [u8]>,
width: usize,
height: usize,
}
impl<'a> crate::TraitFrame for Frame<'a> {
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
&*self.frame
}

View File

@@ -8,7 +8,7 @@ use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, V
use hbb_common::ResultType;
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
use crate::{EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
use crate::{EncodeInput, EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
@@ -183,10 +183,10 @@ impl EncoderApi for VpxEncoder {
}
}
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame> {
fn encode_to_message(&mut self, input: EncodeInput, ms: i64) -> ResultType<VideoFrame> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE_ALIGN)
.encode(ms, input.yuv()?, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(VpxEncoder::create_frame(frame));
@@ -207,6 +207,11 @@ impl EncoderApi for VpxEncoder {
self.yuvfmt.clone()
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
false
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
@@ -226,6 +231,10 @@ impl EncoderApi for VpxEncoder {
let c = unsafe { *self.ctx.config.enc.to_owned() };
c.rc_target_bitrate
}
fn support_abr(&self) -> bool {
true
}
}
impl VpxEncoder {

View File

@@ -1,7 +1,11 @@
use crate::common::{x11::Frame, TraitCapturer};
use crate::wayland::{capturable::*, *};
use crate::{
wayland::{capturable::*, *},
Frame, TraitCapturer,
};
use std::{io, sync::RwLock, time::Duration};
use super::x11::PixelBuffer;
pub struct Capturer(Display, Box<dyn Recorder>, Vec<u8>);
@@ -39,8 +43,18 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
PixelProvider::BGR0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, w, h)),
PixelProvider::RGB0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, w,h)),
PixelProvider::BGR0(w, h, x) => Ok(Frame::PixelBuffer(PixelBuffer::new(
x,
crate::Pixfmt::BGRA,
w,
h,
))),
PixelProvider::RGB0(w, h, x) => Ok(Frame::PixelBuffer(PixelBuffer::new(
x,
crate::Pixfmt::RGBA,
w,
h,
))),
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
_ => Err(map_err("Invalid data")),
}

View File

@@ -1,4 +1,4 @@
use crate::{common::TraitCapturer, x11, Pixfmt, TraitFrame};
use crate::{common::TraitCapturer, x11, Frame, Pixfmt, TraitPixelBuffer};
use std::{io, time::Duration};
pub struct Capturer(x11::Capturer);
@@ -21,20 +21,27 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
Ok(self.0.frame()?)
let width = self.width();
let height = self.height();
Ok(Frame::PixelBuffer(PixelBuffer::new(
self.0.frame()?,
Pixfmt::BGRA,
width,
height,
)))
}
}
pub struct Frame<'a> {
pub data: &'a [u8],
pub pixfmt: Pixfmt,
pub width: usize,
pub height: usize,
pub stride: Vec<usize>,
pub struct PixelBuffer<'a> {
data: &'a [u8],
pixfmt: Pixfmt,
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
pub fn new(data: &'a [u8], pixfmt: Pixfmt, width: usize, height: usize) -> Self {
impl<'a> PixelBuffer<'a> {
pub fn new(data: &'a [u8], pixfmt: Pixfmt, width:usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
@@ -48,7 +55,7 @@ impl<'a> Frame<'a> {
}
}
impl<'a> TraitFrame for Frame<'a> {
impl<'a> TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
self.data
}

View File

@@ -22,6 +22,9 @@ use winapi::{
use crate::RotationMode::*;
use crate::{AdapterDevice, Frame, PixelBuffer};
use std::ffi::c_void;
pub struct ComPtr<T>(*mut T);
impl<T> ComPtr<T> {
fn is_null(&self) -> bool {
@@ -45,12 +48,15 @@ pub struct Capturer {
duplication: ComPtr<IDXGIOutputDuplication>,
fastlane: bool,
surface: ComPtr<IDXGISurface>,
texture: ComPtr<ID3D11Texture2D>,
width: usize,
height: usize,
rotated: Vec<u8>,
gdi_capturer: Option<CapturerGDI>,
gdi_buffer: Vec<u8>,
saved_raw_data: Vec<u8>, // for faster compare and copy
output_texture: bool,
adapter_desc1: DXGI_ADAPTER_DESC1,
}
impl Capturer {
@@ -60,12 +66,14 @@ impl Capturer {
let mut duplication = ptr::null_mut();
#[allow(invalid_value)]
let mut desc = unsafe { mem::MaybeUninit::uninit().assume_init() };
#[allow(invalid_value)]
let mut adapter_desc1 = unsafe { mem::MaybeUninit::uninit().assume_init() };
let mut gdi_capturer = None;
let mut res = if display.gdi {
wrap_hresult(1)
} else {
wrap_hresult(unsafe {
let res = wrap_hresult(unsafe {
D3D11CreateDevice(
display.adapter.0 as *mut _,
D3D_DRIVER_TYPE_UNKNOWN,
@@ -78,7 +86,12 @@ impl Capturer {
ptr::null_mut(),
&mut context,
)
})
});
if res.is_ok() {
wrap_hresult(unsafe { (*display.adapter.0).GetDesc1(&mut adapter_desc1) })
} else {
res
}
};
let device = ComPtr(device);
let context = ComPtr(context);
@@ -145,6 +158,7 @@ impl Capturer {
duplication: ComPtr(duplication),
fastlane: desc.DesktopImageInSystemMemory == TRUE,
surface: ComPtr(ptr::null_mut()),
texture: ComPtr(ptr::null_mut()),
width: display.width() as usize,
height: display.height() as usize,
display,
@@ -152,6 +166,8 @@ impl Capturer {
gdi_capturer,
gdi_buffer: Vec::new(),
saved_raw_data: Vec::new(),
output_texture: false,
adapter_desc1,
})
}
@@ -169,6 +185,11 @@ impl Capturer {
self.gdi_capturer.take();
}
#[cfg(feature = "gpucodec")]
pub fn set_output_texture(&mut self, texture: bool) {
self.output_texture = texture;
}
unsafe fn load_frame(&mut self, timeout: UINT) -> io::Result<(*const u8, i32)> {
let mut frame = ptr::null_mut();
#[allow(invalid_value)]
@@ -230,7 +251,21 @@ impl Capturer {
Ok(surface)
}
pub fn frame<'a>(&'a mut self, timeout: UINT) -> io::Result<&'a [u8]> {
pub fn frame<'a>(&'a mut self, timeout: UINT) -> io::Result<Frame<'a>> {
if self.output_texture {
Ok(Frame::Texture(self.get_texture(timeout)?))
} else {
let width = self.width;
let height = self.height;
Ok(Frame::PixelBuffer(PixelBuffer::new(
self.get_pixelbuffer(timeout)?,
width,
height,
)))
}
}
fn get_pixelbuffer<'a>(&'a mut self, timeout: UINT) -> io::Result<&'a [u8]> {
unsafe {
// Release last frame.
// No error checking needed because we don't care.
@@ -293,6 +328,34 @@ impl Capturer {
}
}
fn get_texture(&mut self, timeout: UINT) -> io::Result<*mut c_void> {
unsafe {
if self.duplication.0.is_null() {
return Err(std::io::ErrorKind::AddrNotAvailable.into());
}
(*self.duplication.0).ReleaseFrame();
let mut frame = ptr::null_mut();
#[allow(invalid_value)]
let mut info = mem::MaybeUninit::uninit().assume_init();
wrap_hresult((*self.duplication.0).AcquireNextFrame(timeout, &mut info, &mut frame))?;
let frame = ComPtr(frame);
if info.AccumulatedFrames == 0 || *info.LastPresentTime.QuadPart() == 0 {
return Err(std::io::ErrorKind::WouldBlock.into());
}
let mut texture: *mut ID3D11Texture2D = ptr::null_mut();
(*frame.0).QueryInterface(
&IID_ID3D11Texture2D,
&mut texture as *mut *mut _ as *mut *mut _,
);
let texture = ComPtr(texture);
self.texture = texture;
Ok(self.texture.0 as *mut c_void)
}
}
fn unmap(&self) {
unsafe {
(*self.duplication.0).ReleaseFrame();
@@ -305,6 +368,15 @@ impl Capturer {
}
}
}
pub fn device(&self) -> AdapterDevice {
AdapterDevice {
device: self.device.0 as _,
vendor_id: self.adapter_desc1.VendorId,
luid: ((self.adapter_desc1.AdapterLuid.HighPart as i64) << 32)
| self.adapter_desc1.AdapterLuid.LowPart as i64,
}
}
}
impl Drop for Capturer {
@@ -547,6 +619,22 @@ impl Display {
self.desc.DesktopCoordinates.top,
)
}
#[cfg(feature = "gpucodec")]
pub fn adapter_luid(&self) -> Option<i64> {
unsafe {
if !self.adapter.is_null() {
#[allow(invalid_value)]
let mut adapter_desc1 = mem::MaybeUninit::uninit().assume_init();
if wrap_hresult((*self.adapter.0).GetDesc1(&mut adapter_desc1)).is_ok() {
let luid = ((adapter_desc1.AdapterLuid.HighPart as i64) << 32)
| adapter_desc1.AdapterLuid.LowPart as i64;
return Some(luid);
}
}
None
}
}
}
fn wrap_hresult(x: HRESULT) -> io::Result<()> {

View File

@@ -1,11 +1,7 @@
use std::{io, ptr, slice};
use hbb_common::libc;
use crate::Frame;
use super::ffi::*;
use super::Display;
use hbb_common::libc;
use std::{io, ptr, slice};
pub struct Capturer {
display: Display,
@@ -97,13 +93,11 @@ impl Capturer {
}
}
pub fn frame<'b>(&'b mut self) -> std::io::Result<Frame> {
pub fn frame<'b>(&'b mut self) -> std::io::Result<&'b [u8]> {
self.get_image();
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
Ok(
Frame::new(result, crate::Pixfmt::BGRA, self.display.w(), self.display.h())
)
Ok(result)
}
}