mirror of
https://github.com/weyne85/rustdesk.git
synced 2025-10-29 17:00:05 +00:00
6
libs/scrap/src/bindings/yuv_ffi.h
Normal file
6
libs/scrap/src/bindings/yuv_ffi.h
Normal file
@@ -0,0 +1,6 @@
|
||||
#include <libyuv/convert.h>
|
||||
#include <libyuv/convert_argb.h>
|
||||
#include <libyuv/convert_from.h>
|
||||
#include <libyuv/convert_from_argb.h>
|
||||
#include <libyuv/rotate.h>
|
||||
#include <libyuv/rotate_argb.h>
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::android::ffi::*;
|
||||
use crate::rgba_to_i420;
|
||||
use crate::Pixfmt;
|
||||
use lazy_static::lazy_static;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
@@ -12,15 +12,15 @@ lazy_static! {
|
||||
|
||||
pub struct Capturer {
|
||||
display: Display,
|
||||
bgra: Vec<u8>,
|
||||
rgba: Vec<u8>,
|
||||
saved_raw_data: Vec<u8>, // for faster compare and copy
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, _yuv: bool) -> io::Result<Capturer> {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
Ok(Capturer {
|
||||
display,
|
||||
bgra: Vec::new(),
|
||||
rgba: Vec::new(),
|
||||
saved_raw_data: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -35,22 +35,47 @@ impl Capturer {
|
||||
}
|
||||
|
||||
impl crate::TraitCapturer for Capturer {
|
||||
fn set_use_yuv(&mut self, _use_yuv: bool) {}
|
||||
|
||||
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
|
||||
if let Some(buf) = get_video_raw() {
|
||||
crate::would_block_if_equal(&mut self.saved_raw_data, buf)?;
|
||||
rgba_to_i420(self.width(), self.height(), buf, &mut self.bgra);
|
||||
Ok(Frame::RAW(&self.bgra))
|
||||
// Is it safe to directly return buf without copy?
|
||||
self.rgba.resize(buf.len(), 0);
|
||||
unsafe {
|
||||
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
|
||||
};
|
||||
Ok(Frame::new(&self.rgba, self.height()))
|
||||
} else {
|
||||
return Err(io::ErrorKind::WouldBlock.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Frame<'a> {
|
||||
RAW(&'a [u8]),
|
||||
Empty,
|
||||
pub struct Frame<'a> {
|
||||
pub data: &'a [u8],
|
||||
pub stride: Vec<usize>,
|
||||
}
|
||||
|
||||
impl<'a> Frame<'a> {
|
||||
pub fn new(data: &'a [u8], h: usize) -> Self {
|
||||
let stride = data.len() / h;
|
||||
let mut v = Vec::new();
|
||||
v.push(stride);
|
||||
Frame { data, stride: v }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> crate::TraitFrame for Frame<'a> {
|
||||
fn data(&self) -> &[u8] {
|
||||
self.data
|
||||
}
|
||||
|
||||
fn stride(&self) -> Vec<usize> {
|
||||
self.stride.clone()
|
||||
}
|
||||
|
||||
fn pixfmt(&self) -> Pixfmt {
|
||||
Pixfmt::RGBA
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Display {
|
||||
|
||||
@@ -7,13 +7,14 @@
|
||||
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
|
||||
|
||||
use crate::codec::{base_bitrate, codec_thread_num, Quality};
|
||||
use crate::Pixfmt;
|
||||
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
|
||||
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
|
||||
use hbb_common::{
|
||||
anyhow::{anyhow, Context},
|
||||
bytes::Bytes,
|
||||
log,
|
||||
message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
|
||||
message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
|
||||
ResultType,
|
||||
};
|
||||
use std::{ptr, slice};
|
||||
@@ -52,6 +53,7 @@ pub struct AomEncoder {
|
||||
ctx: aom_codec_ctx_t,
|
||||
width: usize,
|
||||
height: usize,
|
||||
i444: bool,
|
||||
}
|
||||
|
||||
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
|
||||
@@ -95,6 +97,7 @@ mod webrtc {
|
||||
pub fn enc_cfg(
|
||||
i: *const aom_codec_iface,
|
||||
cfg: AomEncoderConfig,
|
||||
i444: bool,
|
||||
) -> ResultType<aom_codec_enc_cfg> {
|
||||
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
|
||||
call_aom!(aom_codec_enc_config_default(i, &mut c, kUsageProfile));
|
||||
@@ -139,6 +142,9 @@ mod webrtc {
|
||||
c.g_pass = aom_enc_pass::AOM_RC_ONE_PASS; // One-pass rate control
|
||||
c.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0.
|
||||
|
||||
// https://aomedia.googlesource.com/aom/+/refs/tags/v3.6.0/av1/common/enums.h#82
|
||||
c.g_profile = if i444 { 1 } else { 0 };
|
||||
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
@@ -210,14 +216,14 @@ mod webrtc {
|
||||
}
|
||||
|
||||
impl EncoderApi for AomEncoder {
|
||||
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
|
||||
fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match cfg {
|
||||
crate::codec::EncoderCfg::AOM(config) => {
|
||||
let i = call_aom_ptr!(aom_codec_av1_cx());
|
||||
let c = webrtc::enc_cfg(i, config)?;
|
||||
let c = webrtc::enc_cfg(i, config, i444)?;
|
||||
|
||||
let mut ctx = Default::default();
|
||||
// Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH
|
||||
@@ -234,6 +240,7 @@ impl EncoderApi for AomEncoder {
|
||||
ctx,
|
||||
width: config.width as _,
|
||||
height: config.height as _,
|
||||
i444,
|
||||
})
|
||||
}
|
||||
_ => Err(anyhow!("encoder type mismatch")),
|
||||
@@ -255,8 +262,36 @@ impl EncoderApi for AomEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
fn use_yuv(&self) -> bool {
|
||||
true
|
||||
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||
let mut img = Default::default();
|
||||
let fmt = if self.i444 {
|
||||
aom_img_fmt::AOM_IMG_FMT_I444
|
||||
} else {
|
||||
aom_img_fmt::AOM_IMG_FMT_I420
|
||||
};
|
||||
unsafe {
|
||||
aom_img_wrap(
|
||||
&mut img,
|
||||
fmt,
|
||||
self.width as _,
|
||||
self.height as _,
|
||||
crate::STRIDE_ALIGN as _,
|
||||
0x1 as _,
|
||||
);
|
||||
}
|
||||
let pixfmt = if self.i444 {
|
||||
Pixfmt::I444
|
||||
} else {
|
||||
Pixfmt::I420
|
||||
};
|
||||
crate::EncodeYuvFormat {
|
||||
pixfmt,
|
||||
w: img.w as _,
|
||||
h: img.h as _,
|
||||
stride: img.stride.map(|s| s as usize).to_vec(),
|
||||
u: img.planes[1] as usize - img.planes[0] as usize,
|
||||
v: img.planes[2] as usize - img.planes[0] as usize,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
||||
@@ -282,14 +317,20 @@ impl EncoderApi for AomEncoder {
|
||||
|
||||
impl AomEncoder {
|
||||
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
|
||||
if 2 * data.len() < 3 * self.width * self.height {
|
||||
let bpp = if self.i444 { 24 } else { 12 };
|
||||
if data.len() < self.width * self.height * bpp / 8 {
|
||||
return Err(Error::FailedCall("len not enough".to_string()));
|
||||
}
|
||||
let fmt = if self.i444 {
|
||||
aom_img_fmt::AOM_IMG_FMT_I444
|
||||
} else {
|
||||
aom_img_fmt::AOM_IMG_FMT_I420
|
||||
};
|
||||
|
||||
let mut image = Default::default();
|
||||
call_aom_ptr!(aom_img_wrap(
|
||||
&mut image,
|
||||
aom_img_fmt::AOM_IMG_FMT_I420,
|
||||
fmt,
|
||||
self.width as _,
|
||||
self.height as _,
|
||||
stride_align as _,
|
||||
@@ -524,6 +565,13 @@ impl GoogleImage for Image {
|
||||
fn planes(&self) -> Vec<*mut u8> {
|
||||
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
|
||||
}
|
||||
|
||||
fn chroma(&self) -> Chroma {
|
||||
match self.inner().fmt {
|
||||
aom_img_fmt::AOM_IMG_FMT_I444 => Chroma::I444,
|
||||
_ => Chroma::I420,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Image {
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::{
|
||||
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
|
||||
common::GoogleImage,
|
||||
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
|
||||
CodecName, ImageRgb,
|
||||
CodecName, EncodeYuvFormat, ImageRgb,
|
||||
};
|
||||
|
||||
use hbb_common::{
|
||||
@@ -23,7 +23,7 @@ use hbb_common::{
|
||||
config::PeerConfig,
|
||||
log,
|
||||
message_proto::{
|
||||
supported_decoding::PreferCodec, video_frame, EncodedVideoFrames,
|
||||
supported_decoding::PreferCodec, video_frame, Chroma, CodecAbility, EncodedVideoFrames,
|
||||
SupportedDecoding, SupportedEncoding, VideoFrame,
|
||||
},
|
||||
sysinfo::{System, SystemExt},
|
||||
@@ -56,13 +56,13 @@ pub enum EncoderCfg {
|
||||
}
|
||||
|
||||
pub trait EncoderApi {
|
||||
fn new(cfg: EncoderCfg) -> ResultType<Self>
|
||||
fn new(cfg: EncoderCfg, i444: bool) -> ResultType<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame>;
|
||||
|
||||
fn use_yuv(&self) -> bool;
|
||||
fn yuvfmt(&self) -> EncodeYuvFormat;
|
||||
|
||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
|
||||
|
||||
@@ -107,18 +107,18 @@ pub enum EncodingUpdate {
|
||||
}
|
||||
|
||||
impl Encoder {
|
||||
pub fn new(config: EncoderCfg) -> ResultType<Encoder> {
|
||||
log::info!("new encoder:{:?}", config);
|
||||
pub fn new(config: EncoderCfg, i444: bool) -> ResultType<Encoder> {
|
||||
log::info!("new encoder:{config:?}, i444:{i444}");
|
||||
match config {
|
||||
EncoderCfg::VPX(_) => Ok(Encoder {
|
||||
codec: Box::new(VpxEncoder::new(config)?),
|
||||
codec: Box::new(VpxEncoder::new(config, i444)?),
|
||||
}),
|
||||
EncoderCfg::AOM(_) => Ok(Encoder {
|
||||
codec: Box::new(AomEncoder::new(config)?),
|
||||
codec: Box::new(AomEncoder::new(config, i444)?),
|
||||
}),
|
||||
|
||||
#[cfg(feature = "hwcodec")]
|
||||
EncoderCfg::HW(_) => match HwEncoder::new(config) {
|
||||
EncoderCfg::HW(_) => match HwEncoder::new(config, i444) {
|
||||
Ok(hw) => Ok(Encoder {
|
||||
codec: Box::new(hw),
|
||||
}),
|
||||
@@ -230,6 +230,12 @@ impl Encoder {
|
||||
let mut encoding = SupportedEncoding {
|
||||
vp8: true,
|
||||
av1: true,
|
||||
i444: Some(CodecAbility {
|
||||
vp9: true,
|
||||
av1: true,
|
||||
..Default::default()
|
||||
})
|
||||
.into(),
|
||||
..Default::default()
|
||||
};
|
||||
#[cfg(feature = "hwcodec")]
|
||||
@@ -240,18 +246,41 @@ impl Encoder {
|
||||
}
|
||||
encoding
|
||||
}
|
||||
|
||||
pub fn use_i444(config: &EncoderCfg) -> bool {
|
||||
let decodings = PEER_DECODINGS.lock().unwrap().clone();
|
||||
let prefer_i444 = decodings
|
||||
.iter()
|
||||
.all(|d| d.1.prefer_chroma == Chroma::I444.into());
|
||||
let i444_useable = match config {
|
||||
EncoderCfg::VPX(vpx) => match vpx.codec {
|
||||
VpxVideoCodecId::VP8 => false,
|
||||
VpxVideoCodecId::VP9 => decodings.iter().all(|d| d.1.i444.vp9),
|
||||
},
|
||||
EncoderCfg::AOM(_) => decodings.iter().all(|d| d.1.i444.av1),
|
||||
EncoderCfg::HW(_) => false,
|
||||
};
|
||||
prefer_i444 && i444_useable && !decodings.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl Decoder {
|
||||
pub fn supported_decodings(id_for_perfer: Option<&str>) -> SupportedDecoding {
|
||||
let (prefer, prefer_chroma) = Self::preference(id_for_perfer);
|
||||
|
||||
#[allow(unused_mut)]
|
||||
let mut decoding = SupportedDecoding {
|
||||
ability_vp8: 1,
|
||||
ability_vp9: 1,
|
||||
ability_av1: 1,
|
||||
prefer: id_for_perfer
|
||||
.map_or(PreferCodec::Auto, |id| Self::codec_preference(id))
|
||||
.into(),
|
||||
i444: Some(CodecAbility {
|
||||
vp9: true,
|
||||
av1: true,
|
||||
..Default::default()
|
||||
})
|
||||
.into(),
|
||||
prefer: prefer.into(),
|
||||
prefer_chroma: prefer_chroma.into(),
|
||||
..Default::default()
|
||||
};
|
||||
#[cfg(feature = "hwcodec")]
|
||||
@@ -314,31 +343,33 @@ impl Decoder {
|
||||
&mut self,
|
||||
frame: &video_frame::Union,
|
||||
rgb: &mut ImageRgb,
|
||||
chroma: &mut Option<Chroma>,
|
||||
) -> ResultType<bool> {
|
||||
match frame {
|
||||
video_frame::Union::Vp8s(vp8s) => {
|
||||
if let Some(vp8) = &mut self.vp8 {
|
||||
Decoder::handle_vpxs_video_frame(vp8, vp8s, rgb)
|
||||
Decoder::handle_vpxs_video_frame(vp8, vp8s, rgb, chroma)
|
||||
} else {
|
||||
bail!("vp8 decoder not available");
|
||||
}
|
||||
}
|
||||
video_frame::Union::Vp9s(vp9s) => {
|
||||
if let Some(vp9) = &mut self.vp9 {
|
||||
Decoder::handle_vpxs_video_frame(vp9, vp9s, rgb)
|
||||
Decoder::handle_vpxs_video_frame(vp9, vp9s, rgb, chroma)
|
||||
} else {
|
||||
bail!("vp9 decoder not available");
|
||||
}
|
||||
}
|
||||
video_frame::Union::Av1s(av1s) => {
|
||||
if let Some(av1) = &mut self.av1 {
|
||||
Decoder::handle_av1s_video_frame(av1, av1s, rgb)
|
||||
Decoder::handle_av1s_video_frame(av1, av1s, rgb, chroma)
|
||||
} else {
|
||||
bail!("av1 decoder not available");
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "hwcodec")]
|
||||
video_frame::Union::H264s(h264s) => {
|
||||
*chroma = Some(Chroma::I420);
|
||||
if let Some(decoder) = &mut self.hw.h264 {
|
||||
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
|
||||
} else {
|
||||
@@ -347,6 +378,7 @@ impl Decoder {
|
||||
}
|
||||
#[cfg(feature = "hwcodec")]
|
||||
video_frame::Union::H265s(h265s) => {
|
||||
*chroma = Some(Chroma::I420);
|
||||
if let Some(decoder) = &mut self.hw.h265 {
|
||||
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
|
||||
} else {
|
||||
@@ -355,6 +387,7 @@ impl Decoder {
|
||||
}
|
||||
#[cfg(feature = "mediacodec")]
|
||||
video_frame::Union::H264s(h264s) => {
|
||||
*chroma = Some(Chroma::I420);
|
||||
if let Some(decoder) = &mut self.media_codec.h264 {
|
||||
Decoder::handle_mediacodec_video_frame(decoder, h264s, rgb)
|
||||
} else {
|
||||
@@ -363,6 +396,7 @@ impl Decoder {
|
||||
}
|
||||
#[cfg(feature = "mediacodec")]
|
||||
video_frame::Union::H265s(h265s) => {
|
||||
*chroma = Some(Chroma::I420);
|
||||
if let Some(decoder) = &mut self.media_codec.h265 {
|
||||
Decoder::handle_mediacodec_video_frame(decoder, h265s, rgb)
|
||||
} else {
|
||||
@@ -378,6 +412,7 @@ impl Decoder {
|
||||
decoder: &mut VpxDecoder,
|
||||
vpxs: &EncodedVideoFrames,
|
||||
rgb: &mut ImageRgb,
|
||||
chroma: &mut Option<Chroma>,
|
||||
) -> ResultType<bool> {
|
||||
let mut last_frame = vpxcodec::Image::new();
|
||||
for vpx in vpxs.frames.iter() {
|
||||
@@ -393,6 +428,7 @@ impl Decoder {
|
||||
if last_frame.is_null() {
|
||||
Ok(false)
|
||||
} else {
|
||||
*chroma = Some(last_frame.chroma());
|
||||
last_frame.to(rgb);
|
||||
Ok(true)
|
||||
}
|
||||
@@ -403,6 +439,7 @@ impl Decoder {
|
||||
decoder: &mut AomDecoder,
|
||||
av1s: &EncodedVideoFrames,
|
||||
rgb: &mut ImageRgb,
|
||||
chroma: &mut Option<Chroma>,
|
||||
) -> ResultType<bool> {
|
||||
let mut last_frame = aom::Image::new();
|
||||
for av1 in av1s.frames.iter() {
|
||||
@@ -418,6 +455,7 @@ impl Decoder {
|
||||
if last_frame.is_null() {
|
||||
Ok(false)
|
||||
} else {
|
||||
*chroma = Some(last_frame.chroma());
|
||||
last_frame.to(rgb);
|
||||
Ok(true)
|
||||
}
|
||||
@@ -457,12 +495,16 @@ impl Decoder {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
fn codec_preference(id: &str) -> PreferCodec {
|
||||
let codec = PeerConfig::load(id)
|
||||
.options
|
||||
fn preference(id: Option<&str>) -> (PreferCodec, Chroma) {
|
||||
let id = id.unwrap_or_default();
|
||||
if id.is_empty() {
|
||||
return (PreferCodec::Auto, Chroma::I420);
|
||||
}
|
||||
let options = PeerConfig::load(id).options;
|
||||
let codec = options
|
||||
.get("codec-preference")
|
||||
.map_or("".to_owned(), |c| c.to_owned());
|
||||
if codec == "vp8" {
|
||||
let codec = if codec == "vp8" {
|
||||
PreferCodec::VP8
|
||||
} else if codec == "vp9" {
|
||||
PreferCodec::VP9
|
||||
@@ -474,7 +516,13 @@ impl Decoder {
|
||||
PreferCodec::H265
|
||||
} else {
|
||||
PreferCodec::Auto
|
||||
}
|
||||
};
|
||||
let chroma = if options.get("i444") == Some(&"Y".to_string()) {
|
||||
Chroma::I444
|
||||
} else {
|
||||
Chroma::I420
|
||||
};
|
||||
(codec, chroma)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,367 +1,25 @@
|
||||
use super::vpx::*;
|
||||
use std::os::raw::c_int;
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(non_snake_case)]
|
||||
#![allow(non_upper_case_globals)]
|
||||
#![allow(improper_ctypes)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
extern "C" {
|
||||
// seems libyuv uses reverse byte order compared with our view
|
||||
include!(concat!(env!("OUT_DIR"), "/yuv_ffi.rs"));
|
||||
|
||||
pub fn ARGBRotate(
|
||||
src_argb: *const u8,
|
||||
src_stride_argb: c_int,
|
||||
dst_argb: *mut u8,
|
||||
dst_stride_argb: c_int,
|
||||
src_width: c_int,
|
||||
src_height: c_int,
|
||||
mode: c_int,
|
||||
) -> c_int;
|
||||
#[cfg(not(target_os = "ios"))]
|
||||
use crate::Frame;
|
||||
use crate::{generate_call_macro, EncodeYuvFormat, TraitFrame};
|
||||
use hbb_common::{bail, log, ResultType};
|
||||
|
||||
pub fn ARGBMirror(
|
||||
src_argb: *const u8,
|
||||
src_stride_argb: c_int,
|
||||
dst_argb: *mut u8,
|
||||
dst_stride_argb: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn ARGBToI420(
|
||||
src_bgra: *const u8,
|
||||
src_stride_bgra: c_int,
|
||||
dst_y: *mut u8,
|
||||
dst_stride_y: c_int,
|
||||
dst_u: *mut u8,
|
||||
dst_stride_u: c_int,
|
||||
dst_v: *mut u8,
|
||||
dst_stride_v: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn ABGRToI420(
|
||||
src_rgba: *const u8,
|
||||
src_stride_rgba: c_int,
|
||||
dst_y: *mut u8,
|
||||
dst_stride_y: c_int,
|
||||
dst_u: *mut u8,
|
||||
dst_stride_u: c_int,
|
||||
dst_v: *mut u8,
|
||||
dst_stride_v: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn ARGBToNV12(
|
||||
src_bgra: *const u8,
|
||||
src_stride_bgra: c_int,
|
||||
dst_y: *mut u8,
|
||||
dst_stride_y: c_int,
|
||||
dst_uv: *mut u8,
|
||||
dst_stride_uv: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn NV12ToI420(
|
||||
src_y: *const u8,
|
||||
src_stride_y: c_int,
|
||||
src_uv: *const u8,
|
||||
src_stride_uv: c_int,
|
||||
dst_y: *mut u8,
|
||||
dst_stride_y: c_int,
|
||||
dst_u: *mut u8,
|
||||
dst_stride_u: c_int,
|
||||
dst_v: *mut u8,
|
||||
dst_stride_v: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
// I420ToRGB24: RGB little endian (bgr in memory)
|
||||
// I420ToRaw: RGB big endian (rgb in memory) to RGBA.
|
||||
pub fn I420ToRAW(
|
||||
src_y: *const u8,
|
||||
src_stride_y: c_int,
|
||||
src_u: *const u8,
|
||||
src_stride_u: c_int,
|
||||
src_v: *const u8,
|
||||
src_stride_v: c_int,
|
||||
dst_rgba: *mut u8,
|
||||
dst_stride_raw: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn I420ToARGB(
|
||||
src_y: *const u8,
|
||||
src_stride_y: c_int,
|
||||
src_u: *const u8,
|
||||
src_stride_u: c_int,
|
||||
src_v: *const u8,
|
||||
src_stride_v: c_int,
|
||||
dst_rgba: *mut u8,
|
||||
dst_stride_rgba: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn I420ToABGR(
|
||||
src_y: *const u8,
|
||||
src_stride_y: c_int,
|
||||
src_u: *const u8,
|
||||
src_stride_u: c_int,
|
||||
src_v: *const u8,
|
||||
src_stride_v: c_int,
|
||||
dst_rgba: *mut u8,
|
||||
dst_stride_rgba: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn NV12ToARGB(
|
||||
src_y: *const u8,
|
||||
src_stride_y: c_int,
|
||||
src_uv: *const u8,
|
||||
src_stride_uv: c_int,
|
||||
dst_rgba: *mut u8,
|
||||
dst_stride_rgba: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
|
||||
pub fn NV12ToABGR(
|
||||
src_y: *const u8,
|
||||
src_stride_y: c_int,
|
||||
src_uv: *const u8,
|
||||
src_stride_uv: c_int,
|
||||
dst_rgba: *mut u8,
|
||||
dst_stride_rgba: c_int,
|
||||
width: c_int,
|
||||
height: c_int,
|
||||
) -> c_int;
|
||||
}
|
||||
|
||||
// https://github.com/webmproject/libvpx/blob/master/vpx/src/vpx_image.c
|
||||
#[inline]
|
||||
fn get_vpx_i420_stride(
|
||||
width: usize,
|
||||
height: usize,
|
||||
stride_align: usize,
|
||||
) -> (usize, usize, usize, usize, usize, usize) {
|
||||
let mut img = Default::default();
|
||||
unsafe {
|
||||
vpx_img_wrap(
|
||||
&mut img,
|
||||
vpx_img_fmt::VPX_IMG_FMT_I420,
|
||||
width as _,
|
||||
height as _,
|
||||
stride_align as _,
|
||||
0x1 as _,
|
||||
);
|
||||
}
|
||||
(
|
||||
img.w as _,
|
||||
img.h as _,
|
||||
img.stride[0] as _,
|
||||
img.stride[1] as _,
|
||||
img.planes[1] as usize - img.planes[0] as usize,
|
||||
img.planes[2] as usize - img.planes[0] as usize,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn i420_to_rgb(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
||||
let (_, _, src_stride_y, src_stride_uv, u, v) =
|
||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
||||
let src_y = src.as_ptr();
|
||||
let src_u = src[u..].as_ptr();
|
||||
let src_v = src[v..].as_ptr();
|
||||
dst.resize(width * height * 3, 0);
|
||||
unsafe {
|
||||
super::I420ToRAW(
|
||||
src_y,
|
||||
src_stride_y as _,
|
||||
src_u,
|
||||
src_stride_uv as _,
|
||||
src_v,
|
||||
src_stride_uv as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 3) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
pub fn i420_to_bgra(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
||||
let (_, _, src_stride_y, src_stride_uv, u, v) =
|
||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
||||
let src_y = src.as_ptr();
|
||||
let src_u = src[u..].as_ptr();
|
||||
let src_v = src[v..].as_ptr();
|
||||
dst.resize(width * height * 4, 0);
|
||||
unsafe {
|
||||
super::I420ToARGB(
|
||||
src_y,
|
||||
src_stride_y as _,
|
||||
src_u,
|
||||
src_stride_uv as _,
|
||||
src_v,
|
||||
src_stride_uv as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 3) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
pub fn bgra_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
||||
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
|
||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
||||
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_u = dst[u..].as_mut_ptr();
|
||||
let dst_v = dst[v..].as_mut_ptr();
|
||||
unsafe {
|
||||
ARGBToI420(
|
||||
src.as_ptr(),
|
||||
(src.len() / height) as _,
|
||||
dst_y,
|
||||
dst_stride_y as _,
|
||||
dst_u,
|
||||
dst_stride_uv as _,
|
||||
dst_v,
|
||||
dst_stride_uv as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rgba_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
||||
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
|
||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
||||
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_u = dst[u..].as_mut_ptr();
|
||||
let dst_v = dst[v..].as_mut_ptr();
|
||||
unsafe {
|
||||
ABGRToI420(
|
||||
src.as_ptr(),
|
||||
(src.len() / height) as _,
|
||||
dst_y,
|
||||
dst_stride_y as _,
|
||||
dst_u,
|
||||
dst_stride_uv as _,
|
||||
dst_v,
|
||||
dst_stride_uv as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn nv12_to_i420(
|
||||
src_y: *const u8,
|
||||
src_stride_y: c_int,
|
||||
src_uv: *const u8,
|
||||
src_stride_uv: c_int,
|
||||
width: usize,
|
||||
height: usize,
|
||||
dst: &mut Vec<u8>,
|
||||
) {
|
||||
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
|
||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
||||
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_u = dst[u..].as_mut_ptr();
|
||||
let dst_v = dst[v..].as_mut_ptr();
|
||||
NV12ToI420(
|
||||
src_y,
|
||||
src_stride_y,
|
||||
src_uv,
|
||||
src_stride_uv,
|
||||
dst_y,
|
||||
dst_stride_y as _,
|
||||
dst_u,
|
||||
dst_stride_uv as _,
|
||||
dst_v,
|
||||
dst_stride_uv as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
generate_call_macro!(call_yuv, false);
|
||||
|
||||
#[cfg(feature = "hwcodec")]
|
||||
pub mod hw {
|
||||
use super::*;
|
||||
use crate::ImageFormat;
|
||||
use hbb_common::{anyhow::anyhow, ResultType};
|
||||
#[cfg(target_os = "windows")]
|
||||
use hwcodec::{ffmpeg::ffmpeg_linesize_offset_length, AVPixelFormat};
|
||||
|
||||
pub fn hw_bgra_to_i420(
|
||||
width: usize,
|
||||
height: usize,
|
||||
stride: &[i32],
|
||||
offset: &[i32],
|
||||
length: i32,
|
||||
src: &[u8],
|
||||
dst: &mut Vec<u8>,
|
||||
) {
|
||||
let stride_y = stride[0] as usize;
|
||||
let stride_u = stride[1] as usize;
|
||||
let stride_v = stride[2] as usize;
|
||||
let offset_u = offset[0] as usize;
|
||||
let offset_v = offset[1] as usize;
|
||||
|
||||
dst.resize(length as _, 0);
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_u = dst[offset_u..].as_mut_ptr();
|
||||
let dst_v = dst[offset_v..].as_mut_ptr();
|
||||
unsafe {
|
||||
super::ARGBToI420(
|
||||
src.as_ptr(),
|
||||
(src.len() / height) as _,
|
||||
dst_y,
|
||||
stride_y as _,
|
||||
dst_u,
|
||||
stride_u as _,
|
||||
dst_v,
|
||||
stride_v as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hw_bgra_to_nv12(
|
||||
width: usize,
|
||||
height: usize,
|
||||
stride: &[i32],
|
||||
offset: &[i32],
|
||||
length: i32,
|
||||
src: &[u8],
|
||||
dst: &mut Vec<u8>,
|
||||
) {
|
||||
let stride_y = stride[0] as usize;
|
||||
let stride_uv = stride[1] as usize;
|
||||
let offset_uv = offset[0] as usize;
|
||||
|
||||
dst.resize(length as _, 0);
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_uv = dst[offset_uv..].as_mut_ptr();
|
||||
unsafe {
|
||||
super::ARGBToNV12(
|
||||
src.as_ptr(),
|
||||
(src.len() / height) as _,
|
||||
dst_y,
|
||||
stride_y as _,
|
||||
dst_uv,
|
||||
stride_uv as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn hw_nv12_to(
|
||||
fmt: ImageFormat,
|
||||
@@ -386,61 +44,59 @@ pub mod hw {
|
||||
let i420_stride_v = linesize_i420[2];
|
||||
i420.resize(i420_len as _, 0);
|
||||
|
||||
unsafe {
|
||||
let i420_offset_y = i420.as_ptr().add(0) as _;
|
||||
let i420_offset_u = i420.as_ptr().add(offset_i420[0] as _) as _;
|
||||
let i420_offset_v = i420.as_ptr().add(offset_i420[1] as _) as _;
|
||||
super::NV12ToI420(
|
||||
src_y.as_ptr(),
|
||||
nv12_stride_y as _,
|
||||
src_uv.as_ptr(),
|
||||
nv12_stride_uv as _,
|
||||
i420_offset_y,
|
||||
i420_stride_y,
|
||||
i420_offset_u,
|
||||
i420_stride_u,
|
||||
i420_offset_v,
|
||||
i420_stride_v,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
match fmt {
|
||||
ImageFormat::ARGB => {
|
||||
super::I420ToARGB(
|
||||
i420_offset_y,
|
||||
i420_stride_y,
|
||||
i420_offset_u,
|
||||
i420_stride_u,
|
||||
i420_offset_v,
|
||||
i420_stride_v,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
ImageFormat::ABGR => {
|
||||
super::I420ToABGR(
|
||||
i420_offset_y,
|
||||
i420_stride_y,
|
||||
i420_offset_u,
|
||||
i420_stride_u,
|
||||
i420_offset_v,
|
||||
i420_stride_v,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow!("unsupported image format"));
|
||||
}
|
||||
let i420_offset_y = unsafe { i420.as_ptr().add(0) as _ };
|
||||
let i420_offset_u = unsafe { i420.as_ptr().add(offset_i420[0] as _) as _ };
|
||||
let i420_offset_v = unsafe { i420.as_ptr().add(offset_i420[1] as _) as _ };
|
||||
call_yuv!(NV12ToI420(
|
||||
src_y.as_ptr(),
|
||||
nv12_stride_y as _,
|
||||
src_uv.as_ptr(),
|
||||
nv12_stride_uv as _,
|
||||
i420_offset_y,
|
||||
i420_stride_y,
|
||||
i420_offset_u,
|
||||
i420_stride_u,
|
||||
i420_offset_v,
|
||||
i420_stride_v,
|
||||
width as _,
|
||||
height as _,
|
||||
));
|
||||
match fmt {
|
||||
ImageFormat::ARGB => {
|
||||
call_yuv!(I420ToARGB(
|
||||
i420_offset_y,
|
||||
i420_stride_y,
|
||||
i420_offset_u,
|
||||
i420_stride_u,
|
||||
i420_offset_v,
|
||||
i420_stride_v,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
));
|
||||
}
|
||||
return Ok(());
|
||||
};
|
||||
ImageFormat::ABGR => {
|
||||
call_yuv!(I420ToABGR(
|
||||
i420_offset_y,
|
||||
i420_stride_y,
|
||||
i420_offset_u,
|
||||
i420_stride_u,
|
||||
i420_offset_v,
|
||||
i420_stride_v,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
bail!("unsupported image format");
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
return Err(anyhow!("get linesize offset failed"));
|
||||
bail!("get linesize offset failed");
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -457,41 +113,34 @@ pub mod hw {
|
||||
_align: usize,
|
||||
) -> ResultType<()> {
|
||||
dst.resize(width * height * 4, 0);
|
||||
unsafe {
|
||||
match fmt {
|
||||
ImageFormat::ARGB => {
|
||||
match super::NV12ToARGB(
|
||||
src_y.as_ptr(),
|
||||
src_stride_y as _,
|
||||
src_uv.as_ptr(),
|
||||
src_stride_uv as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
) {
|
||||
0 => Ok(()),
|
||||
_ => Err(anyhow!("NV12ToARGB failed")),
|
||||
}
|
||||
}
|
||||
ImageFormat::ABGR => {
|
||||
match super::NV12ToABGR(
|
||||
src_y.as_ptr(),
|
||||
src_stride_y as _,
|
||||
src_uv.as_ptr(),
|
||||
src_stride_uv as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
) {
|
||||
0 => Ok(()),
|
||||
_ => Err(anyhow!("NV12ToABGR failed")),
|
||||
}
|
||||
}
|
||||
_ => Err(anyhow!("unsupported image format")),
|
||||
match fmt {
|
||||
ImageFormat::ARGB => {
|
||||
call_yuv!(NV12ToARGB(
|
||||
src_y.as_ptr(),
|
||||
src_stride_y as _,
|
||||
src_uv.as_ptr(),
|
||||
src_stride_uv as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
));
|
||||
}
|
||||
ImageFormat::ABGR => {
|
||||
call_yuv!(NV12ToABGR(
|
||||
src_y.as_ptr(),
|
||||
src_stride_y as _,
|
||||
src_uv.as_ptr(),
|
||||
src_stride_uv as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
));
|
||||
}
|
||||
_ => bail!("unsupported image format"),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn hw_i420_to(
|
||||
@@ -505,43 +154,153 @@ pub mod hw {
|
||||
src_stride_u: usize,
|
||||
src_stride_v: usize,
|
||||
dst: &mut Vec<u8>,
|
||||
) {
|
||||
) -> ResultType<()> {
|
||||
let src_y = src_y.as_ptr();
|
||||
let src_u = src_u.as_ptr();
|
||||
let src_v = src_v.as_ptr();
|
||||
dst.resize(width * height * 4, 0);
|
||||
unsafe {
|
||||
match fmt {
|
||||
ImageFormat::ARGB => {
|
||||
super::I420ToARGB(
|
||||
src_y,
|
||||
src_stride_y as _,
|
||||
src_u,
|
||||
src_stride_u as _,
|
||||
src_v,
|
||||
src_stride_v as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
ImageFormat::ABGR => {
|
||||
super::I420ToABGR(
|
||||
src_y,
|
||||
src_stride_y as _,
|
||||
src_u,
|
||||
src_stride_u as _,
|
||||
src_v,
|
||||
src_stride_v as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
match fmt {
|
||||
ImageFormat::ARGB => {
|
||||
call_yuv!(I420ToARGB(
|
||||
src_y,
|
||||
src_stride_y as _,
|
||||
src_u,
|
||||
src_stride_u as _,
|
||||
src_v,
|
||||
src_stride_v as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
));
|
||||
}
|
||||
ImageFormat::ABGR => {
|
||||
call_yuv!(I420ToABGR(
|
||||
src_y,
|
||||
src_stride_y as _,
|
||||
src_u,
|
||||
src_stride_u as _,
|
||||
src_v,
|
||||
src_stride_v as _,
|
||||
dst.as_mut_ptr(),
|
||||
(width * 4) as _,
|
||||
width as _,
|
||||
height as _,
|
||||
));
|
||||
}
|
||||
_ => bail!("unsupported image format"),
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "ios"))]
|
||||
pub fn convert_to_yuv(
|
||||
captured: &Frame,
|
||||
dst_fmt: EncodeYuvFormat,
|
||||
dst: &mut Vec<u8>,
|
||||
mid_data: &mut Vec<u8>,
|
||||
) -> ResultType<()> {
|
||||
let src = captured.data();
|
||||
let src_stride = captured.stride();
|
||||
let captured_pixfmt = captured.pixfmt();
|
||||
if captured_pixfmt == crate::Pixfmt::BGRA || captured_pixfmt == crate::Pixfmt::RGBA {
|
||||
if src.len() < src_stride[0] * dst_fmt.h {
|
||||
bail!(
|
||||
"length not enough: {} < {}",
|
||||
src.len(),
|
||||
src_stride[0] * dst_fmt.h
|
||||
);
|
||||
}
|
||||
}
|
||||
match (captured_pixfmt, dst_fmt.pixfmt) {
|
||||
(crate::Pixfmt::BGRA, crate::Pixfmt::I420) | (crate::Pixfmt::RGBA, crate::Pixfmt::I420) => {
|
||||
let dst_stride_y = dst_fmt.stride[0];
|
||||
let dst_stride_uv = dst_fmt.stride[1];
|
||||
dst.resize(dst_fmt.h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
||||
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
||||
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
|
||||
ARGBToI420
|
||||
} else {
|
||||
ABGRToI420
|
||||
};
|
||||
call_yuv!(f(
|
||||
src.as_ptr(),
|
||||
src_stride[0] as _,
|
||||
dst_y,
|
||||
dst_stride_y as _,
|
||||
dst_u,
|
||||
dst_stride_uv as _,
|
||||
dst_v,
|
||||
dst_stride_uv as _,
|
||||
dst_fmt.w as _,
|
||||
dst_fmt.h as _,
|
||||
));
|
||||
}
|
||||
(crate::Pixfmt::BGRA, crate::Pixfmt::NV12) | (crate::Pixfmt::RGBA, crate::Pixfmt::NV12) => {
|
||||
let dst_stride_y = dst_fmt.stride[0];
|
||||
let dst_stride_uv = dst_fmt.stride[1];
|
||||
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_uv / 2), 0);
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_uv = dst[dst_fmt.u..].as_mut_ptr();
|
||||
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
|
||||
ARGBToNV12
|
||||
} else {
|
||||
ABGRToNV12
|
||||
};
|
||||
call_yuv!(f(
|
||||
src.as_ptr(),
|
||||
src_stride[0] as _,
|
||||
dst_y,
|
||||
dst_stride_y as _,
|
||||
dst_uv,
|
||||
dst_stride_uv as _,
|
||||
dst_fmt.w as _,
|
||||
dst_fmt.h as _,
|
||||
));
|
||||
}
|
||||
(crate::Pixfmt::BGRA, crate::Pixfmt::I444) | (crate::Pixfmt::RGBA, crate::Pixfmt::I444) => {
|
||||
let dst_stride_y = dst_fmt.stride[0];
|
||||
let dst_stride_u = dst_fmt.stride[1];
|
||||
let dst_stride_v = dst_fmt.stride[2];
|
||||
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_u + dst_stride_v), 0);
|
||||
let dst_y = dst.as_mut_ptr();
|
||||
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
||||
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
||||
let src = if captured_pixfmt == crate::Pixfmt::BGRA {
|
||||
src
|
||||
} else {
|
||||
mid_data.resize(src.len(), 0);
|
||||
call_yuv!(ABGRToARGB(
|
||||
src.as_ptr(),
|
||||
src_stride[0] as _,
|
||||
mid_data.as_mut_ptr(),
|
||||
src_stride[0] as _,
|
||||
dst_fmt.w as _,
|
||||
dst_fmt.h as _,
|
||||
));
|
||||
mid_data
|
||||
};
|
||||
call_yuv!(ARGBToI444(
|
||||
src.as_ptr(),
|
||||
src_stride[0] as _,
|
||||
dst_y,
|
||||
dst_stride_y as _,
|
||||
dst_u,
|
||||
dst_stride_u as _,
|
||||
dst_v,
|
||||
dst_stride_v as _,
|
||||
dst_fmt.w as _,
|
||||
dst_fmt.h as _,
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
bail!(
|
||||
"convert not support, {captured_pixfmt:?} -> {:?}",
|
||||
dst_fmt.pixfmt
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use crate::{common::TraitCapturer, dxgi};
|
||||
use crate::{common::TraitCapturer, dxgi, Pixfmt};
|
||||
use std::{
|
||||
io::{
|
||||
self,
|
||||
ErrorKind::{NotFound, TimedOut, WouldBlock},
|
||||
},
|
||||
ops,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
@@ -15,10 +14,10 @@ pub struct Capturer {
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
let width = display.width();
|
||||
let height = display.height();
|
||||
let inner = dxgi::Capturer::new(display.0, yuv)?;
|
||||
let inner = dxgi::Capturer::new(display.0)?;
|
||||
Ok(Capturer {
|
||||
inner,
|
||||
width,
|
||||
@@ -40,13 +39,9 @@ impl Capturer {
|
||||
}
|
||||
|
||||
impl TraitCapturer for Capturer {
|
||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.inner.set_use_yuv(use_yuv);
|
||||
}
|
||||
|
||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||
match self.inner.frame(timeout.as_millis() as _) {
|
||||
Ok(frame) => Ok(Frame(frame)),
|
||||
Ok(frame) => Ok(Frame::new(frame, self.height)),
|
||||
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
|
||||
Err(error) => Err(error),
|
||||
}
|
||||
@@ -61,12 +56,31 @@ impl TraitCapturer for Capturer {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Frame<'a>(pub &'a [u8]);
|
||||
pub struct Frame<'a> {
|
||||
data: &'a [u8],
|
||||
stride: Vec<usize>,
|
||||
}
|
||||
|
||||
impl<'a> ops::Deref for Frame<'a> {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &[u8] {
|
||||
self.0
|
||||
impl<'a> Frame<'a> {
|
||||
pub fn new(data: &'a [u8], h: usize) -> Self {
|
||||
let stride = data.len() / h;
|
||||
let mut v = Vec::new();
|
||||
v.push(stride);
|
||||
Frame { data, stride: v }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> crate::TraitFrame for Frame<'a> {
|
||||
fn data(&self) -> &[u8] {
|
||||
self.data
|
||||
}
|
||||
|
||||
fn stride(&self) -> Vec<usize> {
|
||||
self.stride.clone()
|
||||
}
|
||||
|
||||
fn pixfmt(&self) -> Pixfmt {
|
||||
Pixfmt::BGRA
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,9 +148,9 @@ impl CapturerMag {
|
||||
dxgi::mag::CapturerMag::is_supported()
|
||||
}
|
||||
|
||||
pub fn new(origin: (i32, i32), width: usize, height: usize, use_yuv: bool) -> io::Result<Self> {
|
||||
pub fn new(origin: (i32, i32), width: usize, height: usize) -> io::Result<Self> {
|
||||
Ok(CapturerMag {
|
||||
inner: dxgi::mag::CapturerMag::new(origin, width, height, use_yuv)?,
|
||||
inner: dxgi::mag::CapturerMag::new(origin, width, height)?,
|
||||
data: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -151,13 +165,9 @@ impl CapturerMag {
|
||||
}
|
||||
|
||||
impl TraitCapturer for CapturerMag {
|
||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.inner.set_use_yuv(use_yuv)
|
||||
}
|
||||
|
||||
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
|
||||
self.inner.frame(&mut self.data)?;
|
||||
Ok(Frame(&self.data))
|
||||
Ok(Frame::new(&self.data, self.inner.get_rect().2))
|
||||
}
|
||||
|
||||
fn is_gdi(&self) -> bool {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
|
||||
hw, ImageFormat, ImageRgb, HW_STRIDE_ALIGN,
|
||||
hw, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
|
||||
};
|
||||
use hbb_common::{
|
||||
allow_err,
|
||||
@@ -31,7 +31,6 @@ const DEFAULT_RC: RateControl = RC_DEFAULT;
|
||||
|
||||
pub struct HwEncoder {
|
||||
encoder: Encoder,
|
||||
yuv: Vec<u8>,
|
||||
pub format: DataFormat,
|
||||
pub pixfmt: AVPixelFormat,
|
||||
width: u32,
|
||||
@@ -40,7 +39,7 @@ pub struct HwEncoder {
|
||||
}
|
||||
|
||||
impl EncoderApi for HwEncoder {
|
||||
fn new(cfg: EncoderCfg) -> ResultType<Self>
|
||||
fn new(cfg: EncoderCfg, _i444: bool) -> ResultType<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
@@ -78,7 +77,6 @@ impl EncoderApi for HwEncoder {
|
||||
match Encoder::new(ctx.clone()) {
|
||||
Ok(encoder) => Ok(HwEncoder {
|
||||
encoder,
|
||||
yuv: vec![],
|
||||
format,
|
||||
pixfmt: ctx.pixfmt,
|
||||
width: ctx.width as _,
|
||||
@@ -118,8 +116,31 @@ impl EncoderApi for HwEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
fn use_yuv(&self) -> bool {
|
||||
false
|
||||
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||
let pixfmt = if self.pixfmt == AVPixelFormat::AV_PIX_FMT_NV12 {
|
||||
Pixfmt::NV12
|
||||
} else {
|
||||
Pixfmt::I420
|
||||
};
|
||||
let stride = self
|
||||
.encoder
|
||||
.linesize
|
||||
.clone()
|
||||
.drain(..)
|
||||
.map(|i| i as usize)
|
||||
.collect();
|
||||
crate::EncodeYuvFormat {
|
||||
pixfmt,
|
||||
w: self.encoder.ctx.width as _,
|
||||
h: self.encoder.ctx.height as _,
|
||||
stride,
|
||||
u: self.encoder.offset[0] as _,
|
||||
v: if pixfmt == Pixfmt::NV12 {
|
||||
0
|
||||
} else {
|
||||
self.encoder.offset[1] as _
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
|
||||
@@ -145,29 +166,8 @@ impl HwEncoder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn encode(&mut self, bgra: &[u8]) -> ResultType<Vec<EncodeFrame>> {
|
||||
match self.pixfmt {
|
||||
AVPixelFormat::AV_PIX_FMT_YUV420P => hw::hw_bgra_to_i420(
|
||||
self.encoder.ctx.width as _,
|
||||
self.encoder.ctx.height as _,
|
||||
&self.encoder.linesize,
|
||||
&self.encoder.offset,
|
||||
self.encoder.length,
|
||||
bgra,
|
||||
&mut self.yuv,
|
||||
),
|
||||
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_bgra_to_nv12(
|
||||
self.encoder.ctx.width as _,
|
||||
self.encoder.ctx.height as _,
|
||||
&self.encoder.linesize,
|
||||
&self.encoder.offset,
|
||||
self.encoder.length,
|
||||
bgra,
|
||||
&mut self.yuv,
|
||||
),
|
||||
}
|
||||
|
||||
match self.encoder.encode(&self.yuv) {
|
||||
pub fn encode(&mut self, yuv: &[u8]) -> ResultType<Vec<EncodeFrame>> {
|
||||
match self.encoder.encode(yuv) {
|
||||
Ok(v) => {
|
||||
let mut data = Vec::<EncodeFrame>::new();
|
||||
data.append(v);
|
||||
@@ -245,7 +245,7 @@ impl HwDecoder {
|
||||
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<HwDecoderImage>> {
|
||||
match self.decoder.decode(data) {
|
||||
Ok(v) => Ok(v.iter().map(|f| HwDecoderImage { frame: f }).collect()),
|
||||
Err(_) => Ok(vec![]),
|
||||
Err(e) => Err(anyhow!(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -274,7 +274,7 @@ impl HwDecoderImage<'_> {
|
||||
&mut rgb.raw as _,
|
||||
i420,
|
||||
HW_STRIDE_ALIGN,
|
||||
),
|
||||
)?,
|
||||
AVPixelFormat::AV_PIX_FMT_YUV420P => {
|
||||
hw::hw_i420_to(
|
||||
rgb.fmt(),
|
||||
@@ -287,10 +287,10 @@ impl HwDecoderImage<'_> {
|
||||
frame.linesize[1] as _,
|
||||
frame.linesize[2] as _,
|
||||
&mut rgb.raw as _,
|
||||
);
|
||||
return Ok(());
|
||||
)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
|
||||
|
||||
@@ -11,10 +11,10 @@ pub enum Capturer {
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
Ok(match display {
|
||||
Display::X11(d) => Capturer::X11(x11::Capturer::new(d, yuv)?),
|
||||
Display::WAYLAND(d) => Capturer::WAYLAND(wayland::Capturer::new(d, yuv)?),
|
||||
Display::X11(d) => Capturer::X11(x11::Capturer::new(d)?),
|
||||
Display::WAYLAND(d) => Capturer::WAYLAND(wayland::Capturer::new(d)?),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -34,13 +34,6 @@ impl Capturer {
|
||||
}
|
||||
|
||||
impl TraitCapturer for Capturer {
|
||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
match self {
|
||||
Capturer::X11(d) => d.set_use_yuv(use_yuv),
|
||||
Capturer::WAYLAND(d) => d.set_use_yuv(use_yuv),
|
||||
}
|
||||
}
|
||||
|
||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||
match self {
|
||||
Capturer::X11(d) => d.frame(timeout),
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
pub use self::vpxcodec::*;
|
||||
use hbb_common::message_proto::{video_frame, VideoFrame};
|
||||
use hbb_common::{
|
||||
log,
|
||||
message_proto::{video_frame, Chroma, VideoFrame},
|
||||
};
|
||||
use std::slice;
|
||||
|
||||
cfg_if! {
|
||||
@@ -96,8 +99,6 @@ pub fn would_block_if_equal(old: &mut Vec<u8>, b: &[u8]) -> std::io::Result<()>
|
||||
}
|
||||
|
||||
pub trait TraitCapturer {
|
||||
fn set_use_yuv(&mut self, use_yuv: bool);
|
||||
|
||||
// We doesn't support
|
||||
#[cfg(not(any(target_os = "ios")))]
|
||||
fn frame<'a>(&'a mut self, timeout: std::time::Duration) -> std::io::Result<Frame<'a>>;
|
||||
@@ -108,6 +109,31 @@ pub trait TraitCapturer {
|
||||
fn set_gdi(&mut self) -> bool;
|
||||
}
|
||||
|
||||
pub trait TraitFrame {
|
||||
fn data(&self) -> &[u8];
|
||||
|
||||
fn stride(&self) -> Vec<usize>;
|
||||
|
||||
fn pixfmt(&self) -> Pixfmt;
|
||||
}
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum Pixfmt {
|
||||
BGRA,
|
||||
RGBA,
|
||||
I420,
|
||||
NV12,
|
||||
I444,
|
||||
}
|
||||
|
||||
pub struct EncodeYuvFormat {
|
||||
pub pixfmt: Pixfmt,
|
||||
pub w: usize,
|
||||
pub h: usize,
|
||||
pub stride: Vec<usize>,
|
||||
pub u: usize,
|
||||
pub v: usize,
|
||||
}
|
||||
|
||||
#[cfg(x11)]
|
||||
#[inline]
|
||||
pub fn is_x11() -> bool {
|
||||
@@ -260,6 +286,7 @@ pub trait GoogleImage {
|
||||
fn height(&self) -> usize;
|
||||
fn stride(&self) -> Vec<i32>;
|
||||
fn planes(&self) -> Vec<*mut u8>;
|
||||
fn chroma(&self) -> Chroma;
|
||||
fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize {
|
||||
let bytes_per_pixel = match fmt {
|
||||
ImageFormat::Raw => 3,
|
||||
@@ -278,8 +305,8 @@ pub trait GoogleImage {
|
||||
let stride = self.stride();
|
||||
let planes = self.planes();
|
||||
unsafe {
|
||||
match rgb.fmt() {
|
||||
ImageFormat::Raw => {
|
||||
match (self.chroma(), rgb.fmt()) {
|
||||
(Chroma::I420, ImageFormat::Raw) => {
|
||||
super::I420ToRAW(
|
||||
planes[0],
|
||||
stride[0],
|
||||
@@ -293,7 +320,7 @@ pub trait GoogleImage {
|
||||
self.height() as _,
|
||||
);
|
||||
}
|
||||
ImageFormat::ARGB => {
|
||||
(Chroma::I420, ImageFormat::ARGB) => {
|
||||
super::I420ToARGB(
|
||||
planes[0],
|
||||
stride[0],
|
||||
@@ -307,7 +334,7 @@ pub trait GoogleImage {
|
||||
self.height() as _,
|
||||
);
|
||||
}
|
||||
ImageFormat::ABGR => {
|
||||
(Chroma::I420, ImageFormat::ABGR) => {
|
||||
super::I420ToABGR(
|
||||
planes[0],
|
||||
stride[0],
|
||||
@@ -321,6 +348,36 @@ pub trait GoogleImage {
|
||||
self.height() as _,
|
||||
);
|
||||
}
|
||||
(Chroma::I444, ImageFormat::ARGB) => {
|
||||
super::I444ToARGB(
|
||||
planes[0],
|
||||
stride[0],
|
||||
planes[1],
|
||||
stride[1],
|
||||
planes[2],
|
||||
stride[2],
|
||||
rgb.raw.as_mut_ptr(),
|
||||
bytes_per_row as _,
|
||||
self.width() as _,
|
||||
self.height() as _,
|
||||
);
|
||||
}
|
||||
(Chroma::I444, ImageFormat::ABGR) => {
|
||||
super::I444ToABGR(
|
||||
planes[0],
|
||||
stride[0],
|
||||
planes[1],
|
||||
stride[1],
|
||||
planes[2],
|
||||
stride[2],
|
||||
rgb.raw.as_mut_ptr(),
|
||||
bytes_per_row as _,
|
||||
self.width() as _,
|
||||
self.height() as _,
|
||||
);
|
||||
}
|
||||
// (Chroma::I444, ImageFormat::Raw), new version libyuv have I444ToRAW
|
||||
_ => log::error!("unsupported pixfmt:{:?}", self.chroma()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
use crate::quartz;
|
||||
use crate::{quartz, Pixfmt};
|
||||
use std::marker::PhantomData;
|
||||
use std::sync::{Arc, Mutex, TryLockError};
|
||||
use std::{io, mem, ops};
|
||||
use std::{io, mem};
|
||||
|
||||
pub struct Capturer {
|
||||
inner: quartz::Capturer,
|
||||
frame: Arc<Mutex<Option<quartz::Frame>>>,
|
||||
use_yuv: bool,
|
||||
i420: Vec<u8>,
|
||||
saved_raw_data: Vec<u8>, // for faster compare and copy
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
let frame = Arc::new(Mutex::new(None));
|
||||
|
||||
let f = frame.clone();
|
||||
@@ -20,11 +18,7 @@ impl Capturer {
|
||||
display.0,
|
||||
display.width(),
|
||||
display.height(),
|
||||
if use_yuv {
|
||||
quartz::PixelFormat::YCbCr420Video
|
||||
} else {
|
||||
quartz::PixelFormat::Argb8888
|
||||
},
|
||||
quartz::PixelFormat::Argb8888,
|
||||
Default::default(),
|
||||
move |inner| {
|
||||
if let Ok(mut f) = f.lock() {
|
||||
@@ -37,8 +31,6 @@ impl Capturer {
|
||||
Ok(Capturer {
|
||||
inner,
|
||||
frame,
|
||||
use_yuv,
|
||||
i420: Vec::new(),
|
||||
saved_raw_data: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -53,10 +45,6 @@ impl Capturer {
|
||||
}
|
||||
|
||||
impl crate::TraitCapturer for Capturer {
|
||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.use_yuv = use_yuv;
|
||||
}
|
||||
|
||||
fn frame<'a>(&'a mut self, _timeout_ms: std::time::Duration) -> io::Result<Frame<'a>> {
|
||||
match self.frame.try_lock() {
|
||||
Ok(mut handle) => {
|
||||
@@ -66,9 +54,7 @@ impl crate::TraitCapturer for Capturer {
|
||||
match frame {
|
||||
Some(mut frame) => {
|
||||
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
|
||||
if self.use_yuv {
|
||||
frame.nv12_to_i420(self.width(), self.height(), &mut self.i420);
|
||||
}
|
||||
frame.surface_to_bgra(self.height());
|
||||
Ok(Frame(frame, PhantomData))
|
||||
}
|
||||
|
||||
@@ -85,11 +71,20 @@ impl crate::TraitCapturer for Capturer {
|
||||
|
||||
pub struct Frame<'a>(pub quartz::Frame, PhantomData<&'a [u8]>);
|
||||
|
||||
impl<'a> ops::Deref for Frame<'a> {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &[u8] {
|
||||
impl<'a> crate::TraitFrame for Frame<'a> {
|
||||
fn data(&self) -> &[u8] {
|
||||
&*self.0
|
||||
}
|
||||
|
||||
fn stride(&self) -> Vec<usize> {
|
||||
let mut v = Vec::new();
|
||||
v.push(self.0.stride());
|
||||
v
|
||||
}
|
||||
|
||||
fn pixfmt(&self) -> Pixfmt {
|
||||
Pixfmt::BGRA
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Display(quartz::Display);
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
|
||||
use hbb_common::anyhow::{anyhow, Context};
|
||||
use hbb_common::log;
|
||||
use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
|
||||
use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
|
||||
use hbb_common::ResultType;
|
||||
|
||||
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
|
||||
use crate::{GoogleImage, STRIDE_ALIGN};
|
||||
use crate::{GoogleImage, Pixfmt, STRIDE_ALIGN};
|
||||
|
||||
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
|
||||
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
|
||||
@@ -39,6 +39,7 @@ pub struct VpxEncoder {
|
||||
width: usize,
|
||||
height: usize,
|
||||
id: VpxVideoCodecId,
|
||||
i444: bool,
|
||||
}
|
||||
|
||||
pub struct VpxDecoder {
|
||||
@@ -46,7 +47,7 @@ pub struct VpxDecoder {
|
||||
}
|
||||
|
||||
impl EncoderApi for VpxEncoder {
|
||||
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
|
||||
fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
@@ -98,6 +99,13 @@ impl EncoderApi for VpxEncoder {
|
||||
} else {
|
||||
c.rc_target_bitrate = base_bitrate;
|
||||
}
|
||||
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp9/common/vp9_enums.h#29
|
||||
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp8/vp8_cx_iface.c#282
|
||||
c.g_profile = if i444 && config.codec == VpxVideoCodecId::VP9 {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
/*
|
||||
The VPX encoder supports two-pass encoding for rate control purposes.
|
||||
@@ -166,6 +174,7 @@ impl EncoderApi for VpxEncoder {
|
||||
width: config.width as _,
|
||||
height: config.height as _,
|
||||
id: config.codec,
|
||||
i444,
|
||||
})
|
||||
}
|
||||
_ => Err(anyhow!("encoder type mismatch")),
|
||||
@@ -192,8 +201,36 @@ impl EncoderApi for VpxEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
fn use_yuv(&self) -> bool {
|
||||
true
|
||||
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||
let mut img = Default::default();
|
||||
let fmt = if self.i444 {
|
||||
vpx_img_fmt::VPX_IMG_FMT_I444
|
||||
} else {
|
||||
vpx_img_fmt::VPX_IMG_FMT_I420
|
||||
};
|
||||
unsafe {
|
||||
vpx_img_wrap(
|
||||
&mut img,
|
||||
fmt,
|
||||
self.width as _,
|
||||
self.height as _,
|
||||
crate::STRIDE_ALIGN as _,
|
||||
0x1 as _,
|
||||
);
|
||||
}
|
||||
let pixfmt = if self.i444 {
|
||||
Pixfmt::I444
|
||||
} else {
|
||||
Pixfmt::I420
|
||||
};
|
||||
crate::EncodeYuvFormat {
|
||||
pixfmt,
|
||||
w: img.w as _,
|
||||
h: img.h as _,
|
||||
stride: img.stride.map(|s| s as usize).to_vec(),
|
||||
u: img.planes[1] as usize - img.planes[0] as usize,
|
||||
v: img.planes[2] as usize - img.planes[0] as usize,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
||||
@@ -219,14 +256,20 @@ impl EncoderApi for VpxEncoder {
|
||||
|
||||
impl VpxEncoder {
|
||||
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
|
||||
if 2 * data.len() < 3 * self.width * self.height {
|
||||
let bpp = if self.i444 { 24 } else { 12 };
|
||||
if data.len() < self.width * self.height * bpp / 8 {
|
||||
return Err(Error::FailedCall("len not enough".to_string()));
|
||||
}
|
||||
let fmt = if self.i444 {
|
||||
vpx_img_fmt::VPX_IMG_FMT_I444
|
||||
} else {
|
||||
vpx_img_fmt::VPX_IMG_FMT_I420
|
||||
};
|
||||
|
||||
let mut image = Default::default();
|
||||
call_vpx_ptr!(vpx_img_wrap(
|
||||
&mut image,
|
||||
vpx_img_fmt::VPX_IMG_FMT_I420,
|
||||
fmt,
|
||||
self.width as _,
|
||||
self.height as _,
|
||||
stride_align as _,
|
||||
@@ -533,6 +576,13 @@ impl GoogleImage for Image {
|
||||
fn planes(&self) -> Vec<*mut u8> {
|
||||
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
|
||||
}
|
||||
|
||||
fn chroma(&self) -> Chroma {
|
||||
match self.inner().fmt {
|
||||
vpx_img_fmt::VPX_IMG_FMT_I444 => Chroma::I444,
|
||||
_ => Chroma::I420,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Image {
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::common::{x11::Frame, TraitCapturer};
|
||||
use crate::wayland::{capturable::*, *};
|
||||
use std::{io, sync::RwLock, time::Duration};
|
||||
|
||||
pub struct Capturer(Display, Box<dyn Recorder>, bool, Vec<u8>);
|
||||
pub struct Capturer(Display, Box<dyn Recorder>, Vec<u8>);
|
||||
|
||||
static mut IS_CURSOR_EMBEDDED: Option<bool> = None;
|
||||
|
||||
@@ -45,9 +45,9 @@ fn map_err<E: ToString>(err: E) -> io::Error {
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
let r = display.0.recorder(false).map_err(map_err)?;
|
||||
Ok(Capturer(display, r, yuv, Default::default()))
|
||||
Ok(Capturer(display, r, Default::default()))
|
||||
}
|
||||
|
||||
pub fn width(&self) -> usize {
|
||||
@@ -60,24 +60,10 @@ impl Capturer {
|
||||
}
|
||||
|
||||
impl TraitCapturer for Capturer {
|
||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.2 = use_yuv;
|
||||
}
|
||||
|
||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
|
||||
PixelProvider::BGR0(w, h, x) => Ok(Frame(if self.2 {
|
||||
crate::common::bgra_to_i420(w as _, h as _, &x, &mut self.3);
|
||||
&self.3[..]
|
||||
} else {
|
||||
x
|
||||
})),
|
||||
PixelProvider::RGB0(w, h, x) => Ok(Frame(if self.2 {
|
||||
crate::common::rgba_to_i420(w as _, h as _, &x, &mut self.3);
|
||||
&self.3[..]
|
||||
} else {
|
||||
x
|
||||
})),
|
||||
PixelProvider::BGR0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, h)),
|
||||
PixelProvider::RGB0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, h)),
|
||||
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
|
||||
_ => Err(map_err("Invalid data")),
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use crate::{common::TraitCapturer, x11};
|
||||
use std::{io, ops, time::Duration};
|
||||
use crate::{common::TraitCapturer, x11, TraitFrame, Pixfmt};
|
||||
use std::{io, time::Duration};
|
||||
|
||||
pub struct Capturer(x11::Capturer);
|
||||
|
||||
pub const IS_CURSOR_EMBEDDED: bool = false;
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
||||
x11::Capturer::new(display.0, yuv).map(Capturer)
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
x11::Capturer::new(display.0).map(Capturer)
|
||||
}
|
||||
|
||||
pub fn width(&self) -> usize {
|
||||
@@ -20,21 +20,37 @@ impl Capturer {
|
||||
}
|
||||
|
||||
impl TraitCapturer for Capturer {
|
||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.0.set_use_yuv(use_yuv);
|
||||
}
|
||||
|
||||
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
|
||||
Ok(Frame(self.0.frame()?))
|
||||
Ok(self.0.frame()?)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Frame<'a>(pub &'a [u8]);
|
||||
pub struct Frame<'a>{
|
||||
pub data: &'a [u8],
|
||||
pub pixfmt:Pixfmt,
|
||||
pub stride:Vec<usize>,
|
||||
}
|
||||
|
||||
impl<'a> ops::Deref for Frame<'a> {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &[u8] {
|
||||
self.0
|
||||
impl<'a> Frame<'a> {
|
||||
pub fn new(data:&'a [u8], pixfmt:Pixfmt, h:usize) -> Self {
|
||||
let stride = data.len() / h;
|
||||
let mut v = Vec::new();
|
||||
v.push(stride);
|
||||
Self { data, pixfmt, stride: v }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TraitFrame for Frame<'a> {
|
||||
fn data(&self) -> &[u8] {
|
||||
self.data
|
||||
}
|
||||
|
||||
fn stride(&self) -> Vec<usize> {
|
||||
self.stride.clone()
|
||||
}
|
||||
|
||||
fn pixfmt(&self) -> crate::Pixfmt {
|
||||
self.pixfmt
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ impl CapturerGDI {
|
||||
stride,
|
||||
self.width,
|
||||
self.height,
|
||||
180,
|
||||
crate::RotationMode::kRotate180,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -245,9 +245,6 @@ pub struct CapturerMag {
|
||||
rect: RECT,
|
||||
width: usize,
|
||||
height: usize,
|
||||
|
||||
use_yuv: bool,
|
||||
data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Drop for CapturerMag {
|
||||
@@ -262,12 +259,7 @@ impl CapturerMag {
|
||||
MagInterface::new().is_ok()
|
||||
}
|
||||
|
||||
pub(crate) fn new(
|
||||
origin: (i32, i32),
|
||||
width: usize,
|
||||
height: usize,
|
||||
use_yuv: bool,
|
||||
) -> Result<Self> {
|
||||
pub(crate) fn new(origin: (i32, i32), width: usize, height: usize) -> Result<Self> {
|
||||
unsafe {
|
||||
let x = GetSystemMetrics(SM_XVIRTUALSCREEN);
|
||||
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
|
||||
@@ -311,8 +303,6 @@ impl CapturerMag {
|
||||
},
|
||||
width,
|
||||
height,
|
||||
use_yuv,
|
||||
data: Vec::new(),
|
||||
};
|
||||
|
||||
unsafe {
|
||||
@@ -437,10 +427,6 @@ impl CapturerMag {
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
pub(crate) fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.use_yuv = use_yuv;
|
||||
}
|
||||
|
||||
pub(crate) fn exclude(&mut self, cls: &str, name: &str) -> Result<bool> {
|
||||
let name_c = CString::new(name)?;
|
||||
unsafe {
|
||||
@@ -579,22 +565,9 @@ impl CapturerMag {
|
||||
));
|
||||
}
|
||||
|
||||
if self.use_yuv {
|
||||
self.data.resize(lock.1.len(), 0);
|
||||
unsafe {
|
||||
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut self.data[0], self.data.len());
|
||||
}
|
||||
crate::common::bgra_to_i420(
|
||||
self.width as usize,
|
||||
self.height as usize,
|
||||
&self.data,
|
||||
data,
|
||||
);
|
||||
} else {
|
||||
data.resize(lock.1.len(), 0);
|
||||
unsafe {
|
||||
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut data[0], data.len());
|
||||
}
|
||||
data.resize(lock.1.len(), 0);
|
||||
unsafe {
|
||||
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut data[0], data.len());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -651,7 +624,7 @@ mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn test() {
|
||||
let mut capture_mag = CapturerMag::new((0, 0), 1920, 1080, false).unwrap();
|
||||
let mut capture_mag = CapturerMag::new((0, 0), 1920, 1080).unwrap();
|
||||
capture_mag.exclude("", "RustDeskPrivacyWindow").unwrap();
|
||||
std::thread::sleep(std::time::Duration::from_millis(1000 * 10));
|
||||
let mut data = Vec::new();
|
||||
|
||||
@@ -20,6 +20,8 @@ use winapi::{
|
||||
},
|
||||
};
|
||||
|
||||
use crate::RotationMode::*;
|
||||
|
||||
pub struct ComPtr<T>(*mut T);
|
||||
impl<T> ComPtr<T> {
|
||||
fn is_null(&self) -> bool {
|
||||
@@ -45,8 +47,6 @@ pub struct Capturer {
|
||||
surface: ComPtr<IDXGISurface>,
|
||||
width: usize,
|
||||
height: usize,
|
||||
use_yuv: bool,
|
||||
yuv: Vec<u8>,
|
||||
rotated: Vec<u8>,
|
||||
gdi_capturer: Option<CapturerGDI>,
|
||||
gdi_buffer: Vec<u8>,
|
||||
@@ -54,7 +54,7 @@ pub struct Capturer {
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
let mut device = ptr::null_mut();
|
||||
let mut context = ptr::null_mut();
|
||||
let mut duplication = ptr::null_mut();
|
||||
@@ -148,8 +148,6 @@ impl Capturer {
|
||||
width: display.width() as usize,
|
||||
height: display.height() as usize,
|
||||
display,
|
||||
use_yuv,
|
||||
yuv: Vec::new(),
|
||||
rotated: Vec::new(),
|
||||
gdi_capturer,
|
||||
gdi_buffer: Vec::new(),
|
||||
@@ -157,10 +155,6 @@ impl Capturer {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.use_yuv = use_yuv;
|
||||
}
|
||||
|
||||
pub fn is_gdi(&self) -> bool {
|
||||
self.gdi_capturer.is_some()
|
||||
}
|
||||
@@ -259,10 +253,10 @@ impl Capturer {
|
||||
self.unmap();
|
||||
let r = self.load_frame(timeout)?;
|
||||
let rotate = match self.display.rotation() {
|
||||
DXGI_MODE_ROTATION_IDENTITY | DXGI_MODE_ROTATION_UNSPECIFIED => 0,
|
||||
DXGI_MODE_ROTATION_ROTATE90 => 90,
|
||||
DXGI_MODE_ROTATION_ROTATE180 => 180,
|
||||
DXGI_MODE_ROTATION_ROTATE270 => 270,
|
||||
DXGI_MODE_ROTATION_IDENTITY | DXGI_MODE_ROTATION_UNSPECIFIED => kRotate0,
|
||||
DXGI_MODE_ROTATION_ROTATE90 => kRotate90,
|
||||
DXGI_MODE_ROTATION_ROTATE180 => kRotate180,
|
||||
DXGI_MODE_ROTATION_ROTATE270 => kRotate270,
|
||||
_ => {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
@@ -270,7 +264,7 @@ impl Capturer {
|
||||
));
|
||||
}
|
||||
};
|
||||
if rotate == 0 {
|
||||
if rotate == kRotate0 {
|
||||
slice::from_raw_parts(r.0, r.1 as usize * self.height)
|
||||
} else {
|
||||
self.rotated.resize(self.width * self.height * 4, 0);
|
||||
@@ -279,12 +273,12 @@ impl Capturer {
|
||||
r.1,
|
||||
self.rotated.as_mut_ptr(),
|
||||
4 * self.width as i32,
|
||||
if rotate == 180 {
|
||||
if rotate == kRotate180 {
|
||||
self.width
|
||||
} else {
|
||||
self.height
|
||||
} as _,
|
||||
if rotate != 180 {
|
||||
if rotate != kRotate180 {
|
||||
self.width
|
||||
} else {
|
||||
self.height
|
||||
@@ -295,19 +289,7 @@ impl Capturer {
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok({
|
||||
if self.use_yuv {
|
||||
crate::common::bgra_to_i420(
|
||||
self.width as usize,
|
||||
self.height as usize,
|
||||
&result,
|
||||
&mut self.yuv,
|
||||
);
|
||||
&self.yuv[..]
|
||||
} else {
|
||||
result
|
||||
}
|
||||
})
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ use super::ffi::*;
|
||||
pub struct Frame {
|
||||
surface: IOSurfaceRef,
|
||||
inner: &'static [u8],
|
||||
i420: *mut u8,
|
||||
i420_len: usize,
|
||||
bgra: Vec<u8>,
|
||||
bgra_stride: usize,
|
||||
}
|
||||
|
||||
impl Frame {
|
||||
@@ -24,8 +24,8 @@ impl Frame {
|
||||
Frame {
|
||||
surface,
|
||||
inner,
|
||||
i420: ptr::null_mut(),
|
||||
i420_len: 0,
|
||||
bgra: Vec::new(),
|
||||
bgra_stride: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,23 +34,20 @@ impl Frame {
|
||||
self.inner
|
||||
}
|
||||
|
||||
pub fn nv12_to_i420<'a>(&'a mut self, w: usize, h: usize, i420: &'a mut Vec<u8>) {
|
||||
pub fn stride(&self) -> usize {
|
||||
self.bgra_stride
|
||||
}
|
||||
|
||||
pub fn surface_to_bgra<'a>(&'a mut self, h: usize) {
|
||||
unsafe {
|
||||
let plane0 = IOSurfaceGetBaseAddressOfPlane(self.surface, 0);
|
||||
let stride0 = IOSurfaceGetBytesPerRowOfPlane(self.surface, 0);
|
||||
let plane1 = IOSurfaceGetBaseAddressOfPlane(self.surface, 1);
|
||||
let stride1 = IOSurfaceGetBytesPerRowOfPlane(self.surface, 1);
|
||||
crate::common::nv12_to_i420(
|
||||
self.bgra_stride = IOSurfaceGetBytesPerRowOfPlane(self.surface, 0);
|
||||
self.bgra.resize(self.bgra_stride * h, 0);
|
||||
std::ptr::copy_nonoverlapping(
|
||||
plane0 as _,
|
||||
stride0 as _,
|
||||
plane1 as _,
|
||||
stride1 as _,
|
||||
w,
|
||||
h,
|
||||
i420,
|
||||
self.bgra.as_mut_ptr(),
|
||||
self.bgra_stride * h,
|
||||
);
|
||||
self.i420 = i420.as_mut_ptr() as _;
|
||||
self.i420_len = i420.len();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -58,14 +55,7 @@ impl Frame {
|
||||
impl ops::Deref for Frame {
|
||||
type Target = [u8];
|
||||
fn deref<'a>(&'a self) -> &'a [u8] {
|
||||
if self.i420.is_null() {
|
||||
self.inner
|
||||
} else {
|
||||
unsafe {
|
||||
let inner = slice::from_raw_parts(self.i420 as *const u8, self.i420_len);
|
||||
inner
|
||||
}
|
||||
}
|
||||
&self.bgra
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@ use std::{io, ptr, slice};
|
||||
|
||||
use hbb_common::libc;
|
||||
|
||||
use crate::Frame;
|
||||
|
||||
use super::ffi::*;
|
||||
use super::Display;
|
||||
|
||||
@@ -12,13 +14,11 @@ pub struct Capturer {
|
||||
buffer: *const u8,
|
||||
|
||||
size: usize,
|
||||
use_yuv: bool,
|
||||
yuv: Vec<u8>,
|
||||
saved_raw_data: Vec<u8>, // for faster compare and copy
|
||||
}
|
||||
|
||||
impl Capturer {
|
||||
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
|
||||
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||
// Calculate dimensions.
|
||||
|
||||
let pixel_width = 4;
|
||||
@@ -67,17 +67,11 @@ impl Capturer {
|
||||
xcbid,
|
||||
buffer,
|
||||
size,
|
||||
use_yuv,
|
||||
yuv: Vec::new(),
|
||||
saved_raw_data: Vec::new(),
|
||||
};
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
pub fn set_use_yuv(&mut self, use_yuv: bool) {
|
||||
self.use_yuv = use_yuv;
|
||||
}
|
||||
|
||||
pub fn display(&self) -> &Display {
|
||||
&self.display
|
||||
}
|
||||
@@ -103,16 +97,13 @@ impl Capturer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn frame<'b>(&'b mut self) -> std::io::Result<&'b [u8]> {
|
||||
pub fn frame<'b>(&'b mut self) -> std::io::Result<Frame> {
|
||||
self.get_image();
|
||||
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
|
||||
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
|
||||
Ok(if self.use_yuv {
|
||||
crate::common::bgra_to_i420(self.display.w(), self.display.h(), &result, &mut self.yuv);
|
||||
&self.yuv[..]
|
||||
} else {
|
||||
result
|
||||
})
|
||||
Ok(
|
||||
Frame::new(result, crate::Pixfmt::BGRA, self.display.h())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user