当前位置: 首页 > 工具软件 > OpenH264 > 使用案例 >

WebRtc之Openh264学习

胡厉刚
2023-12-01

打开Openh264

gn gen out/project --ide=vs --args=“proprietary_codecs=true rtc_use_h264=true use_openh264=true”

ninja -C out/project

Openh264的使用

h264_encoder_impl

该文件是webrtc对openh264使用的封装。
类H264EncoerImpl 公有继承 H264Encoer

成员函数

  1. H264EncoerImpl(const cricket::VideoCodec& codec);
    codec是外部传入的参数配置,主要用于配置包模式信息,会配置成NonInterleaved

  2. ~H264EncoerImpl()
    析构函数,不多说

  3. InitEncode
    int32_t InitEncode(const VideoCodec* codec_settings,
    const VideoEncoder::Settings& settings) override;
    1)编码器类型必须是 kVideoCodecH264
    2)NumberOfSimulcastStreams
    如果是多流的话,会求所有流设置的码率总和。
    这一步会返回流的个数number_of_streams
    3)分辨率
    即使simulcast个数为0,也要设置一个分辨率
    4)遍历创建共number_of_streams个svc编码器
    1】 WelsCreateSVCEncoder
    2】 设置日志等级
    if (kOpenH264EncoderDetailedLogging) {
    int trace_level = WELS_LOG_DETAIL;
    openh264_encoder->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);
    }
    3】存储h264句柄
    encoders_[i] = openh264_encoder;
    4】从外部参数信息中,设置内部参数
    configurations_[i].simulcast_idx = idx;
    configurations_[i].sending = false;
    configurations_[i].width = codec_.simulcastStream[idx].width; //第idx个simu流宽
    configurations_[i].height = codec_.simulcastStream[idx].height;
    configurations_[i].max_frame_rate = static_cast(codec_.maxFramerate); //如此来看,多个simu流帧率是一样的。
    configurations_[i].frame_dropping_on = codec_.H264()->frameDroppingOn; //是否开启抽帧?
    configurations_[i].key_frame_interval = codec_.H264()->keyFrameInterval; //就是一个GOP size
    configurations_[i].num_temporal_layers =
    codec_.simulcastStream[idx].numberOfTemporalLayers;//一个流下,时域分层个数
    如此看来,当前自己写的openh264使用工具,simulcast,svc_t是正确的。
    webrtc很可能没用到openh264 svc的空域分层概念,而是通过使用多个openh264编码句柄,来创建不同分辨率的码率,每一路码流再进行svc 时域分层。

//创建降采样图像缓存
if (i > 0) {
downscaled_buffers_[i - 1] = I420Buffer::Create(
configurations_[i].width, configurations_[i].height,
configurations_[i].width, configurations_[i].width / 2,
configurations_[i].width / 2);
}
// Y U V 4 2 0
//设置最大码率,开始码率
单位都是Kbit/s

5]基于层配置创建编码器参数
SEncParamExt encoder_params = CreateEncoderParams(i);
//类似与工具中的 填充参数。
1.首先是调用接口,获取默认参数
encoders_[i]->GetDefaultParams(&encoder_params);
2.其次是设置流类型,再此分为相机实时流 和屏幕内容实时流两种。
3.设置分辨率,目标码率也即开始码率
这个值是在configurations_[i]中,需要重点关注。
4.最大码率设置
因为webrtc有拥塞控制功能,所以最大码率是默认值,即不设置。
5.速率控制模式
RCMode = RC_BIRRATE_MODE
该模式下,码率稳定。
开启抽帧的话,在图像剧烈变化时,保证尽量不会超编。
encoder_params.bEnableFrameSkip = configurations_[i].frame_dropping_on;
6.最大帧率
7.关键帧间隔
encoder_params.uiIntraPeriod = configurations_[i].key_frame_interval;
8.设置线程个数

encoder_params.iMultipleThreadIdc = NumberOfThreads(
      encoder_params.iPicWidth, encoder_params.iPicHeight, number_of_cores_);
        // TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac,
  // see crbug.com/583348. Until further investigated, only use one thread.
  //  if (width * height >= 1920 * 1080 && number_of_cores > 8) {
  //    return 8;  // 8 threads for 1080p on high perf machines.
  //  } else if (width * height > 1280 * 960 && number_of_cores >= 6) {
  //    return 3;  // 3 threads for 1080p.
  //  } else if (width * height > 640 * 480 && number_of_cores >= 3) {
  //    return 2;  // 2 threads for qHD/HD.
  //  } else {
  //    return 1;  // 1 thread for VGA or less.
  //  }
  // TODO(sprang): Also check sSliceArgument.uiSliceNum om GetEncoderPrams(),
  //               before enabling multithreading here.

在webrtc中,默认使用 1,单线程模式。

//空间层配置
在webrtc中,只用到了层0
1.设置当前层的宽高,与上面一致。
2.设置帧率,层码率,最大码率
3.时间层配置
如果时间层个数大于1,参考帧数设为1

//打包模式
1.SingleNalUnit
限制:SliceNum=1, SliceMode=SM_SIZELIMITED_SLICE
uiSliceSizeConstraint=max_payload_size_
即可以控制负载大小。
2.NonInerleaved

   // When uiSliceMode = SM_FIXEDSLCNUM_SLICE, uiSliceNum = 0 means auto
  // design it with cpu core number.
  // TODO(sprang): Set to 0 when we understand why the rate controller borks
  //               when uiSliceNum > 1.
  encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;
  encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode =
      SM_FIXEDSLCNUM_SLICE;

到此,参数设置就已经结束。

5) InitializeExt
6) 设置编码参数数据格式

   int video_format = EVideoFormatType::videoFormatI420;
    openh264_encoder->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format);

7)初始化 编码图片大小
默认是 使用未编码数据大小。

const size_t new_capacity =
    CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width,
                   codec_.simulcastStream[idx].height);
encoded_images_[i].SetEncodedData(EncodedImageBuffer::Create(new_capacity));
encoded_images_[i]._completeFrame = true;
encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width;
encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height;
encoded_images_[i].set_size(0);

其实就是一帧图片,转换层YUV后大小。

  1. init_allocator
  SimulcastRateAllocator init_allocator(codec_);
  VideoBitrateAllocation allocation =
      init_allocator.Allocate(VideoBitrateAllocationParameters(
          DataRate::KilobitsPerSec(codec_.startBitrate), codec_.maxFramerate));
  SetRates(RateControlParameters(allocation, codec_.maxFramerate));

这块需要关注下,设计到后面BW 码率调整。

  1. Release
    int32_t Release() override;

设计到资源的释放,重置。

  1. RegisterEncodeCompleteCallback
    int32_t RegisterEncodeCompleteCallback(
      EncodedImageCallback* callback) override;
  1. SetRates
    void SetRates(const RateControlParameters& parameters) override;

这个用于通知编码器调整码率

  1. Encode
  // The result of encoding - an EncodedImage and CodecSpecificInfo - are
  // passed to the encode complete callback.
  int32_t Encode(const VideoFrame& frame,
                 const std::vector<VideoFrameType>* frame_types) override;

主要是构造:SSourcePicture

   /**
*  @brief Structure for source picture
*/
typedef struct Source_Picture_s {
  int       iColorFormat;          ///< color space type
  int       iStride[4];            ///< stride for each plane pData
  unsigned char*  pData[4];        ///< plane pData
  int       iPicWidth;             ///< luma picture width in x coordinate
  int       iPicHeight;            ///< luma picture height in y coordinate
  long long uiTimeStamp;           ///< timestamp of the source picture, unit: millisecond
} SSourcePicture;
// Scale the image down a number of times by downsampling 
factor.libyuv::I420Scale(pictures_[i - 1].pData[0], pictures_[i - 1].iStride[0],
                        pictures_[i - 1].pData[1], pictures_[i - 1].iStride[1],
                        pictures_[i - 1].pData[2], pictures_[i - 1].iStride[2],
                        configurations_[i - 1].width,
                        configurations_[i - 1].height, pictures_[i].pData[0],
                        pictures_[i].iStride[0], pictures_[i].pData[1],
                        pictures_[i].iStride[1], pictures_[i].pData[2],
                        pictures_[i].iStride[2], configurations_[i].width,
                        configurations_[i].height, libyuv::kFilterBilinear);
   // EncodeFrame output.
    SFrameBSInfo info;
    memset(&info, 0, sizeof(SFrameBSInfo));
// Encode!
int enc_ret = encoders_[i]->EncodeFrame(&pictures_[i], &info);
if (enc_ret != 0) {
  RTC_LOG(LS_ERROR)
      << "OpenH264 frame encoding failed, EncodeFrame returned " << enc_ret
      << ".";
  ReportError();
  return WEBRTC_VIDEO_CODEC_ERROR;
}

//接下来就是输出帧和输入帧的信息进行打包

encoded_images_[i]._encodedWidth = configurations_[i].width;
encoded_images_[i]._encodedHeight = configurations_[i].height;
encoded_images_[i].SetTimestamp(input_frame.timestamp());
encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);
encoded_images_[i].SetSpatialIndex(configurations_[i].simulcast_idx);

// Split encoded image up into fragments. This also updates
// |encoded_image_|.
RtpFragmentize(&encoded_images_[i], &info);
//进行编码帧的输出
    // Encoder can skip frames to save bandwidth in which case
    // |encoded_images_[i]._length| == 0.
    if (encoded_images_[i].size() > 0) {
      // Parse QP.
      h264_bitstream_parser_.ParseBitstream(encoded_images_[i].data(),
                                            encoded_images_[i].size());
      h264_bitstream_parser_.GetLastSliceQp(&encoded_images_[i].qp_);

      // Deliver encoded image.
      CodecSpecificInfo codec_specific;
      codec_specific.codecType = kVideoCodecH264;
      codec_specific.codecSpecific.H264.packetization_mode =
          packetization_mode_;
      codec_specific.codecSpecific.H264.temporal_idx = kNoTemporalIdx;
      codec_specific.codecSpecific.H264.idr_frame =
          info.eFrameType == videoFrameTypeIDR;
      codec_specific.codecSpecific.H264.base_layer_sync = false;
      if (configurations_[i].num_temporal_layers > 1) {
        const uint8_t tid = info.sLayerInfo[0].uiTemporalId;
        codec_specific.codecSpecific.H264.temporal_idx = tid;
        codec_specific.codecSpecific.H264.base_layer_sync =
            tid > 0 && tid < tl0sync_limit_[i];
        if (codec_specific.codecSpecific.H264.base_layer_sync) {
          tl0sync_limit_[i] = tid;
        }
        if (tid == 0) {
          tl0sync_limit_[i] = configurations_[i].num_temporal_layers;
        }
      }
      encoded_image_callback_->OnEncodedImage(encoded_images_[i],
                                              &codec_specific);
    }
  1. GetEncoderInfo
VideoEncoder::EncoderInfo H264EncoderImpl::GetEncoderInfo() const {
  EncoderInfo info;
  info.supports_native_handle = false;
  info.implementation_name = "OpenH264";
  info.scaling_settings =
      VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
  info.is_hardware_accelerated = false;
  info.has_internal_source = false;
  info.supports_simulcast = true;
  return info;
}
  1. PacketizationModeForTesting
  // Exposed for testing.
  H264PacketizationMode PacketizationModeForTesting() const {
    return packetization_mode_;
  }
 类似资料: