Skip to content
This repository has been archived by the owner on Oct 25, 2024. It is now read-only.

Commit

Permalink
Fix AVC decoding crash with renderer I444 support.
Browse files Browse the repository at this point in the history
  • Loading branch information
taste1981 committed Nov 3, 2023
1 parent d431bb6 commit e176876
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 62 deletions.
30 changes: 1 addition & 29 deletions talk/owt/sdk/base/webrtcvideorendererimpl.cc
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
#include <d3d9.h>
#include <dxva2api.h>
#endif
#include "rtc_base/logging.h"
#include "talk/owt/sdk/base/nativehandlebuffer.h"
#include "talk/owt/sdk/base/webrtcvideorendererimpl.h"
#if defined(WEBRTC_WIN)
Expand Down Expand Up @@ -72,18 +71,13 @@ void WebrtcVideoRendererImpl::OnFrame(const webrtc::VideoFrame& frame) {
(uint8_t*)render_ptr, resolution, VideoBufferType::kD3D11});

renderer_.RenderFrame(std::move(video_buffer));
return;
#else
return;
#endif
}

// Non-native buffer. Only for I444 buffer and I444 renderer, we do a
// direct copy. Otherwise we convert to renderer type.
VideoRendererType renderer_type = renderer_.Type();
if (renderer_type != VideoRendererType::kI420 &&
renderer_type != VideoRendererType::kARGB &&
renderer_type != VideoRendererType::kI444)
renderer_type != VideoRendererType::kARGB)
return;
Resolution resolution(frame.width(), frame.height());
if (renderer_type == VideoRendererType::kARGB) {
Expand All @@ -93,28 +87,6 @@ void WebrtcVideoRendererImpl::OnFrame(const webrtc::VideoFrame& frame) {
std::unique_ptr<VideoBuffer> video_buffer(
new VideoBuffer{buffer, resolution, VideoBufferType::kARGB});
renderer_.RenderFrame(std::move(video_buffer));
} else if (renderer_type == VideoRendererType::kI444 &&
frame.video_frame_buffer()->type() ==
webrtc::VideoFrameBuffer::Type::kI444) {
// Assume stride equals to width(might not stand here?)
uint8_t* buffer = new uint8_t[resolution.width * resolution.height * 3];
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer =
frame.video_frame_buffer();
const webrtc::PlanarYuv8Buffer* planar_yuv_buffer =
reinterpret_cast<const webrtc::PlanarYuv8Buffer*>(
frame_buffer->GetI444());
size_t data_ptr = 0;
memcpy(buffer, planar_yuv_buffer->DataY(),
planar_yuv_buffer->StrideY() * planar_yuv_buffer->height());
data_ptr += planar_yuv_buffer->StrideY() * planar_yuv_buffer->height();
memcpy(buffer + data_ptr, planar_yuv_buffer->DataU(),
planar_yuv_buffer->StrideU() * planar_yuv_buffer->height());
data_ptr += planar_yuv_buffer->StrideU() * planar_yuv_buffer->height();
memcpy(buffer + data_ptr, planar_yuv_buffer->DataV(),
planar_yuv_buffer->StrideV() * planar_yuv_buffer->height());
std::unique_ptr<VideoBuffer> video_buffer(
new VideoBuffer{buffer, resolution, VideoBufferType::kI444});
renderer_.RenderFrame(std::move(video_buffer));
} else {
uint8_t* buffer = new uint8_t[resolution.width * resolution.height * 3 / 2];
webrtc::ConvertFromI420(frame, webrtc::VideoType::kI420, 0,
Expand Down
33 changes: 3 additions & 30 deletions talk/owt/sdk/base/win/d3d11_video_decoder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -310,24 +310,11 @@ int32_t D3D11VideoDecoder::Decode(const webrtc::EncodedImage& input_image,
goto fail;
}

if (!frame) {
RTC_LOG(LS_ERROR) << "Failed to decode current frame.";
goto fail;
}

int width = frame->width;
int height = frame->height;

VideoFrame* input_frame =
static_cast<VideoFrame*>(av_buffer_get_opaque(frame->buf[0]));
RTC_DCHECK(input_frame);

rtc::scoped_refptr<VideoFrameBuffer> frame_buffer =
input_frame->video_frame_buffer();

// We get one frame from the decoder.
if (frame->format != AV_PIX_FMT_YUV444P) {
if (frame != nullptr && frame->format == hw_pix_fmt) {
ID3D11Texture2D* texture = (ID3D11Texture2D*)frame->data[0];
int width = frame->width;
int height = frame->height;
int index = (intptr_t)frame->data[1];
D3D11_TEXTURE2D_DESC texture_desc;

Expand Down Expand Up @@ -393,20 +380,6 @@ int32_t D3D11VideoDecoder::Decode(const webrtc::EncodedImage& input_image,

}
av_frame_free(&frame);
} else { // YUV444P. Which will be a software frame.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> dst_buffer;
dst_buffer =
WrapI444Buffer(width, height, frame->data[0], frame->linesize[0],
frame->data[1], frame->linesize[1], frame->data[2],
frame->linesize[2], [frame_buffer] {});
webrtc::VideoFrame decoded_frame(dst_buffer, input_image.Timestamp(), 0,
webrtc::kVideoRotation_0);
decoded_frame.set_ntp_time_ms(input_image.ntp_time_ms_);
decoded_frame.set_timestamp(input_image.Timestamp());
if (decoded_image_callback_) {
decoded_image_callback_->Decoded(decoded_frame);
}
av_frame_free(&frame);
}
}
return WEBRTC_VIDEO_CODEC_OK;
Expand Down
2 changes: 1 addition & 1 deletion talk/owt/sdk/base/win/externalvideodecoderfactory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ ExternalVideoDecoderFactory::CreateVideoDecoder(
}
if (vp8_hw || vp9_hw || h264_hw || h265_hw || av1_hw) {
#if defined(OWT_USE_FFMPEG)
if (range_extension_enabled_) {
if (range_extension_enabled_ && h265_hw) {
return std::make_unique<FFMpegDecoderImpl>();
} else {
return owt::base::D3D11VideoDecoder::Create(cricket::VideoCodec(format));
Expand Down
3 changes: 1 addition & 2 deletions talk/owt/sdk/include/cpp/owt/base/videorendererinterface.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,14 @@ enum class VideoBufferType {
kI420,
kARGB,
kD3D11, // Format self-described.
kI444,
};
enum class VideoRendererType {
kI420,
kARGB,
kD3D11, // Format self-described.
kI444,
};


#if defined(WEBRTC_WIN)
struct OWT_EXPORT D3D11ImageHandle {
ID3D11Device* d3d11_device;
Expand Down

0 comments on commit e176876

Please sign in to comment.