VideoPixelFormatToString(src_frame.format()), "dst_format",
VideoPixelFormatToString(dst_frame.format()));
constexpr auto kDefaultFiltering = libyuv::kFilterBox;
- if (!src_frame.IsMappable() || !dst_frame.IsMappable())
+ if ((!src_frame.IsMappable()
+#if defined(TIZEN_TBM_SUPPORT)
+ && src_frame.format() != PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER
+#endif
+ ) ||
+ !dst_frame.IsMappable())
return EncoderStatus::Codes::kUnsupportedFrameFormat;
if ((dst_frame.format() == PIXEL_FORMAT_I420A ||
if ((dst_frame.format() == PIXEL_FORMAT_NV12 ||
dst_frame.format() == PIXEL_FORMAT_NV12A) &&
(src_frame.format() == PIXEL_FORMAT_NV12 ||
+#if defined(TIZEN_TBM_SUPPORT)
+ src_frame.format() == PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
src_frame.format() == PIXEL_FORMAT_NV12A)) {
if (dst_frame.format() == PIXEL_FORMAT_NV12A) {
libyuv::ScalePlane(
}
if (dst_frame.format() == PIXEL_FORMAT_I420 &&
- src_frame.format() == PIXEL_FORMAT_NV12) {
+ (
+#if defined(TIZEN_TBM_SUPPORT)
+ src_frame.format() == PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
+ src_frame.format() == PIXEL_FORMAT_NV12)) {
if (src_frame.visible_rect().size() == dst_frame.visible_rect().size()) {
// Both frames have the same size, only NV12-to-I420 conversion is
// required.
case media::PIXEL_FORMAT_NV12:
return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(
new rtc::RefCountedObject<NV12FrameAdapter>(std::move(video_frame)));
+#if defined(TIZEN_TBM_SUPPORT)
+ case media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER:
+ return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(
+ new rtc::RefCountedObject<NV12FrameAdapter>(std::move(video_frame)));
+#endif
default:
NOTREACHED();
return nullptr;
media::VideoPixelFormat dst_format = media::PIXEL_FORMAT_UNKNOWN;
bool tmp_buffer_needed = false;
if (source_is_nv12) {
+#if !defined(TIZEN_TBM_SUPPORT)
DCHECK_EQ(source_frame->format(), media::PIXEL_FORMAT_NV12);
+#endif
dst_format = media::PIXEL_FORMAT_NV12;
} else {
// ARGB pixel format may be produced by readback of texture backed frames.
source_frame->format() == media::PIXEL_FORMAT_ARGB ||
source_frame->format() == media::PIXEL_FORMAT_XRGB ||
source_frame->format() == media::PIXEL_FORMAT_ABGR ||
+#if defined(TIZEN_TBM_SUPPORT)
+ source_frame->format() ==
+ media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
source_frame->format() == media::PIXEL_FORMAT_XBGR);
RTC_DCHECK(shared_resources);
source_frame->format() == media::PIXEL_FORMAT_I420 ||
source_frame->format() == media::PIXEL_FORMAT_I420A;
const bool source_is_nv12 =
+#if defined(TIZEN_TBM_SUPPORT)
+ source_frame->format() == media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
source_frame->format() == media::PIXEL_FORMAT_NV12;
const bool no_scaling_needed =
source_frame->natural_size() == source_frame->visible_rect().size();
#if defined(TIZEN_VIDEO_HOLE)
frame->storage_type() == media::VideoFrame::STORAGE_HOLE ||
#endif
- frame->format() == media::PIXEL_FORMAT_ENCODED || frame->HasTextures();
+#if BUILDFLAG(IS_TIZEN)
+ frame->format() == media::PIXEL_FORMAT_ENCODED ||
+#if defined(TIZEN_TBM_SUPPORT)
+ frame->format() ==
+ media::VideoPixelFormat::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
+#endif
+ frame->HasTextures();
}
// static
video_frame->timestamp()));
}
return MakeFrameAdapter(std::move(converted_frame));
+#if defined(TIZEN_TBM_SUPPORT)
+ } else if (video_frame->format() ==
+ media::VideoPixelFormat::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER) {
+ // the video frame with tbm buffer type is native, handle it here
+ scoped_refptr<media::VideoFrame> scaled_frame =
+ MaybeConvertAndScaleFrame(video_frame, shared_resources);
+ if (!scaled_frame) {
+ DLOG(ERROR) << "Make scaled frame failed.";
+ return MakeFrameAdapter(std::move(video_frame));
+ }
+ return MakeFrameAdapter(std::move(scaled_frame));
+#endif
} else if (video_frame->HasTextures()) {
auto converted_frame =
shared_resources