You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
webkit2gtk3/SOURCES/gstreamer-1.16.patch

449 lines
22 KiB

diff --git a/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp b/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp
index 51547b0226c0..2ab2d0c8688c 100644
--- a/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp
+++ b/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp
@@ -124,7 +124,8 @@ AudioSourceProviderGStreamer::AudioSourceProviderGStreamer(MediaStreamTrackPriva
g_signal_connect_swapped(decodebin, "pad-added", G_CALLBACK(+[](AudioSourceProviderGStreamer* provider, GstPad* pad) {
auto padCaps = adoptGRef(gst_pad_query_caps(pad, nullptr));
bool isAudio = doCapsHaveType(padCaps.get(), "audio");
- RELEASE_ASSERT(isAudio);
+ if (!isAudio)
+ return;
auto sinkPad = adoptGRef(gst_element_get_static_pad(provider->m_audioSinkBin.get(), "sink"));
gst_pad_link(pad, sinkPad.get());
diff --git a/Source/WebCore/platform/graphics/gstreamer/DMABufUtilities.h b/Source/WebCore/platform/graphics/gstreamer/DMABufUtilities.h
index da16adf3b556..7a78145f6228 100644
--- a/Source/WebCore/platform/graphics/gstreamer/DMABufUtilities.h
+++ b/Source/WebCore/platform/graphics/gstreamer/DMABufUtilities.h
@@ -53,12 +53,6 @@ inline uint32_t dmaBufFourccValue(GstVideoFormat format)
return uint32_t(DMABufFormat::FourCC::BGRA8888);
case GST_VIDEO_FORMAT_ABGR:
return uint32_t(DMABufFormat::FourCC::RGBA8888);
- case GST_VIDEO_FORMAT_P010_10LE:
- case GST_VIDEO_FORMAT_P010_10BE:
- return uint32_t(DMABufFormat::FourCC::P010);
- case GST_VIDEO_FORMAT_P016_LE:
- case GST_VIDEO_FORMAT_P016_BE:
- return uint32_t(DMABufFormat::FourCC::P016);
default:
break;
}
diff --git a/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp
index f8840e3e31e0..ce209d21fb69 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp
@@ -88,7 +88,19 @@ static void webKitGLVideoSinkConstructed(GObject* object)
ASSERT(colorconvert);
gst_bin_add_many(GST_BIN_CAST(sink), upload, colorconvert, sink->priv->appSink.get(), nullptr);
- GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw, format = (string) " GST_GL_CAPS_FORMAT));
+ // Workaround until we can depend on GStreamer 1.16.2.
+ // https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/commit/8d32de090554cf29fe359f83aa46000ba658a693
+ // Forcing a color conversion to RGBA here allows glupload to internally use
+ // an uploader that adds a VideoMeta, through the TextureUploadMeta caps
+ // feature, without needing the patch above. However this specific caps
+ // feature is going to be removed from GStreamer so it is considered a
+ // short-term workaround. This code path most likely will have a negative
+ // performance impact on embedded platforms as well. Downstream embedders
+ // are highly encouraged to cherry-pick the patch linked above in their BSP
+ // and set the WEBKIT_GST_NO_RGBA_CONVERSION environment variable until
+ // GStreamer 1.16.2 is released.
+ // See also https://bugs.webkit.org/show_bug.cgi?id=201422
+ GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw, format = (string) RGBA"));
gst_caps_set_features(caps.get(), 0, gst_caps_features_new(GST_CAPS_FEATURE_MEMORY_GL_MEMORY, nullptr));
g_object_set(sink->priv->appSink.get(), "caps", caps.get(), nullptr);
@@ -186,12 +198,8 @@ static void webKitGLVideoSinkGetProperty(GObject* object, guint propertyId, GVal
WebKitGLVideoSink* sink = WEBKIT_GL_VIDEO_SINK(object);
switch (propertyId) {
- case PROP_STATS: {
- GUniqueOutPtr<GstStructure> stats;
- g_object_get(sink->priv->appSink.get(), "stats", &stats.outPtr(), nullptr);
- gst_value_set_structure(value, stats.get());
+ case PROP_STATS:
break;
- }
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, paramSpec);
RELEASE_ASSERT_NOT_REACHED();
diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp b/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp
index 8b30e0f14b6a..2d587f68a3b2 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp
@@ -32,7 +32,7 @@ GST_DEBUG_CATEGORY_STATIC(webkit_media_gst_audio_mixer_debug);
bool GStreamerAudioMixer::isAvailable()
{
- return isGStreamerPluginAvailable("inter") && isGStreamerPluginAvailable("audiomixer");
+ return false;
}
GStreamerAudioMixer& GStreamerAudioMixer::singleton()
diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
index a7392908eabd..4171e640de22 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
@@ -117,6 +117,24 @@ GstPad* webkitGstGhostPadFromStaticTemplate(GstStaticPadTemplate* staticPadTempl
return pad;
}
+#if !GST_CHECK_VERSION(1, 18, 0)
+void webkitGstVideoFormatInfoComponent(const GstVideoFormatInfo* info, guint plane, gint components[GST_VIDEO_MAX_COMPONENTS])
+{
+ guint c, i = 0;
+
+ /* Reverse mapping of info->plane. */
+ for (c = 0; c < GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info); c++) {
+ if (GST_VIDEO_FORMAT_INFO_PLANE(info, c) == plane) {
+ components[i] = c;
+ i++;
+ }
+ }
+
+ for (c = i; c < GST_VIDEO_MAX_COMPONENTS; c++)
+ components[c] = -1;
+}
+#endif
+
#if ENABLE(VIDEO)
bool getVideoSizeAndFormatFromCaps(const GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride)
{
@@ -566,31 +584,6 @@ void deinitializeGStreamer()
teardownVideoEncoderSingleton();
teardownGStreamerImageDecoders();
#endif
-
- bool isLeaksTracerActive = false;
- auto activeTracers = gst_tracing_get_active_tracers();
- while (activeTracers) {
- auto tracer = adoptGRef(GST_TRACER_CAST(activeTracers->data));
- if (!isLeaksTracerActive && !g_strcmp0(G_OBJECT_TYPE_NAME(G_OBJECT(tracer.get())), "GstLeaksTracer"))
- isLeaksTracerActive = true;
- activeTracers = g_list_delete_link(activeTracers, activeTracers);
- }
-
- if (!isLeaksTracerActive)
- return;
-
- // Make sure there is no active pipeline left. Those might trigger deadlocks during gst_deinit().
- {
- Locker locker { s_activePipelinesMapLock };
- for (auto& pipeline : activePipelinesMap().values()) {
- GST_DEBUG("Pipeline %" GST_PTR_FORMAT " was left running. Forcing clean-up.", pipeline.get());
- disconnectSimpleBusMessageCallback(pipeline.get());
- gst_element_set_state(pipeline.get(), GST_STATE_NULL);
- }
- activePipelinesMap().clear();
- }
-
- gst_deinit();
}
unsigned getGstPlayFlag(const char* nick)
@@ -1239,6 +1232,36 @@ String gstStructureToJSONString(const GstStructure* structure)
return value->toJSONString();
}
+#if !GST_CHECK_VERSION(1, 18, 0)
+GstClockTime webkitGstElementGetCurrentRunningTime(GstElement* element)
+{
+ g_return_val_if_fail(GST_IS_ELEMENT(element), GST_CLOCK_TIME_NONE);
+
+ auto baseTime = gst_element_get_base_time(element);
+ if (!GST_CLOCK_TIME_IS_VALID(baseTime)) {
+ GST_DEBUG_OBJECT(element, "Could not determine base time");
+ return GST_CLOCK_TIME_NONE;
+ }
+
+ auto clock = adoptGRef(gst_element_get_clock(element));
+ if (!clock) {
+ GST_DEBUG_OBJECT(element, "Element has no clock");
+ return GST_CLOCK_TIME_NONE;
+ }
+
+ auto clockTime = gst_clock_get_time(clock.get());
+ if (!GST_CLOCK_TIME_IS_VALID(clockTime))
+ return GST_CLOCK_TIME_NONE;
+
+ if (clockTime < baseTime) {
+ GST_DEBUG_OBJECT(element, "Got negative current running time");
+ return GST_CLOCK_TIME_NONE;
+ }
+
+ return clockTime - baseTime;
+}
+#endif
+
GstClockTime webkitGstInitTime()
{
return s_webkitGstInitTime;
@@ -1296,6 +1319,7 @@ PlatformVideoColorSpace videoColorSpaceFromInfo(const GstVideoInfo& info)
case GST_VIDEO_TRANSFER_BT709:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt709;
break;
+#if GST_CHECK_VERSION(1, 18, 0)
case GST_VIDEO_TRANSFER_BT601:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Smpte170m;
break;
@@ -1308,6 +1332,7 @@ PlatformVideoColorSpace videoColorSpaceFromInfo(const GstVideoInfo& info)
case GST_VIDEO_TRANSFER_BT2020_10:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt2020_10bit;
break;
+#endif
case GST_VIDEO_TRANSFER_BT2020_12:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt2020_12bit;
break;
@@ -1426,6 +1451,7 @@ void fillVideoInfoColorimetryFromColorSpace(GstVideoInfo* info, const PlatformVi
case PlatformVideoTransferCharacteristics::Bt709:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT709;
break;
+#if GST_CHECK_VERSION(1, 18, 0)
case PlatformVideoTransferCharacteristics::Smpte170m:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT601;
break;
@@ -1438,6 +1464,7 @@ void fillVideoInfoColorimetryFromColorSpace(GstVideoInfo* info, const PlatformVi
case PlatformVideoTransferCharacteristics::Bt2020_10bit:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT2020_10;
break;
+#endif
case PlatformVideoTransferCharacteristics::Bt2020_12bit:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT2020_12;
break;
diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
index f9f42a940a58..766ebaf45b38 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
+++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
@@ -57,6 +57,15 @@ inline bool webkitGstCheckVersion(guint major, guint minor, guint micro)
return true;
}
+#if !GST_CHECK_VERSION(1, 18, 0)
+// gst_video_format_info_component() is GStreamer 1.18 API, so for older versions we use a local
+// vendored copy of the function.
+#define GST_VIDEO_MAX_COMPONENTS 4
+void webkitGstVideoFormatInfoComponent(const GstVideoFormatInfo*, guint, gint components[GST_VIDEO_MAX_COMPONENTS]);
+#endif
+
+#define gst_video_format_info_component webkitGstVideoFormatInfoComponent
+
#define GST_VIDEO_CAPS_TYPE_PREFIX "video/"
#define GST_AUDIO_CAPS_TYPE_PREFIX "audio/"
#define GST_TEXT_CAPS_TYPE_PREFIX "text/"
@@ -287,6 +296,13 @@ Vector<T> gstStructureGetArray(const GstStructure*, ASCIILiteral key);
String gstStructureToJSONString(const GstStructure*);
+#if !GST_CHECK_VERSION(1, 18, 0)
+// gst_element_get_current_running_time() is GStreamer 1.18 API, so for older versions we use a local
+// vendored copy of the function.
+GstClockTime webkitGstElementGetCurrentRunningTime(GstElement*);
+#define gst_element_get_current_running_time webkitGstElementGetCurrentRunningTime
+#endif
+
GstClockTime webkitGstInitTime();
PlatformVideoColorSpace videoColorSpaceFromCaps(const GstCaps*);
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
index 9b30c5cfac68..012241d680e3 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
@@ -604,7 +604,6 @@ bool MediaPlayerPrivateGStreamer::doSeek(const SeekTarget& target, float rate)
auto seekStart = toGstClockTime(startTime);
auto seekStop = toGstClockTime(endTime);
- GST_DEBUG_OBJECT(pipeline(), "[Seek] Performing actual seek to %" GST_TIMEP_FORMAT " (endTime: %" GST_TIMEP_FORMAT ") at rate %f", &seekStart, &seekStop, rate);
return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, m_seekFlags, GST_SEEK_TYPE_SET, seekStart, GST_SEEK_TYPE_SET, seekStop);
}
@@ -4369,7 +4368,27 @@ GstElement* MediaPlayerPrivateGStreamer::createVideoSink()
g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
}
- return m_videoSink.get();
+ GstElement* videoSink = nullptr;
+ m_fpsSink = makeGStreamerElement("fpsdisplaysink", "sink");
+ if (m_fpsSink) {
+ g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
+
+ // Turn off text overlay unless tracing is enabled.
+ if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
+ g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
+
+ if (gstObjectHasProperty(m_fpsSink.get(), "video-sink")) {
+ g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
+ videoSink = m_fpsSink.get();
+ } else
+ m_fpsSink = nullptr;
+ }
+
+ if (!m_fpsSink)
+ videoSink = m_videoSink.get();
+
+ ASSERT(videoSink);
+ return videoSink;
}
void MediaPlayerPrivateGStreamer::setStreamVolumeElement(GstStreamVolume* volume)
@@ -4399,25 +4418,18 @@ void MediaPlayerPrivateGStreamer::setStreamVolumeElement(GstStreamVolume* volume
bool MediaPlayerPrivateGStreamer::updateVideoSinkStatistics()
{
- if (!m_videoSink)
- return false;
-
- GUniqueOutPtr<GstStructure> stats;
- g_object_get(m_videoSink.get(), "stats", &stats.outPtr(), nullptr);
- if (!stats)
+ if (!m_videoSink || !m_fpsSink)
return false;
- auto totalVideoFrames = gstStructureGet<uint64_t>(stats.get(), "rendered"_s);
- auto droppedVideoFrames = gstStructureGet<uint64_t>(stats.get(), "dropped"_s);
-
- if (!totalVideoFrames || !droppedVideoFrames)
- return false;
+ unsigned totalVideoFrames = 0;
+ unsigned droppedVideoFrames = 0;
+ g_object_get(m_fpsSink.get(), "frames-rendered", &totalVideoFrames, "frames-dropped", &droppedVideoFrames, nullptr);
// Caching is required so that metrics queries performed after EOS still return valid values.
- if (*totalVideoFrames)
- m_totalVideoFrames = *totalVideoFrames;
- if (*droppedVideoFrames)
- m_droppedVideoFrames = *droppedVideoFrames;
+ if (totalVideoFrames)
+ m_totalVideoFrames = totalVideoFrames;
+ if (droppedVideoFrames)
+ m_droppedVideoFrames = droppedVideoFrames;
return true;
}
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
index 687bb4648aef..53f1f7ab3dc9 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
@@ -637,6 +637,7 @@ private:
uint64_t m_networkReadPosition { 0 };
mutable uint64_t m_readPositionAtLastDidLoadingProgress { 0 };
+ GRefPtr<GstElement> m_fpsSink { nullptr };
uint64_t m_totalVideoFrames { 0 };
uint64_t m_droppedVideoFrames { 0 };
uint64_t m_decodedVideoFrames { 0 };
diff --git a/Source/WebCore/platform/gstreamer/GStreamerCodecUtilities.cpp b/Source/WebCore/platform/gstreamer/GStreamerCodecUtilities.cpp
index c701a84d2316..ec4c4b24347c 100644
--- a/Source/WebCore/platform/gstreamer/GStreamerCodecUtilities.cpp
+++ b/Source/WebCore/platform/gstreamer/GStreamerCodecUtilities.cpp
@@ -256,7 +256,7 @@ static std::pair<GRefPtr<GstCaps>, GRefPtr<GstCaps>> vpxCapsFromCodecString(cons
else if (transfer == VPConfigurationTransferCharacteristics::BT_470_7_BG)
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_GAMMA28;
else if (transfer == VPConfigurationTransferCharacteristics::BT_601_7)
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT601;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_240)
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE240M;
else if (transfer == VPConfigurationTransferCharacteristics::Linear)
@@ -271,16 +271,16 @@ static std::pair<GRefPtr<GstCaps>, GRefPtr<GstCaps>> vpxCapsFromCodecString(cons
GST_WARNING("VPConfigurationTransferCharacteristics::IEC_61966_2_1 not supported");
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
} else if (transfer == VPConfigurationTransferCharacteristics::BT_2020_10bit)
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_10;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
else if (transfer == VPConfigurationTransferCharacteristics::BT_2020_12bit)
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_12;
else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_2084)
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE2084;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_428_1) {
GST_WARNING("VPConfigurationTransferCharacteristics::SMPTE_ST_428_1 not supported");
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
} else if (transfer == VPConfigurationTransferCharacteristics::BT_2100_HLG)
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
auto matrix = parameters->matrixCoefficients;
if (matrix == VPConfigurationMatrixCoefficients::Identity)
@@ -421,7 +421,7 @@ static std::pair<GRefPtr<GstCaps>, GRefPtr<GstCaps>> av1CapsFromCodecString(cons
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_GAMMA28;
break;
case AV1ConfigurationTransferCharacteristics::BT_601_7:
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT601;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
break;
case AV1ConfigurationTransferCharacteristics::SMPTE_ST_240:
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE240M;
@@ -445,20 +445,20 @@ static std::pair<GRefPtr<GstCaps>, GRefPtr<GstCaps>> av1CapsFromCodecString(cons
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
break;
case AV1ConfigurationTransferCharacteristics::BT_2020_10bit:
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_10;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
break;
case AV1ConfigurationTransferCharacteristics::BT_2020_12bit:
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_12;
break;
case AV1ConfigurationTransferCharacteristics::SMPTE_ST_2084:
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE2084;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
break;
case AV1ConfigurationTransferCharacteristics::SMPTE_ST_428_1:
GST_WARNING("AV1ConfigurationTransferCharacteristics::SMPTE_ST_428_1 not supported");
GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
break;
case AV1ConfigurationTransferCharacteristics::BT_2100_HLG:
- GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67;
+ GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
break;
};
diff --git a/Source/WebCore/platform/gstreamer/VideoEncoderPrivateGStreamer.cpp b/Source/WebCore/platform/gstreamer/VideoEncoderPrivateGStreamer.cpp
index 655115564aa2..82204d5ff6d4 100644
--- a/Source/WebCore/platform/gstreamer/VideoEncoderPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/gstreamer/VideoEncoderPrivateGStreamer.cpp
@@ -754,7 +754,9 @@ static void webkit_video_encoder_class_init(WebKitVideoEncoderClass* klass)
gst_util_set_object_arg(G_OBJECT(encoder), "end-usage", "cq");
break;
};
- }, [](GstElement* encoder, const WebKitVideoEncoderBitRateAllocation& bitRateAllocation) {
+ }
+#if 0
+ , [](GstElement* encoder, const WebKitVideoEncoderBitRateAllocation& bitRateAllocation) {
// Allow usage of deprecated GValueArray API.
ALLOW_DEPRECATED_DECLARATIONS_BEGIN;
GUniquePtr<GValueArray> bitrates(g_value_array_new(3));
@@ -887,7 +889,9 @@ static void webkit_video_encoder_class_init(WebKitVideoEncoderClass* klass)
}
ALLOW_DEPRECATED_DECLARATIONS_END;
- });
+ }
+#endif
+ );
Encoders::registerEncoder(Vp9, "vp9enc"_s, nullptr, "video/x-vp9"_s, nullptr,
[&](WebKitVideoEncoder* self) {
diff --git a/Source/cmake/GStreamerChecks.cmake b/Source/cmake/GStreamerChecks.cmake
index 63f183fa6e30..f26a924e9d02 100644
--- a/Source/cmake/GStreamerChecks.cmake
+++ b/Source/cmake/GStreamerChecks.cmake
@@ -1,7 +1,7 @@
if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO)
SET_AND_EXPOSE_TO_BUILD(USE_GSTREAMER TRUE)
if (USE_GSTREAMER_FULL)
- find_package(GStreamer 1.18.4 REQUIRED COMPONENTS full)
+ find_package(GStreamer 1.16.1 REQUIRED COMPONENTS full)
if (NOT PC_GSTREAMER_FULL_FOUND)
message(FATAL_ERROR "GStreamer static library libgstreamer-full-1.0 not found")
else ()
@@ -25,7 +25,7 @@ if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO)
list(APPEND GSTREAMER_COMPONENTS webrtc)
endif ()
- find_package(GStreamer 1.18.4 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})
+ find_package(GStreamer 1.16.1 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})
if (ENABLE_WEB_AUDIO)
if (NOT PC_GSTREAMER_AUDIO_FOUND OR NOT PC_GSTREAMER_FFT_FOUND)