aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--recipes-multimedia/gstreamer/gst-va-intel.bb54
-rw-r--r--recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb14
-rw-r--r--recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.8.bb5
-rw-r--r--recipes-multimedia/gstreamer/gstreamer-vaapi.inc41
-rw-r--r--recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c1712
-rw-r--r--recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h308
-rw-r--r--recipes-multimedia/gstreamer/gstreamer-vaapi/install-tests.patch31
-rw-r--r--recipes-multimedia/gstreamer/gstreamer-vaapi_0.4.3.bb24
-rw-r--r--recipes-multimedia/libva/libva-intel-driver/0001-Workaround-for-concurrently-playing-VC1-and-H264-vid.patch440
-rw-r--r--recipes-multimedia/libva/libva-intel-driver/wayland-include.patch30
-rw-r--r--recipes-multimedia/libva/libva-intel-driver_1.3.2.bb31
-rw-r--r--recipes-multimedia/libva/libva.inc44
-rw-r--r--recipes-multimedia/libva/libva_1.0.16.bb9
-rw-r--r--recipes-multimedia/libva/libva_1.3.1.bb6
-rw-r--r--recipes-multimedia/libva/va-intel.bb28
15 files changed, 2777 insertions, 0 deletions
diff --git a/recipes-multimedia/gstreamer/gst-va-intel.bb b/recipes-multimedia/gstreamer/gst-va-intel.bb
new file mode 100644
index 0000000..9e0d0ca
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gst-va-intel.bb
@@ -0,0 +1,54 @@
+DESCRIPTION = "GStreamer Video Acceleration Add-ons for Intel BSPs"
+LICENSE = "MIT"
+DEPENDS = "gst-meta-base"
+LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \
+ file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+
+def map_gst_vaapi(d):
+ if base_contains('MACHINE_FEATURES', 'va-impl-mixvideo', "1", "0", d) == "1":
+ return "gst-va-mixvideo-vaapi"
+ if base_contains('MACHINE_FEATURES', 'va-impl-intel', "1", "0", d) == "1":
+ return "gst-va-intel-vaapi"
+ return ""
+
+VAAPI_IMPL = "${@map_gst_vaapi(d)}"
+
+PACKAGES = "\
+ gst-va-intel \
+ gst-va-intel-general \
+ gst-va-intel-video \
+ ${VAAPI_IMPL} \
+ "
+
+ALLOW_EMPTY_gst-va-intel = "1"
+ALLOW_EMPTY_gst-va-intel-general = "1"
+ALLOW_EMPTY_gst-va-intel-video = "1"
+ALLOW_EMPTY_gst-va-intel-vaapi = "1"
+ALLOW_EMPTY_gst-va-mixvideo-vaapi = "1"
+
+RDEPENDS_gst-va-intel = "\
+ gst-va-intel-general \
+ gst-va-intel-video \
+ ${VAAPI_IMPL} \
+ "
+
+RDEPENDS_gst-va-intel-general = "\
+ ${@bb.utils.contains("LICENSE_FLAGS_WHITELIST", \
+ "commercial", "gst-ffmpeg", "", d)} \
+ "
+
+RDEPENDS_gst-va-intel-video = "\
+ gst-plugins-good-isomp4 \
+ "
+
+# The gstreamer-vaapi package contains the vaapi implementation
+#
+RDEPENDS_gst-va-intel-vaapi = "\
+ gstreamer-vaapi \
+ "
+
+# The emgd driver contains the vaapi implementation
+#
+RDEPENDS_gst-va-mixvideo-vaapi = "\
+ emgd-driver-bin \
+ "
diff --git a/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb b/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb
new file mode 100644
index 0000000..98e475d
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb
@@ -0,0 +1,14 @@
+require gstreamer-vaapi.inc
+
+DEPENDS += "gstreamer gst-plugins-base gst-plugins-bad"
+
+GST_API_VERSION = "0.10"
+
+SRC_URI += "file://gstvideoencoder.c file://gstvideoencoder.h"
+
+PACKAGECONFIG_remove = "wayland"
+
+# SRC_URI subdir parameter is broken for files, when fixed do the move there
+do_compile_prepend() {
+ cp -f ${WORKDIR}/gstvideoencoder.[ch] ${S}/ext/videoutils/gst-libs/gst/video/
+}
diff --git a/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.8.bb b/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.8.bb
new file mode 100644
index 0000000..886b3c1
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.8.bb
@@ -0,0 +1,5 @@
+require gstreamer-vaapi.inc
+
+DEPENDS += "gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad"
+
+GST_API_VERSION = "1.2"
diff --git a/recipes-multimedia/gstreamer/gstreamer-vaapi.inc b/recipes-multimedia/gstreamer/gstreamer-vaapi.inc
new file mode 100644
index 0000000..2f5b6f1
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gstreamer-vaapi.inc
@@ -0,0 +1,41 @@
+SUMMARY = "VA-API support to GStreamer"
+DESCRIPTION = "gstreamer-vaapi consists of a collection of VA-API \
+based plugins for GStreamer and helper libraries: `vaapidecode', \
+`vaapiconvert', and `vaapisink'."
+
+REALPN = "gstreamer-vaapi"
+FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${REALPN}", "${FILE_DIRNAME}/${REALPN}"], d)}"
+
+LICENSE = "LGPLv2.1+"
+LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
+
+DEPENDS = "libva"
+
+SRC_URI = "http://www.freedesktop.org/software/vaapi/releases/${REALPN}/${REALPN}-${PV}.tar.bz2 \
+ file://install-tests.patch"
+
+SRC_URI[md5sum] = "375ddbab556e53ccc311b792f2c649a7"
+SRC_URI[sha256sum] = "24fee8a1ca4cb99ed7739d876b17a4085e81b28550350867dee5105300d343c6"
+
+S = "${WORKDIR}/${REALPN}-${PV}"
+
+inherit autotools pkgconfig gtk-doc
+
+PACKAGES =+ "${PN}-tests"
+
+EXTRA_OECONF += "--with-gstreamer-api=${GST_API_VERSION}"
+
+PACKAGECONFIG ??= "drm \
+ ${@base_contains("DISTRO_FEATURES", "opengl x11", "glx", "", d)} \
+ ${@base_contains("DISTRO_FEATURES", "wayland", "wayland", "", d)} \
+ ${@base_contains("DISTRO_FEATURES", "x11", "x11", "", d)}"
+
+PACKAGECONFIG[drm] = "--enable-drm,--disable-drm,udev libdrm"
+PACKAGECONFIG[glx] = "--enable-glx,--disable-glx,virtual/mesa"
+PACKAGECONFIG[wayland] = "--enable-wayland,--disable-wayland,wayland"
+PACKAGECONFIG[x11] = "--enable-x11,--disable-x11,virtual/libx11 libxrandr libxrender"
+
+FILES_${PN} += "${libdir}/gstreamer-*/*.so"
+FILES_${PN}-dbg += "${libdir}/gstreamer-*/.debug"
+FILES_${PN}-dev += "${libdir}/gstreamer-*/*.la ${libdir}/gstreamer-*/*.a"
+FILES_${PN}-tests = "${bindir}/*"
diff --git a/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c b/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c
new file mode 100644
index 0000000..7ca6aab
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c
@@ -0,0 +1,1712 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
+ * Copyright (C) 2011 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ * Copyright (C) 2012 Collabora Ltd.
+ * Author : Edward Hervey <edward@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:gstvideoencoder
+ * @short_description: Base class for video encoders
+ * @see_also:
+ *
+ * This base class is for video encoders turning raw video into
+ * encoded video data.
+ *
+ * GstVideoEncoder and subclass should cooperate as follows.
+ * <orderedlist>
+ * <listitem>
+ * <itemizedlist><title>Configuration</title>
+ * <listitem><para>
+ * Initially, GstVideoEncoder calls @start when the encoder element
+ * is activated, which allows subclass to perform any global setup.
+ * </para></listitem>
+ * <listitem><para>
+ * GstVideoEncoder calls @set_format to inform subclass of the format
+ * of input video data that it is about to receive. Subclass should
+ * setup for encoding and configure base class as appropriate
+ * (e.g. latency). While unlikely, it might be called more than once,
+ * if changing input parameters require reconfiguration. Baseclass
+ * will ensure that processing of current configuration is finished.
+ * </para></listitem>
+ * <listitem><para>
+ * GstVideoEncoder calls @stop at end of all processing.
+ * </para></listitem>
+ * </itemizedlist>
+ * </listitem>
+ * <listitem>
+ * <itemizedlist>
+ * <title>Data processing</title>
+ * <listitem><para>
+ * Base class collects input data and metadata into a frame and hands
+ * this to subclass' @handle_frame.
+ * </para></listitem>
+ * <listitem><para>
+ * If codec processing results in encoded data, subclass should call
+ * @gst_video_encoder_finish_frame to have encoded data pushed
+ * downstream.
+ * </para></listitem>
+ * <listitem><para>
+ * If implemented, baseclass calls subclass @pre_push just prior to
+ * pushing to allow subclasses to modify some metadata on the buffer.
+ * If it returns GST_FLOW_OK, the buffer is pushed downstream.
+ * </para></listitem>
+ * <listitem><para>
+ * GstVideoEncoderClass will handle both srcpad and sinkpad events.
+ * Sink events will be passed to subclass if @event callback has been
+ * provided.
+ * </para></listitem>
+ * </itemizedlist>
+ * </listitem>
+ * <listitem>
+ * <itemizedlist><title>Shutdown phase</title>
+ * <listitem><para>
+ * GstVideoEncoder class calls @stop to inform the subclass that data
+ * parsing will be stopped.
+ * </para></listitem>
+ * </itemizedlist>
+ * </listitem>
+ * </orderedlist>
+ *
+ * Subclass is responsible for providing pad template caps for
+ * source and sink pads. The pads need to be named "sink" and "src". It should
+ * also be able to provide fixed src pad caps in @getcaps by the time it calls
+ * @gst_video_encoder_finish_frame.
+ *
+ * Things that subclass need to take care of:
+ * <itemizedlist>
+ * <listitem><para>Provide pad templates</para></listitem>
+ * <listitem><para>
+ * Provide source pad caps before pushing the first buffer
+ * </para></listitem>
+ * <listitem><para>
+ * Accept data in @handle_frame and provide encoded results to
+ * @gst_video_encoder_finish_frame.
+ * </para></listitem>
+ * </itemizedlist>
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+/* TODO
+ *
+ * * Change _set_output_format() to steal the reference of the provided caps
+ * * Calculate actual latency based on input/output timestamp/frame_number
+ * and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
+ */
+
+/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include "gstvideoencoder.h"
+#include "gstvideoutils.h"
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY (videoencoder_debug);
+#define GST_CAT_DEFAULT videoencoder_debug
+
+#define GST_VIDEO_ENCODER_GET_PRIVATE(obj) \
+ (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VIDEO_ENCODER, \
+ GstVideoEncoderPrivate))
+
+struct _GstVideoEncoderPrivate
+{
+ guint64 presentation_frame_number;
+ int distance_from_sync;
+
+ /* FIXME : (and introduce a context ?) */
+ gboolean drained;
+ gboolean at_eos;
+
+ gint64 min_latency;
+ gint64 max_latency;
+
+ GList *current_frame_events;
+
+ GList *headers;
+ gboolean new_headers; /* Whether new headers were just set */
+
+ GList *force_key_unit; /* List of pending forced keyunits */
+
+ guint32 system_frame_number;
+
+ GList *frames; /* Protected with OBJECT_LOCK */
+ GstVideoCodecState *input_state;
+ GstVideoCodecState *output_state;
+ gboolean output_state_changed;
+
+ gint64 bytes;
+ gint64 time;
+};
+
+typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
+struct _ForcedKeyUnitEvent
+{
+ GstClockTime running_time;
+ gboolean pending; /* TRUE if this was requested already */
+ gboolean all_headers;
+ guint count;
+};
+
+static void
+forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
+{
+ g_slice_free (ForcedKeyUnitEvent, evt);
+}
+
+static ForcedKeyUnitEvent *
+forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
+ guint count)
+{
+ ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
+
+ evt->running_time = running_time;
+ evt->all_headers = all_headers;
+ evt->count = count;
+
+ return evt;
+}
+
+static void gst_video_encoder_finalize (GObject * object);
+
+static gboolean gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps);
+static GstCaps *gst_video_encoder_sink_getcaps (GstPad * pad);
+static gboolean gst_video_encoder_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_video_encoder_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstBuffer * buf);
+static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
+ element, GstStateChange transition);
+static const GstQueryType *gst_video_encoder_get_query_types (GstPad * pad);
+static gboolean gst_video_encoder_src_query (GstPad * pad, GstQuery * query);
+static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
+ encoder, GstBuffer * buf, GstClockTime timestamp, GstClockTime duration);
+
+static void
+_do_init (GType object_type)
+{
+ const GInterfaceInfo preset_interface_info = {
+ NULL, /* interface_init */
+ NULL, /* interface_finalize */
+ NULL /* interface_data */
+ };
+
+ g_type_add_interface_static (object_type, GST_TYPE_PRESET,
+ &preset_interface_info);
+}
+
+GST_BOILERPLATE_FULL (GstVideoEncoder, gst_video_encoder,
+ GstElement, GST_TYPE_ELEMENT, _do_init);
+
+static void
+gst_video_encoder_base_init (gpointer g_class)
+{
+ GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
+ "Base Video Encoder");
+}
+
+static void
+gst_video_encoder_class_init (GstVideoEncoderClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ g_type_class_add_private (klass, sizeof (GstVideoEncoderPrivate));
+
+ gobject_class->finalize = gst_video_encoder_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
+}
+
+static void
+gst_video_encoder_reset (GstVideoEncoder * encoder)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GList *g;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ priv->presentation_frame_number = 0;
+ priv->distance_from_sync = 0;
+
+ g_list_foreach (priv->force_key_unit, (GFunc) forced_key_unit_event_free,
+ NULL);
+ g_list_free (priv->force_key_unit);
+ priv->force_key_unit = NULL;
+
+ priv->drained = TRUE;
+ priv->min_latency = 0;
+ priv->max_latency = 0;
+
+ g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL);
+ g_list_free (priv->headers);
+ priv->headers = NULL;
+ priv->new_headers = FALSE;
+
+ g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL);
+ g_list_free (priv->current_frame_events);
+ priv->current_frame_events = NULL;
+
+ for (g = priv->frames; g; g = g->next) {
+ gst_video_codec_frame_unref ((GstVideoCodecFrame *) g->data);
+ }
+ g_list_free (priv->frames);
+ priv->frames = NULL;
+
+ priv->bytes = 0;
+ priv->time = 0;
+
+ if (priv->input_state)
+ gst_video_codec_state_unref (priv->input_state);
+ priv->input_state = NULL;
+ if (priv->output_state)
+ gst_video_codec_state_unref (priv->output_state);
+ priv->output_state = NULL;
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+}
+
+static void
+gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
+{
+ GstVideoEncoderPrivate *priv;
+ GstPadTemplate *pad_template;
+ GstPad *pad;
+
+ GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init");
+
+ priv = encoder->priv = GST_VIDEO_ENCODER_GET_PRIVATE (encoder);
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
+ g_return_if_fail (pad_template != NULL);
+
+ encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
+
+ gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
+ gst_pad_set_event_function (pad,
+ GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
+ gst_pad_set_setcaps_function (pad,
+ GST_DEBUG_FUNCPTR (gst_video_encoder_sink_setcaps));
+ gst_pad_set_getcaps_function (pad,
+ GST_DEBUG_FUNCPTR (gst_video_encoder_sink_getcaps));
+ gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
+ g_return_if_fail (pad_template != NULL);
+
+ encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
+
+ gst_pad_set_query_type_function (pad,
+ GST_DEBUG_FUNCPTR (gst_video_encoder_get_query_types));
+ gst_pad_set_query_function (pad,
+ GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
+ gst_pad_set_event_function (pad,
+ GST_DEBUG_FUNCPTR (gst_video_encoder_src_event));
+ gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad);
+
+ gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
+ gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
+
+ g_static_rec_mutex_init (&encoder->stream_lock);
+
+ priv->at_eos = FALSE;
+ priv->headers = NULL;
+ priv->new_headers = FALSE;
+
+ gst_video_encoder_reset (encoder);
+}
+
+static gboolean
+gst_video_encoded_video_convert (gint64 bytes, gint64 time,
+ GstFormat src_format, gint64 src_value, GstFormat * dest_format,
+ gint64 * dest_value)
+{
+ gboolean res = FALSE;
+
+ g_return_val_if_fail (dest_format != NULL, FALSE);
+ g_return_val_if_fail (dest_value != NULL, FALSE);
+
+ if (G_UNLIKELY (src_format == *dest_format || src_value == 0 ||
+ src_value == -1)) {
+ if (dest_value)
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ if (bytes <= 0 || time <= 0) {
+ GST_DEBUG ("not enough metadata yet to convert");
+ goto exit;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale (src_value, time, bytes);
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = gst_util_uint64_scale (src_value, bytes, time);
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ default:
+ GST_DEBUG ("unhandled conversion from %d to %d", src_format,
+ *dest_format);
+ res = FALSE;
+ }
+
+exit:
+ return res;
+}
+
+/**
+ * gst_video_encoder_set_headers:
+ * @encoder: a #GstVideoEncoder
+ * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
+ *
+ * Set the codec headers to be sent downstream whenever requested.
+ *
+ * Since: 0.10.37
+ */
+void
+gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
+{
+ GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder);
+
+ GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers);
+ if (video_encoder->priv->headers) {
+ g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref,
+ NULL);
+ g_list_free (video_encoder->priv->headers);
+ }
+ video_encoder->priv->headers = headers;
+ video_encoder->priv->new_headers = TRUE;
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder);
+}
+
+static gboolean
+gst_video_encoder_drain (GstVideoEncoder * enc)
+{
+ GstVideoEncoderPrivate *priv;
+ GstVideoEncoderClass *enc_class;
+ gboolean ret = TRUE;
+
+ enc_class = GST_VIDEO_ENCODER_GET_CLASS (enc);
+ priv = enc->priv;
+
+ GST_DEBUG_OBJECT (enc, "draining");
+
+ if (priv->drained) {
+ GST_DEBUG_OBJECT (enc, "already drained");
+ return TRUE;
+ }
+
+ if (enc_class->reset) {
+ GST_DEBUG_OBJECT (enc, "requesting subclass to finish");
+ ret = enc_class->reset (enc, TRUE);
+ }
+ /* everything should be away now */
+ if (priv->frames) {
+ /* not fatal/impossible though if subclass/enc eats stuff */
+ g_list_foreach (priv->frames, (GFunc) gst_video_codec_frame_unref, NULL);
+ g_list_free (priv->frames);
+ priv->frames = NULL;
+ }
+
+ return ret;
+}
+
+static GstVideoCodecState *
+_new_output_state (GstCaps * caps, GstVideoCodecState * reference)
+{
+ GstVideoCodecState *state;
+
+ state = g_slice_new0 (GstVideoCodecState);
+ state->ref_count = 1;
+ gst_video_info_init (&state->info);
+ gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0);
+
+ state->caps = caps;
+
+ if (reference) {
+ GstVideoInfo *tgt, *ref;
+
+ tgt = &state->info;
+ ref = &reference->info;
+
+ /* Copy over extra fields from reference state */
+ tgt->interlace_mode = ref->interlace_mode;
+ tgt->flags = ref->flags;
+ tgt->width = ref->width;
+ tgt->height = ref->height;
+ tgt->chroma_site = ref->chroma_site;
+ tgt->colorimetry = ref->colorimetry;
+ tgt->par_n = ref->par_n;
+ tgt->par_d = ref->par_d;
+ tgt->fps_n = ref->fps_n;
+ tgt->fps_d = ref->fps_d;
+ }
+
+ return state;
+}
+
+static GstVideoCodecState *
+_new_input_state (GstCaps * caps)
+{
+ GstVideoCodecState *state;
+
+ state = g_slice_new0 (GstVideoCodecState);
+ state->ref_count = 1;
+ gst_video_info_init (&state->info);
+ if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
+ goto parse_fail;
+ state->caps = gst_caps_ref (caps);
+
+ return state;
+
+parse_fail:
+ {
+ g_slice_free (GstVideoCodecState, state);
+ return NULL;
+ }
+}
+
+static gboolean
+gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstVideoEncoder *encoder;
+ GstVideoEncoderClass *encoder_class;
+ GstVideoCodecState *state;
+ gboolean ret;
+ gboolean samecaps = FALSE;
+
+ encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ /* subclass should do something here ... */
+ g_return_val_if_fail (encoder_class->set_format != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps);
+
+ state = _new_input_state (caps);
+ if (G_UNLIKELY (!state))
+ goto parse_fail;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ if (encoder->priv->input_state)
+ samecaps =
+ gst_video_info_is_equal (&state->info,
+ &encoder->priv->input_state->info);
+
+ if (!samecaps) {
+ /* arrange draining pending frames */
+ gst_video_encoder_drain (encoder);
+
+ /* and subclass should be ready to configure format at any time around */
+ ret = encoder_class->set_format (encoder, state);
+ if (ret) {
+ if (encoder->priv->input_state)
+ gst_video_codec_state_unref (encoder->priv->input_state);
+ encoder->priv->input_state = state;
+ } else
+ gst_video_codec_state_unref (state);
+ } else {
+ /* no need to stir things up */
+ GST_DEBUG_OBJECT (encoder,
+ "new video format identical to configured format");
+ gst_video_codec_state_unref (state);
+ ret = TRUE;
+ }
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ if (!ret)
+ GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
+
+ gst_object_unref (encoder);
+
+ return ret;
+
+parse_fail:
+ {
+ GST_WARNING_OBJECT (encoder, "Failed to parse caps");
+ gst_object_unref (encoder);
+ return FALSE;
+ }
+}
+
+/**
+ * gst_video_encoder_proxy_getcaps:
+ * @enc: a #GstVideoEncoder
+ * @caps: initial caps
+ *
+ * Returns caps that express @caps (or sink template caps if @caps == NULL)
+ * restricted to resolution/format/... combinations supported by downstream
+ * elements (e.g. muxers).
+ *
+ * Returns: a #GstCaps owned by caller
+ *
+ * Since: 0.10.37
+ */
+GstCaps *
+gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps)
+{
+ const GstCaps *templ_caps;
+ GstCaps *allowed;
+ GstCaps *fcaps, *filter_caps;
+ gint i, j;
+
+ /* Allow downstream to specify width/height/framerate/PAR constraints
+ * and forward them upstream for video converters to handle
+ */
+ templ_caps = caps ? caps : gst_pad_get_pad_template_caps (encoder->sinkpad);
+ allowed = gst_pad_get_allowed_caps (encoder->srcpad);
+
+ if (!allowed || gst_caps_is_empty (allowed) || gst_caps_is_any (allowed)) {
+ fcaps = gst_caps_copy (templ_caps);
+ goto done;
+ }
+
+ GST_LOG_OBJECT (encoder, "template caps %" GST_PTR_FORMAT, templ_caps);
+ GST_LOG_OBJECT (encoder, "allowed caps %" GST_PTR_FORMAT, allowed);
+
+ filter_caps = gst_caps_new_empty ();
+
+ for (i = 0; i < gst_caps_get_size (templ_caps); i++) {
+ GQuark q_name =
+ gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i));
+
+ for (j = 0; j < gst_caps_get_size (allowed); j++) {
+ const GstStructure *allowed_s = gst_caps_get_structure (allowed, j);
+ const GValue *val;
+ GstStructure *s;
+
+ s = gst_structure_id_empty_new (q_name);
+ if ((val = gst_structure_get_value (allowed_s, "width")))
+ gst_structure_set_value (s, "width", val);
+ if ((val = gst_structure_get_value (allowed_s, "height")))
+ gst_structure_set_value (s, "height", val);
+ if ((val = gst_structure_get_value (allowed_s, "framerate")))
+ gst_structure_set_value (s, "framerate", val);
+ if ((val = gst_structure_get_value (allowed_s, "pixel-aspect-ratio")))
+ gst_structure_set_value (s, "pixel-aspect-ratio", val);
+
+ gst_caps_merge_structure (filter_caps, s);
+ }
+ }
+
+ fcaps = gst_caps_intersect (filter_caps, templ_caps);
+ gst_caps_unref (filter_caps);
+
+done:
+ gst_caps_replace (&allowed, NULL);
+
+ GST_LOG_OBJECT (encoder, "proxy caps %" GST_PTR_FORMAT, fcaps);
+
+ return fcaps;
+}
+
+static GstCaps *
+gst_video_encoder_sink_getcaps (GstPad * pad)
+{
+ GstVideoEncoder *encoder;
+ GstVideoEncoderClass *klass;
+ GstCaps *caps;
+
+ encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ if (klass->getcaps)
+ caps = klass->getcaps (encoder);
+ else
+ caps = gst_video_encoder_proxy_getcaps (encoder, NULL);
+
+ GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
+
+ gst_object_unref (encoder);
+
+ return caps;
+}
+
+static void
+gst_video_encoder_finalize (GObject * object)
+{
+ GstVideoEncoder *encoder;
+
+ GST_DEBUG_OBJECT (object, "finalize");
+
+ encoder = GST_VIDEO_ENCODER (object);
+ if (encoder->priv->headers) {
+ g_list_foreach (encoder->priv->headers, (GFunc) gst_buffer_unref, NULL);
+ g_list_free (encoder->priv->headers);
+ }
+ g_static_rec_mutex_free (&encoder->stream_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
+{
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ double rate;
+ double applied_rate;
+ GstFormat format;
+ gint64 start;
+ gint64 stop;
+ gint64 position;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
+ &format, &start, &stop, &position);
+
+ GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, "
+ "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
+ ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
+ GST_TIME_ARGS (position));
+
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ break;
+ }
+
+ gst_segment_set_newsegment_full (&encoder->output_segment, update, rate,
+ applied_rate, format, start, stop, position);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_push_event (encoder->srcpad, event);
+}
+
+static gboolean
+gst_video_encoder_sink_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
+{
+ GstVideoEncoderClass *encoder_class;
+ gboolean ret = FALSE;
+
+ encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ {
+ GstFlowReturn flow_ret;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ encoder->priv->at_eos = TRUE;
+
+ if (encoder_class->finish) {
+ flow_ret = encoder_class->finish (encoder);
+ } else {
+ flow_ret = GST_FLOW_OK;
+ }
+
+ ret = (flow_ret == GST_VIDEO_ENCODER_FLOW_DROPPED);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ break;
+ }
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ double rate;
+ double applied_rate;
+ GstFormat format;
+ gint64 start;
+ gint64 stop;
+ gint64 position;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
+ &format, &start, &stop, &position);
+
+ GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, "
+ "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
+ ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
+ GST_TIME_ARGS (position));
+
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ break;
+ }
+
+ encoder->priv->at_eos = FALSE;
+
+ gst_segment_set_newsegment_full (&encoder->input_segment, update, rate,
+ applied_rate, format, start, stop, position);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ if (gst_video_event_is_force_key_unit (event)) {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (gst_video_event_parse_downstream_force_key_unit (event,
+ NULL, NULL, &running_time, &all_headers, &count)) {
+ ForcedKeyUnitEvent *fevt;
+
+ GST_OBJECT_LOCK (encoder);
+ fevt = forced_key_unit_event_new (running_time, all_headers, count);
+ encoder->priv->force_key_unit =
+ g_list_append (encoder->priv->force_key_unit, fevt);
+ GST_OBJECT_UNLOCK (encoder);
+
+ GST_DEBUG_OBJECT (encoder,
+ "force-key-unit event: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), all_headers, count);
+ }
+ gst_event_unref (event);
+ ret = TRUE;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_video_encoder_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstVideoEncoder *enc;
+ GstVideoEncoderClass *klass;
+ gboolean handled = FALSE;
+ gboolean ret = TRUE;
+
+ enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
+
+ GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
+ GST_EVENT_TYPE_NAME (event));
+
+ if (klass->sink_event)
+ handled = klass->sink_event (enc, event);
+
+ if (!handled)
+ handled = gst_video_encoder_sink_eventfunc (enc, event);
+
+ if (!handled) {
+ /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
+ * For EOS this is required because no buffer or serialized event
+ * will come after EOS and nothing could trigger another
+ * _finish_frame() call. *
+ * If the subclass handles sending of EOS manually it can return
+ * _DROPPED from ::finish() and all other subclasses should have
+ * decoded/flushed all remaining data before this
+ *
+ * For FLUSH_STOP this is required because it is expected
+ * to be forwarded immediately and no buffers are queued anyway.
+ */
+ if (!GST_EVENT_IS_SERIALIZED (event)
+ || GST_EVENT_TYPE (event) == GST_EVENT_EOS
+ || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
+ ret = gst_video_encoder_push_event (enc, event);
+ } else {
+ GST_VIDEO_ENCODER_STREAM_LOCK (enc);
+ enc->priv->current_frame_events =
+ g_list_prepend (enc->priv->current_frame_events, event);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (enc);
+ ret = TRUE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (enc, "event handled");
+
+ gst_object_unref (enc);
+ return ret;
+}
+
+static gboolean
+gst_video_encoder_src_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
+{
+ gboolean handled = FALSE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ if (gst_video_event_is_force_key_unit (event)) {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (gst_video_event_parse_upstream_force_key_unit (event,
+ &running_time, &all_headers, &count)) {
+ ForcedKeyUnitEvent *fevt;
+
+ GST_OBJECT_LOCK (encoder);
+ fevt = forced_key_unit_event_new (running_time, all_headers, count);
+ encoder->priv->force_key_unit =
+ g_list_append (encoder->priv->force_key_unit, fevt);
+ GST_OBJECT_UNLOCK (encoder);
+
+ GST_DEBUG_OBJECT (encoder,
+ "force-key-unit event: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), all_headers, count);
+ }
+ gst_event_unref (event);
+ handled = TRUE;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ return handled;
+}
+
+static gboolean
+gst_video_encoder_src_event (GstPad * pad, GstEvent * event)
+{
+ GstVideoEncoder *encoder;
+ GstVideoEncoderClass *klass;
+ gboolean ret = FALSE;
+ gboolean handled = FALSE;
+
+ encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
+
+ if (klass->src_event)
+ handled = klass->src_event (encoder, event);
+
+ if (!handled)
+ handled = gst_video_encoder_src_eventfunc (encoder, event);
+
+ if (!handled)
+ ret = gst_pad_event_default (pad, event);
+
+ gst_object_unref (encoder);
+
+ return ret;
+}
+
+static const GstQueryType *
+gst_video_encoder_get_query_types (GstPad * pad)
+{
+ static const GstQueryType query_types[] = {
+ GST_QUERY_CONVERT,
+ GST_QUERY_LATENCY,
+ 0
+ };
+
+ return query_types;
+}
+
+static gboolean
+gst_video_encoder_src_query (GstPad * pad, GstQuery * query)
+{
+ GstVideoEncoderPrivate *priv;
+ GstVideoEncoder *enc;
+ gboolean res;
+ GstPad *peerpad;
+
+ enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ priv = enc->priv;
+ peerpad = gst_pad_get_peer (enc->sinkpad);
+
+ GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res =
+ gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt,
+ src_val, &dest_fmt, &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ gboolean live;
+ GstClockTime min_latency, max_latency;
+
+ res = gst_pad_query (peerpad, query);
+ if (res) {
+ gst_query_parse_latency (query, &live, &min_latency, &max_latency);
+ GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
+ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
+
+ GST_OBJECT_LOCK (enc);
+ min_latency += priv->min_latency;
+ if (enc->priv->max_latency == GST_CLOCK_TIME_NONE) {
+ max_latency = GST_CLOCK_TIME_NONE;
+ } else if (max_latency != GST_CLOCK_TIME_NONE) {
+ max_latency += enc->priv->max_latency;
+ }
+ GST_OBJECT_UNLOCK (enc);
+
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ }
+ break;
+ default:
+ res = gst_pad_query_default (pad, query);
+ }
+ gst_object_unref (peerpad);
+ gst_object_unref (enc);
+ return res;
+
+error:
+ GST_DEBUG_OBJECT (enc, "query failed");
+ gst_object_unref (peerpad);
+ gst_object_unref (enc);
+ return res;
+}
+
+static GstVideoCodecFrame *
+gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
+ GstClockTime timestamp, GstClockTime duration)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GstVideoCodecFrame *frame;
+
+ frame = g_slice_new0 (GstVideoCodecFrame);
+
+ frame->ref_count = 1;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ frame->system_frame_number = priv->system_frame_number;
+ priv->system_frame_number++;
+
+ frame->presentation_frame_number = priv->presentation_frame_number;
+ priv->presentation_frame_number++;
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ frame->events = priv->current_frame_events;
+ priv->current_frame_events = NULL;
+ frame->input_buffer = buf;
+ frame->pts = timestamp;
+ frame->duration = duration;
+
+ if (GST_VIDEO_INFO_IS_INTERLACED (&encoder->priv->input_state->info)) {
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_TFF)) {
+ GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, GST_VIDEO_CODEC_FRAME_FLAG_TFF);
+ } else {
+ GST_VIDEO_CODEC_FRAME_FLAG_UNSET (frame, GST_VIDEO_CODEC_FRAME_FLAG_TFF);
+ }
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_RFF)) {
+ GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, GST_VIDEO_CODEC_FRAME_FLAG_RFF);
+ } else {
+ GST_VIDEO_CODEC_FRAME_FLAG_UNSET (frame, GST_VIDEO_CODEC_FRAME_FLAG_RFF);
+ }
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_ONEFIELD)) {
+ GST_VIDEO_CODEC_FRAME_FLAG_SET (frame,
+ GST_VIDEO_CODEC_FRAME_FLAG_ONEFIELD);
+ } else {
+ GST_VIDEO_CODEC_FRAME_FLAG_UNSET (frame,
+ GST_VIDEO_CODEC_FRAME_FLAG_ONEFIELD);
+ }
+ }
+
+ return frame;
+}
+
+
+static GstFlowReturn
+gst_video_encoder_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstVideoEncoder *encoder;
+ GstVideoEncoderPrivate *priv;
+ GstVideoEncoderClass *klass;
+ GstVideoCodecFrame *frame;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint64 start, stop = GST_CLOCK_TIME_NONE, cstart, cstop;
+
+ encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ priv = encoder->priv;
+ klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ /* .... ?? */
+ if (!GST_PAD_CAPS (pad)) {
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+
+ start = GST_BUFFER_TIMESTAMP (buf);
+ if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
+ stop = start + GST_BUFFER_DURATION (buf);
+
+ GST_LOG_OBJECT (encoder,
+ "received buffer of size %d with ts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
+ GST_TIME_ARGS (start), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+
+ if (priv->at_eos) {
+ ret = GST_FLOW_UNEXPECTED;
+ goto done;
+ }
+
+ /* Drop buffers outside of segment */
+ if (!gst_segment_clip (&encoder->input_segment,
+ GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
+ GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame");
+ gst_buffer_unref (buf);
+ goto done;
+ }
+
+ frame = gst_video_encoder_new_frame (encoder, buf, cstart, cstop - cstart);
+
+ GST_OBJECT_LOCK (encoder);
+ if (priv->force_key_unit) {
+ ForcedKeyUnitEvent *fevt = NULL;
+ GstClockTime running_time;
+ GList *l;
+
+ running_time =
+ gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buf));
+
+ for (l = priv->force_key_unit; l; l = l->next) {
+ ForcedKeyUnitEvent *tmp = l->data;
+
+ /* Skip pending keyunits */
+ if (tmp->pending)
+ continue;
+
+ /* Simple case, keyunit ASAP */
+ if (tmp->running_time == GST_CLOCK_TIME_NONE) {
+ fevt = tmp;
+ break;
+ }
+
+ /* Event for before this frame */
+ if (tmp->running_time <= running_time) {
+ fevt = tmp;
+ break;
+ }
+ }
+
+ if (fevt) {
+ GST_DEBUG_OBJECT (encoder,
+ "Forcing a key unit at running time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time));
+ GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame);
+ if (fevt->all_headers)
+ GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame);
+ fevt->pending = TRUE;
+ }
+ }
+ GST_OBJECT_UNLOCK (encoder);
+
+ priv->frames = g_list_append (priv->frames, frame);
+
+ /* new data, more finish needed */
+ priv->drained = FALSE;
+
+ GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass",
+ frame->presentation_frame_number);
+
+ gst_video_codec_frame_ref (frame);
+ ret = klass->handle_frame (encoder, frame);
+
+done:
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ gst_object_unref (encoder);
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_video_encoder_change_state (GstElement * element, GstStateChange transition)
+{
+ GstVideoEncoder *encoder;
+ GstVideoEncoderClass *encoder_class;
+ GstStateChangeReturn ret;
+
+ encoder = GST_VIDEO_ENCODER (element);
+ encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ /* open device/library if needed */
+ if (encoder_class->open && !encoder_class->open (encoder))
+ goto open_failed;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* Initialize device/library if needed */
+ if (encoder_class->start && !encoder_class->start (encoder))
+ goto start_failed;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_video_encoder_reset (encoder);
+ if (encoder_class->stop && !encoder_class->stop (encoder))
+ goto stop_failed;
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ /* close device/library if needed */
+ if (encoder_class->close && !encoder_class->close (encoder))
+ goto close_failed;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ /* Errors */
+
+open_failed:
+ {
+ GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
+ ("Failed to open encoder"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+
+start_failed:
+ {
+ GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
+ ("Failed to start encoder"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+
+stop_failed:
+ {
+ GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
+ ("Failed to stop encoder"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+
+close_failed:
+ {
+ GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
+ ("Failed to close encoder"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+}
+
+static gboolean
+gst_video_encoder_set_src_caps (GstVideoEncoder * encoder)
+{
+ gboolean ret;
+ GstVideoCodecState *state = encoder->priv->output_state;
+ GstVideoInfo *info = &state->info;
+
+ g_return_val_if_fail (state->caps != NULL, FALSE);
+
+ if (encoder->priv->output_state_changed) {
+ state->caps = gst_caps_make_writable (state->caps);
+
+ /* Fill caps */
+ gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width,
+ "height", G_TYPE_INT, info->height,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ info->par_n, info->par_d, NULL);
+ if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
+ /* variable fps with a max-framerate */
+ gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1,
+ "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
+ } else {
+ /* no variable fps or no max-framerate */
+ gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION,
+ info->fps_n, info->fps_d, NULL);
+ }
+ if (state->codec_data)
+ gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER,
+ state->codec_data, NULL);
+ encoder->priv->output_state_changed = FALSE;
+ }
+
+ ret = gst_pad_set_caps (encoder->srcpad, state->caps);
+
+ return ret;
+}
+
+/**
+ * gst_video_encoder_finish_frame:
+ * @encoder: a #GstVideoEncoder
+ * @frame: (transfer full): an encoded #GstVideoCodecFrame
+ *
+ * @frame must have a valid encoded data buffer, whose metadata fields
+ * are then appropriately set according to frame data or no buffer at
+ * all if the frame should be dropped.
+ * It is subsequently pushed downstream or provided to @pre_push.
+ * In any case, the frame is considered finished and released.
+ *
+ * Returns: a #GstFlowReturn resulting from sending data downstream
+ *
+ * Since: 0.10.37
+ */
+GstFlowReturn
+gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstVideoEncoderClass *encoder_class;
+ GList *l;
+ gboolean send_headers = FALSE;
+ gboolean discont = (frame->presentation_frame_number == 0);
+
+ encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ GST_LOG_OBJECT (encoder,
+ "finish frame fpn %d", frame->presentation_frame_number);
+
+ GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
+ ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
+ GST_TIME_ARGS (frame->dts));
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ if (G_UNLIKELY (priv->output_state_changed))
+ gst_video_encoder_set_src_caps (encoder);
+
+ if (G_UNLIKELY (priv->output_state == NULL))
+ goto no_output_state;
+
+ /* Push all pending events that arrived before this frame */
+ for (l = priv->frames; l; l = l->next) {
+ GstVideoCodecFrame *tmp = l->data;
+
+ if (tmp->events) {
+ GList *k;
+
+ for (k = g_list_last (tmp->events); k; k = k->prev)
+ gst_video_encoder_push_event (encoder, k->data);
+ g_list_free (tmp->events);
+ tmp->events = NULL;
+ }
+
+ if (tmp == frame)
+ break;
+ }
+
+ /* no buffer data means this frame is skipped/dropped */
+ if (!frame->output_buffer) {
+ GST_DEBUG_OBJECT (encoder, "skipping frame %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (frame->pts));
+ goto done;
+ }
+
+ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) {
+ GstClockTime stream_time, running_time;
+ GstEvent *ev;
+ ForcedKeyUnitEvent *fevt = NULL;
+ GList *l;
+
+ running_time =
+ gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
+ frame->pts);
+
+ GST_OBJECT_LOCK (encoder);
+ for (l = priv->force_key_unit; l; l = l->next) {
+ ForcedKeyUnitEvent *tmp = l->data;
+
+ /* Skip non-pending keyunits */
+ if (!tmp->pending)
+ continue;
+
+ /* Simple case, keyunit ASAP */
+ if (tmp->running_time == GST_CLOCK_TIME_NONE) {
+ fevt = tmp;
+ break;
+ }
+
+ /* Event for before this frame */
+ if (tmp->running_time <= running_time) {
+ fevt = tmp;
+ break;
+ }
+ }
+
+ if (fevt) {
+ priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
+ }
+ GST_OBJECT_UNLOCK (encoder);
+
+ if (fevt) {
+ stream_time =
+ gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
+ frame->pts);
+
+ ev = gst_video_event_new_downstream_force_key_unit
+ (frame->pts, stream_time, running_time,
+ fevt->all_headers, fevt->count);
+
+ gst_video_encoder_push_event (encoder, ev);
+
+ if (fevt->all_headers)
+ send_headers = TRUE;
+
+ GST_DEBUG_OBJECT (encoder,
+ "Forced key unit: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
+ forced_key_unit_event_free (fevt);
+ }
+ }
+
+ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
+ priv->distance_from_sync = 0;
+ GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ /* For keyframes, DTS = PTS */
+ if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
+ frame->dts = frame->pts;
+ } else if (GST_CLOCK_TIME_IS_VALID (frame->pts) && frame->pts != frame->dts) {
+ GST_WARNING_OBJECT (encoder, "keyframe PTS != DTS");
+ }
+ } else {
+ GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ frame->distance_from_sync = priv->distance_from_sync;
+ priv->distance_from_sync++;
+
+ GST_BUFFER_TIMESTAMP (frame->output_buffer) = frame->pts;
+ GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
+
+ /* update rate estimate */
+ priv->bytes += GST_BUFFER_SIZE (frame->output_buffer);
+ if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
+ priv->time += frame->duration;
+ } else {
+ /* better none than nothing valid */
+ priv->time = GST_CLOCK_TIME_NONE;
+ }
+
+ if (G_UNLIKELY (send_headers || priv->new_headers)) {
+ GList *tmp, *copy = NULL;
+
+ GST_DEBUG_OBJECT (encoder, "Sending headers");
+
+ /* First make all buffers metadata-writable */
+ for (tmp = priv->headers; tmp; tmp = tmp->next) {
+ GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
+
+ copy = g_list_append (copy, gst_buffer_make_metadata_writable (tmpbuf));
+ }
+ g_list_free (priv->headers);
+ priv->headers = copy;
+
+ for (tmp = priv->headers; tmp; tmp = tmp->next) {
+ GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
+
+ gst_buffer_set_caps (tmpbuf, GST_PAD_CAPS (encoder->srcpad));
+ gst_buffer_ref (tmpbuf);
+ priv->bytes += GST_BUFFER_SIZE (tmpbuf);
+ if (G_UNLIKELY (discont)) {
+ GST_LOG_OBJECT (encoder, "marking discont");
+ GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
+ discont = FALSE;
+ }
+
+ gst_pad_push (encoder->srcpad, tmpbuf);
+ }
+ priv->new_headers = FALSE;
+ }
+
+ if (G_UNLIKELY (discont)) {
+ GST_LOG_OBJECT (encoder, "marking discont");
+ GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ gst_buffer_set_caps (GST_BUFFER (frame->output_buffer),
+ GST_PAD_CAPS (encoder->srcpad));
+
+ if (encoder_class->pre_push)
+ ret = encoder_class->pre_push (encoder, frame);
+
+ if (ret == GST_FLOW_OK)
+ ret = gst_pad_push (encoder->srcpad, frame->output_buffer);
+
+ frame->output_buffer = NULL;
+
+done:
+ /* handed out */
+
+ /* unref once from the list */
+ l = g_list_find (priv->frames, frame);
+ if (l) {
+ gst_video_codec_frame_unref (frame);
+ priv->frames = g_list_delete_link (priv->frames, l);
+ }
+ /* unref because this function takes ownership */
+ gst_video_codec_frame_unref (frame);
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return ret;
+
+ /* ERRORS */
+no_output_state:
+ {
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ GST_ERROR_OBJECT (encoder, "Output state was not configured");
+ return GST_FLOW_ERROR;
+ }
+}
+
+/**
+ * gst_video_encoder_get_output_state:
+ * @encoder: a #GstVideoEncoder
+ *
+ * Get the current #GstVideoCodecState
+ *
+ * Returns: (transfer full): #GstVideoCodecState describing format of video data.
+ *
+ * Since: 0.10.37
+ */
+GstVideoCodecState *
+gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
+{
+ GstVideoCodecState *state;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ state = gst_video_codec_state_ref (encoder->priv->output_state);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return state;
+}
+
+/**
+ * gst_video_encoder_set_output_state:
+ * @encoder: a #GstVideoEncoder
+ * @caps: (transfer full): the #GstCaps to use for the output
+ * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState
+ *
+ * Creates a new #GstVideoCodecState with the specified caps as the output state
+ * for the encoder.
+ * Any previously set output state on @decoder will be replaced by the newly
+ * created one.
+ *
+ * The specified @caps should not contain any resolution, pixel-aspect-ratio,
+ * framerate, codec-data, .... Those should be specified instead in the returned
+ * #GstVideoCodecState.
+ *
+ * If the subclass wishes to copy over existing fields (like pixel aspect ratio,
+ * or framerate) from an existing #GstVideoCodecState, it can be provided as a
+ * @reference.
+ *
+ * If the subclass wishes to override some fields from the output state (like
+ * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
+ *
+ * The new output state will only take effect (set on pads and buffers) starting
+ * from the next call to #gst_video_encoder_finish_frame().
+ *
+ * Returns: (transfer full): the newly configured output state.
+ *
+ * Since: 0.10.37
+ */
+GstVideoCodecState *
+gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
+ GstVideoCodecState * reference)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GstVideoCodecState *state;
+
+ g_return_val_if_fail (caps != NULL, NULL);
+
+ state = _new_output_state (caps, reference);
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ if (priv->output_state)
+ gst_video_codec_state_unref (priv->output_state);
+ priv->output_state = gst_video_codec_state_ref (state);
+
+ priv->output_state_changed = TRUE;
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return state;
+}
+
+/**
+ * gst_video_encoder_set_latency:
+ * @encoder: a #GstVideoEncoder
+ * @min_latency: minimum latency
+ * @max_latency: maximum latency
+ *
+ * Informs baseclass of encoding latency.
+ *
+ * Since: 0.10.37
+ */
+void
+gst_video_encoder_set_latency (GstVideoEncoder * encoder,
+ GstClockTime min_latency, GstClockTime max_latency)
+{
+ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
+ g_return_if_fail (max_latency >= min_latency);
+
+ GST_OBJECT_LOCK (encoder);
+ encoder->priv->min_latency = min_latency;
+ encoder->priv->max_latency = max_latency;
+ GST_OBJECT_UNLOCK (encoder);
+
+ gst_element_post_message (GST_ELEMENT_CAST (encoder),
+ gst_message_new_latency (GST_OBJECT_CAST (encoder)));
+}
+
+/**
+ * gst_video_encoder_get_latency:
+ * @encoder: a #GstVideoEncoder
+ * @min_latency: (out) (allow-none): the configured minimum latency
+ * @max_latency: (out) (allow-none): the configured maximum latency
+ *
+ * Returns the configured encoding latency.
+ *
+ * Since: 0.10.37
+ */
+void
+gst_video_encoder_get_latency (GstVideoEncoder * encoder,
+ GstClockTime * min_latency, GstClockTime * max_latency)
+{
+ GST_OBJECT_LOCK (encoder);
+ if (min_latency)
+ *min_latency = encoder->priv->min_latency;
+ if (max_latency)
+ *max_latency = encoder->priv->max_latency;
+ GST_OBJECT_UNLOCK (encoder);
+}
+
+/**
+ * gst_video_encoder_get_oldest_frame:
+ * @encoder: a #GstVideoEncoder
+ *
+ * Get the oldest unfinished pending #GstVideoCodecFrame
+ *
+ * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame
+ *
+ * Since: 0.10.37
+ */
+GstVideoCodecFrame *
+gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
+{
+ GstVideoCodecFrame *frame = NULL;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ if (encoder->priv->frames)
+ frame = gst_video_codec_frame_ref (encoder->priv->frames->data);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return (GstVideoCodecFrame *) frame;
+}
+
+/**
+ * gst_video_encoder_get_frame:
+ * @encoder: a #GstVideoEnccoder
+ * @frame_number: system_frame_number of a frame
+ *
+ * Get a pending unfinished #GstVideoCodecFrame
+ *
+ * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
+ *
+ * Since: 0.10.37
+ */
+GstVideoCodecFrame *
+gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
+{
+ GList *g;
+ GstVideoCodecFrame *frame = NULL;
+
+ GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number);
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ for (g = encoder->priv->frames; g; g = g->next) {
+ GstVideoCodecFrame *tmp = g->data;
+
+ if (tmp->system_frame_number == frame_number) {
+ frame = tmp;
+ gst_video_codec_frame_ref (frame);
+ break;
+ }
+ }
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return frame;
+}
+
+/**
+ * gst_video_encoder_get_frames:
+ * @encoder: a #GstVideoEncoder
+ *
+ * Get all pending unfinished #GstVideoCodecFrame
+ *
+ * Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
+ */
+GList *
+gst_video_encoder_get_frames (GstVideoEncoder * encoder)
+{
+ GList *frames;
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ frames = g_list_copy (encoder->priv->frames);
+ g_list_foreach (frames, (GFunc) gst_video_codec_frame_ref, NULL);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return frames;
+}
diff --git a/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h b/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h
new file mode 100644
index 0000000..9ae3516
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h
@@ -0,0 +1,308 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
+ * Copyright (C) 2011 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ * Copyright (C) 2012 Collabora Ltd.
+ * Author : Edward Hervey <edward@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_VIDEO_ENCODER_H_
+#define _GST_VIDEO_ENCODER_H_
+
+#include <gst/video/gstvideoutils.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_ENCODER \
+ (gst_video_encoder_get_type())
+#define GST_VIDEO_ENCODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoder))
+#define GST_VIDEO_ENCODER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass))
+#define GST_VIDEO_ENCODER_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass))
+#define GST_IS_VIDEO_ENCODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_ENCODER))
+#define GST_IS_VIDEO_ENCODER_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_ENCODER))
+#define GST_VIDEO_ENCODER_CAST(enc) ((GstVideoEncoder*)enc)
+
+/**
+ * GST_VIDEO_ENCODER_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_SINK_NAME "sink"
+/**
+ * GST_VIDEO_ENCODER_SRC_NAME:
+ *
+ * The name of the templates for the source pad.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_SRC_NAME "src"
+
+/**
+ * GST_VIDEO_ENCODER_FLOW_DROPPED:
+ *
+ * Returned when the event/buffer should be dropped.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
+
+/**
+ * GST_VIDEO_ENCODER_SRC_PAD:
+ * @obj: a #GstVideoEncoder
+ *
+ * Gives the pointer to the source #GstPad object of the element.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_SRC_PAD(obj) (((GstVideoEncoder *) (obj))->srcpad)
+
+/**
+ * GST_VIDEO_ENCODER_SINK_PAD:
+ * @obj: a #GstVideoEncoder
+ *
+ * Gives the pointer to the sink #GstPad object of the element.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_SINK_PAD(obj) (((GstVideoEncoder *) (obj))->sinkpad)
+
+/**
+ * GST_VIDEO_ENCODER_FLOW_NEED_DATA:
+ *
+ * Returned while parsing to indicate more data is needed.
+ *
+ * Since: 0.10.37
+ **/
+#define GST_VIDEO_ENCODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
+
+/**
+ * GST_VIDEO_ENCODER_FLOW_DROPPED:
+ *
+ * Returned when the event/buffer should be dropped.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
+
+/**
+ * GST_VIDEO_ENCODER_INPUT_SEGMENT:
+ * @obj: base parse instance
+ *
+ * Gives the segment of the element.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_INPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->input_segment)
+
+/**
+ * GST_VIDEO_ENCODER_OUTPUT_SEGMENT:
+ * @obj: base parse instance
+ *
+ * Gives the segment of the element.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_OUTPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->output_segment)
+
+/**
+ * GST_VIDEO_ENCODER_STREAM_LOCK:
+ * @encoder: video encoder instance
+ *
+ * Obtain a lock to protect the encoder function from concurrent access.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_STREAM_LOCK(encoder) g_static_rec_mutex_lock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
+
+/**
+ * GST_VIDEO_ENCODER_STREAM_UNLOCK:
+ * @encoder: video encoder instance
+ *
+ * Release the lock that protects the encoder function from concurrent access.
+ *
+ * Since: 0.10.37
+ */
+#define GST_VIDEO_ENCODER_STREAM_UNLOCK(encoder) g_static_rec_mutex_unlock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
+
+typedef struct _GstVideoEncoder GstVideoEncoder;
+typedef struct _GstVideoEncoderPrivate GstVideoEncoderPrivate;
+typedef struct _GstVideoEncoderClass GstVideoEncoderClass;
+
+/**
+ * GstVideoEncoder:
+ *
+ * The opaque #GstVideoEncoder data structure.
+ *
+ * Since: 0.10.37
+ */
+struct _GstVideoEncoder
+{
+ /*< private >*/
+ GstElement element;
+
+ /*< protected >*/
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* protects all data processing, i.e. is locked
+ * in the chain function, finish_frame and when
+ * processing serialized events */
+ GStaticRecMutex stream_lock;
+
+ /* MT-protected (with STREAM_LOCK) */
+ GstSegment input_segment;
+ GstSegment output_segment;
+
+ GstVideoEncoderPrivate *priv;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING_LARGE];
+};
+
+/**
+ * GstVideoEncoderClass:
+ * @open: Optional.
+ * Called when the element changes to GST_STATE_READY.
+ * Allows opening external resources. Since: 0.10.37.
+ * @close: Optional.
+ * Called when the element changes to GST_STATE_NULL.
+ * Allows closing external resources. Since: 0.10.37.
+ * @start: Optional.
+ * Called when the element starts processing.
+ * Allows opening external resources.
+ * @stop: Optional.
+ * Called when the element stops processing.
+ * Allows closing external resources.
+ * @set_format: Optional.
+ * Notifies subclass of incoming data format.
+ * GstVideoCodecState fields have already been
+ * set according to provided caps.
+ * @handle_frame: Provides input frame to subclass.
+ * @reset: Optional.
+ * Allows subclass (encoder) to perform post-seek semantics reset.
+ * @finish: Optional.
+ * Called to request subclass to dispatch any pending remaining
+ * data (e.g. at EOS).
+ * @pre_push: Optional.
+ * Allows subclass to push frame downstream in whatever
+ * shape or form it deems appropriate. If not provided,
+ * provided encoded frame data is simply pushed downstream.
+ * @getcaps: Optional.
+ * Allows for a custom sink getcaps implementation (e.g.
+ * for multichannel input specification). If not implemented,
+ * default returns gst_video_encoder_proxy_getcaps
+ * applied to sink template caps.
+ * @sink_event: Optional.
+ * Event handler on the sink pad. This function should return
+ * TRUE if the event was handled and should be discarded
+ * (i.e. not unref'ed).
+ * @src_event: Optional.
+ * Event handler on the source pad. This function should return
+ * TRUE if the event was handled and should be discarded
+ * (i.e. not unref'ed).
+ *
+ * Subclasses can override any of the available virtual methods or not, as
+ * needed. At minimum @handle_frame needs to be overridden, and @set_format
+ * and @get_caps are likely needed as well.
+ *
+ * Since: 0.10.37
+ */
+struct _GstVideoEncoderClass
+{
+ /*< private >*/
+ GstElementClass element_class;
+
+ /*< public >*/
+ /* virtual methods for subclasses */
+ gboolean (*open) (GstVideoEncoder *encoder);
+
+ gboolean (*close) (GstVideoEncoder *encoder);
+
+ gboolean (*start) (GstVideoEncoder *encoder);
+
+ gboolean (*stop) (GstVideoEncoder *encoder);
+
+ gboolean (*set_format) (GstVideoEncoder *encoder,
+ GstVideoCodecState *state);
+
+ GstFlowReturn (*handle_frame) (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame);
+
+ gboolean (*reset) (GstVideoEncoder *encoder,
+ gboolean hard);
+
+ GstFlowReturn (*finish) (GstVideoEncoder *encoder);
+
+ GstFlowReturn (*pre_push) (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame);
+
+ GstCaps * (*getcaps) (GstVideoEncoder *enc);
+
+ gboolean (*sink_event) (GstVideoEncoder *encoder,
+ GstEvent *event);
+
+ gboolean (*src_event) (GstVideoEncoder *encoder,
+ GstEvent *event);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING_LARGE];
+};
+
+GType gst_video_encoder_get_type (void);
+
+GstVideoCodecState* gst_video_encoder_get_output_state (GstVideoEncoder *encoder);
+
+GstVideoCodecState* gst_video_encoder_set_output_state (GstVideoEncoder * encoder,
+ GstCaps * caps,
+ GstVideoCodecState * reference);
+
+GstVideoCodecFrame* gst_video_encoder_get_frame (GstVideoEncoder *encoder,
+ int frame_number);
+GstVideoCodecFrame* gst_video_encoder_get_oldest_frame (GstVideoEncoder *encoder);
+
+GList * gst_video_encoder_get_frames (GstVideoEncoder *encoder);
+
+GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame);
+
+GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc,
+ GstCaps * caps);
+void gst_video_encoder_set_discont (GstVideoEncoder *encoder);
+gboolean gst_video_encoder_get_discont (GstVideoEncoder *encoder);
+
+void gst_video_encoder_set_latency (GstVideoEncoder *encoder,
+ GstClockTime min_latency,
+ GstClockTime max_latency);
+void gst_video_encoder_get_latency (GstVideoEncoder *encoder,
+ GstClockTime *min_latency,
+ GstClockTime *max_latency);
+
+void gst_video_encoder_set_headers (GstVideoEncoder *encoder,
+ GList *headers);
+G_END_DECLS
+
+#endif
+
diff --git a/recipes-multimedia/gstreamer/gstreamer-vaapi/install-tests.patch b/recipes-multimedia/gstreamer/gstreamer-vaapi/install-tests.patch
new file mode 100644
index 0000000..9281b04
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gstreamer-vaapi/install-tests.patch
@@ -0,0 +1,31 @@
+Install tests.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+---
+ tests/Makefile.am | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/tests/Makefile.am b/tests/Makefile.am
+index 2e7f191..8f65446 100644
+--- a/tests/Makefile.am
++++ b/tests/Makefile.am
+@@ -1,4 +1,4 @@
+-noinst_PROGRAMS = \
++bin_PROGRAMS = \
+ simple-decoder \
+ test-decode \
+ test-display \
+@@ -9,7 +9,7 @@ noinst_PROGRAMS = \
+ $(NULL)
+
+ if USE_GLX
+-noinst_PROGRAMS += \
++bin_PROGRAMS += \
+ test-textures \
+ $(NULL)
+ endif
+--
+1.7.10.4
+
diff --git a/recipes-multimedia/gstreamer/gstreamer-vaapi_0.4.3.bb b/recipes-multimedia/gstreamer/gstreamer-vaapi_0.4.3.bb
new file mode 100644
index 0000000..93e0fbc
--- /dev/null
+++ b/recipes-multimedia/gstreamer/gstreamer-vaapi_0.4.3.bb
@@ -0,0 +1,24 @@
+SUMMARY = "VA-API support to GStreamer"
+DESCRIPTION = "gstreamer-vaapi consists of a collection of VA-API \
+based plugins for GStreamer and helper libraries: `vaapidecode', \
+`vaapiconvert', and `vaapisink'."
+
+LICENSE = "LGPLv2.1+"
+LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
+
+DEPENDS = "gstreamer gst-plugins-base gst-plugins-bad libva"
+
+SRC_URI = "http://www.freedesktop.org/software/vaapi/releases/${BPN}/${BPN}-${PV}.tar.bz2"
+SRC_URI[md5sum] = "7b1ede7193bc5a0aca921c490684f7b5"
+SRC_URI[sha256sum] = "68e0598456fe17085f6b8b1ce3da066322cc02c363955fb319776a5404d2b0da"
+
+inherit autotools pkgconfig gtk-doc
+
+PACKAGECONFIG ??= "${@base_contains("DISTRO_FEATURES", "x11", "x11", "", d)} \
+ ${@base_contains("DISTRO_FEATURES", "wayland", "wayland", "", d)}"
+PACKAGECONFIG[x11] = "--enable-x11,--disable-x11,virtual/libx11 libxrandr"
+PACKAGECONFIG[wayland] = "--enable-wayland,--disable-wayland,wayland"
+
+FILES_${PN} += "${libdir}/gstreamer-0.10/*.so"
+FILES_${PN}-dbg += "${libdir}/gstreamer-0.10/.debug"
+FILES_${PN}-dev += "${libdir}/gstreamer-0.10/*.la ${libdir}/gstreamer-0.10/*.a"
diff --git a/recipes-multimedia/libva/libva-intel-driver/0001-Workaround-for-concurrently-playing-VC1-and-H264-vid.patch b/recipes-multimedia/libva/libva-intel-driver/0001-Workaround-for-concurrently-playing-VC1-and-H264-vid.patch
new file mode 100644
index 0000000..e000632
--- /dev/null
+++ b/recipes-multimedia/libva/libva-intel-driver/0001-Workaround-for-concurrently-playing-VC1-and-H264-vid.patch
@@ -0,0 +1,440 @@
+Upstream-Status: Pending
+
+From 43c3fd3ea485a0b9ad12c248a0a94a959ab4d5ee Mon Sep 17 00:00:00 2001
+From: "Xiang, Haihao" <haihao.xiang@intel.com>
+Date: Mon, 29 Oct 2012 10:01:16 +0800
+Subject: [PATCH] Workaround for concurrently playing VC1 and H264 video on SNB
+
+Signed-off-by: Xiang, Haihao <haihao.xiang@intel.com>
+---
+ src/gen6_mfd.c | 379 +++++++++++++++++++++++++++++++++++++++++++++++++++++++-
+ src/gen6_mfd.h | 3 +
+ 2 files changed, 380 insertions(+), 2 deletions(-)
+
+diff --git a/src/gen6_mfd.c b/src/gen6_mfd.c
+index fa2f128..b8c671b 100755
+--- a/src/gen6_mfd.c
++++ b/src/gen6_mfd.c
+@@ -50,6 +50,377 @@ static const uint32_t zigzag_direct[64] = {
+ 53, 60, 61, 54, 47, 55, 62, 63
+ };
+
++/* Workaround for VC1 decoding */
++
++VAStatus
++i965_DestroySurfaces(VADriverContextP ctx,
++ VASurfaceID *surface_list,
++ int num_surfaces);
++VAStatus
++i965_CreateSurfaces(VADriverContextP ctx,
++ int width,
++ int height,
++ int format,
++ int num_surfaces,
++ VASurfaceID *surfaces);
++
++static struct {
++ int width;
++ int height;
++ int mb_count;
++ unsigned char data[32];
++ int data_size;
++ int data_bit_offset;
++
++ unsigned int f_code:16;
++ unsigned int intra_dc_precision:2;
++ unsigned int picture_structure:2;
++ unsigned int top_field_first:1;
++ unsigned int frame_pred_frame_dct:1;
++ unsigned int concealment_motion_vectors:1;
++ unsigned int q_scale_type:1;
++ unsigned int intra_vlc_format:1;
++ unsigned int alternate_scan:1;
++ unsigned int picture_coding_type:1;
++ unsigned int pad0: 5;
++
++ unsigned int quantiser_scale_code;
++
++ unsigned char qm[2][64];
++} gen6_dwa_clip = {
++ width: 32,
++ height: 16,
++ mb_count: 2,
++ data: {
++ 0x00, 0x00, 0x01, 0x01, 0x1b, 0xfb, 0xfd, 0xf8,
++ 0x02, 0x97, 0xef, 0xf8, 0x8b, 0x97, 0xe0, 0x0a,
++ 0x5f, 0xbf, 0xe2, 0x20, 0x00, 0x00, 0x01, 0x00
++ },
++ data_size: 20,
++ data_bit_offset: 38,
++
++ f_code: 0xffff,
++ intra_dc_precision: 0,
++ picture_structure: 3,
++ top_field_first: 0,
++ frame_pred_frame_dct: 1,
++ concealment_motion_vectors: 0,
++ q_scale_type: 0,
++ intra_vlc_format: 0,
++ alternate_scan: 0,
++ picture_coding_type: 1, /* I frame */
++
++ quantiser_scale_code: 3,
++
++ qm: {
++ {
++ 8, 16, 19, 22, 26, 27, 29, 34,
++ 16, 16, 22, 24, 27, 29, 34, 37,
++ 19, 22, 26, 27, 29, 34, 34, 38,
++ 22, 22, 26, 27, 29, 34, 37, 40,
++ 22, 26, 27, 29, 32, 35, 40, 48,
++ 26, 27, 29, 32, 35, 40, 48, 58,
++ 26, 27, 29, 34, 38, 46, 56, 69,
++ 27, 29, 35, 38, 46, 56, 69, 83
++ },
++
++ {
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ 16, 16, 16, 16, 16, 16, 16, 16,
++ }
++ },
++};
++
++static void
++gen6_dwa_init(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct i965_driver_data *i965 = i965_driver_data(ctx);
++ VAStatus status;
++ struct object_surface *obj_surface;
++
++ if (gen6_mfd_context->dwa_surface_id != VA_INVALID_SURFACE)
++ i965_DestroySurfaces(ctx,
++ &gen6_mfd_context->dwa_surface_id,
++ 1);
++
++ status = i965_CreateSurfaces(ctx,
++ gen6_dwa_clip.width,
++ gen6_dwa_clip.height,
++ VA_RT_FORMAT_YUV420,
++ 1,
++ &gen6_mfd_context->dwa_surface_id);
++ assert(status == VA_STATUS_SUCCESS);
++
++ obj_surface = SURFACE(gen6_mfd_context->dwa_surface_id);
++ assert(obj_surface);
++ i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N', 'V', '1', '2'), SUBSAMPLE_YUV420);
++
++ if (!gen6_mfd_context->dwa_slice_data_bo)
++ dri_bo_unreference(gen6_mfd_context->dwa_slice_data_bo);
++
++ gen6_mfd_context->dwa_slice_data_bo = dri_bo_alloc(i965->intel.bufmgr,
++ "WA data",
++ 0x1000,
++ 0x1000);
++ dri_bo_subdata(gen6_mfd_context->dwa_slice_data_bo,
++ 0,
++ gen6_dwa_clip.data_size,
++ gen6_dwa_clip.data);
++}
++
++static void
++gen6_dwa_pipe_mode_select(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++
++ BEGIN_BCS_BATCH(batch, 4);
++ OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (4 - 2));
++ OUT_BCS_BATCH(batch,
++ (MFD_MODE_VLD << 16) | /* VLD mode */
++ (0 << 10) | /* disable Stream-Out */
++ (0 << 9) | /* Post Deblocking Output */
++ (1 << 8) | /* Pre Deblocking Output */
++ (0 << 7) | /* disable TLB prefectch */
++ (0 << 5) | /* not in stitch mode */
++ (MFX_CODEC_DECODE << 4) | /* decoding mode */
++ (MFX_FORMAT_MPEG2 << 0));
++ OUT_BCS_BATCH(batch,
++ (0 << 20) | /* round flag in PB slice */
++ (0 << 19) | /* round flag in Intra8x8 */
++ (0 << 7) | /* expand NOA bus flag */
++ (1 << 6) | /* must be 1 */
++ (0 << 5) | /* disable clock gating for NOA */
++ (0 << 4) | /* terminate if AVC motion and POC table error occurs */
++ (0 << 3) | /* terminate if AVC mbdata error occurs */
++ (0 << 2) | /* terminate if AVC CABAC/CAVLC decode error occurs */
++ (0 << 1) | /* AVC long field motion vector */
++ (0 << 0)); /* always calculate AVC ILDB boundary strength */
++ OUT_BCS_BATCH(batch, 0);
++ ADVANCE_BCS_BATCH(batch);
++}
++
++static void
++gen6_dwa_surface_state(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++ struct i965_driver_data *i965 = i965_driver_data(ctx);
++ struct object_surface *obj_surface = SURFACE(gen6_mfd_context->dwa_surface_id);
++
++ BEGIN_BCS_BATCH(batch, 6);
++ OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
++ OUT_BCS_BATCH(batch, 0);
++ OUT_BCS_BATCH(batch,
++ ((obj_surface->orig_width - 1) << 19) |
++ ((obj_surface->orig_height - 1) << 6));
++ OUT_BCS_BATCH(batch,
++ (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
++ (1 << 27) | /* interleave chroma */
++ (0 << 22) | /* surface object control state, ignored */
++ ((obj_surface->width - 1) << 3) | /* pitch */
++ (0 << 2) | /* must be 0 */
++ (1 << 1) | /* must be tiled */
++ (I965_TILEWALK_YMAJOR << 0)); /* tile walk, must be 1 */
++ OUT_BCS_BATCH(batch,
++ (0 << 16) | /* X offset for U(Cb), must be 0 */
++ (obj_surface->y_cb_offset << 0)); /* Y offset for U(Cb) */
++ OUT_BCS_BATCH(batch,
++ (0 << 16) | /* X offset for V(Cr), must be 0 */
++ (0 << 0)); /* Y offset for V(Cr), must be 0 for video codec */
++ ADVANCE_BCS_BATCH(batch);
++}
++
++static void
++gen6_dwa_pipe_buf_addr_state(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++ struct i965_driver_data *i965 = i965_driver_data(ctx);
++ struct object_surface *obj_surface = SURFACE(gen6_mfd_context->dwa_surface_id);
++ dri_bo *intra_bo;
++ int i;
++
++ intra_bo = dri_bo_alloc(i965->intel.bufmgr,
++ "intra row store",
++ 128 * 64,
++ 0x1000);
++
++ BEGIN_BCS_BATCH(batch, 24);
++ OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
++ OUT_BCS_RELOC(batch,
++ obj_surface->bo,
++ I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
++ 0);
++
++ OUT_BCS_BATCH(batch, 0); /* post deblocking */
++
++ OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
++ OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
++
++ OUT_BCS_RELOC(batch,
++ intra_bo,
++ I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
++ 0);
++
++ OUT_BCS_BATCH(batch, 0);
++
++ /* DW 7..22 */
++ for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
++ OUT_BCS_BATCH(batch, 0);
++ }
++
++ OUT_BCS_BATCH(batch, 0); /* ignore DW23 for decoding */
++ ADVANCE_BCS_BATCH(batch);
++
++ dri_bo_unreference(intra_bo);
++}
++
++static void
++gen6_dwa_bsp_buf_base_addr_state(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct i965_driver_data *i965 = i965_driver_data(ctx);
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++ dri_bo *bsd_mpc_bo;
++
++ bsd_mpc_bo = dri_bo_alloc(i965->intel.bufmgr,
++ "bsd mpc row store",
++ 11520, /* 1.5 * 120 * 64 */
++ 0x1000);
++
++ BEGIN_BCS_BATCH(batch, 4);
++ OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
++
++ OUT_BCS_RELOC(batch,
++ bsd_mpc_bo,
++ I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
++ 0);
++ OUT_BCS_BATCH(batch, 0);
++ OUT_BCS_BATCH(batch, 0);
++ ADVANCE_BCS_BATCH(batch);
++
++ dri_bo_unreference(bsd_mpc_bo);
++}
++
++static void
++gen6_dwa_mpeg2_pic_state(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++ unsigned int width_in_mbs = ALIGN(gen6_dwa_clip.width, 16) / 16;
++ unsigned int height_in_mbs = ALIGN(gen6_dwa_clip.height, 16) / 16;
++
++ BEGIN_BCS_BATCH(batch, 4);
++ OUT_BCS_BATCH(batch, MFX_MPEG2_PIC_STATE | (4 - 2));
++ OUT_BCS_BATCH(batch,
++ gen6_dwa_clip.f_code << 16 |
++ gen6_dwa_clip.intra_dc_precision << 14 |
++ gen6_dwa_clip.picture_structure << 12 |
++ gen6_dwa_clip.top_field_first << 11 |
++ gen6_dwa_clip.frame_pred_frame_dct << 10 |
++ gen6_dwa_clip.concealment_motion_vectors << 9 |
++ gen6_dwa_clip.q_scale_type << 8 |
++ gen6_dwa_clip.intra_vlc_format << 7 |
++ gen6_dwa_clip.alternate_scan << 6);
++ OUT_BCS_BATCH(batch,
++ gen6_dwa_clip.picture_coding_type << 9);
++ OUT_BCS_BATCH(batch,
++ height_in_mbs << 16 |
++ width_in_mbs);
++ ADVANCE_BCS_BATCH(batch);
++}
++
++static void
++gen6_dwa_mpeg2_qm_state(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++ int i;
++
++ for (i = 0; i < 2; i++) {
++ BEGIN_BCS_BATCH(batch, 18);
++ OUT_BCS_BATCH(batch, MFX_MPEG2_QM_STATE | (18 - 2));
++ OUT_BCS_BATCH(batch, i);
++ intel_batchbuffer_data(batch, gen6_dwa_clip.qm[i], 64);
++ ADVANCE_BCS_BATCH(batch);
++ }
++}
++
++static void
++gen6_dwa_ind_obj_base_addr_state(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++
++ BEGIN_BCS_BATCH(batch, 11);
++ OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
++ OUT_BCS_RELOC(batch,
++ gen6_mfd_context->dwa_slice_data_bo,
++ I915_GEM_DOMAIN_INSTRUCTION, 0,
++ 0);
++ OUT_BCS_BATCH(batch, 0);
++ OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
++ OUT_BCS_BATCH(batch, 0);
++ OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
++ OUT_BCS_BATCH(batch, 0);
++ OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
++ OUT_BCS_BATCH(batch, 0);
++ OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
++ OUT_BCS_BATCH(batch, 0);
++ ADVANCE_BCS_BATCH(batch);
++}
++
++static void
++gen6_dwa_mpeg2_bsd_object(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++
++ BEGIN_BCS_BATCH(batch, 5);
++ OUT_BCS_BATCH(batch, MFD_MPEG2_BSD_OBJECT | (5 - 2));
++ OUT_BCS_BATCH(batch,
++ gen6_dwa_clip.data_size - (gen6_dwa_clip.data_bit_offset >> 3));
++ OUT_BCS_BATCH(batch, gen6_dwa_clip.data_bit_offset >> 3);
++ OUT_BCS_BATCH(batch,
++ (0 << 24) |
++ (0 << 16) |
++ (gen6_dwa_clip.mb_count << 8) |
++ (1 << 5) |
++ (1 << 3) |
++ (gen6_dwa_clip.data_bit_offset & 0x7));
++ OUT_BCS_BATCH(batch,
++ gen6_dwa_clip.quantiser_scale_code << 24);
++ ADVANCE_BCS_BATCH(batch);
++}
++
++static void
++gen6_mfd_dwa(VADriverContextP ctx,
++ struct gen6_mfd_context *gen6_mfd_context)
++{
++ struct intel_batchbuffer *batch = gen6_mfd_context->base.batch;
++ gen6_dwa_init(ctx, gen6_mfd_context);
++ intel_batchbuffer_emit_mi_flush(batch);
++ gen6_dwa_pipe_mode_select(ctx, gen6_mfd_context);
++ gen6_dwa_surface_state(ctx, gen6_mfd_context);
++ gen6_dwa_pipe_buf_addr_state(ctx, gen6_mfd_context);
++ gen6_dwa_bsp_buf_base_addr_state(ctx, gen6_mfd_context);
++ gen6_dwa_mpeg2_qm_state(ctx, gen6_mfd_context);
++ gen6_dwa_mpeg2_pic_state(ctx, gen6_mfd_context);
++ gen6_dwa_ind_obj_base_addr_state(ctx, gen6_mfd_context);
++ gen6_dwa_mpeg2_bsd_object(ctx, gen6_mfd_context);
++}
++
++/* end of workaround */
++
+ static void
+ gen6_mfd_avc_frame_store_index(VADriverContextP ctx,
+ VAPictureParameterBufferH264 *pic_param,
+@@ -1055,7 +1426,8 @@ gen6_mfd_avc_decode_picture(VADriverContextP ctx,
+ }
+ }
+
+- gen6_mfd_avc_phantom_slice(ctx, pic_param, gen6_mfd_context);
++ gen6_mfd_dwa(ctx, gen6_mfd_context);
++
+ intel_batchbuffer_end_atomic(batch);
+ intel_batchbuffer_flush(batch);
+ }
+@@ -1944,6 +2316,8 @@ gen6_mfd_vc1_decode_picture(VADriverContextP ctx,
+ }
+ }
+
++ gen6_mfd_dwa(ctx, gen6_mfd_context);
++
+ intel_batchbuffer_end_atomic(batch);
+ intel_batchbuffer_flush(batch);
+ }
+@@ -2031,6 +2405,7 @@ gen6_dec_hw_context_init(VADriverContextP ctx, VAProfile profile)
+ }
+
+ gen6_mfd_context->wa_mpeg2_slice_vertical_position = -1;
+-
++ gen6_mfd_context->dwa_surface_id = VA_INVALID_ID;
++
+ return (struct hw_context *)gen6_mfd_context;
+ }
+diff --git a/src/gen6_mfd.h b/src/gen6_mfd.h
+index de131d6..7c4a619 100644
+--- a/src/gen6_mfd.h
++++ b/src/gen6_mfd.h
+@@ -72,6 +72,9 @@ struct gen6_mfd_context
+ GenBuffer bitplane_read_buffer;
+
+ int wa_mpeg2_slice_vertical_position;
++
++ VASurfaceID dwa_surface_id;
++ dri_bo *dwa_slice_data_bo;
+ };
+
+ #endif /* _GEN6_MFD_H_ */
+--
+1.7.9.5
+
diff --git a/recipes-multimedia/libva/libva-intel-driver/wayland-include.patch b/recipes-multimedia/libva/libva-intel-driver/wayland-include.patch
new file mode 100644
index 0000000..4a46773
--- /dev/null
+++ b/recipes-multimedia/libva/libva-intel-driver/wayland-include.patch
@@ -0,0 +1,30 @@
+Fix a compile error when building without X11.
+
+Upstream-Status: Submitted
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+From a5c44e48dc73b2892f161bd21591c7ee0a3b7842 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@intel.com>
+Date: Mon, 21 Oct 2013 16:25:18 +0100
+Subject: [PATCH] i965_output_wayland: add missing include
+
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+---
+ src/i965_output_wayland.h | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/i965_output_wayland.h b/src/i965_output_wayland.h
+index 61ca39f..acea8d9 100644
+--- a/src/i965_output_wayland.h
++++ b/src/i965_output_wayland.h
+@@ -26,6 +26,7 @@
+ #define I965_OUTPUT_WAYLAND_H
+
+ #include <stdbool.h>
++#include <va/va_backend.h>
+
+ bool
+ i965_output_wayland_init(VADriverContextP ctx);
+--
+1.7.10.4
+
diff --git a/recipes-multimedia/libva/libva-intel-driver_1.3.2.bb b/recipes-multimedia/libva/libva-intel-driver_1.3.2.bb
new file mode 100644
index 0000000..9a44669
--- /dev/null
+++ b/recipes-multimedia/libva/libva-intel-driver_1.3.2.bb
@@ -0,0 +1,31 @@
+SUMMARY = "VA driver for Intel G45 & HD Graphics family"
+DESCRIPTION = "libva-driver-intel is the VA-API implementation \
+for Intel G45 chipsets and Intel HD Graphics for Intel Core \
+processor family."
+
+HOMEPAGE = "http://www.freedesktop.org/wiki/Software/vaapi"
+BUGTRACKER = "https://bugs.freedesktop.org"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2e48940f94acb0af582e5ef03537800f"
+
+COMPATIBLE_HOST = '(i.86|x86_64).*-linux'
+
+DEPENDS = "libva libdrm"
+
+SRC_URI = "http://www.freedesktop.org/software/vaapi/releases/${BPN}/${BPN}-${PV}.tar.bz2"
+SRC_URI += "file://wayland-include.patch"
+
+SRC_URI[md5sum] = "3f4f08f1d42ee451b2fb9c239ee0b8d7"
+SRC_URI[sha256sum] = "11d956ce11cb85bb009c8eb1ebb768403da2cb82675a6d66649f9c49ef4908d1"
+
+inherit autotools pkgconfig
+
+PACKAGECONFIG ??= "${@base_contains("DISTRO_FEATURES", "x11", "x11", "", d)} \
+ ${@base_contains("DISTRO_FEATURES", "opengl wayland", "wayland", "", d)}"
+PACKAGECONFIG[x11] = "--enable-x11,--disable-x11"
+PACKAGECONFIG[wayland] = "--enable-wayland,--disable-wayland,wayland virtual/egl"
+
+FILES_${PN} += "${libdir}/dri/*.so"
+FILES_${PN}-dev += "${libdir}/dri/*.la"
+FILES_${PN}-dbg += "${libdir}/dri/.debug"
diff --git a/recipes-multimedia/libva/libva.inc b/recipes-multimedia/libva/libva.inc
new file mode 100644
index 0000000..fd8ef97
--- /dev/null
+++ b/recipes-multimedia/libva/libva.inc
@@ -0,0 +1,44 @@
+SUMMARY = "Video Acceleration (VA) API for Linux"
+DESCRIPTION = "Video Acceleration API (VA API) is a library (libVA) \
+and API specification which enables and provides access to graphics \
+hardware (GPU) acceleration for video processing on Linux and UNIX \
+based operating systems. Accelerated processing includes video \
+decoding, video encoding, subpicture blending and rendering. The \
+specification was originally designed by Intel for its GMA (Graphics \
+Media Accelerator) series of GPU hardware, the API is however not \
+limited to GPUs or Intel specific hardware, as other hardware and \
+manufacturers can also freely use this API for hardware accelerated \
+video decoding."
+
+HOMEPAGE = "http://www.freedesktop.org/wiki/Software/vaapi"
+BUGTRACKER = "https://bugs.freedesktop.org"
+
+SECTION = "x11"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2e48940f94acb0af582e5ef03537800f"
+
+DEPENDS = "libdrm virtual/mesa virtual/libgles1 virtual/libgles2 virtual/egl"
+
+inherit autotools-brokensep pkgconfig
+
+EXTRA_OECONF = "--disable-dummy-driver"
+
+PACKAGECONFIG ??= "${@base_contains("DISTRO_FEATURES", "x11", "x11", "", d)} \
+ ${@base_contains("DISTRO_FEATURES", "wayland", "wayland", "", d)}"
+PACKAGECONFIG[x11] = "--enable-x11,--disable-x11,virtual/libx11 libxext libxfixes"
+PACKAGECONFIG[wayland] = "--enable-wayland,--disable-wayland,wayland"
+
+PACKAGES =+ "${PN}-x11 ${PN}-tpi ${PN}-glx ${PN}-egl ${PN}-wayland"
+
+RDEPENDS_${PN}-tpi =+ "${PN}"
+RDEPENDS_${PN}-x11 =+ "${PN}"
+RDEPENDS_${PN}-glx =+ "${PN}-x11"
+RDEPENDS_${PN}-egl =+ "${PN}-x11"
+
+FILES_${PN}-dbg += "${libdir}/dri/.debug"
+
+FILES_${PN}-x11 =+ "${libdir}/libva-x11*${SOLIBS}"
+FILES_${PN}-tpi =+ "${libdir}/libva-tpi*${SOLIBS}"
+FILES_${PN}-glx =+ "${libdir}/libva-glx*${SOLIBS}"
+FILES_${PN}-egl =+ "${libdir}/libva-egl*${SOLIBS}"
+FILES_${PN}-wayland =+ "${libdir}/libva-wayland*${SOLIBS}"
diff --git a/recipes-multimedia/libva/libva_1.0.16.bb b/recipes-multimedia/libva/libva_1.0.16.bb
new file mode 100644
index 0000000..98475f2
--- /dev/null
+++ b/recipes-multimedia/libva/libva_1.0.16.bb
@@ -0,0 +1,9 @@
+require libva.inc
+
+PACKAGECONFIG[x11] = ",,virtual/libx11 libxext libxfixes"
+PACKAGECONFIG[wayland] = ",,"
+
+SRC_URI = "http://www.freedesktop.org/software/vaapi/releases/libva/libva-${PV}.tar.bz2"
+
+SRC_URI[md5sum] = "99343b27cf24e99abc0c5db2d09e30c8"
+SRC_URI[sha256sum] = "03e46f8f48f252e6b6112c495745100bc217ddded801fdb393384aab1fafeaa2"
diff --git a/recipes-multimedia/libva/libva_1.3.1.bb b/recipes-multimedia/libva/libva_1.3.1.bb
new file mode 100644
index 0000000..9f80b6d
--- /dev/null
+++ b/recipes-multimedia/libva/libva_1.3.1.bb
@@ -0,0 +1,6 @@
+require libva.inc
+
+SRC_URI = "http://www.freedesktop.org/software/vaapi/releases/libva/${BP}.tar.bz2"
+
+SRC_URI[md5sum] = "eb4db967f068854444b597071c66b480"
+SRC_URI[sha256sum] = "8789b1f1ca9cbc2c1ae477323ec8f5fb269b9360410d4d5e9ec96d679b3cc297" \ No newline at end of file
diff --git a/recipes-multimedia/libva/va-intel.bb b/recipes-multimedia/libva/va-intel.bb
new file mode 100644
index 0000000..9bc6939
--- /dev/null
+++ b/recipes-multimedia/libva/va-intel.bb
@@ -0,0 +1,28 @@
+DESCRIPTION = "Video Acceleration Add-ons for Intel BSPs"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \
+ file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+
+def map_valibs(d):
+ # The emgd mixvideo implementation requires libva-x11/tpi/glx/egl
+ if base_contains('MACHINE_FEATURES', 'va-impl-mixvideo', "1", "0", d) == "1":
+ return "libva libva-x11 libva-tpi libva-glx libva-egl"
+ # The intel implementation requires the libva-intel-driver package
+ if base_contains('MACHINE_FEATURES', 'va-impl-intel', "1", "0", d) == "1":
+ return "libva libva-intel-driver"
+ # All meta-intel video acceleration requires libva
+ return "libva"
+
+VA_IMPL = "${@map_valibs(d)}"
+
+PACKAGES = "\
+ va-intel \
+ "
+
+ALLOW_EMPTY_va-intel = "1"
+
+RDEPENDS_va-intel = " \
+ ${VA_IMPL} \
+ "
+
+COMPATIBLE_HOST = '(i.86|x86_64).*-linux*'