Added qmafw-gst-subtitles-renderer-0.0.55 for Meego Harmattan 1.2
[mafwsubrenderer] / qmafw-gst-subtitles-renderer / src / mafw-gst-renderer-worker.c
diff --git a/qmafw-gst-subtitles-renderer/src/mafw-gst-renderer-worker.c b/qmafw-gst-subtitles-renderer/src/mafw-gst-renderer-worker.c
new file mode 100644 (file)
index 0000000..eeceb39
--- /dev/null
@@ -0,0 +1,3105 @@
+/*
+ * This file is a part of MAFW
+ *
+ * Copyright (C) 2007, 2008, 2009 Nokia Corporation, all rights reserved.
+ *
+ * Contact: Visa Smolander <visa.smolander@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <glib.h>
+#include <glib/gstdio.h>
+#include <unistd.h>
+#include <X11/Xlib.h>
+#include <gst/interfaces/xoverlay.h>
+#include <gst/pbutils/missing-plugins.h>
+#include <gst/base/gstbasesink.h>
+#include <context_provider.h>
+
+#include "mafw-gst-renderer-worker.h"
+#include "mafw-gst-renderer-utils.h"
+
+#define UNUSED(x) (void)(x)
+
+/* context provider DBus name must be the same as the .context file name without
+ * the .context suffix, service name in the .context file must be the same too */
+#define CONTEXT_PROVIDER_BUS_NAME       "com.nokia.mafw.context_provider.libqmafw_gst_renderer"
+#define CONTEXT_PROVIDER_KEY_NOWPLAYING "Media.NowPlaying"
+#define CONTEXT_PROVIDER_KEY_NOWPLAYING_TITLE    "title"
+#define CONTEXT_PROVIDER_KEY_NOWPLAYING_ALBUM    "album"
+#define CONTEXT_PROVIDER_KEY_NOWPLAYING_ARTIST   "artist"
+#define CONTEXT_PROVIDER_KEY_NOWPLAYING_GENRE    "genre"
+#define CONTEXT_PROVIDER_KEY_NOWPLAYING_RESOURCE "resource"
+#define CONTEXT_PROVIDER_KEY_NOWPLAYING_DURATION "duration"
+
+#define WORKER_ERROR g_quark_from_static_string("com.nokia.mafw.error.renderer")
+
+#define MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_LAZY_TIMEOUT 4000
+#define MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_FAST_TIMEOUT 200
+#define MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_LOOP_LIMIT 10
+#define MAFW_GST_MISSING_TYPE_DECODER "decoder"
+#define MAFW_GST_MISSING_TYPE_ENCODER "encoder"
+
+#define MAFW_TMP_URI_LEN 2048
+
+#define STREAM_TYPE_MMS "mms://"
+#define STREAM_TYPE_MMSH "mmsh://"
+#define MAFW_GST_MMSH_CONNECTION_SPEED "2000"       /* kbit/s */
+#define MAFW_GST_MMSH_TCP_TIMEOUT "30000000"        /* microseconds */
+
+/* struct needed when emitting renderer art/frames as image files */
+typedef struct {
+    MafwGstRendererWorker *worker;
+    gint metadata_key;
+    const gchar *filename;
+} SaveGraphicData;
+
+/* Forward declarations. */
+static void _do_play(MafwGstRendererWorker *worker);
+
+static void _do_seek(MafwGstRendererWorker *worker,
+                     GstSeekType seek_type,
+                     gint position,
+                     gboolean key_frame_seek,
+                     GError **error);
+
+static gboolean _set_value(GValue *v, GType type, gconstpointer value);
+
+static void _emit_metadatas(MafwGstRendererWorker *worker);
+
+static gboolean _current_metadata_add(MafwGstRendererWorker *worker,
+                                      const gint key,
+                                      GType type,
+                                      const gpointer value);
+
+static gpointer _set_context_map_value(gpointer map,
+                                       const gchar *tag,
+                                       const gchar *value);
+
+/*
+ * Is used to prevent a critical log from context fw in case of multiple initialisations.
+ * Common to all renderers in the process.
+ */
+static gboolean _context_fw_initialised = FALSE;
+
+/*
+ * Sends @error to MafwGstRenderer.  Only call this from the glib main thread,
+ * or face the consequences.  @err is free'd.
+ */
+static void _send_error(MafwGstRendererWorker *worker, GError *err)
+{
+    worker->is_error = TRUE;
+    if (worker->notify_error_handler)
+    {
+        /* remap a possible gst ecode to worker ecode */
+        err->code = remap_gst_error_code(err);
+        worker->notify_error_handler(worker, worker->owner, err);
+    }
+    g_error_free(err);
+}
+
+configuration* _create_default_configuration()
+{
+    configuration *config = g_malloc0(sizeof(configuration));
+    config->asink = g_strdup("pulsesink");
+    config->vsink = g_strdup("omapxvsink");
+    config->flags = 71;
+    config->buffer_time = 600000; /* microseconds */
+    config->latency_time = 100000; /* microseconds */
+    config->autoload_subtitles = TRUE;
+    config->subtitle_encoding = NULL;
+    config->subtitle_font = g_strdup("Sans Bold 18");
+
+    /* timers */
+    config->milliseconds_to_pause_frame = 700; /* milliseconds */
+    config->seconds_to_pause_to_ready = 3; /* seconds */
+
+    /* dhmmixer */
+    config->use_dhmmixer = TRUE;
+
+    config->mobile_surround_music.state = 0;
+    config->mobile_surround_music.room = 2;
+    config->mobile_surround_music.color = 2;
+    config->mobile_surround_video.state = 0;
+    config->mobile_surround_video.room = 2;
+    config->mobile_surround_video.color = 2;
+
+    return config;
+}
+
+void _free_configuration(configuration* config)
+{
+    g_free(config->asink);
+    g_free(config->vsink);
+
+    g_free(config);
+}
+
+/*
+ * Posts an @error on the gst bus.  _async_bus_handler will then pick it up and
+ * forward to MafwGstRenderer.  @err is free'd.
+ */
+static void _post_error(MafwGstRendererWorker *worker, GError *err)
+{
+    gst_bus_post(worker->bus,
+                 gst_message_new_error(GST_OBJECT(worker->pipeline),
+                                       err,
+                                       NULL));
+    g_error_free(err);
+}
+
+static gboolean _set_value(GValue *v, GType type, gconstpointer value)
+{
+
+    gboolean ret = TRUE;
+
+    if (v && value)
+    {
+        memset(v, 0, sizeof(GValue));
+        g_value_init(v, type);
+
+        if (type == G_TYPE_STRING) {
+            g_value_set_string(v, (const gchar*)value);
+        }
+        else if (type == G_TYPE_INT) {
+            g_value_set_int(v, *(gint*)value);
+        }
+        else if (type == G_TYPE_UINT) {
+            g_value_set_uint(v, *(uint*)value);
+        }
+        else if (type == G_TYPE_DOUBLE) {
+            g_value_set_double(v, *(gdouble*)value);
+        }
+        else if (type == G_TYPE_BOOLEAN) {
+            g_value_set_boolean(v, *(gboolean*)value);
+        }
+        else if (type == G_TYPE_INT64) {
+            g_value_set_int64(v, *(gint64*)value);
+        }
+        else if (type == G_TYPE_FLOAT) {
+            g_value_set_float(v, *(gfloat*)value);
+        }
+        else if (type == G_TYPE_VALUE_ARRAY) {
+            g_value_copy((GValue*)value,v);
+        }
+        else {
+            g_warning("%s: unknown g_type", G_STRFUNC);
+            ret = FALSE;
+        }
+    }
+    else
+    {
+        ret = FALSE;
+    }
+
+    return ret;
+
+}
+
+static void _emit_metadata(MafwGstRendererWorker *worker,
+                           gint metadata_key,
+                           GType type,
+                           gconstpointer value)
+{
+
+    GValue v;
+
+    if (worker && worker->notify_metadata_handler &&
+        _set_value(&v, type, value))
+    {
+        GValueArray *array = g_value_array_new(0);
+        g_value_array_append(array, &v);
+        worker->notify_metadata_handler(worker,
+                                        worker->owner,
+                                        metadata_key,
+                                        G_TYPE_VALUE_ARRAY,
+                                        array);
+        g_value_array_free(array);
+        g_value_unset(&v);
+    }
+
+}
+
+static void _emit_property(MafwGstRendererWorker *worker,
+                           gint property,
+                           GType type,
+                           gconstpointer value)
+{
+
+    GValue v;
+
+    if (worker && worker->notify_property_handler &&
+        _set_value(&v, type, value))
+    {
+        worker->notify_property_handler(worker, worker->owner, property, &v);
+        g_value_unset(&v);
+    }
+
+}
+
+static gchar *_init_tmp_file(void)
+{
+    gint fd;
+    gchar *path = NULL;
+
+    fd = g_file_open_tmp("mafw-gst-renderer-XXXXXX.picture", &path, NULL);
+    if (fd >= 0 )
+    {
+        close(fd);
+    }
+
+    return path;
+}
+
+static void _destroy_tmp_file(MafwGstRendererWorker *worker, guint index)
+{
+    g_unlink(worker->tmp_files_pool[index]);
+    g_free(worker->tmp_files_pool[index]);
+    worker->tmp_files_pool[index] = NULL;
+}
+
+static void _init_tmp_files_pool(MafwGstRendererWorker *worker)
+{
+    guint8 i;
+
+    worker->tmp_files_pool_index = 0;
+
+    for (i = 0; i < MAFW_GST_RENDERER_MAX_TMP_FILES; i++) {
+        worker->tmp_files_pool[i] = NULL;
+    }
+}
+
+static void _destroy_tmp_files_pool(MafwGstRendererWorker *worker)
+{
+    guint8 i;
+
+    for (i = 0; (i < MAFW_GST_RENDERER_MAX_TMP_FILES) &&
+         (worker->tmp_files_pool[i] != NULL); i++) {
+        g_unlink(worker->tmp_files_pool[i]);
+        g_free(worker->tmp_files_pool[i]);
+    }
+}
+
+static const gchar *_get_tmp_file_from_pool(MafwGstRendererWorker *worker)
+{
+    gchar *path = worker->tmp_files_pool[worker->tmp_files_pool_index];
+
+    if (path == NULL) {
+        path = _init_tmp_file();
+        worker->tmp_files_pool[worker->tmp_files_pool_index] = path;
+    }
+    else
+    {
+        _destroy_tmp_file(worker, worker->tmp_files_pool_index);
+        path = _init_tmp_file();
+        worker->tmp_files_pool[worker->tmp_files_pool_index] = path;
+    }
+
+    if (++(worker->tmp_files_pool_index) >= MAFW_GST_RENDERER_MAX_TMP_FILES) {
+        worker->tmp_files_pool_index = 0;
+    }
+
+    return path;
+}
+
+static void _emit_gst_buffer_as_graphic_file_cb(GError *error,
+                                                gpointer user_data)
+{
+    SaveGraphicData *sgd = user_data;
+
+    if (error == NULL) {
+        /* Add the info to the current metadata. */
+        _current_metadata_add(sgd->worker,
+                              sgd->metadata_key,
+                              G_TYPE_STRING,
+                              (const gpointer)sgd->filename);
+
+        /* Emit the metadata. */
+        _emit_metadata(sgd->worker,
+                       sgd->metadata_key,
+                       G_TYPE_STRING,
+                       sgd->filename);
+    }
+    else
+    {
+        g_warning("could not emit graphic file: %s", error->message);
+    }
+
+    g_free(sgd);
+}
+
+static void _emit_gst_buffer_as_graphic_file(MafwGstRendererWorker *worker,
+                                             GstBuffer *buffer,
+                                             const gint metadata_key)
+{
+    GstStructure *structure;
+    const gchar *mime = NULL;
+    GError *error = NULL;
+    SaveGraphicData *sgd;
+
+    g_return_if_fail((buffer != NULL) && GST_IS_BUFFER(buffer));
+
+    structure = gst_caps_get_structure(GST_BUFFER_CAPS(buffer), 0);
+    mime = gst_structure_get_name(structure);
+
+    /* video pause frame related branch */
+    if (g_str_has_prefix(mime, "video/x-raw")) {
+        const gchar *filename = _get_tmp_file_from_pool(worker);
+
+        if(worker->taking_screenshot)
+        {
+            worker->screenshot_handler(worker, worker->owner, NULL, NULL, TRUE);
+        }
+        worker->taking_screenshot = TRUE;
+        worker->screenshot_handler(worker, worker->owner, buffer, filename, FALSE);
+
+    /* gst image tag related branch */
+    } else if (g_str_has_prefix(mime, "image/")) {
+
+        sgd = g_new0(SaveGraphicData, 1);
+        sgd->worker = worker;
+        sgd->metadata_key = metadata_key;
+        sgd->filename = _get_tmp_file_from_pool(worker);
+
+        g_debug("dumping gst image %s directly to a file", mime);
+        g_file_set_contents(sgd->filename,
+                            (const gchar*)GST_BUFFER_DATA(buffer),
+                            GST_BUFFER_SIZE(buffer),
+                            &error);
+        _emit_gst_buffer_as_graphic_file_cb(error, sgd);
+        if (error) {
+            g_error_free(error);
+        }
+    } else {
+        g_warning("Mime type not supported, will not create a thumbnail");
+        gst_buffer_unref(buffer);
+    }
+}
+
+static gboolean _go_to_gst_ready(gpointer user_data)
+{
+    g_debug("_go_to_gst_ready");
+    MafwGstRendererWorker *worker = user_data;
+
+    g_return_val_if_fail(worker->state == GST_STATE_PAUSED ||
+                         worker->prerolling, FALSE);
+
+    worker->seek_position = mafw_gst_renderer_worker_get_position(worker);
+
+    g_debug("going to GST_STATE_READY");
+    gst_element_set_state(worker->pipeline, GST_STATE_READY);
+    worker->in_ready = TRUE;
+    return FALSE;
+}
+
+static void _add_ready_timeout(MafwGstRendererWorker *worker)
+{
+    if( worker->ready_timeout == 0 )
+    {
+        g_debug("Adding timeout to go to GST_STATE_READY");
+        worker->ready_timeout =
+                g_timeout_add_seconds(
+                    worker->config->seconds_to_pause_to_ready,
+                    _go_to_gst_ready,
+                    worker);
+    }
+}
+
+static void _remove_ready_timeout(MafwGstRendererWorker *worker)
+{
+    if( worker->ready_timeout != 0 )
+    {
+        g_debug("removing timeout for READY");
+        g_source_remove(worker->ready_timeout);
+        worker->ready_timeout = 0;
+    }
+}
+
+static gboolean _take_pause_frame(gpointer user_data)
+{
+    MafwGstRendererWorker *worker = user_data;
+
+    if( worker->pause_frame_taken && worker->pause_frame_buffer )
+    {
+        gst_buffer_unref(worker->pause_frame_buffer);
+        worker->pause_frame_buffer = NULL;
+        return FALSE;
+    }
+
+    if (worker->pause_frame_buffer != NULL) {
+        worker->pause_frame_taken = TRUE;
+        _emit_gst_buffer_as_graphic_file(
+            worker,
+            worker->pause_frame_buffer,
+            WORKER_METADATA_KEY_PAUSED_THUMBNAIL_URI);
+        worker->pause_frame_buffer = NULL;
+    }
+    return FALSE;
+}
+
+static void _add_pause_frame_timeout(MafwGstRendererWorker *worker)
+{
+    if (worker->media.has_visual_content && worker->current_frame_on_pause && worker->seek_position == -1)
+    {            
+        if (!worker->pause_frame_timeout)
+        {
+            GstBuffer *buffer = NULL;
+            g_object_get(worker->pipeline, "frame", &buffer, NULL);
+            if( buffer )
+            {
+                GstBuffer *copy = gst_buffer_copy(buffer);
+                gst_buffer_copy_metadata(copy, buffer, GST_BUFFER_COPY_ALL);
+                worker->pause_frame_buffer = copy;
+                gst_buffer_unref(buffer);
+
+                g_debug("Adding timeout to go to current frame capture");
+                worker->pause_frame_timeout =
+                        g_timeout_add_full(
+                                G_PRIORITY_DEFAULT,
+                                worker->config->milliseconds_to_pause_frame,
+                                _take_pause_frame,
+                                worker, NULL);
+            }
+            else
+            {
+                g_warning("MafwGstRenderer Worker: Could not get buffer from pipeline! Maybe at EOS?");
+            }
+        }
+    } else {
+        g_debug("Not adding timeout to take pause frame.");
+        worker->pause_frame_timeout = 0;
+    }
+}
+
+static void _remove_pause_frame_timeout(MafwGstRendererWorker *worker)
+{
+    if (worker->pause_frame_timeout != 0) {
+        g_debug("removing timeout for pause frame!");
+        g_source_remove(worker->pause_frame_timeout);
+        worker->pause_frame_timeout = 0;
+    }
+
+    if(worker->taking_screenshot)
+    {
+        worker->screenshot_handler(worker, worker->owner, NULL, NULL, TRUE);
+        worker->taking_screenshot = FALSE;
+    }
+    else
+    {
+        /* in this case the buffer has not been given to the
+         * screenshot component to be processed */
+        if(worker->pause_frame_buffer)
+        {
+            gst_buffer_unref(worker->pause_frame_buffer);
+            worker->pause_frame_buffer = NULL;
+        }
+    }
+}
+
+static gboolean _emit_video_info(MafwGstRendererWorker *worker)
+{
+
+    _emit_metadata(worker,
+                   WORKER_METADATA_KEY_RES_X,
+                   G_TYPE_INT,
+                   &worker->media.video_width);
+
+    _emit_metadata(worker,
+                   WORKER_METADATA_KEY_RES_Y,
+                   G_TYPE_INT,
+                   &worker->media.video_height);
+
+    _emit_metadata(worker,
+                   WORKER_METADATA_KEY_VIDEO_FRAMERATE,
+                   G_TYPE_DOUBLE,
+                   &worker->media.fps);
+
+    return FALSE;
+
+}
+
+/*
+ * Checks if the video details are supported.  It also extracts other useful
+ * information (such as PAR and framerate) from the caps, if available.  NOTE:
+ * this will be called from a different thread than glib's mainloop (when
+ * invoked via _stream_info_cb);  don't call MafwGstRenderer directly.
+ *
+ * Returns: TRUE if video details are acceptable.
+ */
+static gboolean _handle_video_info(MafwGstRendererWorker *worker,
+                                   const GstStructure *structure)
+{
+    gint width, height;
+    gdouble fps;
+
+    width = height = 0;
+    gst_structure_get_int(structure, "width", &width);
+    gst_structure_get_int(structure, "height", &height);
+    g_debug("video size: %d x %d", width, height);
+    if (gst_structure_has_field(structure, "pixel-aspect-ratio"))
+    {
+        gst_structure_get_fraction(structure,
+                                   "pixel-aspect-ratio",
+                                   &worker->media.par_n,
+                                   &worker->media.par_d);
+        g_debug("video PAR: %d:%d", worker->media.par_n, worker->media.par_d);
+        width = width * worker->media.par_n / worker->media.par_d;
+    }
+
+    fps = 1.0;
+    if (gst_structure_has_field(structure, "framerate"))
+    {
+        gint fps_n, fps_d;
+
+        gst_structure_get_fraction(structure, "framerate", &fps_n, &fps_d);
+        if (fps_d > 0) {
+            fps = (gdouble)fps_n / (gdouble)fps_d;
+        }
+        g_debug("video fps: %f", fps);
+    }
+
+    worker->media.video_width = width;
+    worker->media.video_height = height;
+    worker->media.fps = fps;
+
+    _current_metadata_add(worker, WORKER_METADATA_KEY_RES_X, G_TYPE_INT,
+                          (const gpointer)&width);
+    _current_metadata_add(worker, WORKER_METADATA_KEY_RES_Y, G_TYPE_INT,
+                          (const gpointer)&height);
+    _current_metadata_add(worker, WORKER_METADATA_KEY_VIDEO_FRAMERATE,
+                          G_TYPE_DOUBLE, (const gpointer)&fps);
+
+    /* Emit the metadata.*/
+    g_idle_add((GSourceFunc)_emit_video_info, worker);
+    return TRUE;
+}
+
+static void _parse_stream_info_item(MafwGstRendererWorker *worker, GObject *obj)
+{
+    GParamSpec *pspec;
+    GEnumValue *val;
+    gint type;
+
+    g_object_get(obj, "type", &type, NULL);
+    pspec = g_object_class_find_property(G_OBJECT_GET_CLASS(obj), "type");
+    if(!pspec)
+        return;
+    val = g_enum_get_value(G_PARAM_SPEC_ENUM(pspec)->enum_class, type);
+    if (!val)
+        return;
+    if (!g_ascii_strcasecmp(val->value_nick, "video") ||
+        !g_ascii_strcasecmp(val->value_name, "video"))
+    {
+        GstCaps *vcaps;
+        GstObject *object;
+
+        object = NULL;
+        g_object_get(obj, "object", &object, NULL);
+        vcaps = NULL;
+        if (object) {
+            vcaps = gst_pad_get_caps(GST_PAD_CAST(object));
+        } else {
+            g_object_get(obj, "caps", &vcaps, NULL);
+            gst_caps_ref(vcaps);
+        }
+        if (vcaps) {
+            if (gst_caps_is_fixed(vcaps))
+            {
+                _handle_video_info(worker, gst_caps_get_structure(vcaps, 0));
+            }
+            gst_caps_unref(vcaps);
+        }
+    }
+}
+
+/* It always returns FALSE, because it is used as an idle callback as well. */
+static gboolean _parse_stream_info(MafwGstRendererWorker *worker)
+{
+    GList *stream_info, *s;
+
+    stream_info = NULL;
+    if (g_object_class_find_property(G_OBJECT_GET_CLASS(worker->pipeline),
+                                     "stream-info"))
+    {
+        g_object_get(worker->pipeline, "stream-info", &stream_info, NULL);
+    }
+    for (s = stream_info; s; s = g_list_next(s))
+        _parse_stream_info_item(worker, G_OBJECT(s->data));
+    return FALSE;
+}
+
+static void mafw_gst_renderer_worker_apply_xid(MafwGstRendererWorker *worker)
+{
+    /* Set sink to render on the provided XID if we have do have
+       a XID a valid video sink and we are rendering video content */
+    if (worker->xid &&
+        worker->vsink &&
+        worker->media.has_visual_content)
+    {
+        g_debug ("Setting overlay, window id: %x", (gint) worker->xid);
+        gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(worker->vsink), worker->xid);
+
+
+        /* Ask the gst to redraw the frame if we are paused */
+        /* TODO: in MTG this works only in non-fs -> fs way. */
+        if (worker->state == GST_STATE_PAUSED)
+        {
+            gst_x_overlay_expose(GST_X_OVERLAY(worker->vsink));
+        }
+    } else {
+        g_debug("Not setting overlay for window id: %x", (gint) worker->xid);
+    }
+}
+
+static void mafw_gst_renderer_worker_apply_render_rectangle(MafwGstRendererWorker *worker)
+{
+    /* Set sink to render on the provided XID if we have do have
+       a XID a valid video sink and we are rendering video content */
+    if (worker->xid &&
+        worker->vsink &&
+        worker->media.has_visual_content
+        &&
+        (worker->x_overlay_rectangle.x >= 0 &&
+         worker->x_overlay_rectangle.y >= 0 &&
+         worker->x_overlay_rectangle.width >= 0 &&
+         worker->x_overlay_rectangle.height >= 0) )
+    {
+        g_debug("Applying render rectangle: X:%d,Y:%d  Width:%d, Height:%d",
+                worker->x_overlay_rectangle.x,
+                worker->x_overlay_rectangle.y,
+                worker->x_overlay_rectangle.width,
+                worker->x_overlay_rectangle.height);
+
+        gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(worker->vsink),
+                                           worker->x_overlay_rectangle.x,
+                                           worker->x_overlay_rectangle.y,
+                                           worker->x_overlay_rectangle.width,
+                                           worker->x_overlay_rectangle.height);
+        /* Ask the gst to redraw the frame if we are paused */
+        /* TODO: in MTG this works only in non-fs -> fs way. */
+        if (worker->state == GST_STATE_PAUSED)
+        {
+            gst_x_overlay_expose(GST_X_OVERLAY(worker->vsink));
+        }
+
+    } else {
+        g_debug("Not setting render rectangle for window id: %x", (gint) worker->xid);
+    }
+}
+
+/*
+ * GstBus synchronous message handler.  NOTE that this handler is NOT invoked
+ * from the glib thread, so be careful what you do here.
+ */
+static GstBusSyncReply _sync_bus_handler(GstBus *bus,
+                                         GstMessage *msg,
+                                         MafwGstRendererWorker *worker)
+{
+
+    UNUSED(bus);
+
+    if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ELEMENT &&
+        gst_structure_has_name(msg->structure, "prepare-xwindow-id"))
+    {
+        g_debug("got prepare-xwindow-id");
+        worker->media.has_visual_content = TRUE;
+        set_dolby_video_property(worker, worker->config->mobile_surround_video.state);
+        set_dolby_video_sound_property(worker, worker->config->mobile_surround_video.room, TRUE);
+        set_dolby_video_sound_property(worker, worker->config->mobile_surround_video.color, FALSE);
+        /* The user has to preset the XID, we don't create windows by
+         * ourselves. */
+        if (!worker->xid || !worker->vsink) {
+            /* We must post an error message to the bus that will be picked up
+             * by _async_bus_handler.  Calling the notification function
+             * directly from here (different thread) is not healthy. */
+            g_warning("No video window or video-sink set!");
+            _post_error(worker,
+                        g_error_new_literal(WORKER_ERROR,
+                                            WORKER_ERROR_PLAYBACK,
+                                            "No video window XID or video-sink set"));
+            gst_message_unref (msg);
+            return GST_BUS_DROP;
+        } else {
+            g_debug ("Video window to use is: %x", (gint)worker->xid);
+        }
+
+        /* Instruct vsink to use the client-provided window */
+        mafw_gst_renderer_worker_apply_xid(worker);
+        /* Instruct vsink to use the required render rectangle */
+        mafw_gst_renderer_worker_apply_render_rectangle(worker);
+
+        /* Handle colorkey and autopaint */
+        mafw_gst_renderer_worker_set_autopaint(worker, worker->autopaint);
+        g_object_get(worker->vsink, "colorkey", &worker->colorkey, NULL);
+        /* Defer the signal emission to the thread running the mainloop. */
+        if (worker->colorkey != -1) {
+            gst_bus_post(worker->bus,
+                         gst_message_new_application(
+                             GST_OBJECT(worker->vsink),
+                             gst_structure_empty_new("ckey")));
+        }
+        gst_message_unref (msg);
+        return GST_BUS_DROP;
+    }
+    /* do not unref message when returning PASS */
+    return GST_BUS_PASS;
+}
+
+static void _free_taglist_item(GstMessage *msg, gpointer data)
+{
+    UNUSED(data);
+
+    gst_message_unref(msg);
+}
+
+static void _free_taglist(MafwGstRendererWorker *worker)
+{
+    if (worker->tag_list != NULL)
+    {
+        g_ptr_array_foreach(worker->tag_list, (GFunc)_free_taglist_item, NULL);
+        g_ptr_array_free(worker->tag_list, TRUE);
+        worker->tag_list = NULL;
+    }
+}
+
+static gboolean _seconds_duration_equal(gint64 duration1, gint64 duration2)
+{
+    gint64 duration1_seconds, duration2_seconds;
+
+    duration1_seconds = duration1 / GST_SECOND;
+    duration2_seconds = duration2 / GST_SECOND;
+
+    return duration1_seconds == duration2_seconds;
+}
+
+static void _check_duration(MafwGstRendererWorker *worker, gint64 value)
+{
+    gboolean right_query = TRUE;
+
+    if (value == -1) {
+        GstFormat format = GST_FORMAT_TIME;
+        right_query =
+            gst_element_query_duration(worker->pipeline, &format, &value);
+    }
+
+    if (right_query && value > 0
+            && !_seconds_duration_equal(worker->media.length_nanos, value))
+    {
+        gint64 duration = (value + (GST_SECOND/2)) / GST_SECOND;
+
+        /* Add the duration to the current metadata. */
+        if( _current_metadata_add(worker,
+                                  WORKER_METADATA_KEY_DURATION,
+                                  G_TYPE_INT64,
+                                  (const gpointer)&duration) )
+        {
+            _emit_metadata(worker,
+                           WORKER_METADATA_KEY_DURATION,
+                           G_TYPE_INT64,
+                           &duration);
+        }
+
+         /* Publish to context FW */
+        if( worker->context_nowplaying == NULL )
+        {
+            worker->context_nowplaying = context_provider_map_new();
+        }
+        context_provider_map_set_integer(worker->context_nowplaying,
+                                         CONTEXT_PROVIDER_KEY_NOWPLAYING_DURATION,
+                                         duration);
+        context_provider_set_map(CONTEXT_PROVIDER_KEY_NOWPLAYING,
+                                 worker->context_nowplaying, FALSE);
+         /* end of publishing to context FW */
+    }
+
+    if( right_query )
+    {
+        worker->media.length_nanos = value;
+    }
+
+    g_debug("media duration: %lld", worker->media.length_nanos);
+}
+
+static void _check_seekability(MafwGstRendererWorker *worker)
+{
+    SeekabilityType seekable = SEEKABILITY_UNKNOWN;
+    if (worker->media.length_nanos >= 0 )
+    {
+        g_debug("Quering GStreamer for seekability");
+        GstQuery *seek_query;
+        GstFormat format = GST_FORMAT_TIME;
+        /* Query the seekability of the stream */
+        seek_query = gst_query_new_seeking(format);
+        if (gst_element_query(worker->pipeline, seek_query)) {
+            gboolean renderer_seekable = FALSE;
+            gst_query_parse_seeking(seek_query,
+                                    NULL,
+                                    &renderer_seekable,
+                                    NULL, NULL);
+            g_debug("GStreamer seekability %d", renderer_seekable);
+            seekable = renderer_seekable ? SEEKABILITY_SEEKABLE : SEEKABILITY_NO_SEEKABLE;
+        }
+        else
+        {
+            g_debug("Could not query pipeline for seekability! Using old value!");
+            seekable = worker->media.seekable;
+        }
+        gst_query_unref(seek_query);
+    }
+    else if( worker->media.length_nanos == DURATION_INDEFINITE )
+    {
+        /* duration indefinite, "clearly" not seekable */
+        seekable = SEEKABILITY_NO_SEEKABLE;
+    }
+    else
+    {
+        /* otherwise we'll use last known/guessed value */
+        seekable = worker->media.seekable;
+    }
+
+    g_debug("media seekable: %d", seekable);
+
+    /* If the seekability is unknown it is set as false and sent. After that it is
+       sent only if it changes to true
+       */
+    if( (seekable == SEEKABILITY_UNKNOWN && worker->media.seekable == SEEKABILITY_UNKNOWN)
+        || seekable != worker->media.seekable )
+    {
+        if( seekable != SEEKABILITY_NO_SEEKABLE )
+        {
+            worker->media.seekable = SEEKABILITY_SEEKABLE;
+        }
+        else
+        {
+            worker->media.seekable =  SEEKABILITY_NO_SEEKABLE;
+        }
+
+        gboolean is_seekable = (worker->media.seekable == SEEKABILITY_SEEKABLE);
+        _current_metadata_add(worker,
+                              WORKER_METADATA_KEY_IS_SEEKABLE,
+                              G_TYPE_BOOLEAN,
+                              (const gpointer)&is_seekable);
+        _emit_metadata(worker,
+                       WORKER_METADATA_KEY_IS_SEEKABLE,
+                       G_TYPE_BOOLEAN,
+                       &is_seekable);
+    }
+}
+
+static gboolean _query_duration_and_seekability_timeout(gpointer data)
+{
+    MafwGstRendererWorker *worker = data;
+
+    if (!worker->in_ready)
+    {
+        _check_duration(worker, -1);
+        worker->duration_seek_timeout_loop_count += 1;
+
+        /* for worker's internal logic let's put the indefinite duration if loop limit has been reached */
+        /* this affects the seekability resolution */
+        if( worker->duration_seek_timeout_loop_count >= MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_LOOP_LIMIT
+            && worker->media.length_nanos == DURATION_UNQUERIED )
+        {
+            worker->media.length_nanos = DURATION_INDEFINITE;
+        }
+
+        _check_seekability(worker);
+
+        if( worker->media.length_nanos >= DURATION_INDEFINITE )
+        {
+            worker->duration_seek_timeout = 0;
+            /* we've got a valid duration value no need to ask for more */
+            return FALSE;
+        }
+        else
+        {
+            return TRUE;
+        }
+    }
+    else
+    {
+        g_warning("_query_duration_and_seekability_timeout: We are in ready state, duration and seekability not checked.");
+        return FALSE;
+    }
+}
+
+/*
+ * Resets the media information.
+ */
+static void _reset_media_info(MafwGstRendererWorker *worker)
+{
+    if (worker->media.location) {
+        g_free(worker->media.location);
+        worker->media.location = NULL;
+    }
+    worker->media.length_nanos = DURATION_UNQUERIED;
+    worker->media.has_visual_content = FALSE;
+    worker->media.seekable = SEEKABILITY_UNKNOWN;
+    worker->media.video_width = 0;
+    worker->media.video_height = 0;
+    worker->media.fps = 0.0;
+}
+
+static void _reset_pipeline_and_worker(MafwGstRendererWorker *worker)
+{
+
+    if (worker->pipeline) {
+        g_debug("destroying pipeline");
+        if (worker->async_bus_id) {
+            g_source_remove(worker->async_bus_id);
+            worker->async_bus_id = 0;
+        }
+        gst_element_set_state(worker->pipeline, GST_STATE_NULL);
+        if (worker->bus) {
+            gst_bus_set_sync_handler(worker->bus, NULL, NULL);
+            gst_object_unref(GST_OBJECT_CAST(worker->bus));
+            worker->bus = NULL;
+        }
+        gst_object_unref(worker->pipeline);
+        worker->pipeline = NULL;
+    }
+
+    worker->report_statechanges = TRUE;
+    worker->state = GST_STATE_NULL;
+    worker->prerolling = FALSE;
+    worker->is_live = FALSE;
+    worker->buffering = FALSE;
+    worker->is_stream = FALSE;
+    worker->is_error = FALSE;
+    worker->eos = FALSE;
+    worker->seek_position = -1;
+    worker->stay_paused = FALSE;
+    worker->playback_speed = 1;
+    worker->in_ready = FALSE;
+    _remove_ready_timeout(worker);
+    _remove_pause_frame_timeout(worker);
+    _free_taglist(worker);
+    if (worker->current_metadata) {
+        g_hash_table_destroy(worker->current_metadata);
+        worker->current_metadata = NULL;
+    }
+
+    if (worker->duration_seek_timeout != 0) {
+        g_source_remove(worker->duration_seek_timeout);
+        worker->duration_seek_timeout = 0;
+    }
+    worker->duration_seek_timeout_loop_count = 0;
+
+    _reset_media_info(worker);
+
+    /* removes all idle timeouts with this worker as data */
+    while(g_idle_remove_by_data(worker));
+}
+
+
+/*
+ * Called when the pipeline transitions into PAUSED state.  It extracts more
+ * information from Gst.
+ */
+static void _finalize_startup(MafwGstRendererWorker *worker)
+{
+    /* Check video caps */
+    if (worker->media.has_visual_content && worker->vsink) {
+        GstPad *pad = GST_BASE_SINK_PAD(worker->vsink);
+        GstCaps *caps = GST_PAD_CAPS(pad);
+        if (caps && gst_caps_is_fixed(caps)) {
+            GstStructure *structure;
+            structure = gst_caps_get_structure(caps, 0);
+            if (!_handle_video_info(worker, structure))
+                return;
+        }
+    }
+
+    /* Something might have gone wrong at this point already. */
+    if (worker->is_error) {
+        g_debug("Error occured during preroll");
+        return;
+    }
+
+    /* Streaminfo might reveal the media to be unsupported.  Therefore we
+     * need to check the error again. */
+    _parse_stream_info(worker);
+    if (worker->is_error) {
+        g_debug("Error occured. Leaving");
+        return;
+    }
+
+    /* Check duration and seekability */
+    if (worker->duration_seek_timeout != 0) {
+        g_source_remove(worker->duration_seek_timeout);
+        worker->duration_seek_timeout = 0;
+    }
+
+    _check_duration(worker, -1);
+    _check_seekability(worker);
+}
+
+static void _add_duration_seek_query_timeout(MafwGstRendererWorker *worker)
+{
+    if(worker->duration_seek_timeout == 0)
+    {
+        gint timeout = 0;
+        if( worker->duration_seek_timeout_loop_count >= MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_LOOP_LIMIT
+            || worker->media.length_nanos >= DURATION_INDEFINITE )
+        {
+            /* this is just for verifying the duration later on if it was received in PAUSED state early on */
+            timeout = MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_LAZY_TIMEOUT;
+        }
+        else
+        {
+            timeout = MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_FAST_TIMEOUT;
+        }
+
+        worker->duration_seek_timeout = g_timeout_add(
+                timeout,
+                _query_duration_and_seekability_timeout,
+                worker);
+    }
+}
+
+static void _do_pause_postprocessing(MafwGstRendererWorker *worker)
+{
+    if (worker->notify_pause_handler) {
+        worker->notify_pause_handler(worker, worker->owner);
+    }
+
+    _add_pause_frame_timeout(worker);
+    _add_ready_timeout(worker);
+}
+
+static void _report_playing_state(MafwGstRendererWorker * worker)
+{
+    if (worker->report_statechanges && worker->notify_play_handler)
+    {
+        worker->notify_play_handler( worker,
+                                     worker->owner);
+    }
+}
+
+static void _handle_state_changed(GstMessage *msg,
+                                  MafwGstRendererWorker *worker)
+{
+    GstState newstate, oldstate;
+    GstStateChange statetrans;
+
+    gst_message_parse_state_changed(msg, &oldstate, &newstate, NULL);
+    statetrans = GST_STATE_TRANSITION(oldstate, newstate);
+    g_debug("State changed: %d: %d -> %d", worker->state, oldstate, newstate);
+
+    /* If the state is the same we do nothing, otherwise, we keep it */
+    if (worker->state == newstate)
+    {
+        /* This is used for saving correct pause frame after pauseAt.
+         * If we doing normal seek we dont want to save pause frame.
+         * We use gst_element_get_state to check if the state change is completed.
+         * If gst_element_get_state returns GST_STATE_CHANGE_SUCCESS we know that
+         * it's save to do pause_postprocessing */
+        if (newstate == GST_STATE_PAUSED && worker->stay_paused &&
+            gst_element_get_state(worker->pipeline, NULL, NULL, 0) == GST_STATE_CHANGE_SUCCESS)
+        {
+            worker->seek_position = mafw_gst_renderer_seeker_process(worker->seeker);
+
+            /* has seeking ended successfully? */
+            if( worker->seek_position < 0 )
+            {
+                /* we do pause_postprocessing for pauseAt */
+                _do_pause_postprocessing(worker);
+            }
+        }
+
+        /* the EOS flag should only be cleared if it has been set and seeking has been done
+         * paused -> paused transition should only happen when seeking
+         */
+        if( newstate == GST_STATE_PAUSED && worker->eos )
+        {
+            worker->eos = FALSE;
+        }
+        return;
+    }
+
+    worker->state = newstate;
+
+    switch (statetrans) {
+        case GST_STATE_CHANGE_READY_TO_PAUSED:
+            if (worker->in_ready) {
+                /* Woken up from READY, resume stream position and playback */
+
+                /*live sources can be sought only in PLAYING state*/
+                if( !worker->is_live ) {
+                    _do_seek(worker,
+                             GST_SEEK_TYPE_SET,
+                             worker->seek_position,
+                             FALSE,
+                             NULL);
+                }
+
+                /* While buffering, we have to wait in PAUSED until we reach 100% before
+                 * doing anything */
+                if (worker->buffering) {
+                    return;
+                } else {
+                    _do_play(worker);
+                }
+            } else if (worker->prerolling && worker->report_statechanges && !worker->buffering) {
+                /* PAUSED after pipeline has been constructed. We check caps,
+                 * seek and duration and if staying in pause is needed, we
+                 * perform operations for pausing, such as current frame on
+                 * pause and signalling state change and adding the timeout to
+                 * go to ready */
+                g_debug ("Prerolling done, finalizaing startup");
+                _finalize_startup(worker);
+
+                if (worker->stay_paused) {
+                    /* then we can tell we're paused */
+                    _do_pause_postprocessing(worker);
+                }
+
+                if( worker->seek_position > 0 )
+                {
+                    g_debug("Immediate seek from READY state to: %d", worker->seek_position);
+                    _do_seek(worker, GST_SEEK_TYPE_SET,
+                             worker->seek_position, FALSE, NULL);
+
+                    if(worker->vsink)
+                    {
+                        g_object_set(worker->vsink, "show-preroll-frame",
+                                     TRUE, NULL);
+                    }
+
+                    /* do_seek will set this to false, but we'll want to report state changes
+                       when doing immediate seek from start */
+                    worker->report_statechanges = TRUE;
+                }
+                worker->prerolling = FALSE;
+                _do_play(worker);
+            }
+            break;
+        case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+            /* When pausing we do the stuff, like signalling state, current
+             * frame on pause and timeout to go to ready */
+            if (worker->report_statechanges) {
+                _do_pause_postprocessing(worker);
+            }
+            break;
+        case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+
+            /*live sources can be sought only in PLAYING state
+              This seek should happen only after READY to PAUSED to PLAYING
+              transitions
+            */
+            if( worker->report_statechanges
+                    && worker->seek_position > -1
+                    && worker->is_live )
+            {
+                g_debug("Seeking live source in PLAYING state!");
+                _do_seek(worker,
+                         GST_SEEK_TYPE_SET,
+                         worker->seek_position,
+                         FALSE,
+                         NULL);
+                /* this has to be set as do_seek sets statechanges to FALSE
+                  but we still want to inform that we're in PLAYING state */
+                worker->report_statechanges = TRUE;
+                /* seek position needs to be reset here for a live stream */
+                worker->seek_position = -1;
+            }
+
+            /* Because live streams are sought in PLAYING state, we reset
+               seek_position after all state transitions are completed. Normal
+               streams resetting seek_position here is OK.  */
+            if(worker->report_statechanges == FALSE || !worker->is_live)
+            {
+                /* if seek was called, at this point it is really ended */
+                worker->seek_position = mafw_gst_renderer_seeker_process(worker->seeker);
+            }
+
+            /* Signal state change if needed */
+            _report_playing_state(worker);
+
+            /* Prevent blanking if we are playing video */
+            if (worker->media.has_visual_content &&
+                worker->blanking__control_handler)
+            {
+                worker->blanking__control_handler(worker, worker->owner, TRUE);
+            }
+
+            /* Back to playing no longer in_ready (if ever was) */
+            worker->in_ready = FALSE;
+
+            /* context framework adaptation starts */
+            worker->context_nowplaying =
+                _set_context_map_value(worker->context_nowplaying,
+                                       GST_TAG_LOCATION,
+                                       worker->media.location);
+            context_provider_set_map(CONTEXT_PROVIDER_KEY_NOWPLAYING,
+                                     worker->context_nowplaying, FALSE);
+            /* context framework adaptation ends */
+
+            /* Emit metadata. We wait until we reach the playing state because
+             * this speeds up playback start time */
+            _emit_metadatas(worker);
+
+            /* in any case the duration is verified, it may change with VBR media */
+            _add_duration_seek_query_timeout(worker);
+
+            /* We've reached playing state, state changes are not reported
+             * unless explicitly requested (e.g. by PAUSE request) seeking
+             * in PLAYING does not cause state change reports
+             */
+            worker->report_statechanges = FALSE;
+
+            /* Delayed pause e.g. because of seek */
+            if (worker->stay_paused) {
+                mafw_gst_renderer_worker_pause(worker);
+            }
+
+            break;
+        case GST_STATE_CHANGE_PAUSED_TO_READY:
+            /* If we went to READY, we free the taglist and * deassign the
+             * timout it */
+            if (worker->in_ready) {
+                g_debug("changed to GST_STATE_READY");
+                worker->ready_timeout = 0;
+                _free_taglist(worker);
+
+                if( worker->notify_ready_state_handler )
+                {
+                    worker->notify_ready_state_handler(worker, worker->owner);
+                }
+            }
+            break;
+        case GST_STATE_CHANGE_NULL_TO_READY:
+            if(g_str_has_prefix(worker->media.location, STREAM_TYPE_MMSH) ||
+               g_str_has_prefix(worker->media.location, STREAM_TYPE_MMS))
+            {
+                GstElement *source = NULL;
+                g_object_get(worker->pipeline, "source", &source, NULL);
+                if(source)
+                {
+                    gst_util_set_object_arg(G_OBJECT(source), "tcp-timeout", MAFW_GST_MMSH_TCP_TIMEOUT);
+                    gst_object_unref(source);
+                }
+                else
+                    g_warning("Failed to get source element from pipeline");
+            }
+            break;
+        default:
+            break;
+    }
+}
+
+static void _handle_duration(MafwGstRendererWorker *worker)
+{
+    /* if we've got ongoing query timeout ignore this and in any case do it only in PAUSE/PLAYING */
+    if( worker->duration_seek_timeout == 0
+        && ( worker->state == GST_STATE_PAUSED || worker->state == GST_STATE_PLAYING) )
+    {
+        /* We want to check this quickly but not immediately */
+        g_timeout_add_full(G_PRIORITY_DEFAULT,
+                           MAFW_GST_RENDERER_WORKER_DURATION_AND_SEEKABILITY_FAST_TIMEOUT,
+                           _query_duration_and_seekability_timeout,
+                           worker,
+                           NULL);
+    }
+}
+
+static void _emit_renderer_art(MafwGstRendererWorker *worker,
+                               const GstTagList *list)
+{
+    GstBuffer *buffer = NULL;
+    const GValue *value = NULL;
+
+    g_return_if_fail(gst_tag_list_get_tag_size(list, GST_TAG_IMAGE) > 0);
+
+    value = gst_tag_list_get_value_index(list, GST_TAG_IMAGE, 0);
+
+    g_return_if_fail((value != NULL) && G_VALUE_HOLDS(value, GST_TYPE_BUFFER));
+
+    buffer = g_value_peek_pointer(value);
+
+    g_return_if_fail((buffer != NULL) && GST_IS_BUFFER(buffer));
+
+    _emit_gst_buffer_as_graphic_file(worker,
+                                     buffer,
+                                     WORKER_METADATA_KEY_RENDERER_ART_URI);
+}
+
+static void value_dtor(gpointer value)
+{
+
+    if (G_IS_VALUE(value)) {
+        g_value_unset(value);
+        g_free(value);
+    } else {
+        g_value_array_free(value);
+    }
+
+}
+
+static gboolean _current_metadata_add(MafwGstRendererWorker *worker,
+                                      const gint key,
+                                      GType type,
+                                      const gpointer value)
+{
+    GValue *new_gval;
+    gboolean was_updated = FALSE;
+
+    if( value == NULL )
+    {
+        g_warning("Null value for metadata was tried to be set!");
+        return was_updated;
+    }
+
+    if (!worker->current_metadata) {
+        worker->current_metadata = g_hash_table_new_full(g_direct_hash,
+                                                         g_direct_equal,
+                                                         NULL,
+                                                         value_dtor);
+    }
+
+    if (type == G_TYPE_VALUE_ARRAY) {
+        GValueArray *values = (GValueArray *) value;
+
+        if (values->n_values == 1) {
+            GValue *gval = g_value_array_get_nth(values, 0);
+            new_gval = g_new0(GValue, 1);
+            g_value_init(new_gval, G_VALUE_TYPE(gval));
+            g_value_copy(gval, new_gval);
+
+            GValue *existing = (GValue*)g_hash_table_lookup(worker->current_metadata, (gpointer)key);
+            if(!existing || (GST_VALUE_EQUAL != gst_value_compare(existing, new_gval)) )
+            {
+                was_updated = TRUE;
+            }
+            g_hash_table_insert(worker->current_metadata,
+                                (gpointer)key,
+                                new_gval);
+        } else {
+            GValueArray *new_gvalues = g_value_array_copy(values);
+
+            GValueArray *existing = (GValueArray*)g_hash_table_lookup(worker->current_metadata, (gpointer)key);
+
+            if( existing
+                && new_gvalues->n_values == existing->n_values )
+            {
+                guint size = new_gvalues->n_values;
+
+                guint i = 0;
+                for( ; i < size; ++i )
+                {
+                    GValue *newVal = g_value_array_get_nth(new_gvalues, i);
+                    GValue *existingVal = g_value_array_get_nth(existing, i);
+                    if( GST_VALUE_EQUAL != gst_value_compare(newVal, existingVal) )
+                    {
+                        was_updated = TRUE;
+                        break;
+                    }
+                }
+            }
+            else
+            {
+                was_updated = TRUE;
+            }
+
+            g_hash_table_insert(worker->current_metadata,
+                                (gpointer)key,
+                                new_gvalues);
+        }
+
+        return was_updated;
+    }
+
+    new_gval = g_new0(GValue, 1);
+
+    if (_set_value(new_gval, type, value) == FALSE)
+    {
+        g_warning("Metadata type: %i is not being handled", type);
+        return was_updated;
+    }
+
+    GValue *existing = (GValue*)g_hash_table_lookup(worker->current_metadata, (gpointer)key);
+    if(!existing || (GST_VALUE_EQUAL != gst_value_compare(existing, new_gval)) )
+    {
+        was_updated = TRUE;
+    }
+    g_hash_table_insert(worker->current_metadata, (gpointer)key, new_gval);
+
+    return was_updated;
+
+}
+
+static GHashTable* _build_tagmap(void)
+{
+    GHashTable *hash_table = NULL;
+
+    hash_table = g_hash_table_new_full(g_str_hash, g_str_equal, g_free, g_free);
+
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_TITLE),
+                        (gpointer)WORKER_METADATA_KEY_TITLE);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_ARTIST),
+                        (gpointer)WORKER_METADATA_KEY_ARTIST);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_AUDIO_CODEC),
+                        (gpointer)WORKER_METADATA_KEY_AUDIO_CODEC);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_VIDEO_CODEC),
+                        (gpointer)WORKER_METADATA_KEY_VIDEO_CODEC);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_BITRATE),
+                        (gpointer)WORKER_METADATA_KEY_BITRATE);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_LANGUAGE_CODE),
+                        (gpointer)WORKER_METADATA_KEY_ENCODING);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_ALBUM),
+                        (gpointer)WORKER_METADATA_KEY_ALBUM);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_GENRE),
+                        (gpointer)WORKER_METADATA_KEY_GENRE);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_TRACK_NUMBER),
+                        (gpointer)WORKER_METADATA_KEY_TRACK);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_ORGANIZATION),
+                        (gpointer)WORKER_METADATA_KEY_ORGANIZATION);
+    g_hash_table_insert(hash_table, g_strdup(GST_TAG_IMAGE),
+                        (gpointer)WORKER_METADATA_KEY_RENDERER_ART_URI);
+
+    return hash_table;
+}
+
+/*
+ * Sets values to given context framework map, allocates it when map is NULL.
+ */
+gpointer _set_context_map_value(gpointer map,
+                                const gchar *tag,
+                                const gchar *value)
+{
+
+    if (map == NULL)
+    {
+        map = context_provider_map_new();
+    }
+
+    if (g_str_equal(tag, GST_TAG_LOCATION))
+    {
+        context_provider_map_set_string(map,
+                                        CONTEXT_PROVIDER_KEY_NOWPLAYING_RESOURCE,
+                                        value);
+    }
+    else if (g_str_equal(tag, GST_TAG_TITLE))
+    {
+        context_provider_map_set_string(map,
+                                        CONTEXT_PROVIDER_KEY_NOWPLAYING_TITLE,
+                                        value);
+    }
+    else if (g_str_equal(tag, GST_TAG_ARTIST))
+    {
+        context_provider_map_set_string(map,
+                                        CONTEXT_PROVIDER_KEY_NOWPLAYING_ARTIST,
+                                        value);
+    }
+    else if (g_str_equal(tag, GST_TAG_ALBUM))
+    {
+        context_provider_map_set_string(map,
+                                        CONTEXT_PROVIDER_KEY_NOWPLAYING_ALBUM,
+                                        value);
+    }
+    else if (g_str_equal(tag, GST_TAG_GENRE))
+    {
+        context_provider_map_set_string(map,
+                                        CONTEXT_PROVIDER_KEY_NOWPLAYING_GENRE,
+                                        value);
+    }
+
+    return map;
+
+}
+
+/*
+ * Emits metadata-changed signals for gst tags.
+ */
+static void _emit_tag(const GstTagList *list,
+                      const gchar *tag,
+                      MafwGstRendererWorker *worker)
+{
+    /* Mapping between Gst <-> MAFW metadata tags
+     * NOTE: This assumes that GTypes matches between GST and MAFW. */
+    static GHashTable *tagmap = NULL;
+    gint i, count;
+    gint mafwtag;
+    GType type;
+    GValueArray *values;
+
+    if (tagmap == NULL) {
+        tagmap = _build_tagmap();
+    }
+
+    g_debug("tag: '%s' (type: %s)", tag, g_type_name(gst_tag_get_type(tag)));
+    /* Is there a mapping for this tag? */
+    mafwtag = (gint)g_hash_table_lookup(tagmap, tag);
+    if (!mafwtag)
+        return;
+
+    if (mafwtag == WORKER_METADATA_KEY_RENDERER_ART_URI) {
+        _emit_renderer_art(worker, list);
+        return;
+    }
+
+    /* Build a value array of this tag.  We need to make sure that strings
+     * are UTF-8.  GstTagList API says that the value is always UTF8, but it
+     * looks like the ID3 demuxer still might sometimes produce non-UTF-8
+     * strings. */
+    count = gst_tag_list_get_tag_size(list, tag);
+
+    type = gst_tag_get_type(tag);
+    values = g_value_array_new(count);
+    for (i = 0; i < count; ++i) {
+        GValue *v = (GValue *)
+            gst_tag_list_get_value_index(list, tag, i);
+        if (type == G_TYPE_STRING) {
+            gchar *orig, *utf8;
+
+            gboolean tagExists = gst_tag_list_get_string_index(list, tag, i, &orig);
+            UNUSED(tagExists); //TODO what if tag does not exists?
+            if (convert_utf8(orig, &utf8)) {
+                GValue utf8gval;
+                memset(&utf8gval, 0, sizeof(utf8gval));
+
+                g_value_init(&utf8gval, G_TYPE_STRING);
+                g_value_take_string(&utf8gval, utf8);
+                g_value_array_append(values, &utf8gval);
+                g_value_unset(&utf8gval);
+            }
+            /* context framework adaptation starts */
+            worker->context_nowplaying =
+                _set_context_map_value(worker->context_nowplaying,
+                                       tag,
+                                       orig);
+            /* context framework adaptation ends */
+            g_free(orig);
+        } else if (type == G_TYPE_UINT) {
+            GValue intgval;
+            memset(&intgval, 0, sizeof(intgval));
+
+            g_value_init(&intgval, G_TYPE_INT);
+            g_value_transform(v, &intgval);
+            g_value_array_append(values, &intgval);
+            g_value_unset(&intgval);
+        } else {
+            g_value_array_append(values, v);
+        }
+    }
+
+    /* context framework adaptation starts */
+    context_provider_set_map(CONTEXT_PROVIDER_KEY_NOWPLAYING,
+                             worker->context_nowplaying, FALSE);
+    /* context framework adaptation ends */
+
+    /* Add the info to the current metadata. */
+    gboolean changed = _current_metadata_add(worker,
+                                             mafwtag,
+                                             G_TYPE_VALUE_ARRAY,
+                                             (const gpointer) values);
+
+    /* Emit the metadata. */
+    if (changed && worker->notify_metadata_handler)
+    {
+        worker->notify_metadata_handler(worker,
+                                        worker->owner,
+                                        mafwtag,
+                                        G_TYPE_VALUE_ARRAY,
+                                        values);
+    }
+
+    g_value_array_free(values);
+}
+
+/**
+ * Collect tag-messages, parse it later, when playing is ongoing
+ */
+static void _handle_tag(MafwGstRendererWorker *worker, GstMessage *msg)
+{
+    /* Do not emit metadata until we get to PLAYING state to speed up playback
+     * start */
+    if (worker->tag_list == NULL)
+        worker->tag_list = g_ptr_array_new();
+    g_ptr_array_add(worker->tag_list, gst_message_ref(msg));
+
+    /* Some tags come in playing state, so in this case we have to emit them
+     * right away (example: radio stations) */
+    if (worker->state == GST_STATE_PLAYING) {
+        _emit_metadatas(worker);
+    }
+}
+
+/**
+ * Parses the list of tag-messages
+ */
+static void _parse_tagmsg(GstMessage *msg, MafwGstRendererWorker *worker)
+{
+    GstTagList *new_tags;
+
+    gst_message_parse_tag(msg, &new_tags);
+    gst_tag_list_foreach(new_tags, (gpointer)_emit_tag, worker);
+    gst_tag_list_free(new_tags);
+    gst_message_unref(msg);
+}
+
+/**
+ * Parses the collected tag messages, and emits the metadatas
+ */
+static void _emit_metadatas(MafwGstRendererWorker *worker)
+{
+    if (worker->tag_list != NULL)
+    {
+        g_ptr_array_foreach(worker->tag_list, (GFunc)_parse_tagmsg, worker);
+        g_ptr_array_free(worker->tag_list, TRUE);
+        worker->tag_list = NULL;
+    }
+}
+
+static void _handle_buffering(MafwGstRendererWorker *worker, GstMessage *msg)
+{
+    /* We set smaller buffer for mms streams */
+    if((g_str_has_prefix(worker->media.location, STREAM_TYPE_MMSH) || g_str_has_prefix(worker->media.location, STREAM_TYPE_MMS))
+        && worker->state != GST_STATE_PLAYING && !worker->buffering)
+    {
+        if(worker->queue)
+        {
+            g_object_set(worker->queue, "high-percent", 30, NULL);
+        }
+        else
+        {
+            g_warning("Queue2 element doesn't exist!");
+        }
+    }
+
+    /* We can ignore buffering messages when we are in READY state or when going to it */
+    if(worker->state == GST_STATE_READY || worker->ready_timeout != 0 )
+    {
+        worker->buffering = TRUE;
+        return;
+    }
+
+    gint percent;
+    gst_message_parse_buffering(msg, &percent);
+    g_debug("buffering: %d", percent);
+
+    /* No state management needed for live pipelines */
+    if (!worker->is_live) {
+        worker->buffering = TRUE;
+        if (percent < 100 && worker->state == GST_STATE_PLAYING) {
+            /* If we need to buffer more, we set larger buffer */
+            if(g_str_has_prefix(worker->media.location, STREAM_TYPE_MMSH) || g_str_has_prefix(worker->media.location, STREAM_TYPE_MMS))
+            {
+                if(worker->queue)
+                {
+                    g_object_set(worker->queue, "high-percent", 100, NULL);
+                }
+                else
+                {
+                    g_warning("Queue2 element doesn't exist!");
+                }
+            }
+            g_debug("setting pipeline to PAUSED not to wolf the buffer down");
+
+            //if there's no requested state transitions i.e. resume/pause let's keep this quiet
+            if( gst_element_get_state(worker->pipeline, NULL, NULL, 0) == GST_STATE_CHANGE_SUCCESS )
+            {
+                worker->report_statechanges = FALSE;
+            }
+
+            /* We can't call _pause() here, since it sets the
+             * "report_statechanges" to TRUE.  We don't want that, application
+             * doesn't need to know that internally the state changed to PAUSED.
+             */
+            gst_element_set_state(worker->pipeline, GST_STATE_PAUSED);
+        }
+
+        if (percent >= 100) {
+            /* On buffering we go to PAUSED, so here we move back to PLAYING */
+            worker->buffering = FALSE;
+            if (worker->state == GST_STATE_PAUSED)
+            {
+                /* If buffering more than once, do this only the first time we
+                 * are done with buffering */
+                if (worker->prerolling)
+                {
+                    g_debug("buffering concluded during prerolling");
+                    _finalize_startup(worker);
+                    _do_play(worker);
+                    /* Send the paused notification */
+                    if (worker->stay_paused &&
+                        worker->notify_pause_handler)
+                    {
+                        worker->notify_pause_handler(worker, worker->owner);
+                    }
+                    worker->prerolling = FALSE;
+                }
+                /* if eos has been reached no automatic playing should be done
+                   only on resume request e.g. eos reached -> seek requested => stays paused after seek&buffering completed */
+                else if (!worker->stay_paused && !worker->eos)
+                {
+                    g_debug("buffering concluded, setting "
+                            "pipeline to PLAYING again");
+                    worker->report_statechanges = TRUE;
+                    gst_element_set_state(worker->pipeline, GST_STATE_PLAYING);
+                }
+            }
+            /* if there's no pending state changes and we're really in PLAYING state... */
+            else if (gst_element_get_state(worker->pipeline, NULL, NULL, 0) == GST_STATE_CHANGE_SUCCESS
+                       && worker->state == GST_STATE_PLAYING)
+            {
+                g_debug("buffering concluded, signalling state change");
+                /* In this case we got a PLAY command while buffering, likely
+                 * because it was issued before we got the first buffering
+                 * signal.  The UI should not do this, but if it does, we have
+                 * to signal that we have executed the state change, since in
+                 * _handle_state_changed we do not do anything if we are
+                 * buffering */
+                if (worker->report_statechanges &&
+                    worker->notify_play_handler) {
+                    worker->notify_play_handler(worker, worker->owner);
+                }
+                _add_duration_seek_query_timeout(worker);
+
+            }
+            /* has somebody requested pause transition? */
+            else if( !worker->stay_paused )
+            {
+                /* we're in PLAYING but pending for paused state change.
+                   Let's request new state change to override the pause */
+                gst_element_set_state(worker->pipeline, GST_STATE_PLAYING);
+            }
+        }
+    }
+
+    /* Send buffer percentage */
+    if (worker->notify_buffer_status_handler)
+        worker->notify_buffer_status_handler(worker, worker->owner, percent);
+}
+
+static void _handle_element_msg(MafwGstRendererWorker *worker, GstMessage *msg)
+{
+    /* Only HelixBin sends "resolution" messages. */
+    if (gst_structure_has_name(msg->structure, "resolution") &&
+        _handle_video_info(worker, msg->structure))
+    {
+        worker->media.has_visual_content = TRUE;
+        set_dolby_video_property(worker, worker->config->mobile_surround_video.state);
+        set_dolby_video_sound_property(worker, worker->config->mobile_surround_video.room, TRUE);
+        set_dolby_video_sound_property(worker, worker->config->mobile_surround_video.color, FALSE);
+    }
+    /* We do RTSP redirect when we try to play .sdp streams */
+    else if(gst_structure_has_name(msg->structure, "redirect"))
+    {
+        /* "new-location" contains the rtsp uri what we are going to play */
+        mafw_gst_renderer_worker_play(worker, gst_structure_get_string(msg->structure, "new-location"));
+    }
+
+}
+
+static GError * _get_specific_missing_plugin_error(GstMessage *msg)
+{
+    const GstStructure *gst_struct;
+    const gchar *type;
+
+    GError *error;
+    gchar *desc;
+
+    desc = gst_missing_plugin_message_get_description(msg);
+
+    gst_struct = gst_message_get_structure(msg);
+    type = gst_structure_get_string(gst_struct, "type");
+
+    if ((type) && ((strcmp(type, MAFW_GST_MISSING_TYPE_DECODER) == 0) ||
+                   (strcmp(type, MAFW_GST_MISSING_TYPE_ENCODER) == 0))) {
+
+        /* Missing codec error. */
+        const GValue *val;
+        const GstCaps *caps;
+        GstStructure *caps_struct;
+        const gchar *mime;
+
+        val = gst_structure_get_value(gst_struct, "detail");
+        caps = gst_value_get_caps(val);
+        caps_struct = gst_caps_get_structure(caps, 0);
+        mime = gst_structure_get_name(caps_struct);
+
+        if (g_strrstr(mime, "video")) {
+            error = g_error_new_literal(WORKER_ERROR,
+                                        WORKER_ERROR_VIDEO_CODEC_NOT_FOUND,
+                                        desc);
+        } else if (g_strrstr(mime, "audio")) {
+            error = g_error_new_literal(WORKER_ERROR,
+                                        WORKER_ERROR_AUDIO_CODEC_NOT_FOUND,
+                                        desc);
+        } else {
+            error = g_error_new_literal(WORKER_ERROR,
+                                        WORKER_ERROR_CODEC_NOT_FOUND,
+                                        desc);
+        }
+    } else {
+        /* Unsupported type error. */
+        error = g_error_new(WORKER_ERROR,
+                            WORKER_ERROR_UNSUPPORTED_TYPE,
+                            "missing plugin: %s", desc);
+    }
+
+    g_free(desc);
+
+    return error;
+}
+
+/*
+ * Asynchronous message handler.  It gets removed from if it returns FALSE.
+ */
+static gboolean _async_bus_handler(GstBus *bus,
+                                   GstMessage *msg,
+                                   MafwGstRendererWorker *worker)
+{
+
+    UNUSED(bus);
+
+    /* No need to handle message if error has already occured. */
+    if (worker->is_error)
+        return TRUE;
+
+    /* Handle missing-plugin (element) messages separately, relaying more
+     * details. */
+    if (gst_is_missing_plugin_message(msg)) {
+        GError *err = _get_specific_missing_plugin_error(msg);
+        /* FIXME?: for some reason, calling the error handler directly
+         * (_send_error) causes problems.  On the other hand, turning
+         * the error into a new GstMessage and letting the next
+         * iteration handle it seems to work. */
+        _post_error(worker, err);
+        return TRUE;
+    }
+    switch (GST_MESSAGE_TYPE(msg)) {
+        case GST_MESSAGE_ERROR:
+            if (!worker->is_error) {
+                gchar *debug;
+                GError *err;
+                debug = NULL;
+                gst_message_parse_error(msg, &err, &debug);
+                g_debug("gst error: domain = %s, code = %d, "
+                        "message = '%s', debug = '%s'",
+                        g_quark_to_string(err->domain), err->code, err->message, debug);
+                if (debug)
+                {
+                    g_free(debug);
+                }
+                               //decodebin2 uses the error only to report text files
+                if (err->code == GST_STREAM_ERROR_WRONG_TYPE && g_str_has_prefix(GST_MESSAGE_SRC_NAME(msg), "decodebin2"))
+                {
+                    err->code = WORKER_ERROR_POSSIBLY_PLAYLIST_TYPE;
+                }
+
+                _send_error(worker, err);
+            }
+            break;
+        case GST_MESSAGE_EOS:
+            if (!worker->is_error) {
+                worker->eos = TRUE;
+                worker->seek_position = -1;
+                if (worker->notify_eos_handler)
+                {
+                    worker->notify_eos_handler(worker, worker->owner);
+                }
+            }
+            break;
+        case GST_MESSAGE_TAG:
+            _handle_tag(worker, msg);
+            break;
+        case GST_MESSAGE_BUFFERING:
+            _handle_buffering(worker, msg);
+            break;
+        case GST_MESSAGE_DURATION:
+            /* in ready state we might not get correct seekability info */
+            if (!worker->in_ready)
+            {
+                _handle_duration(worker);
+            }
+            break;
+        case GST_MESSAGE_ELEMENT:
+            _handle_element_msg(worker, msg);
+            break;
+        case GST_MESSAGE_STATE_CHANGED:
+            if ((GstElement *)GST_MESSAGE_SRC(msg) == worker->pipeline)
+                _handle_state_changed(msg, worker);
+            break;
+        case GST_MESSAGE_APPLICATION:
+            if (gst_structure_has_name(gst_message_get_structure(msg), "ckey"))
+            {
+                _emit_property(worker,
+                               WORKER_PROPERTY_COLORKEY,
+                               G_TYPE_INT,
+                               &worker->colorkey);
+            }
+        default:
+            break;
+    }
+    return TRUE;
+}
+
+/* NOTE this function will possibly be called from a different thread than the
+ * glib main thread. */
+static void _stream_info_cb(GstObject *pipeline, GParamSpec *unused,
+                            MafwGstRendererWorker *worker)
+{
+    UNUSED(pipeline);
+    UNUSED(unused);
+
+    g_debug("stream-info changed");
+    _parse_stream_info(worker);
+}
+
+static void _element_added_cb(GstBin *bin, GstElement *element,
+                              MafwGstRendererWorker *worker)
+{
+    UNUSED(bin);
+    gchar *element_name;
+
+    element_name = gst_element_get_name(element);
+    if(g_str_has_prefix(element_name, "uridecodebin") ||
+       g_str_has_prefix(element_name, "decodebin2"))
+    {
+        g_signal_connect(element, "element-added",
+                         G_CALLBACK(_element_added_cb), worker);
+    }
+    else if(g_str_has_prefix(element_name, "sdpdemux"))
+    {
+        g_object_set(element, "redirect", FALSE, NULL);
+    }
+    else if(g_str_has_prefix(element_name, "queue2"))
+    {
+        worker->queue = element;
+    }
+    g_free(element_name);
+}
+
+/*
+ * Start to play the media
+ */
+static void _start_play(MafwGstRendererWorker *worker)
+{
+    GstStateChangeReturn state_change_info;
+    worker->stay_paused = FALSE;
+    char *autoload_sub = NULL;
+
+    g_assert(worker->pipeline);
+    g_object_set(G_OBJECT(worker->pipeline),
+                 "uri", worker->media.location, NULL);
+
+    if (worker->config->autoload_subtitles) {
+        autoload_sub = uri_get_subtitle_uri(worker->media.location);
+        if (autoload_sub) {
+            g_debug("SUBURI: %s", autoload_sub);
+            g_object_set(G_OBJECT(worker->pipeline),
+                         "suburi", autoload_sub,
+                         "subtitle-font-desc", worker->config->subtitle_font,
+                         "subtitle-encoding", worker->config->subtitle_encoding,
+                         NULL);
+
+            gst_element_set_state(worker->pipeline, GST_STATE_READY);
+            g_free(autoload_sub);
+        }
+    } else {
+        g_object_set(G_OBJECT(worker->pipeline), "suburi", NULL, NULL);
+    }
+
+    g_debug("URI: %s", worker->media.location);
+    g_debug("setting pipeline to PAUSED");
+
+    worker->report_statechanges = TRUE;
+    state_change_info = gst_element_set_state(worker->pipeline,
+                                              GST_STATE_PAUSED);
+    if (state_change_info == GST_STATE_CHANGE_NO_PREROLL) {
+        /* FIXME:  for live sources we may have to handle buffering and
+         * prerolling differently */
+        g_debug ("Source is live!");
+        worker->is_live = TRUE;
+    }
+    worker->prerolling = TRUE;
+
+    worker->is_stream = uri_is_stream(worker->media.location);
+
+}
+
+/*
+ * Constructs gst pipeline
+ *
+ * FIXME: Could the same pipeline be used for playing all media instead of
+ *  constantly deleting and reconstructing it again?
+ */
+static void _construct_pipeline(MafwGstRendererWorker *worker, configuration *config)
+{
+    g_debug("constructing pipeline");
+    g_assert(worker != NULL);
+
+    /* Return if we have already one */
+    if (worker->pipeline)
+        return;
+
+    _free_taglist(worker);
+
+    g_debug("Creating a new instance of playbin2");
+    worker->pipeline = gst_element_factory_make("playbin2", "playbin");
+    if (worker->pipeline == NULL)
+    {
+        /* Let's try with playbin */
+        g_warning ("playbin2 failed, falling back to playbin");
+        worker->pipeline = gst_element_factory_make("playbin", "playbin");
+
+        if (worker->pipeline) {
+            /* Use nwqueue only for non-rtsp and non-mms(h) streams. */
+            gboolean use_nw;
+            use_nw = worker->media.location &&
+                !g_str_has_prefix(worker->media.location, "rtsp://") &&
+                !g_str_has_prefix(worker->media.location, "mms://") &&
+                !g_str_has_prefix(worker->media.location, "mmsh://");
+
+            g_debug("playbin using network queue: %d", use_nw);
+
+            gst_object_ref_sink(worker->pipeline);
+            /* These need a modified version of playbin. */
+            g_object_set(G_OBJECT(worker->pipeline),
+                         "nw-queue", use_nw,
+                         "no-video-transform", TRUE,
+                         NULL);
+
+        }
+    }
+
+    if (!worker->pipeline) {
+        g_critical("failed to create playback pipeline");
+        _send_error(worker,
+                    g_error_new(WORKER_ERROR,
+                                WORKER_ERROR_UNABLE_TO_PERFORM,
+                                "Could not create pipeline"));
+        g_assert_not_reached();
+    }
+
+    worker->bus = gst_pipeline_get_bus(GST_PIPELINE(worker->pipeline));
+    gst_bus_set_sync_handler(worker->bus,
+                             (GstBusSyncHandler)_sync_bus_handler,
+                             worker);
+    worker->async_bus_id = gst_bus_add_watch_full(
+        worker->bus,G_PRIORITY_HIGH,
+        (GstBusFunc)_async_bus_handler,
+        worker, NULL);
+
+    /* Listen for changes in stream-info object to find out whether the media
+     * contains video and throw error if application has not provided video
+     * window. */
+    g_signal_connect(worker->pipeline, "notify::stream-info",
+                     G_CALLBACK(_stream_info_cb), worker);
+
+    g_signal_connect(worker->pipeline, "element-added",
+                     G_CALLBACK(_element_added_cb), worker);
+
+    /* Set audio and video sinks ourselves. We create and configure them only
+     * once. */
+    if (!worker->asink) {
+        const gchar *sink = g_getenv("AUDIO_SINK");
+        worker->asink = gst_element_factory_make(sink?sink: worker->config->asink, NULL);
+        if (!worker->asink){
+            worker->asink = gst_element_factory_make("alsasink", NULL);
+        }
+        if (!worker->asink) {
+            g_critical("Failed to create pipeline audio sink");
+            _send_error(worker,
+                        g_error_new(WORKER_ERROR,
+                                    WORKER_ERROR_UNABLE_TO_PERFORM,
+                                    "Could not create audio sink"));
+            g_assert_not_reached();
+        }
+        g_debug("MafwGstRendererWorker: Using following buffer-time: %lld and latency-time: %lld",
+                config->buffer_time,
+                config->latency_time);
+        gst_object_ref_sink(worker->asink);
+        g_object_set(worker->asink,
+                     "buffer-time", config->buffer_time,
+                     "latency-time", config->latency_time,
+                     NULL);
+    }
+
+    if (worker->config->use_dhmmixer && !worker->amixer)
+    {
+        worker->amixer = gst_element_factory_make("nokiadhmmix", NULL);
+        if( !worker->amixer )
+        {
+            g_warning("Could not create dhmmixer, falling back to basic audiosink!");
+        }
+    }
+
+    if( worker->config->use_dhmmixer && worker->amixer && !worker->audiobin )
+    {
+        worker->audiobin = gst_bin_new("audiobin");
+        if( worker->audiobin )
+        {
+            gst_bin_add(GST_BIN (worker->audiobin), worker->amixer);
+            gst_bin_add(GST_BIN (worker->audiobin), worker->asink);
+            gst_element_link(worker->amixer, worker->asink);
+            GstPad *pad;
+            pad = gst_element_get_static_pad (worker->amixer, "sink");
+            gst_element_add_pad (worker->audiobin, gst_ghost_pad_new ("sink", pad));
+            gst_object_unref (GST_OBJECT (pad));
+
+            gst_object_ref(worker->audiobin);
+
+            /* Use Dolby music settings by default */
+            set_dolby_music_property(worker, worker->config->mobile_surround_music.state);
+            set_dolby_music_sound_property(worker, worker->config->mobile_surround_music.room, TRUE);
+            set_dolby_music_sound_property(worker, worker->config->mobile_surround_music.color, FALSE);
+        }
+        else
+        {
+            gst_object_ref_sink(worker->asink);
+            gst_object_sink(worker->amixer);
+            g_warning("Could not create audiobin! Falling back to basic audio-sink!");
+        }
+    }
+
+
+    if( worker->config->use_dhmmixer && worker->amixer && worker->audiobin )
+    {
+        g_object_set(worker->pipeline,
+                 "audio-sink", worker->audiobin,
+                 "flags", worker->config->flags,
+                  NULL);
+    }
+    else
+    {
+        g_object_set(worker->pipeline,
+                 "audio-sink", worker->asink,
+                 "flags", worker->config->flags,
+                  NULL);
+    }
+
+    if( worker->pipeline )
+    {
+        mafw_gst_renderer_seeker_set_pipeline(worker->seeker, worker->pipeline);
+
+        if( worker->vsink && worker->xid != 0 )
+        {
+            g_object_set(worker->pipeline,
+                         "video-sink", worker->vsink,
+                         NULL);
+        }
+    }
+
+    if (!worker->tsink) {
+        worker->tsink = gst_element_factory_make("textoverlay", NULL);
+        if (!worker->tsink) {
+            g_critical("Failed to create pipeline text sink");
+            _send_error(worker,
+                        g_error_new(WORKER_ERROR,
+                                    WORKER_ERROR_UNABLE_TO_PERFORM,
+                                    "Could not create text sink"));
+            g_assert_not_reached();
+        }
+        gst_object_ref(worker->tsink);
+    }
+    g_object_set(worker->pipeline, "text-sink", worker->tsink, NULL);
+}
+
+guint check_dolby_audioroute(MafwGstRendererWorker *worker, guint prop) {
+    if (g_slist_find(worker->destinations,
+                     GINT_TO_POINTER(WORKER_OUTPUT_BLUETOOTH_AUDIO)) ||
+        g_slist_find(worker->destinations,
+                     GINT_TO_POINTER(WORKER_OUTPUT_HEADPHONE_JACK)))
+    {
+        return prop;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+void set_dolby_music_property(MafwGstRendererWorker *worker, guint prop) {
+    worker->config->mobile_surround_music.state = prop;
+    if (worker->amixer && !worker->media.has_visual_content) {
+        GValue a;
+        guint tempprop = check_dolby_audioroute(worker, prop);
+        if (_set_value(&a, G_TYPE_UINT, &tempprop))
+        {
+            g_object_set_property(G_OBJECT(worker->amixer), "mobile-surround", &a);
+            g_value_unset (&a);
+        }
+    }
+}
+
+void set_dolby_music_sound_property(MafwGstRendererWorker *worker, gint prop, gboolean isRoomProperty) {
+    if (isRoomProperty) {
+        worker->config->mobile_surround_music.room = prop;
+    } else {
+        worker->config->mobile_surround_music.color = prop;
+    }
+    if (worker->amixer && !worker->media.has_visual_content) {
+        GValue a;
+
+        if (_set_value(&a, G_TYPE_UINT, &prop))
+        {
+            if (isRoomProperty) {
+                g_object_set_property(G_OBJECT(worker->amixer), "room-size", &a);
+            } else {
+                g_object_set_property(G_OBJECT(worker->amixer), "brightness", &a);
+            }
+            g_value_unset (&a);
+        }
+    }
+}
+
+void set_dolby_video_property(MafwGstRendererWorker *worker, guint prop) {
+    worker->config->mobile_surround_video.state = prop;
+    if (worker->amixer && worker->media.has_visual_content) {
+        GValue a;
+        guint tempprop = check_dolby_audioroute(worker, prop);
+        if (_set_value(&a, G_TYPE_UINT, &tempprop))
+        {
+            g_object_set_property(G_OBJECT(worker->amixer), "mobile-surround", &a);
+            g_value_unset (&a);
+        }
+    }
+}
+
+void set_dolby_video_sound_property(MafwGstRendererWorker *worker, gint prop, gboolean isRoomProperty) {
+    if (isRoomProperty) {
+        worker->config->mobile_surround_video.room = prop;
+    } else {
+        worker->config->mobile_surround_video.color = prop;
+    }
+    if (worker->amixer && worker->media.has_visual_content) {
+        GValue a;
+
+        if (_set_value(&a, G_TYPE_UINT, &prop))
+        {
+            if (isRoomProperty) {
+                g_object_set_property(G_OBJECT(worker->amixer), "room-size", &a);
+            } else {
+                g_object_set_property(G_OBJECT(worker->amixer), "brightness", &a);
+            }
+            g_value_unset (&a);
+        }
+    }
+}
+
+/*
+ * @seek_type: GstSeekType
+ * @position: Time in seconds where to seek
+ * @key_frame_seek: True if this is a key frame based seek
+ * @error: Possible error that is set and returned
+ */
+static void _do_seek(MafwGstRendererWorker *worker,
+                     GstSeekType seek_type,
+                     gint position,
+                     gboolean key_frame_seek,
+                     GError **error)
+{
+    gboolean ret;
+    gint64 spos;
+    GstSeekFlags flags = GST_SEEK_FLAG_FLUSH;
+
+    g_assert(worker != NULL);
+
+    if (!worker->media.seekable == SEEKABILITY_SEEKABLE)
+    {
+        goto err;
+    }
+
+    /* According to the docs, relative seeking is not so easy:
+    GST_SEEK_TYPE_CUR - change relative to currently configured segment.
+    This can't be used to seek relative to the current playback position -
+    do a position query, calculate the desired position and then do an
+    absolute position seek instead if that's what you want to do. */
+    if (seek_type == GST_SEEK_TYPE_CUR)
+    {
+        gint curpos = mafw_gst_renderer_worker_get_position(worker);
+        position = curpos + position;
+        seek_type = GST_SEEK_TYPE_SET;
+    }
+
+    if (position < 0) {
+        position = 0;
+    }
+
+    worker->seek_position = position;
+
+    if (worker->state != GST_STATE_PLAYING && worker->state != GST_STATE_PAUSED )
+    {
+        g_debug("_do_seek: Not in playing or paused state, seeking delayed.");
+        return;
+    }
+    else if( worker->is_live && worker->state == GST_STATE_PAUSED )
+    {
+        g_debug("_do_seek: Live source can be seeked only in playing state, seeking delayed!");
+        return;
+    }
+
+    worker->report_statechanges = FALSE;
+
+    if (key_frame_seek == TRUE)
+    {
+        /* tries to do key frame seeks at least with some change */
+        ret = mafw_gst_renderer_seeker_seek_to(worker->seeker, worker->seek_position);
+    }
+    else
+    {
+        spos = (gint64)position * GST_SECOND;
+        g_debug("seek: type = %d, offset = %lld", seek_type, spos);
+
+        /* exact seek */
+        ret = gst_element_seek(worker->pipeline,
+                               1.0,
+                               GST_FORMAT_TIME,
+                               flags,
+                               seek_type,
+                               spos,
+                               GST_SEEK_TYPE_NONE,
+                               GST_CLOCK_TIME_NONE);
+    }
+
+    if (ret)
+    {
+        /* Seeking is async, so seek_position should not be invalidated here */
+        return;
+    }
+
+err:
+    g_set_error(error,
+                WORKER_ERROR,
+                WORKER_ERROR_CANNOT_SET_POSITION,
+                "Seeking to %d failed", position);
+    worker->report_statechanges = TRUE;
+    worker->seek_position = -1;
+    mafw_gst_renderer_seeker_cancel(worker->seeker);
+}
+
+void mafw_gst_renderer_worker_set_current_frame_on_pause(
+    MafwGstRendererWorker *worker,
+    gboolean current_frame_on_pause)
+{
+
+    worker->current_frame_on_pause = current_frame_on_pause;
+
+    _emit_property(worker,
+                   WORKER_PROPERTY_CURRENT_FRAME_ON_PAUSE,
+                   G_TYPE_BOOLEAN,
+                   &worker->current_frame_on_pause);
+}
+
+gboolean mafw_gst_renderer_worker_get_current_frame_on_pause(
+    MafwGstRendererWorker *worker)
+{
+    return worker->current_frame_on_pause;
+}
+
+configuration *mafw_gst_renderer_worker_create_default_configuration(MafwGstRendererWorker *worker)
+{
+    UNUSED(worker);
+    return _create_default_configuration();
+}
+
+void mafw_gst_renderer_worker_set_configuration(MafwGstRendererWorker *worker,
+                                                configuration *config)
+{
+    if( config == NULL )
+    {
+        g_warning("NULL config was tried to be set!");
+        return;
+    }
+
+    if( worker->config )
+    {
+        _free_configuration(worker->config);
+    }
+    worker->config = config;
+
+    if( (worker->pipeline == NULL)
+        || (worker->state == GST_STATE_NULL && gst_element_get_state(worker->pipeline, NULL, NULL, 0) == GST_STATE_CHANGE_SUCCESS) )
+    {
+        _reset_pipeline_and_worker(worker);
+        _construct_pipeline(worker, worker->config);
+    }
+}
+
+/*
+ * Sets the pipeline PAUSED-to-READY timeout to given value (in seconds). If the
+ * pipeline is already in PAUSED state and this called with zero value the pipeline
+ * get immediately set to READY state.
+ */
+void mafw_gst_renderer_worker_set_ready_timeout(MafwGstRendererWorker *worker,
+                                                guint seconds)
+{
+    g_debug(G_STRFUNC);
+
+    worker->config->seconds_to_pause_to_ready = seconds;
+
+    /* zero is a special case: if we are already in PAUSED state, a pending
+     * ready timeout has not yet elapsed and we are asked to set the timeout
+     * value to zero --> remove the pending tmo and go immediately to READY.
+     * This forces GStreamer to release all pipeline resources and for the
+     * outsiders it looks like we are still in the PAUSED state. */
+    if (seconds == 0 && worker->ready_timeout && worker->state == GST_STATE_PAUSED)
+    {
+        _remove_ready_timeout(worker);
+        _add_ready_timeout(worker);
+    }
+
+}
+
+void mafw_gst_renderer_worker_set_position(MafwGstRendererWorker *worker,
+                                           GstSeekType seek_type,
+                                           gint position, GError **error)
+{
+    _do_seek(worker, seek_type, position, TRUE, error);
+    if (worker->notify_seek_handler)
+        worker->notify_seek_handler(worker, worker->owner);
+}
+
+static gint64 _get_duration(MafwGstRendererWorker *worker)
+{
+    gint64 value = DURATION_UNQUERIED;
+    GstFormat format = GST_FORMAT_TIME;
+
+    gboolean right_query = gst_element_query_duration(worker->pipeline, &format, &value);
+    if( !right_query )
+    {
+        /* just in case gstreamer messes with the value */
+        value = DURATION_UNQUERIED;
+    }
+    return value;
+}
+
+/*
+ * Gets current position, rounded down into precision of one second.  If a seek
+ * is pending, returns the position we are going to seek.  Returns -1 on
+ * failure.
+ */
+gint mafw_gst_renderer_worker_get_position(MafwGstRendererWorker *worker)
+{
+    GstFormat format;
+    gint64 time = 0;
+    g_assert(worker != NULL);
+
+    /* If seek is ongoing, return the position where we are seeking. */
+    if (worker->seek_position != -1)
+    {
+        return worker->seek_position;
+    }
+
+    /* Otherwise query position from pipeline. */
+    format = GST_FORMAT_TIME;
+    if (worker->pipeline &&
+        gst_element_query_position(worker->pipeline, &format, &time))
+    {
+        return (gint)(time / GST_SECOND);
+    }
+    /* lets return the duration if we're in eos and the pipeline cannot return position */
+    else if( worker->pipeline && worker->eos )
+    {
+        gint64 duration = _get_duration(worker);
+        if( duration > 0 )
+        {
+            return (gint)(duration / GST_SECOND);
+        }
+    }
+    return -1;
+}
+
+/*
+ * Returns the duration of the current media in seconds
+ */
+gint64 mafw_gst_renderer_worker_get_duration(MafwGstRendererWorker *worker)
+{
+    gint64 duration = _get_duration(worker);
+    if( duration >= 0 )
+    {
+        gint64 second_precision = (duration + (GST_SECOND/2)) / GST_SECOND;
+
+        if( !_seconds_duration_equal(duration, worker->media.length_nanos) )
+        {            
+            worker->media.length_nanos = duration;
+
+            if( _current_metadata_add(worker,
+                                      WORKER_METADATA_KEY_DURATION,
+                                      G_TYPE_INT64,
+                                      (const gpointer)&second_precision) )
+            {
+                _emit_metadata(worker,
+                               WORKER_METADATA_KEY_DURATION,
+                               G_TYPE_INT64,
+                               &second_precision);
+            }
+        }
+        return second_precision;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+gint64 mafw_gst_renderer_worker_get_last_known_duration(MafwGstRendererWorker *worker)
+{
+    if( worker->media.length_nanos <= 0 )
+    {
+        return worker->media.length_nanos;
+    }
+    else
+    {
+        return (worker->media.length_nanos + (GST_SECOND/2)) / GST_SECOND;
+    }
+}
+
+GHashTable *mafw_gst_renderer_worker_get_current_metadata(
+    MafwGstRendererWorker *worker)
+{
+    return worker->current_metadata;
+}
+
+void mafw_gst_renderer_worker_set_xid(MafwGstRendererWorker *worker, XID xid)
+{
+    /* Store the target window id */
+    g_debug("Setting xid: %x", (guint)xid);
+    worker->xid = xid;
+
+    if( !worker->vsink )
+    {
+        g_debug("Creating video-sink as XID has been set, %s", worker->config->vsink);
+        worker->vsink = gst_element_factory_make(worker->config->vsink, NULL);
+        if (!worker->vsink) {
+            worker->vsink = gst_element_factory_make("xvimagesink", NULL);
+        }
+        if (!worker->vsink) {
+            g_critical("Failed to create pipeline video sink");
+            _send_error(worker,
+                        g_error_new(WORKER_ERROR,
+                                    WORKER_ERROR_UNABLE_TO_PERFORM,
+                                    "Could not create video sink"));
+            g_assert_not_reached();
+        }
+        gst_object_ref_sink(worker->vsink);
+
+        //special case for xvimagesink
+        {
+            gchar* element_name = gst_element_get_name(worker->vsink);
+            g_object_set(G_OBJECT(worker->vsink),
+                         "colorkey", 0x080810,
+                         NULL);
+            if (g_str_has_prefix(element_name, "xvimagesink"))
+            {
+                g_object_set(G_OBJECT(worker->vsink),
+                             "handle-events", TRUE,
+                             "force-aspect-ratio", TRUE,
+                             NULL);
+            }
+            g_free(element_name);
+        }
+
+        //do not dare to set video-sink in any other state
+        if( worker->pipeline && worker->state == GST_STATE_NULL )
+        {
+             g_object_set(worker->pipeline,
+                          "video-sink", worker->vsink,
+                          NULL);
+        }
+    }
+
+    /* We don't want to set XID to video sink here when in READY state, because
+       it prevents "prepare-xwindow-id" message. Setting it when we are
+       PAUSED or PLAYING is fine, because we already got "prepare-xwindow-id". */
+    if(worker->state == GST_STATE_PLAYING ||
+       worker->state == GST_STATE_PAUSED)
+    {
+        /* Check if we should use it right away */
+        mafw_gst_renderer_worker_apply_xid(worker);
+    }
+
+    _emit_property(worker, WORKER_PROPERTY_XID, G_TYPE_UINT, &worker->xid);
+
+}
+
+XID mafw_gst_renderer_worker_get_xid(MafwGstRendererWorker *worker)
+{
+    return worker->xid;
+}
+
+void mafw_gst_renderer_worker_set_render_rectangle(MafwGstRendererWorker *worker, render_rectangle *rect)
+{
+    /* Store the target window id */
+    g_debug("Setting render rectangle: X:%d,Y:%d  Width:%d, Height:%d",
+            rect->x, rect->y, rect->width, rect->height);
+
+    worker->x_overlay_rectangle.x = rect->x;
+    worker->x_overlay_rectangle.y = rect->y;
+    worker->x_overlay_rectangle.width = rect->width;
+    worker->x_overlay_rectangle.height = rect->height;
+
+    /* Check if we should use it right away */
+    mafw_gst_renderer_worker_apply_render_rectangle(worker);
+
+    GValueArray *rect_array = g_value_array_new(4);
+    GValue x;
+    GValue y;
+    GValue width;
+    GValue height;
+
+    _set_value(&x, G_TYPE_INT, &(rect->x));
+    _set_value(&y, G_TYPE_INT, &(rect->y));
+    _set_value(&width, G_TYPE_INT, &(rect->width));
+    _set_value(&height, G_TYPE_INT, &(rect->height));
+
+    g_value_array_insert(rect_array, 0, &x );
+    g_value_array_insert(rect_array, 1, &y );
+    g_value_array_insert(rect_array, 2, &width );
+    g_value_array_insert(rect_array, 3, &height );
+
+    GValue value;
+    memset(&value, 0, sizeof(value));
+    g_value_init(&value, G_TYPE_VALUE_ARRAY);
+    g_value_take_boxed(&value, rect_array);
+
+    _emit_property(worker, WORKER_PROPERTY_RENDER_RECTANGLE, G_TYPE_VALUE_ARRAY, &value);
+
+    g_value_unset(&value);
+}
+
+const render_rectangle* mafw_gst_renderer_worker_get_render_rectangle(MafwGstRendererWorker *worker)
+{
+    UNUSED(worker);
+    return &worker->x_overlay_rectangle;
+}
+
+gboolean mafw_gst_renderer_worker_get_autopaint(MafwGstRendererWorker *worker)
+{
+    return worker->autopaint;
+}
+
+void mafw_gst_renderer_worker_set_autopaint(MafwGstRendererWorker *worker,
+                                            gboolean autopaint)
+{
+    /* TODO Is this a bug or a feature? */
+    worker->autopaint = autopaint;
+    if (worker->vsink)
+        g_object_set(worker->vsink, "autopaint-colorkey", autopaint, NULL);
+
+    _emit_property(worker,
+                   WORKER_PROPERTY_AUTOPAINT,
+                   G_TYPE_BOOLEAN,
+                   &autopaint);
+
+}
+
+gboolean mafw_gst_renderer_worker_set_playback_speed(MafwGstRendererWorker* worker,
+                                                     gfloat speed)
+{
+    gboolean retVal = FALSE;
+
+    if (worker->state == GST_STATE_PLAYING)
+    {
+        worker->playback_speed = speed;
+
+        gint64 current_position;
+        GstFormat format = GST_FORMAT_TIME;
+
+        if (worker->pipeline && gst_element_query_position(worker->pipeline,
+                                                           &format,
+                                                           &current_position))
+        {
+
+            retVal = gst_element_seek(worker->pipeline,
+                                      speed,
+                                      GST_FORMAT_DEFAULT,
+                                      GST_SEEK_FLAG_SKIP | GST_SEEK_FLAG_KEY_UNIT,
+                                      GST_SEEK_TYPE_NONE,
+                                      current_position,
+                                      GST_SEEK_TYPE_NONE,
+                                      GST_CLOCK_TIME_NONE);
+
+            if(retVal)
+            {
+                _emit_property(worker,
+                               WORKER_PROPERTY_PLAYBACK_SPEED,
+                               G_TYPE_FLOAT,
+                               &speed);
+            }
+        }
+    }
+
+    return retVal;
+}
+
+gfloat mafw_gst_renderer_worker_get_playback_speed(MafwGstRendererWorker* worker)
+{
+    return worker->playback_speed;
+}
+
+void mafw_gst_renderer_worker_set_force_aspect_ratio(MafwGstRendererWorker *worker, gboolean force)
+{
+
+    worker->force_aspect_ratio = force;
+    if (worker->vsink)
+    {
+        g_object_set(worker->vsink, "force-aspect-ratio", force, NULL);
+    }
+    _emit_property(worker, WORKER_PROPERTY_FORCE_ASPECT_RATIO, G_TYPE_BOOLEAN, &force);
+
+}
+
+gboolean mafw_gst_renderer_worker_get_force_aspect_ratio(MafwGstRendererWorker *worker)
+{
+    return worker->force_aspect_ratio;
+}
+
+gint mafw_gst_renderer_worker_get_colorkey(MafwGstRendererWorker *worker)
+{
+    return worker->colorkey;
+}
+
+gboolean mafw_gst_renderer_worker_get_seekable(MafwGstRendererWorker *worker)
+{
+    return worker->media.seekable == SEEKABILITY_SEEKABLE;
+}
+
+gboolean mafw_gst_renderer_worker_get_streaming(MafwGstRendererWorker *worker)
+{
+    return uri_is_stream(worker->media.location);
+}
+
+const char* mafw_gst_renderer_worker_get_uri(MafwGstRendererWorker *worker)
+{
+    return worker->media.location;
+}
+
+static void _do_play(MafwGstRendererWorker *worker)
+{
+    g_assert(worker != NULL);
+
+    if (worker->pipeline == NULL) {
+        g_debug("play without a pipeline!");
+        return;
+    }
+    worker->report_statechanges = TRUE;
+
+    /* If we have to stay paused, we do and add the ready timeout. Otherwise, we
+     * move the pipeline */
+    if (!worker->stay_paused) {
+        /* If pipeline is READY, we move it to PAUSED, otherwise, to PLAYING */
+        if (worker->state == GST_STATE_READY) {
+            gst_element_set_state(worker->pipeline, GST_STATE_PAUSED);
+            g_debug("setting pipeline to PAUSED");
+        } else {
+            gst_element_set_state(worker->pipeline, GST_STATE_PLAYING);
+            g_debug("setting pipeline to PLAYING");
+        }
+    }
+    else {
+        g_debug("staying in PAUSED state");
+        _add_ready_timeout(worker);
+    }
+}
+
+void mafw_gst_renderer_worker_play(MafwGstRendererWorker *worker,
+                                   const gchar *uri)
+{
+    g_assert(uri);
+
+    mafw_gst_renderer_worker_stop(worker);
+    _reset_media_info(worker);
+
+    /* Set the item to be played */
+    worker->media.location = g_strdup(uri);
+
+    _start_play(worker);
+}
+
+/*
+ * Currently, stop destroys the Gst pipeline and resets the worker into
+ * default startup configuration.
+ */
+void mafw_gst_renderer_worker_stop(MafwGstRendererWorker *worker)
+{
+    g_debug("worker stop");
+    g_assert(worker != NULL);
+
+    /* If location is NULL, this is a pre-created pipeline */
+    if (worker->async_bus_id && worker->pipeline && !worker->media.location)
+        return;
+
+    _reset_pipeline_and_worker(worker);
+
+    /* context framework adaptation starts */
+    if (worker->context_nowplaying) {
+        context_provider_map_free(worker->context_nowplaying);
+        worker->context_nowplaying = NULL;
+    }
+    context_provider_set_null(CONTEXT_PROVIDER_KEY_NOWPLAYING);
+    /* context framework adaptation ends */
+
+    /* We are not playing, so we can let the screen blank */
+    if (worker->blanking__control_handler)
+    {
+        worker->blanking__control_handler(worker, worker->owner, FALSE);
+    }
+
+    /* And now get a fresh pipeline ready */
+    _construct_pipeline(worker, worker->config);
+}
+
+void mafw_gst_renderer_worker_pause(MafwGstRendererWorker *worker)
+{
+    g_assert(worker != NULL);
+
+    if (worker->buffering && worker->state == GST_STATE_PAUSED &&
+        !worker->prerolling)
+    {
+        /* If we are buffering and get a pause, we have to
+         * signal state change and stay_paused */
+        g_debug("Pausing while buffering, signalling state change");
+
+        /* We need to make sure that we go into real PAUSE state */
+        if (worker->blanking__control_handler)
+        {
+            worker->blanking__control_handler(worker, worker->owner, FALSE);
+        }
+        _do_pause_postprocessing(worker);
+    }
+    else
+    {
+        worker->report_statechanges = TRUE;
+        if (worker->seek_position == -1 && worker->state == GST_STATE_PLAYING )
+        {
+            gst_element_set_state(worker->pipeline, GST_STATE_PAUSED);
+            if (worker->blanking__control_handler)
+            {
+                worker->blanking__control_handler(worker, worker->owner, FALSE);
+            }
+        }
+    }
+
+    worker->stay_paused = TRUE;
+    worker->pause_frame_taken = FALSE;
+}
+
+/*
+ * Notifier to call when audio/video routing changes
+ */
+void mafw_gst_renderer_worker_notify_media_destination(
+    MafwGstRendererWorker *worker,
+    GSList *destinations)
+{
+    g_assert(worker != NULL);
+    g_assert(destinations != NULL);
+
+    /* 1. update our records of current destinations */
+    g_slist_free(worker->destinations);
+    worker->destinations = g_slist_copy(destinations);
+
+    /* 2. prevent blanking if we are playing video and outputting it on our own
+     * display, otherwise disable it */
+    if (worker->blanking__control_handler &&
+        worker->media.has_visual_content &&
+        worker->state == GST_STATE_PLAYING &&
+        g_slist_find(worker->destinations,
+                     GINT_TO_POINTER(WORKER_OUTPUT_BUILTIN_DISPLAY)))
+    {
+        worker->blanking__control_handler(worker, worker->owner, TRUE);
+    }
+    else
+    {
+        worker->blanking__control_handler(worker, worker->owner, FALSE);
+    }
+
+    /* 3. disabling Dolby Headphone effect if not outputting to audio jack or
+     * bluetooth headphones, otherwise using the effect. Actual route check is done
+     * in set_dolby_*****_property function*/
+    set_dolby_music_property(worker, worker->config->mobile_surround_music.state);
+    set_dolby_video_property(worker, worker->config->mobile_surround_video.state);
+
+}
+
+void mafw_gst_renderer_worker_pause_at(MafwGstRendererWorker *worker, guint position)
+{
+    /* the current implementation works only from ready i.e. stopped state */
+    g_assert( worker != NULL);
+    worker->stay_paused = TRUE;
+    worker->pause_frame_taken = FALSE;
+    worker->seek_position = position;
+
+    if( worker->vsink )
+    {
+        g_object_set(worker->vsink, "show-preroll-frame",
+                     FALSE, NULL);
+    }
+}
+
+void mafw_gst_renderer_worker_resume(MafwGstRendererWorker *worker)
+{
+    worker->stay_paused = FALSE;
+    if (worker->buffering && worker->state == GST_STATE_PAUSED &&
+        !worker->prerolling) {
+        /* If we are buffering we cannot resume, but we know that the pipeline
+         * will be moved to PLAYING as stay_paused is FALSE, so we just
+         * activate the state change report, this way as soon as buffering
+         * is finished the pipeline will be set to PLAYING and the state
+         * change will be reported */
+        worker->report_statechanges = TRUE;
+        g_debug("Resumed while buffering, activating pipeline state changes");
+        /* Notice though that we can receive the Resume before we get any
+           buffering information. In that case we go with the "else" branch
+           and set the pipeline to to PLAYING. However, it is possible that
+           in this case we get the fist buffering signal before the PAUSED ->
+           PLAYING state change. In that case, since we ignore state changes
+           while buffering we never signal the state change to PLAYING. We
+           can only fix this by checking, when we receive a PAUSED -> PLAYING
+           transition if we are buffering, and in that case signal the state
+           change (if we get that transition while buffering is on, it can
+           only mean that the client resumed playback while buffering, and
+           we must notify the state change) */
+    } else {
+        _do_play(worker);
+    }
+
+    /* we want to resume no use for these timers anymore */
+    _remove_pause_frame_timeout(worker);
+    _remove_ready_timeout(worker);
+}
+
+MafwGstRendererWorker *mafw_gst_renderer_worker_new(gpointer owner)
+{
+    MafwGstRendererWorker *worker;
+
+    g_debug("%s", G_STRFUNC);
+
+    worker = g_new0(MafwGstRendererWorker, 1);
+    worker->owner = owner;
+    worker->report_statechanges = TRUE;
+    worker->state = GST_STATE_NULL;
+    worker->seek_position = -1;
+    worker->ready_timeout = 0;
+    worker->pause_frame_timeout = 0;
+    worker->duration_seek_timeout = 0;
+    worker->duration_seek_timeout_loop_count = 0;
+    worker->in_ready = FALSE;
+    worker->xid = 0;
+    worker->x_overlay_rectangle.x = -1;
+    worker->x_overlay_rectangle.y = -1;
+    worker->x_overlay_rectangle.width = -1;
+    worker->x_overlay_rectangle.height = -1;
+    worker->autopaint = TRUE;
+    worker->playback_speed = 1;
+    worker->colorkey = -1;
+    worker->vsink = NULL;
+    worker->asink = NULL;
+    worker->tsink = NULL;
+    worker->amixer = NULL;
+    worker->audiobin = NULL;
+    worker->tag_list = NULL;
+    worker->current_metadata = NULL;
+    worker->media.seekable = SEEKABILITY_SEEKABLE;
+
+    worker->destinations = NULL;
+
+    worker->current_frame_on_pause = FALSE;
+    worker->taking_screenshot = FALSE;
+    worker->force_aspect_ratio = TRUE;
+    _init_tmp_files_pool(worker);
+    worker->notify_seek_handler = NULL;
+    worker->notify_pause_handler = NULL;
+    worker->notify_play_handler = NULL;
+    worker->notify_buffer_status_handler = NULL;
+    worker->notify_eos_handler = NULL;
+    worker->notify_metadata_handler = NULL;
+    worker->notify_error_handler = NULL;
+    worker->blanking__control_handler = NULL;
+    worker->screenshot_handler = NULL;
+
+    worker->config = _create_default_configuration();
+
+    worker->seeker = mafw_gst_renderer_seeker_new();
+
+    if (!_context_fw_initialised)
+    {
+        /* context framework adaptation starts */
+        if (context_provider_init(DBUS_BUS_SESSION, CONTEXT_PROVIDER_BUS_NAME)) {
+            _context_fw_initialised = TRUE;
+            context_provider_install_key(CONTEXT_PROVIDER_KEY_NOWPLAYING, FALSE,
+                                         NULL, NULL);
+            g_debug("Initialized context framework provider");
+        }
+        else {
+            g_warning("Could not initialize context framework provider");
+        }
+    }
+    /* context framework adaptation ends */
+
+    return worker;
+
+}
+
+void mafw_gst_renderer_worker_exit(MafwGstRendererWorker *worker)
+{
+    _destroy_tmp_files_pool(worker);
+    _reset_pipeline_and_worker(worker);
+
+    /* We are not playing, so we can let the screen blank */
+    if (worker->blanking__control_handler)
+    {
+        worker->blanking__control_handler(worker, worker->owner, FALSE);
+    }
+
+    /* now finally sinks/bins are released */
+    if( worker->audiobin )
+    {
+        gst_object_unref(worker->audiobin);
+        worker->audiobin = NULL;
+    }
+    else if( worker->asink )
+    {
+        gst_object_unref(worker->asink);
+        worker->asink = NULL;
+    }
+
+    if( worker->vsink )
+    {
+        gst_object_unref(worker->vsink);
+        worker->vsink = NULL;
+    }
+
+    context_provider_stop();
+    _context_fw_initialised = FALSE;
+
+    if( worker->destinations )
+    {
+        g_slist_free(worker->destinations);
+        worker->destinations = NULL;
+    }
+
+    if( worker->config )
+    {
+        _free_configuration(worker->config);
+        worker->config = NULL;
+    }
+
+    if( worker->seeker )
+    {
+        mafw_gst_renderer_seeker_free(worker->seeker);
+        worker->seeker = NULL;
+    }
+}