[Merge] lp:~alfonsosanchezbeato/media-hub/video-desktop-support into lp:media-hub

Alfonso Sanchez-Beato alfonso.sanchez-beato at canonical.com
Thu Feb 16 09:59:03 UTC 2017


Comments addressed & answered, pushing branch now.

Diff comments:

> 
> === modified file 'debian/control'
> --- debian/control	2016-05-04 13:11:22 +0000
> +++ debian/control	2017-02-15 10:21:21 +0000
> @@ -33,6 +33,8 @@
>                 libpulse-dev,
>                 qtbase5-dev,
>                 libtelepathy-qt5-dev,
> +               libegl1-mesa-dev,

Actually what should happen is that "control" is not at all under control version. But, as it is, it looks like it is kept in sync with "control.in".

> +               libgl1-mesa-dev,
>  Standards-Version: 3.9.6
>  Homepage: https://launchpad.net/media-hub
>  # If you aren't a member of ~phablet-team but need to upload packaging changes,
> 
> === modified file 'debian/usr.bin.media-hub-server'
> --- debian/usr.bin.media-hub-server	2016-08-23 06:54:43 +0000
> +++ debian/usr.bin.media-hub-server	2017-02-15 10:21:21 +0000
> @@ -126,6 +126,14 @@
>    owner @{HOME}/.local/share/** rk,
>    owner /{,var/}run/user/[0-9]*/** rk,
>  
> +  # Permissions for desktop video decoding
> +  unix (bind, send) type=dgram addr="@media-hub-server*",

To send the client the file descriptors for the buffer and notify when we have a new frame.

> +  /sys/devices/**/drm/render** r,
> +  /sys/devices/**/drm/card** r,
> +  /sys/devices/system/node/node*/meminfo r,
> +  /run/user/*/orcexec* rw,
> +  /run/user/**/mir_socket rw,
> +
>    # Site-specific additions and overrides. See local/README for details.
>    #include <local/usr.bin.media-hub-server>
>  }
> 
> === modified file 'src/core/media/audio/pulse_audio_output_observer.cpp'
> --- src/core/media/audio/pulse_audio_output_observer.cpp	2016-04-06 15:28:29 +0000
> +++ src/core/media/audio/pulse_audio_output_observer.cpp	2017-02-15 10:21:21 +0000
> @@ -329,7 +329,7 @@
>              }
>  
>              audio::OutputState state;
> -            if (info->index == primary_sink_index)
> +            if (info->index == static_cast<std::uint32_t>(primary_sink_index))

Yay ;)

>                  state = audio::OutputState::Speaker;
>              else
>                  state = audio::OutputState::External;
> 
> === modified file 'src/core/media/gstreamer/playbin.cpp'
> --- src/core/media/gstreamer/playbin.cpp	2016-07-11 01:21:38 +0000
> +++ src/core/media/gstreamer/playbin.cpp	2017-02-15 10:21:21 +0000
> @@ -14,59 +14,78 @@
>   * along with this program.  If not, see <http://www.gnu.org/licenses/>.
>   *
>   * Authored by: Thomas Voß <thomas.voss at canonical.com>
> + *              Alfonso Sanchez-Beato <alfonso.sanchez-beato at canonical.com>
>   */
>  
>  #include <core/media/gstreamer/playbin.h>
>  #include <core/media/gstreamer/engine.h>
> +#include <core/media/video/socket_types.h>
>  
>  #include <gst/pbutils/missing-plugins.h>
>  
> -#if defined(MEDIA_HUB_HAVE_HYBRIS_MEDIA_COMPAT_LAYER)
>  #include <hybris/media/surface_texture_client_hybris.h>
>  #include <hybris/media/media_codec_layer.h>
>  
>  #include "core/media/logger/logger.h"
>  #include "core/media/util/uri_check.h"
>  
> +#include <sys/socket.h>
> +#include <sys/un.h>
> +
>  #include <utility>
> -
> -namespace
> -{
> -void setup_video_sink_for_buffer_streaming(GstElement* pipeline)
> -{
> -    // Get the service-side BufferQueue (IGraphicBufferProducer) and associate it with
> -    // the SurfaceTextureClientHybris instance
> -    IGBPWrapperHybris igbp = decoding_service_get_igraphicbufferproducer();
> -    SurfaceTextureClientHybris stc = surface_texture_client_create_by_igbp(igbp);
> -
> -    // Because mirsink is being loaded, we are definitely doing * hardware rendering.
> -    surface_texture_client_set_hardware_rendering(stc, TRUE);
> -
> -    GstContext *context = gst_context_new("gst.mir.MirContext", TRUE);
> -    GstStructure *structure = gst_context_writable_structure(context);
> -    gst_structure_set(structure, "gst_mir_context", G_TYPE_POINTER, stc, NULL);
> -
> -    /* Propagate context in pipeline (needed by amchybris and mirsink) */
> -    gst_element_set_context(pipeline, context);
> -}
> -}
> -#else  // MEDIA_HUB_HAVE_HYBRIS_MEDIA_COMPAT_LAYER
> -namespace
> -{
> -void setup_video_sink_for_buffer_streaming(GstElement*)
> -{
> -    throw core::ubuntu::media::Player::Errors::OutOfProcessBufferStreamingNotSupported{};
> -}
> -}
> -#endif // MEDIA_HUB_HAVE_HYBRIS_MEDIA_COMPAT_LAYER
> -
> -namespace
> -{
> -bool is_mir_video_sink()
> -{
> -    return g_strcmp0(::getenv("CORE_UBUNTU_MEDIA_SERVICE_VIDEO_SINK_NAME"), "mirsink") == 0;
> -}
> -}
> +#include <cstring>
> +
> +static const char *PULSE_SINK = "pulsesink";
> +static const char *HYBRIS_SINK = "hybrissink";
> +static const char *MIR_SINK = "mirsink";
> +
> +using namespace std;
> +
> +void gstreamer::Playbin::setup_video_sink_for_buffer_streaming()
> +{
> +    IGBPWrapperHybris igbp;
> +    SurfaceTextureClientHybris stc;
> +    GstContext *context;
> +    GstStructure *structure;
> +
> +    switch (backend) {
> +    case core::ubuntu::media::AVBackend::Backend::hybris:
> +        // Get the service-side BufferQueue (IGraphicBufferProducer) and
> +        // associate with it the SurfaceTextureClientHybris instance.
> +        igbp = decoding_service_get_igraphicbufferproducer();
> +        stc = surface_texture_client_create_by_igbp(igbp);
> +
> +        // Because mirsink is being loaded, we are definitely doing * hardware rendering.
> +        surface_texture_client_set_hardware_rendering(stc, TRUE);
> +
> +        context = gst_context_new("gst.mir.MirContext", TRUE);
> +        structure = gst_context_writable_structure(context);
> +        gst_structure_set(structure, "gst_mir_context", G_TYPE_POINTER, stc, NULL);
> +
> +        /* Propagate context in pipeline (needed by amchybris and mirsink) */
> +        gst_element_set_context(pipeline, context);
> +        break;
> +    case core::ubuntu::media::AVBackend::Backend::mir:
> +        // Connect to buffer consumer socket
> +        connect_to_consumer();
> +        // Configure mirsink so it exports buffers
> +        g_object_set (G_OBJECT (video_sink), "export-buffers", TRUE, nullptr);

The new mirsink has too modes. When the export buffer property is set, the sink produces messages with the buffer info and when a frame becomes available (see process_message_element()). If not set, it creates its own window and displays there the frame. The default is FALSE so mirsink works well with gst-launch. Comment added though.

> +        break;
> +    case core::ubuntu::media::AVBackend::Backend::none:
> +    default:
> +        throw core::ubuntu::media::Player::Errors::
> +            OutOfProcessBufferStreamingNotSupported{};
> +    }
> +}
> +
> +bool gstreamer::Playbin::is_supported_video_sink(void) const
> +{
> +    if (video_sink_name == HYBRIS_SINK || video_sink_name ==  MIR_SINK)
> +        return TRUE;
> +
> +    return FALSE;
> +}
> +
>  // Uncomment to generate a dot file at the time that the pipeline
>  // goes to the PLAYING state. Make sure to export GST_DEBUG_DUMP_DOT_DIR
>  // before starting media-hub-server. To convert the dot file to something
> @@ -253,6 +281,40 @@
>      MH_ERROR("Missing decoder for %s", mime);
>  }
>  
> +void gstreamer::Playbin::process_message_element(GstMessage *message)
> +{
> +    const GstStructure *msg_data = gst_message_get_structure(message);
> +    const gchar *struct_name = gst_structure_get_name(msg_data);
> +
> +    if (g_strcmp0("buffer-export-data", struct_name) == 0)
> +    {
> +        int fd;
> +        core::ubuntu::media::video::BufferMetadata meta;
> +        if (!gst_structure_get(msg_data,
> +                               "fd", G_TYPE_INT, &fd,
> +                               "width", G_TYPE_INT, &meta.width,
> +                               "height", G_TYPE_INT, &meta.height,
> +                               "fourcc", G_TYPE_INT, &meta.fourcc,
> +                               "stride", G_TYPE_INT, &meta.stride,
> +                               "offset", G_TYPE_INT, &meta.offset,
> +                               NULL))
> +        {
> +            MH_ERROR("Wrong buffer-export-data message");

This is one of those things that just should not happen, it would indicate a mismatch between the message that sends mirsink and what media-hub expects. Changed a bit the message to clarify that.

> +            return;
> +        }
> +        MH_DEBUG("Exporting %dx%d buffer (fd %d)", meta.width, meta.height, fd);
> +        send_buffer_data(fd, &meta, sizeof meta);
> +    }
> +    else if (g_strcmp0("frame-ready", struct_name) == 0)
> +    {
> +        send_frame_ready();
> +    }
> +    else
> +    {
> +        MH_ERROR("Unknown GST_MESSAGE_ELEMENT with struct %s", struct_name);
> +    }
> +}
> +
>  void gstreamer::Playbin::on_new_message_async(const Bus::Message& message)
>  {
>      switch (message.type)
> @@ -846,3 +910,92 @@
>      else
>          return true;
>  }
> +
> +bool gstreamer::Playbin::connect_to_consumer(void)
> +{
> +    static const char *local_socket = "media-hub-server";
> +    static const char *consumer_socket = "media-consumer";
> +
> +    int len;
> +    struct sockaddr_un local, remote;
> +
> +    if (sock_consumer != -1) {
> +        MH_DEBUG("Resetting socket");
> +        close(sock_consumer);
> +    }
> +
> +    if ((sock_consumer = socket(AF_UNIX, SOCK_DGRAM, 0)) == -1)
> +    {
> +        MH_ERROR("Cannot create socket: %s (%d)", strerror(errno), errno);
> +        return false;
> +    }
> +
> +    // Bind client to local -abstract- socket (media-hub-server<session>)
> +    ostringstream local_ss;
> +    local_ss << local_socket << key;
> +    local.sun_family = AF_UNIX;
> +    local.sun_path[0] = '\0';
> +    strcpy(local.sun_path + 1, local_ss.str().c_str());
> +    len = sizeof(local.sun_family) + local_ss.str().length() + 1;
> +    if (bind(sock_consumer, (struct sockaddr *) &local, len) == -1)
> +    {
> +        MH_ERROR("Cannot bind socket: %s (%d)", strerror(errno), errno);
> +        close(sock_consumer);
> +        sock_consumer = -1;
> +        return false;
> +    }
> +
> +    // Connect to buffer consumer (media-consumer<session>)
> +    ostringstream remote_ss;
> +    remote_ss << consumer_socket << key;
> +    remote.sun_family = AF_UNIX;
> +    remote.sun_path[0] = '\0';
> +    strcpy(remote.sun_path + 1, remote_ss.str().c_str());
> +    len = sizeof(remote.sun_family) + remote_ss.str().length() + 1;
> +    if (connect(sock_consumer, (struct sockaddr *) &remote, len) == -1)
> +    {
> +        MH_ERROR("Cannot connect to consumer: %s (%d)", strerror(errno), errno);
> +        close(sock_consumer);
> +        sock_consumer = -1;
> +        return false;
> +    }
> +
> +    MH_DEBUG("Connected to buffer consumer socket");
> +
> +    return true;
> +}
> +
> +void gstreamer::Playbin::send_buffer_data(int fd, void *data, size_t len)
> +{
> +    struct msghdr msg{};
> +    char buf[CMSG_SPACE(sizeof fd)]{};
> +    struct cmsghdr *cmsg;
> +    struct iovec io = { .iov_base = data, .iov_len = len };
> +
> +    msg.msg_iov = &io;
> +    msg.msg_iovlen = 1;
> +    msg.msg_control = buf;
> +    msg.msg_controllen = sizeof buf;
> +
> +    cmsg = CMSG_FIRSTHDR(&msg);
> +    cmsg->cmsg_level = SOL_SOCKET;
> +    cmsg->cmsg_type = SCM_RIGHTS;
> +    cmsg->cmsg_len = CMSG_LEN(sizeof fd);
> +
> +    memmove(CMSG_DATA(cmsg), &fd, sizeof fd);
> +
> +    msg.msg_controllen = cmsg->cmsg_len;
> +
> +    if (sendmsg(sock_consumer, &msg, 0) < 0)
> +        MH_ERROR("Failed to send dma_buf fd to consumer: %s (%d)",
> +                 strerror(errno), errno);
> +}
> +
> +void gstreamer::Playbin::send_frame_ready(void)
> +{
> +    const char ready = 'r';
> +
> +    if (send (sock_consumer, &ready, sizeof ready, 0) == -1)
> +        MH_ERROR("Error when sending sync to client: %s (%d)",

Done

> +                 strerror(errno), errno);
> +}
> 
> === modified file 'src/core/media/player_stub.cpp'
> --- src/core/media/player_stub.cpp	2016-08-15 19:27:29 +0000
> +++ src/core/media/player_stub.cpp	2017-02-15 10:21:21 +0000
> @@ -316,17 +316,22 @@
>  
>  media::video::Sink::Ptr media::PlayerStub::create_gl_texture_video_sink(std::uint32_t texture_id)
>  {
> +    // Create first local stub so media-hub can rely on an existing socket
> +    // for the mir/desktop case.
> +    auto sink = d->sink_factory(texture_id);

Done

> +
>      auto op = d->object->transact_method<mpris::Player::CreateVideoSink, void>(texture_id);
>  
>      if (op.is_error())
>      {
> -        if (op.error().name() == mpris::Player::Error::OutOfProcessBufferStreamingNotSupported::name)
> +        if (op.error().name() ==
> +            mpris::Player::Error::OutOfProcessBufferStreamingNotSupported::name)
>              throw media::Player::Errors::OutOfProcessBufferStreamingNotSupported{};
>          else
>              throw std::runtime_error{op.error().print()};
>      }
>  
> -    return d->sink_factory(texture_id);
> +    return sink;
>  }
>  
>  void media::PlayerStub::next()
> 
> === added file 'src/core/media/video/egl_sink.cpp'
> --- src/core/media/video/egl_sink.cpp	1970-01-01 00:00:00 +0000
> +++ src/core/media/video/egl_sink.cpp	2017-02-15 10:21:21 +0000
> @@ -0,0 +1,311 @@
> +/*
> + * Copyright © 2017 Canonical Ltd.
> + *
> + * This program is free software: you can redistribute it and/or modify it
> + * under the terms of the GNU Lesser General Public License version 3,
> + * as published by the Free Software Foundation.
> + *
> + * This program is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
> + * GNU Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public License
> + * along with this program.  If not, see <http://www.gnu.org/licenses/>.
> + *
> + * Authored by: Alfonso Sanchez-Beato <alfonso.sanchez-beato at canonical.com>
> + */
> +
> +#include <core/media/video/egl_sink.h>
> +#include <core/media/video/socket_types.h>
> +
> +#include <EGL/egl.h>
> +#include <EGL/eglext.h>
> +#include <GLES2/gl2.h>
> +#include <GLES2/gl2ext.h>
> +
> +#include <sys/types.h>
> +#include <sys/socket.h>
> +#include <sys/un.h>
> +
> +#include <sstream>
> +#include <thread>
> +#include <future>
> +#include <cstring>
> +#include <unistd.h>
> +
> +namespace media = core::ubuntu::media;
> +namespace video = core::ubuntu::media::video;
> +
> +using namespace std;
> +
> +struct video::EglSink::Private
> +{
> +
> +    static bool receive_buff(int socket, BufferData *data)
> +    {
> +        struct msghdr msg{};
> +        struct iovec io = { .iov_base = &data->meta,
> +                            .iov_len = sizeof data->meta };
> +        char c_buffer[256];
> +        ssize_t res;
> +
> +        msg.msg_iov = &io;
> +        msg.msg_iovlen = 1;
> +
> +        msg.msg_control = c_buffer;
> +        msg.msg_controllen = sizeof c_buffer;
> +
> +        if ((res = recvmsg(socket, &msg, 0)) == -1) {
> +            cout << "Failed to receive message\n";
> +            return false;
> +        } else if (res == 0) {
> +            cout << "Socket shutdown while receiving buffer data";
> +            return false;
> +        }
> +
> +        struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msg);
> +
> +        memmove(&data->fd, CMSG_DATA(cmsg), sizeof data->fd);
> +
> +        cout << "Extracted fd " << data->fd << '\n';

Done, not sure why I thought MH_* could not be used by the client library.

> +        cout << "width    " << data->meta.width << '\n';
> +        cout << "height   " << data->meta.height << '\n';
> +        cout << "fourcc 0x" << hex << data->meta.fourcc << dec << '\n';
> +        cout << "stride   " << data->meta.stride << '\n';
> +        cout << "offset   " << data->meta.offset << '\n';
> +
> +        return true;
> +    }
> +
> +    static void read_sock_events(const media::Player::PlayerKey key,
> +                                 int sock_fd,
> +                                 promise<BufferData>& prom_buff,
> +                                 core::Signal<void>& frame_available)
> +    {
> +        static const char *consumer_socket = "media-consumer";
> +
> +        struct sockaddr_un local;
> +        int len;
> +        BufferData buff_data;
> +
> +        if (sock_fd == -1) {
> +            perror("Cannot create buffer consumer socket");
> +            return;
> +        }
> +
> +        ostringstream sock_name_ss;
> +        sock_name_ss << consumer_socket << key;
> +        local.sun_family = AF_UNIX;
> +        local.sun_path[0] = '\0';
> +        strcpy(local.sun_path + 1, sock_name_ss.str().c_str());
> +        len = sizeof(local.sun_family) + sock_name_ss.str().length() + 1;
> +        if (bind(sock_fd, (struct sockaddr *) &local, len) == -1) {
> +            perror("Cannot bind consumer socket");
> +            return;
> +        }
> +
> +        // Wait for buffer descriptions, pass them to rendering thread
> +        if (!receive_buff(sock_fd, &buff_data))
> +            return;
> +
> +        prom_buff.set_value(buff_data);
> +
> +        // Now signal frame syncs
> +        while(true) {
> +            ssize_t res;
> +            char c;
> +
> +            res = recv(sock_fd, &c, sizeof c, 0);
> +            if (res == -1) {
> +                perror("while waiting sync");
> +                return;
> +            } else if (res == 0) {
> +                cout << "Socket shutdown\n";
> +                return;
> +            }
> +
> +            frame_available();
> +        }
> +    }
> +
> +    bool find_extension(const string& extensions, const string& ext)
> +    {
> +        size_t len_all = extensions.length();
> +        size_t len = ext.length();
> +        size_t pos = 0;
> +
> +        while ((pos = extensions.find(ext, pos)) != string::npos) {
> +            if (pos + len == len_all || extensions[pos + len] == ' ')
> +                return true;
> +
> +            pos = pos + len;
> +        }
> +
> +        return false;
> +    }
> +
> +    Private(uint32_t gl_texture, const media::Player::PlayerKey key)

PlayerKey is just a typedef for uint32_t, so not really worth passing as reference (would be slower in fact).

> +        : gl_texture{gl_texture},
> +          prom_buff{},
> +          fut_buff{prom_buff.get_future()},
> +          sock_fd{socket(AF_UNIX, SOCK_DGRAM, 0)},
> +          sock_thread{read_sock_events, key, sock_fd,

The best way to do this would be to provide fds so receiving could be integrated in the app's main loop (best libraries let you do that). But that would need changes in the client library interface and in its users. And the library is supposed to be client agnostic, so we cannot assume we are used by a Qt app or a glib app to integrate in their main loops.

So no, no easy way to do things other way. Note nonetheless that in the hybris case there is also a thread sending the frame_available signals, although it gets created in the Android libraries.

> +                      ref(prom_buff), ref(frame_available)},
> +          egl_image{EGL_NO_IMAGE_KHR},
> +          buf_fd{-1}
> +    {
> +        const char *extensions;
> +        const char *egl_needed[] = {"EGL_KHR_image_base",
> +                                    "EGL_EXT_image_dma_buf_import"};
> +        EGLDisplay egl_display = eglGetCurrentDisplay();
> +        size_t i;
> +
> +        extensions = eglQueryString (egl_display, EGL_EXTENSIONS);

Some comments added.

> +        if (!extensions)
> +            throw runtime_error {"Error querying EGL extensions"};
> +
> +        for (i = 0; i < sizeof(egl_needed)/sizeof(egl_needed[0]); ++i) {
> +            if (!find_extension(extensions, egl_needed[i])) {
> +                ostringstream oss;
> +                oss << egl_needed[i] << " not supported";
> +                cout << oss.str() << '\n';
> +                // TODO check why extensions is different from es2_info output
> +                //throw runtime_error {oss.str().c_str()};
> +            }
> +        }
> +
> +        // TODO this returns a NULL pointer, probably same issue as with eglQueryString

It is not a question of functionality, but of doing things in the right way. We must check whether the extensions we need are present to avoid errors when accessing the functionality. There is kind of a second check when we load dynamically the functions defined by the extensions, but note that extensions might also define changes in the behavior of standard EGL/GL calls.

The issue here is that eglQueryString/glGetString are returning different strings to what es2_info command returns, with less extensions and not including the ones we need. This does not happen in gstreamer/media-hub, where I see the right strings and I perform the needed checks in mirsink, so I think this problem is related to how Qt initializes EGL/GL.

But, fortunately, even although the extensions are not shown in the strings, we can load the functions defined by the extensions and all works as expected. So probably something worth investigating, but no need to spend time at the moment on this.

> +        // extensions = reinterpret_cast<const char *>(glGetString(GL_EXTENSIONS));
> +        // if (!extensions)
> +        //     throw runtime_error {"Error querying OpenGL ES extensions"};
> +
> +        // if (!find_extension(extensions, "GL_OES_EGL_image_external"))
> +        //     throw runtime_error {"GL_OES_EGL_image_external is not supported"};
> +
> +        // Import functions from extensions
> +        _eglCreateImageKHR = (PFNEGLCREATEIMAGEKHRPROC)
> +            eglGetProcAddress("eglCreateImageKHR");
> +        _eglDestroyImageKHR = (PFNEGLDESTROYIMAGEKHRPROC)
> +            eglGetProcAddress("eglDestroyImageKHR");
> +        _glEGLImageTargetTexture2DOES = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)
> +            eglGetProcAddress("glEGLImageTargetTexture2DOES");
> +
> +        if (_eglCreateImageKHR == nullptr || _eglDestroyImageKHR == nullptr ||
> +            _glEGLImageTargetTexture2DOES == nullptr)
> +            throw runtime_error {"Error when loading extensions"};
> +    }
> +
> +    ~Private()
> +    {
> +        if (sock_fd != -1) {
> +            shutdown(sock_fd, SHUT_RDWR);
> +            sock_thread.join();
> +            close(sock_fd);
> +        }
> +
> +        if (buf_fd != -1)
> +            close(buf_fd);
> +
> +        if (egl_image != EGL_NO_IMAGE_KHR)
> +            _eglDestroyImageKHR(eglGetCurrentDisplay(), egl_image);
> +    }
> +
> +    bool import_buffer(const BufferData *buf_data)

Done.

> +    {
> +        GLenum err;
> +        EGLDisplay egl_display = eglGetCurrentDisplay();
> +        EGLint image_attrs[] = {
> +            EGL_WIDTH, buf_data->meta.width,
> +            EGL_HEIGHT, buf_data->meta.height,
> +            EGL_LINUX_DRM_FOURCC_EXT, buf_data->meta.fourcc,
> +            EGL_DMA_BUF_PLANE0_FD_EXT, buf_data->fd,
> +            EGL_DMA_BUF_PLANE0_OFFSET_EXT, buf_data->meta.offset,
> +            EGL_DMA_BUF_PLANE0_PITCH_EXT, buf_data->meta.stride,
> +            EGL_NONE
> +        };
> +
> +        buf_fd = buf_data->fd;
> +        egl_image = _eglCreateImageKHR(egl_display, EGL_NO_CONTEXT,
> +                                       EGL_LINUX_DMA_BUF_EXT, NULL, image_attrs);
> +        if (egl_image == EGL_NO_IMAGE_KHR) {
> +            cout << "eglCreateImageKHR error 0x" << hex
> +                 << eglGetError() << dec << '\n';
> +            return false;
> +        }
> +
> +        // TODO Do this when swapping if we end up importing more than one buffer
> +        glBindTexture(GL_TEXTURE_2D, gl_texture);
> +        _glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, egl_image);
> +
> +        while((err = glGetError()) != GL_NO_ERROR)
> +            cout << "OpenGL error 0x" << hex << err << dec << '\n';
> +
> +        cout << "Image successfully imported\n";
> +
> +        return true;
> +    }
> +
> +    uint32_t gl_texture;
> +    promise<BufferData> prom_buff;
> +    future<BufferData> fut_buff;
> +    core::Signal<void> frame_available;
> +    int sock_fd;
> +    thread sock_thread;
> +    EGLImageKHR egl_image;
> +    int buf_fd;
> +    PFNEGLCREATEIMAGEKHRPROC _eglCreateImageKHR;
> +    PFNEGLDESTROYIMAGEKHRPROC _eglDestroyImageKHR;
> +    PFNGLEGLIMAGETARGETTEXTURE2DOESPROC _glEGLImageTargetTexture2DOES;
> +};
> +
> +function<video::Sink::Ptr(uint32_t)>
> +video::EglSink::factory_for_key(const media::Player::PlayerKey& key)
> +{
> +    return [key](uint32_t texture)
> +    {
> +        return video::Sink::Ptr{new video::EglSink{texture, key}};
> +    };
> +}
> +
> +video::EglSink::EglSink(uint32_t gl_texture,
> +                        const media::Player::PlayerKey key)

PlayerKey is uint32_t typedef, see previous comment.

> +    : d{new Private{gl_texture, key}}
> +{
> +}
> +
> +video::EglSink::~EglSink()
> +{
> +}
> +
> +const core::Signal<void>& video::EglSink::frame_available() const
> +{
> +    return d->frame_available;
> +}
> +
> +bool video::EglSink::transformation_matrix(float *matrix) const
> +{
> +    // TODO: Can we get orientation on unity8 desktop somehow?
> +    static const float identity_4x4[] = { 1, 0, 0, 0,
> +                                          0, 1, 0, 0,
> +                                          0, 0, 1, 0,
> +                                          0, 0, 0, 1 };
> +
> +    memcpy(matrix, identity_4x4, sizeof identity_4x4);
> +    return true;
> +}
> +
> +bool video::EglSink::swap_buffers() const
> +{
> +    // First time called, import buffers
> +    if (d->egl_image == EGL_NO_IMAGE_KHR) {
> +        BufferData buf_data = d->fut_buff.get();
> +        if (!d->import_buffer(&buf_data))
> +            return false;
> +    }
> +
> +    // We need to do nothing here, as the only buffer has already been mapped.
> +    // TODO Change when we implement a buffer queue.
> +
> +    return true;
> +}


-- 
https://code.launchpad.net/~alfonsosanchezbeato/media-hub/video-desktop-support/+merge/317181
Your team Ubuntu Phablet Team is subscribed to branch lp:media-hub.



More information about the Ubuntu-reviews mailing list