[Merge] lp:~alfonsosanchezbeato/media-hub/video-desktop-support into lp:media-hub
Jim Hodapp
jim.hodapp at canonical.com
Wed Feb 15 20:44:13 UTC 2017
Review: Needs Fixing code
Looks very good overall. Some comments inline below.
Diff comments:
>
> === modified file 'debian/control'
> --- debian/control 2016-05-04 13:11:22 +0000
> +++ debian/control 2017-02-15 10:21:21 +0000
> @@ -33,6 +33,8 @@
> libpulse-dev,
> qtbase5-dev,
> libtelepathy-qt5-dev,
> + libegl1-mesa-dev,
You should only have to change this in control.in below and not this one.
> + libgl1-mesa-dev,
> Standards-Version: 3.9.6
> Homepage: https://launchpad.net/media-hub
> # If you aren't a member of ~phablet-team but need to upload packaging changes,
>
> === modified file 'debian/usr.bin.media-hub-server'
> --- debian/usr.bin.media-hub-server 2016-08-23 06:54:43 +0000
> +++ debian/usr.bin.media-hub-server 2017-02-15 10:21:21 +0000
> @@ -126,6 +126,14 @@
> owner @{HOME}/.local/share/** rk,
> owner /{,var/}run/user/[0-9]*/** rk,
>
> + # Permissions for desktop video decoding
> + unix (bind, send) type=dgram addr="@media-hub-server*",
Remind me again what this unix socket is used for?
> + /sys/devices/**/drm/render** r,
> + /sys/devices/**/drm/card** r,
> + /sys/devices/system/node/node*/meminfo r,
> + /run/user/*/orcexec* rw,
> + /run/user/**/mir_socket rw,
> +
> # Site-specific additions and overrides. See local/README for details.
> #include <local/usr.bin.media-hub-server>
> }
>
> === modified file 'src/core/media/audio/pulse_audio_output_observer.cpp'
> --- src/core/media/audio/pulse_audio_output_observer.cpp 2016-04-06 15:28:29 +0000
> +++ src/core/media/audio/pulse_audio_output_observer.cpp 2017-02-15 10:21:21 +0000
> @@ -329,7 +329,7 @@
> }
>
> audio::OutputState state;
> - if (info->index == primary_sink_index)
> + if (info->index == static_cast<std::uint32_t>(primary_sink_index))
Fixing a long-standing compile warning? :)
> state = audio::OutputState::Speaker;
> else
> state = audio::OutputState::External;
>
> === modified file 'src/core/media/backend.cpp'
> --- src/core/media/backend.cpp 2016-08-15 19:27:29 +0000
> +++ src/core/media/backend.cpp 2017-02-15 10:21:21 +0000
> @@ -39,5 +39,12 @@
> return media::AVBackend::Backend::hybris;
> }
>
> + plugin = gst_registry_lookup(registry, "libgstmirsink.so");
I really like this approach, clean and simple. Nice work.
> + if (plugin)
> + {
> + gst_object_unref(plugin);
> + return media::AVBackend::Backend::mir;
> + }
> +
> return media::AVBackend::Backend::none;
> }
>
> === modified file 'src/core/media/gstreamer/playbin.cpp'
> --- src/core/media/gstreamer/playbin.cpp 2016-07-11 01:21:38 +0000
> +++ src/core/media/gstreamer/playbin.cpp 2017-02-15 10:21:21 +0000
> @@ -14,59 +14,78 @@
> * along with this program. If not, see <http://www.gnu.org/licenses/>.
> *
> * Authored by: Thomas Voß <thomas.voss at canonical.com>
> + * Alfonso Sanchez-Beato <alfonso.sanchez-beato at canonical.com>
> */
>
> #include <core/media/gstreamer/playbin.h>
> #include <core/media/gstreamer/engine.h>
> +#include <core/media/video/socket_types.h>
>
> #include <gst/pbutils/missing-plugins.h>
>
> -#if defined(MEDIA_HUB_HAVE_HYBRIS_MEDIA_COMPAT_LAYER)
> #include <hybris/media/surface_texture_client_hybris.h>
> #include <hybris/media/media_codec_layer.h>
>
> #include "core/media/logger/logger.h"
> #include "core/media/util/uri_check.h"
>
> +#include <sys/socket.h>
> +#include <sys/un.h>
> +
> #include <utility>
> -
> -namespace
> -{
> -void setup_video_sink_for_buffer_streaming(GstElement* pipeline)
> -{
> - // Get the service-side BufferQueue (IGraphicBufferProducer) and associate it with
> - // the SurfaceTextureClientHybris instance
> - IGBPWrapperHybris igbp = decoding_service_get_igraphicbufferproducer();
> - SurfaceTextureClientHybris stc = surface_texture_client_create_by_igbp(igbp);
> -
> - // Because mirsink is being loaded, we are definitely doing * hardware rendering.
> - surface_texture_client_set_hardware_rendering(stc, TRUE);
> -
> - GstContext *context = gst_context_new("gst.mir.MirContext", TRUE);
> - GstStructure *structure = gst_context_writable_structure(context);
> - gst_structure_set(structure, "gst_mir_context", G_TYPE_POINTER, stc, NULL);
> -
> - /* Propagate context in pipeline (needed by amchybris and mirsink) */
> - gst_element_set_context(pipeline, context);
> -}
> -}
> -#else // MEDIA_HUB_HAVE_HYBRIS_MEDIA_COMPAT_LAYER
> -namespace
> -{
> -void setup_video_sink_for_buffer_streaming(GstElement*)
> -{
> - throw core::ubuntu::media::Player::Errors::OutOfProcessBufferStreamingNotSupported{};
> -}
> -}
> -#endif // MEDIA_HUB_HAVE_HYBRIS_MEDIA_COMPAT_LAYER
> -
> -namespace
> -{
> -bool is_mir_video_sink()
> -{
> - return g_strcmp0(::getenv("CORE_UBUNTU_MEDIA_SERVICE_VIDEO_SINK_NAME"), "mirsink") == 0;
> -}
> -}
> +#include <cstring>
> +
> +static const char *PULSE_SINK = "pulsesink";
> +static const char *HYBRIS_SINK = "hybrissink";
> +static const char *MIR_SINK = "mirsink";
> +
> +using namespace std;
> +
> +void gstreamer::Playbin::setup_video_sink_for_buffer_streaming()
> +{
> + IGBPWrapperHybris igbp;
> + SurfaceTextureClientHybris stc;
> + GstContext *context;
> + GstStructure *structure;
> +
> + switch (backend) {
> + case core::ubuntu::media::AVBackend::Backend::hybris:
> + // Get the service-side BufferQueue (IGraphicBufferProducer) and
> + // associate with it the SurfaceTextureClientHybris instance.
> + igbp = decoding_service_get_igraphicbufferproducer();
> + stc = surface_texture_client_create_by_igbp(igbp);
> +
> + // Because mirsink is being loaded, we are definitely doing * hardware rendering.
> + surface_texture_client_set_hardware_rendering(stc, TRUE);
> +
> + context = gst_context_new("gst.mir.MirContext", TRUE);
> + structure = gst_context_writable_structure(context);
> + gst_structure_set(structure, "gst_mir_context", G_TYPE_POINTER, stc, NULL);
> +
> + /* Propagate context in pipeline (needed by amchybris and mirsink) */
> + gst_element_set_context(pipeline, context);
> + break;
> + case core::ubuntu::media::AVBackend::Backend::mir:
> + // Connect to buffer consumer socket
> + connect_to_consumer();
> + // Configure mirsink so it exports buffers
> + g_object_set (G_OBJECT (video_sink), "export-buffers", TRUE, nullptr);
What exactly does it mean for mirsink to export buffers? Maybe explain that some more here. Also, why doesn't it default to exporting buffers all of the time?
> + break;
> + case core::ubuntu::media::AVBackend::Backend::none:
> + default:
> + throw core::ubuntu::media::Player::Errors::
> + OutOfProcessBufferStreamingNotSupported{};
> + }
> +}
> +
> +bool gstreamer::Playbin::is_supported_video_sink(void) const
> +{
> + if (video_sink_name == HYBRIS_SINK || video_sink_name == MIR_SINK)
> + return TRUE;
> +
> + return FALSE;
> +}
> +
> // Uncomment to generate a dot file at the time that the pipeline
> // goes to the PLAYING state. Make sure to export GST_DEBUG_DUMP_DOT_DIR
> // before starting media-hub-server. To convert the dot file to something
> @@ -253,6 +281,40 @@
> MH_ERROR("Missing decoder for %s", mime);
> }
>
> +void gstreamer::Playbin::process_message_element(GstMessage *message)
> +{
> + const GstStructure *msg_data = gst_message_get_structure(message);
> + const gchar *struct_name = gst_structure_get_name(msg_data);
> +
> + if (g_strcmp0("buffer-export-data", struct_name) == 0)
> + {
> + int fd;
> + core::ubuntu::media::video::BufferMetadata meta;
> + if (!gst_structure_get(msg_data,
> + "fd", G_TYPE_INT, &fd,
> + "width", G_TYPE_INT, &meta.width,
> + "height", G_TYPE_INT, &meta.height,
> + "fourcc", G_TYPE_INT, &meta.fourcc,
> + "stride", G_TYPE_INT, &meta.stride,
> + "offset", G_TYPE_INT, &meta.offset,
> + NULL))
> + {
> + MH_ERROR("Wrong buffer-export-data message");
What does this mean? This wouldn't be a helpful error message if I were looking at the media-hub log output.
> + return;
> + }
> + MH_DEBUG("Exporting %dx%d buffer (fd %d)", meta.width, meta.height, fd);
> + send_buffer_data(fd, &meta, sizeof meta);
> + }
> + else if (g_strcmp0("frame-ready", struct_name) == 0)
> + {
> + send_frame_ready();
> + }
> + else
> + {
> + MH_ERROR("Unknown GST_MESSAGE_ELEMENT with struct %s", struct_name);
> + }
> +}
> +
> void gstreamer::Playbin::on_new_message_async(const Bus::Message& message)
> {
> switch (message.type)
> @@ -846,3 +910,92 @@
> else
> return true;
> }
> +
> +bool gstreamer::Playbin::connect_to_consumer(void)
> +{
> + static const char *local_socket = "media-hub-server";
> + static const char *consumer_socket = "media-consumer";
> +
> + int len;
> + struct sockaddr_un local, remote;
> +
> + if (sock_consumer != -1) {
> + MH_DEBUG("Resetting socket");
> + close(sock_consumer);
> + }
> +
> + if ((sock_consumer = socket(AF_UNIX, SOCK_DGRAM, 0)) == -1)
> + {
> + MH_ERROR("Cannot create socket: %s (%d)", strerror(errno), errno);
> + return false;
> + }
> +
> + // Bind client to local -abstract- socket (media-hub-server<session>)
> + ostringstream local_ss;
> + local_ss << local_socket << key;
> + local.sun_family = AF_UNIX;
> + local.sun_path[0] = '\0';
> + strcpy(local.sun_path + 1, local_ss.str().c_str());
> + len = sizeof(local.sun_family) + local_ss.str().length() + 1;
> + if (bind(sock_consumer, (struct sockaddr *) &local, len) == -1)
> + {
> + MH_ERROR("Cannot bind socket: %s (%d)", strerror(errno), errno);
> + close(sock_consumer);
> + sock_consumer = -1;
> + return false;
> + }
> +
> + // Connect to buffer consumer (media-consumer<session>)
> + ostringstream remote_ss;
> + remote_ss << consumer_socket << key;
> + remote.sun_family = AF_UNIX;
> + remote.sun_path[0] = '\0';
> + strcpy(remote.sun_path + 1, remote_ss.str().c_str());
> + len = sizeof(remote.sun_family) + remote_ss.str().length() + 1;
> + if (connect(sock_consumer, (struct sockaddr *) &remote, len) == -1)
> + {
> + MH_ERROR("Cannot connect to consumer: %s (%d)", strerror(errno), errno);
> + close(sock_consumer);
> + sock_consumer = -1;
> + return false;
> + }
> +
> + MH_DEBUG("Connected to buffer consumer socket");
> +
> + return true;
> +}
> +
> +void gstreamer::Playbin::send_buffer_data(int fd, void *data, size_t len)
> +{
> + struct msghdr msg{};
> + char buf[CMSG_SPACE(sizeof fd)]{};
> + struct cmsghdr *cmsg;
> + struct iovec io = { .iov_base = data, .iov_len = len };
> +
> + msg.msg_iov = &io;
> + msg.msg_iovlen = 1;
> + msg.msg_control = buf;
> + msg.msg_controllen = sizeof buf;
> +
> + cmsg = CMSG_FIRSTHDR(&msg);
> + cmsg->cmsg_level = SOL_SOCKET;
> + cmsg->cmsg_type = SCM_RIGHTS;
> + cmsg->cmsg_len = CMSG_LEN(sizeof fd);
> +
> + memmove(CMSG_DATA(cmsg), &fd, sizeof fd);
> +
> + msg.msg_controllen = cmsg->cmsg_len;
> +
> + if (sendmsg(sock_consumer, &msg, 0) < 0)
> + MH_ERROR("Failed to send dma_buf fd to consumer: %s (%d)",
> + strerror(errno), errno);
> +}
> +
> +void gstreamer::Playbin::send_frame_ready(void)
> +{
> + const char ready = 'r';
> +
> + if (send (sock_consumer, &ready, sizeof ready, 0) == -1)
> + MH_ERROR("Error when sending sync to client: %s (%d)",
Change to "Error when sending frame ready sync flag to client"
> + strerror(errno), errno);
> +}
>
> === modified file 'src/core/media/player_stub.cpp'
> --- src/core/media/player_stub.cpp 2016-08-15 19:27:29 +0000
> +++ src/core/media/player_stub.cpp 2017-02-15 10:21:21 +0000
> @@ -316,17 +316,22 @@
>
> media::video::Sink::Ptr media::PlayerStub::create_gl_texture_video_sink(std::uint32_t texture_id)
> {
> + // Create first local stub so media-hub can rely on an existing socket
> + // for the mir/desktop case.
> + auto sink = d->sink_factory(texture_id);
const
> +
> auto op = d->object->transact_method<mpris::Player::CreateVideoSink, void>(texture_id);
>
> if (op.is_error())
> {
> - if (op.error().name() == mpris::Player::Error::OutOfProcessBufferStreamingNotSupported::name)
> + if (op.error().name() ==
> + mpris::Player::Error::OutOfProcessBufferStreamingNotSupported::name)
> throw media::Player::Errors::OutOfProcessBufferStreamingNotSupported{};
> else
> throw std::runtime_error{op.error().print()};
> }
>
> - return d->sink_factory(texture_id);
> + return sink;
> }
>
> void media::PlayerStub::next()
>
> === added file 'src/core/media/video/egl_sink.cpp'
> --- src/core/media/video/egl_sink.cpp 1970-01-01 00:00:00 +0000
> +++ src/core/media/video/egl_sink.cpp 2017-02-15 10:21:21 +0000
> @@ -0,0 +1,311 @@
> +/*
> + * Copyright © 2017 Canonical Ltd.
> + *
> + * This program is free software: you can redistribute it and/or modify it
> + * under the terms of the GNU Lesser General Public License version 3,
> + * as published by the Free Software Foundation.
> + *
> + * This program is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
> + * GNU Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public License
> + * along with this program. If not, see <http://www.gnu.org/licenses/>.
> + *
> + * Authored by: Alfonso Sanchez-Beato <alfonso.sanchez-beato at canonical.com>
> + */
> +
> +#include <core/media/video/egl_sink.h>
> +#include <core/media/video/socket_types.h>
> +
> +#include <EGL/egl.h>
> +#include <EGL/eglext.h>
> +#include <GLES2/gl2.h>
> +#include <GLES2/gl2ext.h>
> +
> +#include <sys/types.h>
> +#include <sys/socket.h>
> +#include <sys/un.h>
> +
> +#include <sstream>
> +#include <thread>
> +#include <future>
> +#include <cstring>
> +#include <unistd.h>
> +
> +namespace media = core::ubuntu::media;
> +namespace video = core::ubuntu::media::video;
> +
> +using namespace std;
> +
> +struct video::EglSink::Private
> +{
> +
> + static bool receive_buff(int socket, BufferData *data)
> + {
> + struct msghdr msg{};
> + struct iovec io = { .iov_base = &data->meta,
> + .iov_len = sizeof data->meta };
> + char c_buffer[256];
> + ssize_t res;
> +
> + msg.msg_iov = &io;
> + msg.msg_iovlen = 1;
> +
> + msg.msg_control = c_buffer;
> + msg.msg_controllen = sizeof c_buffer;
> +
> + if ((res = recvmsg(socket, &msg, 0)) == -1) {
> + cout << "Failed to receive message\n";
> + return false;
> + } else if (res == 0) {
> + cout << "Socket shutdown while receiving buffer data";
> + return false;
> + }
> +
> + struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msg);
> +
> + memmove(&data->fd, CMSG_DATA(cmsg), sizeof data->fd);
> +
> + cout << "Extracted fd " << data->fd << '\n';
Use the MH_* debug macros here.
> + cout << "width " << data->meta.width << '\n';
> + cout << "height " << data->meta.height << '\n';
> + cout << "fourcc 0x" << hex << data->meta.fourcc << dec << '\n';
> + cout << "stride " << data->meta.stride << '\n';
> + cout << "offset " << data->meta.offset << '\n';
> +
> + return true;
> + }
> +
> + static void read_sock_events(const media::Player::PlayerKey key,
> + int sock_fd,
> + promise<BufferData>& prom_buff,
> + core::Signal<void>& frame_available)
> + {
> + static const char *consumer_socket = "media-consumer";
> +
> + struct sockaddr_un local;
> + int len;
> + BufferData buff_data;
> +
> + if (sock_fd == -1) {
> + perror("Cannot create buffer consumer socket");
> + return;
> + }
> +
> + ostringstream sock_name_ss;
> + sock_name_ss << consumer_socket << key;
> + local.sun_family = AF_UNIX;
> + local.sun_path[0] = '\0';
> + strcpy(local.sun_path + 1, sock_name_ss.str().c_str());
> + len = sizeof(local.sun_family) + sock_name_ss.str().length() + 1;
> + if (bind(sock_fd, (struct sockaddr *) &local, len) == -1) {
> + perror("Cannot bind consumer socket");
Use the MH_* debug macros here.
> + return;
> + }
> +
> + // Wait for buffer descriptions, pass them to rendering thread
> + if (!receive_buff(sock_fd, &buff_data))
> + return;
> +
> + prom_buff.set_value(buff_data);
> +
> + // Now signal frame syncs
> + while(true) {
> + ssize_t res;
> + char c;
> +
> + res = recv(sock_fd, &c, sizeof c, 0);
> + if (res == -1) {
> + perror("while waiting sync");
Use the MH_* debug macros here.
> + return;
> + } else if (res == 0) {
> + cout << "Socket shutdown\n";
Use the MH_* debug macros here.
> + return;
> + }
> +
> + frame_available();
> + }
> + }
> +
> + bool find_extension(const string& extensions, const string& ext)
> + {
> + size_t len_all = extensions.length();
> + size_t len = ext.length();
> + size_t pos = 0;
> +
> + while ((pos = extensions.find(ext, pos)) != string::npos) {
> + if (pos + len == len_all || extensions[pos + len] == ' ')
> + return true;
> +
> + pos = pos + len;
> + }
> +
> + return false;
> + }
> +
> + Private(uint32_t gl_texture, const media::Player::PlayerKey key)
Add & for key
> + : gl_texture{gl_texture},
> + prom_buff{},
> + fut_buff{prom_buff.get_future()},
> + sock_fd{socket(AF_UNIX, SOCK_DGRAM, 0)},
> + sock_thread{read_sock_events, key, sock_fd,
Was there no way to do this using select instead of a thread?
> + ref(prom_buff), ref(frame_available)},
> + egl_image{EGL_NO_IMAGE_KHR},
> + buf_fd{-1}
> + {
> + const char *extensions;
> + const char *egl_needed[] = {"EGL_KHR_image_base",
> + "EGL_EXT_image_dma_buf_import"};
> + EGLDisplay egl_display = eglGetCurrentDisplay();
> + size_t i;
> +
> + extensions = eglQueryString (egl_display, EGL_EXTENSIONS);
I'd like to see some comments with some of the code blocks in here. It's not entirely obvious why each block is needed. A high level overview would suffice.
> + if (!extensions)
> + throw runtime_error {"Error querying EGL extensions"};
> +
> + for (i = 0; i < sizeof(egl_needed)/sizeof(egl_needed[0]); ++i) {
> + if (!find_extension(extensions, egl_needed[i])) {
> + ostringstream oss;
> + oss << egl_needed[i] << " not supported";
> + cout << oss.str() << '\n';
> + // TODO check why extensions is different from es2_info output
> + //throw runtime_error {oss.str().c_str()};
> + }
> + }
> +
> + // TODO this returns a NULL pointer, probably same issue as with eglQueryString
Is this really needed? What functionality do we miss without the extensions lookup?
> + // extensions = reinterpret_cast<const char *>(glGetString(GL_EXTENSIONS));
> + // if (!extensions)
> + // throw runtime_error {"Error querying OpenGL ES extensions"};
> +
> + // if (!find_extension(extensions, "GL_OES_EGL_image_external"))
> + // throw runtime_error {"GL_OES_EGL_image_external is not supported"};
> +
> + // Import functions from extensions
> + _eglCreateImageKHR = (PFNEGLCREATEIMAGEKHRPROC)
> + eglGetProcAddress("eglCreateImageKHR");
> + _eglDestroyImageKHR = (PFNEGLDESTROYIMAGEKHRPROC)
> + eglGetProcAddress("eglDestroyImageKHR");
> + _glEGLImageTargetTexture2DOES = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)
> + eglGetProcAddress("glEGLImageTargetTexture2DOES");
> +
> + if (_eglCreateImageKHR == nullptr || _eglDestroyImageKHR == nullptr ||
> + _glEGLImageTargetTexture2DOES == nullptr)
> + throw runtime_error {"Error when loading extensions"};
> + }
> +
> + ~Private()
> + {
> + if (sock_fd != -1) {
> + shutdown(sock_fd, SHUT_RDWR);
> + sock_thread.join();
> + close(sock_fd);
> + }
> +
> + if (buf_fd != -1)
> + close(buf_fd);
> +
> + if (egl_image != EGL_NO_IMAGE_KHR)
> + _eglDestroyImageKHR(eglGetCurrentDisplay(), egl_image);
> + }
> +
> + bool import_buffer(const BufferData *buf_data)
Please add a comment for this method explaining at a high level what it's used for and how it functions.
> + {
> + GLenum err;
> + EGLDisplay egl_display = eglGetCurrentDisplay();
> + EGLint image_attrs[] = {
> + EGL_WIDTH, buf_data->meta.width,
> + EGL_HEIGHT, buf_data->meta.height,
> + EGL_LINUX_DRM_FOURCC_EXT, buf_data->meta.fourcc,
> + EGL_DMA_BUF_PLANE0_FD_EXT, buf_data->fd,
> + EGL_DMA_BUF_PLANE0_OFFSET_EXT, buf_data->meta.offset,
> + EGL_DMA_BUF_PLANE0_PITCH_EXT, buf_data->meta.stride,
> + EGL_NONE
> + };
> +
> + buf_fd = buf_data->fd;
> + egl_image = _eglCreateImageKHR(egl_display, EGL_NO_CONTEXT,
> + EGL_LINUX_DMA_BUF_EXT, NULL, image_attrs);
> + if (egl_image == EGL_NO_IMAGE_KHR) {
> + cout << "eglCreateImageKHR error 0x" << hex
> + << eglGetError() << dec << '\n';
> + return false;
> + }
> +
> + // TODO Do this when swapping if we end up importing more than one buffer
> + glBindTexture(GL_TEXTURE_2D, gl_texture);
> + _glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, egl_image);
> +
> + while((err = glGetError()) != GL_NO_ERROR)
> + cout << "OpenGL error 0x" << hex << err << dec << '\n';
> +
> + cout << "Image successfully imported\n";
> +
> + return true;
> + }
> +
> + uint32_t gl_texture;
> + promise<BufferData> prom_buff;
> + future<BufferData> fut_buff;
> + core::Signal<void> frame_available;
> + int sock_fd;
> + thread sock_thread;
> + EGLImageKHR egl_image;
> + int buf_fd;
> + PFNEGLCREATEIMAGEKHRPROC _eglCreateImageKHR;
> + PFNEGLDESTROYIMAGEKHRPROC _eglDestroyImageKHR;
> + PFNGLEGLIMAGETARGETTEXTURE2DOESPROC _glEGLImageTargetTexture2DOES;
> +};
> +
> +function<video::Sink::Ptr(uint32_t)>
> +video::EglSink::factory_for_key(const media::Player::PlayerKey& key)
> +{
> + return [key](uint32_t texture)
> + {
> + return video::Sink::Ptr{new video::EglSink{texture, key}};
> + };
> +}
> +
> +video::EglSink::EglSink(uint32_t gl_texture,
> + const media::Player::PlayerKey key)
Add &
> + : d{new Private{gl_texture, key}}
> +{
> +}
> +
> +video::EglSink::~EglSink()
> +{
> +}
> +
> +const core::Signal<void>& video::EglSink::frame_available() const
> +{
> + return d->frame_available;
> +}
> +
> +bool video::EglSink::transformation_matrix(float *matrix) const
> +{
> + // TODO: Can we get orientation on unity8 desktop somehow?
> + static const float identity_4x4[] = { 1, 0, 0, 0,
> + 0, 1, 0, 0,
> + 0, 0, 1, 0,
> + 0, 0, 0, 1 };
> +
> + memcpy(matrix, identity_4x4, sizeof identity_4x4);
> + return true;
> +}
> +
> +bool video::EglSink::swap_buffers() const
> +{
> + // First time called, import buffers
> + if (d->egl_image == EGL_NO_IMAGE_KHR) {
> + BufferData buf_data = d->fut_buff.get();
> + if (!d->import_buffer(&buf_data))
> + return false;
> + }
> +
> + // We need to do nothing here, as the only buffer has already been mapped.
> + // TODO Change when we implement a buffer queue.
> +
> + return true;
> +}
--
https://code.launchpad.net/~alfonsosanchezbeato/media-hub/video-desktop-support/+merge/317181
Your team Ubuntu Phablet Team is subscribed to branch lp:media-hub.
More information about the Ubuntu-reviews
mailing list