|
| 1 | +//! `RenderAndroid` is a `Render` implementation for Android |
| 2 | +//! platform. It only implements an OpenGLES mechanism. |
| 3 | +//! |
| 4 | +//! Internally it uses GStreamer's *glsinkbin* element as *videosink* |
| 5 | +//! wrapping the *appsink* from the Player. And the shared frames are |
| 6 | +//! mapped as texture IDs. |
| 7 | +
|
| 8 | +extern crate gstreamer as gst; |
| 9 | +extern crate gstreamer_gl as gst_gl; |
| 10 | +extern crate gstreamer_video as gst_video; |
| 11 | + |
| 12 | +extern crate servo_media_gstreamer_render as sm_gst_render; |
| 13 | +extern crate servo_media_player as sm_player; |
| 14 | + |
| 15 | +use gst::prelude::*; |
| 16 | +use gst_gl::prelude::*; |
| 17 | +use sm_gst_render::Render; |
| 18 | +use sm_player::context::{GlApi, GlContext, NativeDisplay, PlayerGLContext}; |
| 19 | +use sm_player::frame::{Buffer, Frame, FrameData}; |
| 20 | +use sm_player::PlayerError; |
| 21 | +use std::sync::{Arc, Mutex}; |
| 22 | + |
| 23 | +struct GStreamerBuffer { |
| 24 | + is_external_oes: bool, |
| 25 | + frame: gst_video::VideoFrame<gst_video::video_frame::Readable>, |
| 26 | +} |
| 27 | + |
| 28 | +impl Buffer for GStreamerBuffer { |
| 29 | + fn to_vec(&self) -> Result<FrameData, ()> { |
| 30 | + // packed formats are guaranteed to be in a single plane |
| 31 | + if self.frame.format() == gst_video::VideoFormat::Rgba { |
| 32 | + let tex_id = self.frame.get_texture_id(0).ok_or_else(|| ())?; |
| 33 | + Ok(if self.is_external_oes { |
| 34 | + FrameData::OESTexture(tex_id) |
| 35 | + } else { |
| 36 | + FrameData::Texture(tex_id) |
| 37 | + }) |
| 38 | + } else { |
| 39 | + Err(()) |
| 40 | + } |
| 41 | + } |
| 42 | +} |
| 43 | + |
| 44 | +pub struct RenderAndroid { |
| 45 | + display: gst_gl::GLDisplay, |
| 46 | + app_context: gst_gl::GLContext, |
| 47 | + gst_context: Arc<Mutex<Option<gst_gl::GLContext>>>, |
| 48 | + gl_upload: Arc<Mutex<Option<gst::Element>>>, |
| 49 | +} |
| 50 | + |
| 51 | +impl RenderAndroid { |
| 52 | + /// Tries to create a new intance of the `RenderAndroid` |
| 53 | + /// |
| 54 | + /// # Arguments |
| 55 | + /// |
| 56 | + /// * `context` - is the PlayerContext trait object from |
| 57 | + /// application. |
| 58 | + pub fn new(app_gl_context: Box<dyn PlayerGLContext>) -> Option<RenderAndroid> { |
| 59 | + // Check that we actually have the elements that we |
| 60 | + // need to make this work. |
| 61 | + if gst::ElementFactory::find("glsinkbin").is_none() { |
| 62 | + return None; |
| 63 | + } |
| 64 | + |
| 65 | + let display_native = app_gl_context.get_native_display(); |
| 66 | + let gl_context = app_gl_context.get_gl_context(); |
| 67 | + let gl_api = match app_gl_context.get_gl_api() { |
| 68 | + GlApi::OpenGL => gst_gl::GLAPI::OPENGL, |
| 69 | + GlApi::OpenGL3 => gst_gl::GLAPI::OPENGL3, |
| 70 | + GlApi::Gles1 => gst_gl::GLAPI::GLES1, |
| 71 | + GlApi::Gles2 => gst_gl::GLAPI::GLES2, |
| 72 | + GlApi::None => gst_gl::GLAPI::NONE, |
| 73 | + }; |
| 74 | + |
| 75 | + let (wrapped_context, display) = match gl_context { |
| 76 | + GlContext::Egl(context) => { |
| 77 | + let display = match display_native { |
| 78 | + NativeDisplay::Egl(display_native) => { |
| 79 | + unsafe { gst_gl::GLDisplayEGL::new_with_egl_display(display_native) } |
| 80 | + .and_then(|display| Some(display.upcast())) |
| 81 | + } |
| 82 | + _ => None, |
| 83 | + }; |
| 84 | + |
| 85 | + if let Some(display) = display { |
| 86 | + let wrapped_context = unsafe { |
| 87 | + gst_gl::GLContext::new_wrapped( |
| 88 | + &display, |
| 89 | + context, |
| 90 | + gst_gl::GLPlatform::EGL, |
| 91 | + gl_api, |
| 92 | + ) |
| 93 | + }; |
| 94 | + (wrapped_context, Some(display)) |
| 95 | + } else { |
| 96 | + (None, None) |
| 97 | + } |
| 98 | + } |
| 99 | + _ => (None, None), |
| 100 | + }; |
| 101 | + |
| 102 | + if let Some(app_context) = wrapped_context { |
| 103 | + Some(RenderAndroid { |
| 104 | + display: display.unwrap(), |
| 105 | + app_context, |
| 106 | + gst_context: Arc::new(Mutex::new(None)), |
| 107 | + gl_upload: Arc::new(Mutex::new(None)), |
| 108 | + }) |
| 109 | + } else { |
| 110 | + None |
| 111 | + } |
| 112 | + } |
| 113 | +} |
| 114 | + |
| 115 | +impl Render for RenderAndroid { |
| 116 | + fn is_gl(&self) -> bool { |
| 117 | + true |
| 118 | + } |
| 119 | + |
| 120 | + fn build_frame(&self, sample: gst::Sample) -> Result<Frame, ()> { |
| 121 | + if self.gst_context.lock().unwrap().is_none() && self.gl_upload.lock().unwrap().is_some() { |
| 122 | + *self.gst_context.lock().unwrap() = |
| 123 | + if let Some(glupload) = self.gl_upload.lock().unwrap().as_ref() { |
| 124 | + glupload |
| 125 | + .get_property("context") |
| 126 | + .or_else(|_| Err(()))? |
| 127 | + .get::<gst_gl::GLContext>() |
| 128 | + } else { |
| 129 | + None |
| 130 | + }; |
| 131 | + } |
| 132 | + |
| 133 | + let buffer = sample.get_buffer_owned().ok_or_else(|| ())?; |
| 134 | + let caps = sample.get_caps().ok_or_else(|| ())?; |
| 135 | + |
| 136 | + let is_external_oes = caps |
| 137 | + .get_structure(0) |
| 138 | + .and_then(|s| { |
| 139 | + s.get::<&str>("texture-target").and_then(|target| { |
| 140 | + if target == "external-oes" { |
| 141 | + Some(s) |
| 142 | + } else { |
| 143 | + None |
| 144 | + } |
| 145 | + }) |
| 146 | + }) |
| 147 | + .is_some(); |
| 148 | + |
| 149 | + let info = gst_video::VideoInfo::from_caps(caps).ok_or_else(|| ())?; |
| 150 | + |
| 151 | + if self.gst_context.lock().unwrap().is_some() { |
| 152 | + if let Some(sync_meta) = buffer.get_meta::<gst_gl::GLSyncMeta>() { |
| 153 | + sync_meta.set_sync_point(self.gst_context.lock().unwrap().as_ref().unwrap()); |
| 154 | + } |
| 155 | + } |
| 156 | + |
| 157 | + let frame = |
| 158 | + gst_video::VideoFrame::from_buffer_readable_gl(buffer, &info).or_else(|_| Err(()))?; |
| 159 | + |
| 160 | + if self.gst_context.lock().unwrap().is_some() { |
| 161 | + if let Some(sync_meta) = frame.buffer().get_meta::<gst_gl::GLSyncMeta>() { |
| 162 | + sync_meta.wait(&self.app_context); |
| 163 | + } |
| 164 | + } |
| 165 | + |
| 166 | + Frame::new( |
| 167 | + info.width() as i32, |
| 168 | + info.height() as i32, |
| 169 | + Arc::new(GStreamerBuffer { |
| 170 | + is_external_oes, |
| 171 | + frame, |
| 172 | + }), |
| 173 | + ) |
| 174 | + } |
| 175 | + |
| 176 | + fn build_video_sink( |
| 177 | + &self, |
| 178 | + appsink: &gst::Element, |
| 179 | + pipeline: &gst::Element, |
| 180 | + ) -> Result<(), PlayerError> { |
| 181 | + if self.gl_upload.lock().unwrap().is_some() { |
| 182 | + return Err(PlayerError::Backend( |
| 183 | + "render unix already setup the video sink".to_owned(), |
| 184 | + )); |
| 185 | + } |
| 186 | + |
| 187 | + let vsinkbin = gst::ElementFactory::make("glsinkbin", Some("servo-media-vsink")) |
| 188 | + .ok_or(PlayerError::Backend("glupload creation failed".to_owned()))?; |
| 189 | + |
| 190 | + let caps = gst::Caps::builder("video/x-raw") |
| 191 | + .features(&[&gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY]) |
| 192 | + .field("format", &gst_video::VideoFormat::Rgba.to_string()) |
| 193 | + .field("texture-target", &gst::List::new(&[&"2D", &"external-oes"])) |
| 194 | + .build(); |
| 195 | + appsink |
| 196 | + .set_property("caps", &caps) |
| 197 | + .expect("appsink doesn't have expected 'caps' property"); |
| 198 | + |
| 199 | + vsinkbin |
| 200 | + .set_property("sink", &appsink) |
| 201 | + .expect("glsinkbin doesn't have expected 'sink' property"); |
| 202 | + |
| 203 | + pipeline |
| 204 | + .set_property("video-sink", &vsinkbin) |
| 205 | + .expect("playbin doesn't have expected 'video-sink' property"); |
| 206 | + |
| 207 | + let bus = pipeline.get_bus().expect("pipeline with no bus"); |
| 208 | + let display_ = self.display.clone(); |
| 209 | + let context_ = self.app_context.clone(); |
| 210 | + bus.set_sync_handler(move |_, msg| { |
| 211 | + match msg.view() { |
| 212 | + gst::MessageView::NeedContext(ctxt) => { |
| 213 | + if let Some(el) = msg.get_src().map(|s| s.downcast::<gst::Element>().unwrap()) { |
| 214 | + let context_type = ctxt.get_context_type(); |
| 215 | + if context_type == *gst_gl::GL_DISPLAY_CONTEXT_TYPE { |
| 216 | + let ctxt = gst::Context::new(context_type, true); |
| 217 | + ctxt.set_gl_display(&display_); |
| 218 | + el.set_context(&ctxt); |
| 219 | + } else if context_type == "gst.gl.app_context" { |
| 220 | + let mut ctxt = gst::Context::new(context_type, true); |
| 221 | + { |
| 222 | + let s = ctxt.get_mut().unwrap().get_mut_structure(); |
| 223 | + s.set_value("context", context_.to_send_value()); |
| 224 | + } |
| 225 | + el.set_context(&ctxt); |
| 226 | + } |
| 227 | + } |
| 228 | + } |
| 229 | + _ => (), |
| 230 | + } |
| 231 | + |
| 232 | + gst::BusSyncReply::Pass |
| 233 | + }); |
| 234 | + |
| 235 | + let mut iter = vsinkbin |
| 236 | + .dynamic_cast::<gst::Bin>() |
| 237 | + .unwrap() |
| 238 | + .iterate_elements(); |
| 239 | + *self.gl_upload.lock().unwrap() = loop { |
| 240 | + match iter.next() { |
| 241 | + Ok(Some(element)) => { |
| 242 | + if "glupload" == element.get_factory().unwrap().get_name() { |
| 243 | + break Some(element); |
| 244 | + } |
| 245 | + } |
| 246 | + Err(gst::IteratorError::Resync) => iter.resync(), |
| 247 | + _ => break None, |
| 248 | + } |
| 249 | + }; |
| 250 | + |
| 251 | + Ok(()) |
| 252 | + } |
| 253 | +} |
0 commit comments