How to show the GStreamer video in Qt frameless window ?

1.7k views Asked by At

I am trying to show the gstreamer video in qt frameless window . My gstreamer pipeline for which i am creating code is :
gst-launch-1.0 -v rtspsrc location=rtsp://192.168.1.15:8554/test ! rtpjitterbuffer ! rtph264depay ! avdec_h264 ! d3dvideosink sync=false
This is my first question please answer me. My code is as below My code is working for a qt window. It is showing the video which it is receiving from rtsp link but i have two major issues in it:
1. When i minimize this window, It loses it's output video and starts to show a blank screen.
2. I want to open this video in a frameless window but if i do so then it displays nothing.


I am using Qt-4.8.12 and gstreamer version 1.4.5 for windows 7 64 bit. Any help regarding these two issues is highly appreciated. Thanks in Advance.

#include <glib.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>

#include <QApplication>
#include <QTimer>
#include <QWidget>
#include <stdio.h>
#include "qmainwindow.h"


static void on_pad_added (GstElement *element, GstPad *pad, gpointer data);
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data);
int main(int argc, char *argv[])
{
  

  if (!g_thread_supported ())
    g_thread_init (NULL);

  /* Initialize GStreamer */


  gst_init (&argc, &argv);
  QApplication app(argc, argv);
  app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));

  /* Creating Elements */

  //GstElement *pipeLine = gst_pipeline_new ("xvoverlay");

 QWidget window;
 // QMainWindow window;
  window.resize(1024,768);
  WId xwinid=window.winId();
  
  GMainLoop *loop;

  GstElement *pipeLine, *rtspSrc, *rtpJitterBuffer, *rtpH264Depay, *avDecH264, *videoSink;

  rtspSrc =  gst_element_factory_make("rtspsrc", NULL);

  rtpJitterBuffer =  gst_element_factory_make("rtpjitterbuffer", NULL);

  rtpH264Depay =  gst_element_factory_make("rtph264depay", NULL);

  avDecH264 = gst_element_factory_make("avdec_h264", NULL);

  videoSink =  gst_element_factory_make("d3dvideosink", NULL);

  loop = g_main_loop_new (NULL, FALSE); 

  if (!rtspSrc || !rtpJitterBuffer || !rtpH264Depay || !avDecH264 || !videoSink) 
  {
        g_printerr ("Not all elements could be created.\n");
        return -1;
  }
  

  /* Set element properties */

   g_object_set( rtspSrc, "location", "rtsp://192.168.1.16:8554/test" , NULL);
   g_object_set( videoSink, "sync", false, NULL);
  
   /*Initializing Pipeline*/

   pipeLine = gst_pipeline_new ("TestPipeLine");

   if (!pipeLine) 
   {
    g_printerr ("Pipeline could not be created.");
   }

   /* we add a message handler */ 
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeLine)); 
  gst_bus_add_watch (bus, bus_call, loop); 
  gst_object_unref (bus); 


   /*Adding Components to the pipeline */

   gst_bin_add_many (GST_BIN(pipeLine),
                        rtspSrc,
      rtpJitterBuffer,
                        rtpH264Depay,
                        avDecH264,
                        videoSink,
                        NULL);
/*   if (gst_element_link (rtspSrc, rtpJitterBuffer) != TRUE) 
   {
        g_printerr ("rtspSrc & rtpJitterBuffer could not be linked.\n");
        gst_object_unref (pipeLine);
        return -1;
   }
*/
   

   if (gst_element_link (rtpJitterBuffer, rtpH264Depay) != TRUE) 
   {
        g_printerr ("rtpJitterBuffer and rtpH264Depay could not be linked.\n");
        gst_object_unref (pipeLine);
        return -1;
   }
   
   if (gst_element_link (rtpH264Depay, avDecH264) != TRUE) 
   {
        g_printerr ("rtpH264Depay and avDecH264 could not be linked.\n");
        gst_object_unref (pipeLine);
        return -1;
   }
   if (gst_element_link (avDecH264, videoSink) != TRUE) 
   {
        g_printerr ("avDecH264 and videoSink could not be linked.\n");
        gst_object_unref (pipeLine);
        return -1;
   }

   g_signal_connect (rtspSrc, "pad-added", G_CALLBACK (on_pad_added), rtpJitterBuffer);
       window.setWindowFlags(Qt::FramelessWindowHint);
  // else
 //   g_printerr("Pipeline created");zz
   gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY(videoSink), guintptr(xwinid));

   window.show();
  
   /* Set the pipeline to "playing" state*/ 
  g_print ("Now playing: %s\n", argv[1]); 
  gst_element_set_state (pipeLine, GST_STATE_PLAYING); 
    app.exec();

  /* Iterate */ 
  g_print ("Running...\n"); 
  g_main_loop_run (loop); 
  

  /* Out of the main loop, clean up nicely */ 
  g_print ("Returned, stopping playback\n"); 
  gst_element_set_state (pipeLine, GST_STATE_NULL); 
  
  g_print ("Deleting pipeline\n"); 
  gst_object_unref (GST_OBJECT (pipeLine)); 

  return 0; 

}

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data) 
{ 
  GstPad *sinkpad; 
  GstElement *decoder = (GstElement *) data; 

  /* We can now link this pad with the vorbis-decoder sink pad */ 
  g_print ("Dynamic pad created, linking demuxer/decoder\n"); 

  sinkpad = gst_element_get_static_pad (decoder, "sink"); 

  gst_pad_link (pad, sinkpad); 

  gst_object_unref (sinkpad); 
} 

static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) 
{ 
  GMainLoop *loop = (GMainLoop *) data; 

  switch (GST_MESSAGE_TYPE (msg)) { 

    case GST_MESSAGE_EOS: 
      g_print ("End of stream\n"); 
      g_main_loop_quit (loop); 
      break; 

    case GST_MESSAGE_ERROR: { 
      gchar  *debug; 
      GError *error; 

      gst_message_parse_error (msg, &error, &debug); 
      g_free (debug); 

      g_printerr ("Error: %s\n", error->message); 
      g_error_free (error); 

      g_main_loop_quit (loop); 
      break; 
    } 
    default: 
      break; 
  } 

  return TRUE; 
} 

1

There are 1 answers

0
David Netherwood On

If you switch back to using QWindow, this will work:

window.setWindowFlags (Qt::FramelessWindowHint);

I'm not sure how, but I think something similar is available for QWidget.