2012-10-25 91 views
2

我正嘗試使用gstreamer appsrc通過網絡播放視頻流。gstreamer appsrc通過網絡進行視頻流式傳輸

我在這裏找到了很好的例子。

gstreamer appsrc test application

http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html

使用上面的例子,我可以在X Window使用的Xlib播放視頻。當管道設置爲PLAYING狀態時,以某種方式發出「需要數據」信號並將start_feed回調函數中的數據從視頻文件中讀取後注入appsrc GstBuffer並播放示例視頻。

我想從網絡獲取數據而不是文件,所以我認爲簡單的echo服務器讀取一個視頻文件完全一樣的方式,併發送連接時發送數據到客戶端。客戶端應該獲取這些數據並放入appsrc中。

我的問題是如何將流數據放到appsrc管道中?有人提出任何建議或意見嗎?

下面是使用上述鏈接示例的工作示例代碼。

// http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html 
// http://www.cs.odu.edu/~cs476/Xlib/xlines.c 
#include <X11/Xlib.h> 
#include <X11/Xutil.h> 
#include <stdio.h> 
#include <unistd.h> // sleep() 
#include <stdbool.h> 

#include <gst/gst.h> 
#include <gst/app/gstappsrc.h> 
#include <gst/app/gstappbuffer.h> 
#include <gst/interfaces/xoverlay.h> 

#define BUFF_SIZE (640*480*3)//(1024) 
#define BORDER_WIDTH 2 

#define DEBUG printf 

typedef unsigned int uint32; 
typedef unsigned char uint8; 

typedef struct { 
    GstPipeline *pipeline; 
    GstAppSrc *src; 
    GstElement *sink; 
    GstElement *decoder; 
    GstElement *ffmpeg; 
    GstElement *videosink; 
    GMainLoop *loop; 
    guint sourceid; 
    FILE *file; 
} gst_app_t; 

static gst_app_t gst_app; 

static Window child_window = 0; 
static Window window = 0; 

static gboolean read_data(gst_app_t *app) 
{ 
    GstBuffer *buffer; 
    guint8 *ptr; 
    gint size; 
    GstFlowReturn ret; 

    ptr = g_malloc(BUFF_SIZE); 
    g_assert(ptr); 

    size = fread(ptr, 1, BUFF_SIZE, app->file); 

    if(size == 0){ 
    ret = gst_app_src_end_of_stream(app->src); 
    DEBUG("eos returned %d at %d\n", ret, __LINE__); 
    return FALSE; 
    } 

    buffer = gst_buffer_new(); 
    GST_BUFFER_MALLOCDATA(buffer) = ptr; 
    GST_BUFFER_SIZE(buffer) = size; 
    GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer); 

    ret = gst_app_src_push_buffer(app->src, buffer); 

    if(ret != GST_FLOW_OK){ 
     DEBUG("push buffer returned %d for %d bytes \n", ret, size); 
     return FALSE; 
    } 

    if(size != BUFF_SIZE){ 
     ret = gst_app_src_end_of_stream(app->src); 
     DEBUG("eos returned %d at %d\n", ret, __LINE__); 
     return FALSE; 
    } 

    return TRUE; 
} 

static void start_feed (GstElement * pipeline, guint size, gst_app_t *app) 
{ 
    if (app->sourceid == 0) { 
     DEBUG ("start feeding\n"); 
     app->sourceid = g_idle_add ((GSourceFunc) read_data, app); 
    } 
} 

static void stop_feed (GstElement * pipeline, gst_app_t *app) 
{ 
    if (app->sourceid != 0) { 
     DEBUG ("stop feeding\n"); 
     g_source_remove (app->sourceid); 
     app->sourceid = 0; 
    } 
} 

static void on_pad_added(GstElement *element, GstPad *pad) 
{ 
    GstCaps *caps; 
    GstStructure *str; 
    gchar *name; 
    GstPad *ffmpegsink; 
    GstPadLinkReturn ret; 

    DEBUG("pad added\n"); 

    caps = gst_pad_get_caps(pad); 
    str = gst_caps_get_structure(caps, 0); 

    g_assert(str); 

    name = (gchar*)gst_structure_get_name(str); 

    DEBUG("pad name %s\n", name); 

    if(g_strrstr(name, "video")){ 

     ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink"); 
     g_assert(ffmpegsink); 
     ret = gst_pad_link(pad, ffmpegsink); 
     DEBUG("pad_link returned %d\n", ret); 
     gst_object_unref(ffmpegsink); 
    } 
    gst_caps_unref(caps); 
} 

static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr) 
{ 
    gst_app_t *app = (gst_app_t*)ptr; 

    switch(GST_MESSAGE_TYPE(message)) 
    { 
    case GST_MESSAGE_ELEMENT: { 
     gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC(message)), child_window); 
     } 
     break; 

    case GST_MESSAGE_ERROR: 
     { 
     gchar *debug; 
     GError *err; 

     gst_message_parse_error(message, &err, &debug); 
     DEBUG("Error %s\n", err->message); 
     g_error_free(err); 
     g_free(debug); 
     g_main_loop_quit(app->loop); 
     } 
     break; 

    case GST_MESSAGE_WARNING: 
     { 
     gchar *debug; 
     GError *err; 
     gchar *name; 

     gst_message_parse_warning(message, &err, &debug); 
     DEBUG("Warning %s\nDebug %s\n", err->message, debug); 

     name = GST_MESSAGE_SRC_NAME(message); 

     DEBUG("Name of src %s\n", name ? name : "nil"); 
     g_error_free(err); 
     g_free(debug); 
     } 
     break; 

    case GST_MESSAGE_EOS: 
     DEBUG("End of stream\n"); 
     g_main_loop_quit(app->loop); 
     break; 

    case GST_MESSAGE_STATE_CHANGED: 
     break; 

    default: 
     DEBUG("got message %s\n", \ 
     gst_message_type_get_name (GST_MESSAGE_TYPE (message))); 
     break; 
    } 

    return TRUE; 
} 

static gboolean terminate_playback (GstElement * loop) 
{ 
    DEBUG ("Terminating playback\n"); 
    g_main_loop_quit ((GMainLoop *)loop); 
    return FALSE; 
} 

int gstreamer_init(int argc, char *argv[]) 
{ 
    gst_app_t *app = &gst_app; 

    GstBus *bus; 
    GstStateChangeReturn state_ret; 

    app->file = fopen(argv[1], "r"); 

    g_assert(app->file); 

    /* initialization */ 
    gst_init((int)0, NULL); 

    app->loop = g_main_loop_new(NULL, FALSE); 

    /* create elements */ 
    app->pipeline = (GstPipeline *)gst_pipeline_new("my_pipeline"); 

    app->src = (GstAppSrc *)gst_element_factory_make("appsrc", "myappsrc"); 
    app->decoder = gst_element_factory_make("decodebin2", "mydecoder"); 
    app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg"); 
    app->videosink = gst_element_factory_make("autovideosink", "myvideosink"); 

    if (!app->videosink) { 
     DEBUG ("output could not be found - check your install\n"); 
    } 

    g_assert(app->src); 
    g_assert(app->decoder); 
    g_assert(app->ffmpeg); 
    g_assert(app->videosink); 

    bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline)); 
    gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app); 
    gst_object_unref(bus); 

    g_signal_connect(app->decoder, "pad-added", 
       G_CALLBACK(on_pad_added), app->ffmpeg); 

    //gst_app_src_set_emit_signals(app->src, true); 
    g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app); 
    g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app); 

    gst_bin_add_many (GST_BIN (app->pipeline), (GstElement *)app->src, 
       app->decoder, app->ffmpeg, app->videosink, NULL); 

    /* link everything together */ 
    if (!gst_element_link((GstElement *)app->src, app->decoder)) { 

     DEBUG ("Failed to link one or more elements!\n"); 
     return -1; 
    } 

    if(!gst_element_link(app->ffmpeg, app->videosink)){ 
     DEBUG("failed to link ffmpeg and videosink"); 
     return -1; 
    } 

    state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_PLAYING); 
    if (state_ret == GST_STATE_CHANGE_FAILURE) { 

     DEBUG("Failed to start up pipeline!\n"); 
     return 1; 
    } 

    DEBUG("set state returned %d\n", state_ret); 

    //g_timeout_add (15000, (GSourceFunc) terminate_playback, app->loop); 

    g_main_loop_run(app->loop); 

    state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_NULL); 
    DEBUG("set state null returned %d\n", state_ret); 
    gst_object_unref(app->pipeline); 

    return 1; 
} 

/* 
* gst-launch filesrc location=test.avi ! decodebin2 ! ffmpegcolorspace ! autovideosink 
* 
* 1. dependency library install 
* $ sudo apt-get install gstreamer0.10-plugins-bad 
* $ sudo apt-get install gstreamer0.10-ffmpeg 
* 
* 2. compile 
* $ gcc hello.c -o hello -lX11 `pkg-config --cflags --libs gstreamer-0.10 gstreamer-app-0.10` -lgstinterfaces-0.10 
* 
* 3. how to run program 
* $ ./hello <video_file_name> 
* $ GST_DEBUG=appsrc:5 ./hello ./hbo_dtc_sd.ts 
*/ 

int main(int argc, char *argv[]) 
{ 
    Display *disp; 
    Window root; 
    long fgcolor, bgcolor; 

    GC gc; 
    XGCValues gc_val; 
    XEvent event; 
    char *msg = "Hello, World!"; 
    int screen; 

    disp = XOpenDisplay(NULL); 
    if (disp == NULL) { 
     fprintf(stderr, "Cannot open display\n"); 
     exit(1); 
    } 

    screen = DefaultScreen(disp); 

    root = RootWindow(disp, screen); 
    fgcolor = BlackPixel(disp, screen); 
    bgcolor = WhitePixel(disp, screen); 

    window = XCreateSimpleWindow(disp, root, 100, 100, 1000, 840, 1, 
        fgcolor, bgcolor); 

    child_window = XCreateSimpleWindow(disp, window, 100, 100, 800, 600, 1, 
        fgcolor, bgcolor); 

    gc_val.foreground = fgcolor; 
    gc_val.background = bgcolor; 
    gc = XCreateGC(disp, child_window, GCForeground|GCBackground, &gc_val); 

    XSelectInput(disp, child_window, ExposureMask | KeyPressMask); 

    g_warning("map xwindow"); 
    //XMapWindow(disp, window); 
    XMapWindow(disp, window); 
    XMapWindow(disp, child_window); 
    XSync(disp, FALSE); 

    //XDrawLine (disp, window, gc, 0, 0, 1000, 800); 
    //XDrawLine (disp, child_window, gc, 0, 0, 800, 600); 

    gstreamer_init(argc, argv); 

    XDestroyWindow(disp, window); 
    XDestroyWindow(disp, child_window); 

    XCloseDisplay(disp); 

    return 0; 
} 

回答

0

您將希望至少有一個其他線程(每端)處理通過套接字(如TCP或UDP,如果在本地網絡上)的通信。這通常會阻塞呼叫來等待數據包。要發送數據,您可以形成一個gstreamer tee和隊列,然後形成一個appsrc來緩衝/發送數據到套接字。要接收,您可以將數據從套接字中提取到緩衝區。請記住,操作系統的套接字緩衝區相對較小,如果您不能足夠快速地從中取出數據包,或者將數據包丟棄得過快,則會丟棄數據包。因此緩衝區。

NEED_DATA信號上,使用pushBuffer()從該緩衝區拉出到管線。在一個ENOUGH_DATA信號上,無論您的應用需要做什麼,您都可以保持緩衝或處理它。