您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_ELEMENT函数代码示例

51自学网 2021-06-01 20:56:18
  C++
这篇教程C++ GST_ELEMENT函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_ELEMENT函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ELEMENT函数的具体用法?C++ GST_ELEMENT怎么用?C++ GST_ELEMENT使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_ELEMENT函数的27个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: OpenDecoder

/***************************************************************************** * OpenDecoder: probe the decoder and return score *****************************************************************************/static int OpenDecoder( vlc_object_t *p_this ){    decoder_t *p_dec = ( decoder_t* )p_this;    decoder_sys_t *p_sys;    GstStateChangeReturn i_ret;    gboolean b_ret;    sink_src_caps_t caps = { NULL, NULL };    GstStructure *p_str;    GstAppSrcCallbacks cb;    int i_rval = VLC_SUCCESS;    GList *p_list;    bool dbin;#define VLC_GST_CHECK( r, v, s, t ) /    { if( r == v ){ msg_Err( p_dec, s ); i_rval = t; goto fail; } }    if( !vlc_gst_init( ))    {        msg_Err( p_dec, "failed to register vlcvideosink" );        return VLC_EGENERIC;    }    p_str = vlc_to_gst_fmt( &p_dec->fmt_in );    if( !p_str )        return VLC_EGENERIC;    /* Allocate the memory needed to store the decoder's structure */    p_sys = p_dec->p_sys = calloc( 1, sizeof( *p_sys ) );    if( p_sys == NULL )    {        gst_structure_free( p_str );        return VLC_ENOMEM;    }    dbin = var_CreateGetBool( p_dec, "use-decodebin" );    msg_Dbg( p_dec, "Using decodebin? %s", dbin ? "yes ":"no" );    caps.p_sinkcaps = gst_caps_new_empty( );    gst_caps_append_structure( caps.p_sinkcaps, p_str );    /* Currently supports only system memory raw output format */    caps.p_srccaps = gst_caps_new_empty_simple( "video/x-raw" );    /* Get the list of all the available gstreamer decoders */    p_list = gst_element_factory_list_get_elements(            GST_ELEMENT_FACTORY_TYPE_DECODER, GST_RANK_MARGINAL );    VLC_GST_CHECK( p_list, NULL, "no decoder list found", VLC_ENOMOD );    if( !dbin )    {        GList *p_l;        /* Sort them as per ranks */        p_list = g_list_sort( p_list, gst_plugin_feature_rank_compare_func );        VLC_GST_CHECK( p_list, NULL, "failed to sort decoders list",                VLC_ENOMOD );        p_l = g_list_find_custom( p_list, &caps, find_decoder_func );        VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",                VLC_ENOMOD );        /* create the decoder with highest rank */        p_sys->p_decode_in = gst_element_factory_create(                ( GstElementFactory* )p_l->data, NULL );        VLC_GST_CHECK( p_sys->p_decode_in, NULL,                "failed to create decoder", VLC_ENOMOD );    }    else    {        GList *p_l;        /* Just check if any suitable decoder exists, rest will be         * handled by decodebin */        p_l = g_list_find_custom( p_list, &caps, find_decoder_func );        VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",                VLC_ENOMOD );    }    gst_plugin_feature_list_free( p_list );    p_list = NULL;    gst_caps_unref( caps.p_srccaps );    caps.p_srccaps = NULL;    p_sys->b_prerolled = false;    p_sys->b_running = false;    /* Queue: GStreamer thread will dump buffers into this queue,     * DecodeBlock() will pop out the buffers from the queue */    p_sys->p_que = gst_atomic_queue_new( 0 );    VLC_GST_CHECK( p_sys->p_que, NULL, "failed to create queue",            VLC_ENOMEM );    p_sys->p_decode_src = gst_element_factory_make( "appsrc", NULL );    VLC_GST_CHECK( p_sys->p_decode_src, NULL, "appsrc not found",            VLC_ENOMOD );    g_object_set( G_OBJECT( p_sys->p_decode_src ), "caps", caps.p_sinkcaps,            "emit-signals", TRUE, "format", GST_FORMAT_BYTES,            "stream-type", GST_APP_STREAM_TYPE_SEEKABLE,            /* Making DecodeBlock() to block on appsrc with max queue size of 1 byte.             * This will make the push_buffer() tightly coupled with the buffer             * flow from appsrc -> decoder. push_buffer() will only return when             * the same buffer it just fed to appsrc has also been fed to the             * decoder element as well */            "block", TRUE, "max-bytes", ( guint64 )1, NULL );//.........这里部分代码省略.........
开发者ID:CityFire,项目名称:vlc,代码行数:101,


示例2: close

bool CvCapture_GStreamer::open( int type, const char* filename ){    close();    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");    __BEGIN__;    gst_initializer::init();//    if(!isInited) {//        printf("gst_init/n");//        gst_init (NULL, NULL);//        gst_debug_set_active(TRUE);//        gst_debug_set_colored(TRUE);//        gst_debug_set_default_threshold(GST_LEVEL_WARNING);//        isInited = true;//    }    bool stream = false;    bool manualpipeline = false;    char *uri = NULL;    uridecodebin = NULL;    if(type != CV_CAP_GSTREAMER_FILE) {        close();        return false;    }    if(!gst_uri_is_valid(filename)) {        uri = realpath(filename, NULL);        stream=false;        if(uri) {            uri = g_filename_to_uri(uri, NULL, NULL);            if(!uri) {                CV_WARN("GStreamer: Error opening file/n");                close();                return false;            }        } else {            GError *err = NULL;            //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);            uridecodebin = gst_parse_launch(filename, &err);            if(!uridecodebin) {                CV_WARN("GStreamer: Error opening bin/n");                close();                return false;            }            stream = true;            manualpipeline = true;        }    } else {        stream = true;        uri = g_strdup(filename);    }    if(!uridecodebin) {        uridecodebin = gst_element_factory_make ("uridecodebin", NULL);        g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);        if(!uridecodebin) {            CV_WARN("GStreamer: Failed to create uridecodebin/n");            close();            return false;        }    }    if(manualpipeline) {        GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {        CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline/n");        return false;        }    pipeline = uridecodebin;    } else {    pipeline = gst_pipeline_new (NULL);        color = gst_element_factory_make("ffmpegcolorspace", NULL);        sink = gst_element_factory_make("appsink", NULL);        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);        g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);        if(!gst_element_link(color, sink)) {            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink/n");            gst_object_unref(pipeline);            return false;        }    }    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);    caps = gst_caps_new_simple("video/x-raw-rgb",                               "red_mask",   G_TYPE_INT, 0x0000FF,                               "green_mask", G_TYPE_INT, 0x00FF00,                               "blue_mask",  G_TYPE_INT, 0xFF0000,                               NULL);    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);    gst_caps_unref(caps);    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==//.........这里部分代码省略.........
开发者ID:4auka,项目名称:opencv,代码行数:101,


示例3: CV_WARN

bool CvCapture_GStreamer::setProperty( int propId, double value ){       GstFormat format;    GstSeekFlags flags;    if(!pipeline) {        CV_WARN("GStreamer: no pipeline");        return false;    }    switch(propId) {    case CV_CAP_PROP_POS_MSEC:        format = GST_FORMAT_TIME;        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,                        flags, (gint64) (value * GST_MSECOND))) {            CV_WARN("GStreamer: unable to seek");        }        break;    case CV_CAP_PROP_POS_FRAMES:        format = GST_FORMAT_DEFAULT;        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,                        flags, (gint64) value)) {            CV_WARN("GStreamer: unable to seek");        }        break;    case CV_CAP_PROP_POS_AVI_RATIO:        format = GST_FORMAT_PERCENT;        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,                        flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {            CV_WARN("GStreamer: unable to seek");        }        break;    case CV_CAP_PROP_FRAME_WIDTH:        if(value > 0)            setFilter("width", G_TYPE_INT, (int) value, 0);        else            removeFilter("width");        break;    case CV_CAP_PROP_FRAME_HEIGHT:        if(value > 0)            setFilter("height", G_TYPE_INT, (int) value, 0);        else            removeFilter("height");        break;    case CV_CAP_PROP_FPS:        if(value > 0) {            int num, denom;            num = (int) value;            if(value != num) { // FIXME this supports only fractions x/1 and x/2                num = (int) (value * 2);                denom = 2;            } else                denom = 1;            setFilter("framerate", GST_TYPE_FRACTION, num, denom);        } else            removeFilter("framerate");        break;    case CV_CAP_PROP_FOURCC:    case CV_CAP_PROP_FRAME_COUNT:    case CV_CAP_PROP_FORMAT:    case CV_CAP_PROP_MODE:    case CV_CAP_PROP_BRIGHTNESS:    case CV_CAP_PROP_CONTRAST:    case CV_CAP_PROP_SATURATION:    case CV_CAP_PROP_HUE:    case CV_CAP_PROP_GAIN:    case CV_CAP_PROP_CONVERT_RGB:        break;    case CV_CAP_GSTREAMER_QUEUE_LENGTH:        if(!sink)            break;        gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);        break;    default:        CV_WARN("GStreamer: unhandled property");    }    return false;}
开发者ID:4auka,项目名称:opencv,代码行数:82,


示例4: _update_caps

/* Return the possible output caps based on inputs and downstream prefs */static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter){  GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);  GList *l;  gint best_width = -1, best_height = -1;  gdouble best_fps = -1, cur_fps;  gint best_fps_n = 0, best_fps_d = 1;  GstVideoInfo *mix_info;  GstCaps *blend_caps, *tmp_caps;  GstCaps *out_caps;  GST_OBJECT_LOCK (vagg);  for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {    GstVideoAggregatorPad *pad = l->data;    GstVideoInfo tmp = pad->info;    gint this_width, this_height;    gint fps_n, fps_d;    if (!pad->info.finfo)      continue;    /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */    if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)      continue;    /* Convert to per-view width/height for unpacked forms */    gst_video_multiview_video_info_change_mode (&tmp,        GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE);    this_width = GST_VIDEO_INFO_WIDTH (&tmp);    this_height = GST_VIDEO_INFO_HEIGHT (&tmp);    fps_n = GST_VIDEO_INFO_FPS_N (&tmp);    fps_d = GST_VIDEO_INFO_FPS_D (&tmp);    GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT        " w %u h %u", pad, this_width, this_height);    if (this_width == 0 || this_height == 0)      continue;    if (best_width < this_width)      best_width = this_width;    if (best_height < this_height)      best_height = this_height;    if (fps_d == 0)      cur_fps = 0.0;    else      gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);    if (best_fps < cur_fps) {      best_fps = cur_fps;      best_fps_n = fps_n;      best_fps_d = fps_d;    }    /* FIXME: Preserve PAR for at least one input when different sized inputs */  }  GST_OBJECT_UNLOCK (vagg);  mix_info = &mix->mix_info;  gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width,      best_height);  GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n;  GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d;  GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED;  GST_VIDEO_INFO_VIEWS (mix_info) = 2;  /* FIXME: If input is marked as flipped or flopped, preserve those flags */  GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE;  /* Choose our output format based on downstream preferences */  blend_caps = gst_video_info_to_caps (mix_info);  gst_caps_set_features (blend_caps, 0,      gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));  tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps);  gst_caps_unref (blend_caps);  out_caps = gst_caps_intersect (caps, tmp_caps);  gst_caps_unref (tmp_caps);  GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, out_caps);  return out_caps;}
开发者ID:CogentEmbedded,项目名称:gst-plugins-bad,代码行数:92,


示例5: gst_opus_dec_parse_header

static GstFlowReturngst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf){  const guint8 *data;  GstAudioChannelPosition pos[64];  const GstAudioChannelPosition *posn = NULL;  GstMapInfo map;  if (!gst_opus_header_is_id_header (buf)) {    GST_ERROR_OBJECT (dec, "Header is not an Opus ID header");    return GST_FLOW_ERROR;  }  gst_buffer_map (buf, &map, GST_MAP_READ);  data = map.data;  if (!(dec->n_channels == 0 || dec->n_channels == data[9])) {    gst_buffer_unmap (buf, &map);    GST_ERROR_OBJECT (dec, "Opus ID header has invalid channels");    return GST_FLOW_ERROR;  }  dec->n_channels = data[9];  dec->sample_rate = GST_READ_UINT32_LE (data + 12);  dec->pre_skip = GST_READ_UINT16_LE (data + 10);  dec->r128_gain = GST_READ_UINT16_LE (data + 16);  dec->r128_gain_volume = gst_opus_dec_get_r128_volume (dec->r128_gain);  GST_INFO_OBJECT (dec,      "Found pre-skip of %u samples, R128 gain %d (volume %f)",      dec->pre_skip, dec->r128_gain, dec->r128_gain_volume);  dec->channel_mapping_family = data[18];  if (dec->channel_mapping_family == 0) {    /* implicit mapping */    GST_INFO_OBJECT (dec, "Channel mapping family 0, implicit mapping");    dec->n_streams = dec->n_stereo_streams = 1;    dec->channel_mapping[0] = 0;    dec->channel_mapping[1] = 1;  } else {    dec->n_streams = data[19];    dec->n_stereo_streams = data[20];    memcpy (dec->channel_mapping, data + 21, dec->n_channels);    if (dec->channel_mapping_family == 1) {      GST_INFO_OBJECT (dec, "Channel mapping family 1, Vorbis mapping");      switch (dec->n_channels) {        case 1:        case 2:          /* nothing */          break;        case 3:        case 4:        case 5:        case 6:        case 7:        case 8:          posn = gst_opus_channel_positions[dec->n_channels - 1];          break;        default:{          gint i;          GST_ELEMENT_WARNING (GST_ELEMENT (dec), STREAM, DECODE,              (NULL), ("Using NONE channel layout for more than 8 channels"));          for (i = 0; i < dec->n_channels; i++)            pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE;          posn = pos;        }      }    } else {      GST_INFO_OBJECT (dec, "Channel mapping family %d",          dec->channel_mapping_family);    }  }  gst_opus_dec_negotiate (dec, posn);  gst_buffer_unmap (buf, &map);  return GST_FLOW_OK;}
开发者ID:vanechipi,项目名称:gst-plugins-bad,代码行数:82,


示例6: webkitVideoSinkNew

GstElement* webkitVideoSinkNew(WebCore::GStreamerGWorld* gstGWorld){    GstElement* element = GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));    WEBKIT_VIDEO_SINK(element)->priv->gstGWorld = gstGWorld;    return element;}
开发者ID:Metrological,项目名称:qtwebkit,代码行数:6,


示例7: gst_curl_smtp_sink_set_property

static voidgst_curl_smtp_sink_set_property (GObject * object, guint prop_id,    const GValue * value, GParamSpec * pspec){  GstCurlSmtpSink *sink;  GstState cur_state;  g_return_if_fail (GST_IS_CURL_SMTP_SINK (object));  sink = GST_CURL_SMTP_SINK (object);  gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0);  if (cur_state != GST_STATE_PLAYING && cur_state != GST_STATE_PAUSED) {    GST_OBJECT_LOCK (sink);    switch (prop_id) {      case PROP_MAIL_RCPT:        g_free (sink->mail_rcpt);        sink->mail_rcpt = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "mail-rcpt set to %s", sink->mail_rcpt);        break;      case PROP_MAIL_FROM:        g_free (sink->mail_from);        sink->mail_from = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "mail-from set to %s", sink->mail_from);        break;      case PROP_SUBJECT:        g_free (sink->subject);        sink->subject = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "subject set to %s", sink->subject);        break;      case PROP_MESSAGE_BODY:        g_free (sink->message_body);        sink->message_body = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "message-body set to %s", sink->message_body);        break;      case PROP_CONTENT_TYPE:        g_free (sink->content_type);        sink->content_type = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "content-type set to %s", sink->content_type);        break;      case PROP_USE_SSL:        sink->use_ssl = g_value_get_boolean (value);        GST_DEBUG_OBJECT (sink, "use-ssl set to %d", sink->use_ssl);        break;      case PROP_NBR_ATTACHMENTS:        sink->nbr_attachments = g_value_get_int (value);        sink->nbr_attachments_left = sink->nbr_attachments;        GST_DEBUG_OBJECT (sink, "nbr-attachments set to %d",            sink->nbr_attachments);        break;      case PROP_POP_USER_NAME:        g_free (sink->pop_user);        sink->pop_user = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "pop-user set to %s", sink->pop_user);        break;      case PROP_POP_USER_PASSWD:        g_free (sink->pop_passwd);        sink->pop_passwd = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "pop-passwd set to %s", sink->pop_passwd);        break;      case PROP_POP_LOCATION:        g_free (sink->pop_location);        sink->pop_location = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "pop-location set to %s", sink->pop_location);        break;      default:        GST_DEBUG_OBJECT (sink, "invalid property id %d", prop_id);        break;    }    GST_OBJECT_UNLOCK (sink);    return;  }  /* in PLAYING or PAUSED state */  GST_OBJECT_LOCK (sink);  switch (prop_id) {    case PROP_CONTENT_TYPE:      g_free (sink->content_type);      sink->content_type = g_value_dup_string (value);      GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);      break;    default:      GST_WARNING_OBJECT (sink, "cannot set property when PLAYING");      break;  }  GST_OBJECT_UNLOCK (sink);}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:92,


示例8: gst_curl_http_sink_set_property

static voidgst_curl_http_sink_set_property (GObject * object, guint prop_id,    const GValue * value, GParamSpec * pspec){  GstCurlHttpSink *sink;  GstState cur_state;  g_return_if_fail (GST_IS_CURL_HTTP_SINK (object));  sink = GST_CURL_HTTP_SINK (object);  gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0);  if (cur_state != GST_STATE_PLAYING && cur_state != GST_STATE_PAUSED) {    GST_OBJECT_LOCK (sink);    switch (prop_id) {      case PROP_PROXY:        g_free (sink->proxy);        sink->proxy = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "proxy set to %s", sink->proxy);        break;      case PROP_PROXY_PORT:        sink->proxy_port = g_value_get_int (value);        GST_DEBUG_OBJECT (sink, "proxy port set to %d", sink->proxy_port);        break;      case PROP_PROXY_USER_NAME:        g_free (sink->proxy_user);        sink->proxy_user = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "proxy user set to %s", sink->proxy_user);        break;      case PROP_PROXY_USER_PASSWD:        g_free (sink->proxy_passwd);        sink->proxy_passwd = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "proxy password set to %s", sink->proxy_passwd);        break;      case PROP_USE_CONTENT_LENGTH:        sink->use_content_length = g_value_get_boolean (value);        GST_DEBUG_OBJECT (sink, "use_content_length set to %d",            sink->use_content_length);        break;      case PROP_CONTENT_TYPE:        g_free (sink->content_type);        sink->content_type = g_value_dup_string (value);        GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);        break;      default:        GST_DEBUG_OBJECT (sink, "invalid property id %d", prop_id);        break;    }    GST_OBJECT_UNLOCK (sink);    return;  }  /* in PLAYING or PAUSED state */  GST_OBJECT_LOCK (sink);  switch (prop_id) {    case PROP_CONTENT_TYPE:      g_free (sink->content_type);      sink->content_type = g_value_dup_string (value);      GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);      break;    default:      GST_WARNING_OBJECT (sink, "cannot set property when PLAYING");      break;  }  GST_OBJECT_UNLOCK (sink);}
开发者ID:ylatuya,项目名称:gst-plugins-bad,代码行数:70,


示例9: gst_opus_dec_parse_header

static GstFlowReturngst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf){  const guint8 *data = GST_BUFFER_DATA (buf);  GstCaps *caps;  const GstAudioChannelPosition *pos = NULL;  if (!gst_opus_header_is_id_header (buf)) {    GST_ERROR_OBJECT (dec, "Header is not an Opus ID header");    return GST_FLOW_ERROR;  }  if (!(dec->n_channels == 0 || dec->n_channels == data[9])) {    GST_ERROR_OBJECT (dec, "Opus ID header has invalid channels");    return GST_FLOW_ERROR;  }  dec->n_channels = data[9];  dec->pre_skip = GST_READ_UINT16_LE (data + 10);  dec->r128_gain = GST_READ_UINT16_LE (data + 16);  dec->r128_gain_volume = gst_opus_dec_get_r128_volume (dec->r128_gain);  GST_INFO_OBJECT (dec,      "Found pre-skip of %u samples, R128 gain %d (volume %f)",      dec->pre_skip, dec->r128_gain, dec->r128_gain_volume);  dec->channel_mapping_family = data[18];  if (dec->channel_mapping_family == 0) {    /* implicit mapping */    GST_INFO_OBJECT (dec, "Channel mapping family 0, implicit mapping");    dec->n_streams = dec->n_stereo_streams = 1;    dec->channel_mapping[0] = 0;    dec->channel_mapping[1] = 1;  } else {    dec->n_streams = data[19];    dec->n_stereo_streams = data[20];    memcpy (dec->channel_mapping, data + 21, dec->n_channels);    if (dec->channel_mapping_family == 1) {      GST_INFO_OBJECT (dec, "Channel mapping family 1, Vorbis mapping");      switch (dec->n_channels) {        case 1:        case 2:          /* nothing */          break;        case 3:        case 4:        case 5:        case 6:        case 7:        case 8:          pos = gst_opus_channel_positions[dec->n_channels - 1];          break;        default:{          gint i;          GstAudioChannelPosition *posn =              g_new (GstAudioChannelPosition, dec->n_channels);          GST_ELEMENT_WARNING (GST_ELEMENT (dec), STREAM, DECODE,              (NULL), ("Using NONE channel layout for more than 8 channels"));          for (i = 0; i < dec->n_channels; i++)            posn[i] = GST_AUDIO_CHANNEL_POSITION_NONE;          pos = posn;        }      }    } else {      GST_INFO_OBJECT (dec, "Channel mapping family %d",          dec->channel_mapping_family);    }  }  caps = gst_opus_dec_negotiate (dec);  if (pos) {    GST_DEBUG_OBJECT (dec, "Setting channel positions on caps");    gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);  }  if (dec->n_channels > 8) {    g_free ((GstAudioChannelPosition *) pos);  }  GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);  gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);  gst_caps_unref (caps);  return GST_FLOW_OK;}
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer-sdk__gst-plugins-bad,代码行数:88,


示例10: GStreamer_init

int GStreamer_init(const char *mplayer){	GError* error;	GstBus *bus;	GstElement *videosink, *audiosink;	int err;	if (g_initialized)		g_error("GStreamer: already initialized, call destroy first!/n");	g_state_callback = NULL;	g_duration = 0;	g_position = 0;	/* pthread synchronization */	pthread_mutex_init(&g_mutex, NULL);	err = pthread_cond_init(&g_main_cond, NULL);	if (err) {		g_error("GStreamer: failed to initialize main condition %s/n",				strerror(errno));		return -1;	}	/* init gstreamer library */	if (!gst_init_check(NULL, NULL, &error)) {		g_error("GStreamer: failed to initialize gstreamer library: [%d] %s/n",				error->code, error->message);		g_error_free(error);		return -1;	}	/* create pipeline */	g_pipeline = gst_pipeline_new("pipeline");	g_pipeline_name = gst_element_get_name(GST_ELEMENT(g_pipeline));	/* register callback */	bus = gst_pipeline_get_bus(GST_PIPELINE(g_pipeline));	gst_bus_add_watch(bus, my_bus_callback, NULL);	gst_object_unref(bus);#if 0	/* TODO unlinked when removed from pipeline */	/* hardcode audio/video sink */	g_videosink = create_video_sink();	g_audiosink = create_audio_sink();	if (!g_videosink || !g_audiosink) {		/* TODO memory leak */		g_error("GStreamer: failed to create sink elements/n");		return -1;	}#endif	/* prepare http/file src */	g_filesrc = gst_element_factory_make ("filesrc", "filesrc");	g_httpsrc = gst_element_factory_make ("souphttpsrc", "httpsrc");	if (!g_filesrc || !g_httpsrc) {		/* TODO memory leak */		g_error("GStreamer: failed to create src elements %x %x/n", g_filesrc, g_httpsrc);		return -1;	}	g_object_ref(g_filesrc);	g_object_ref(g_httpsrc);	/* initialize pipeline */	/* TODO do for audio/video pipe separately */	if (gst_element_set_state(g_pipeline, GST_STATE_READY) ==		GST_STATE_CHANGE_FAILURE) {	  g_error("GStreamer: could not set pipeline to ready/n");	}	/* start main loop */	g_main_loop = g_main_loop_new(NULL, FALSE);	err = pthread_create(&g_reader_thread, NULL, main_thread_proc, NULL);	if (err) {		g_error("GStreamer: failed to launch gstreamer main thread %s/n",				strerror(errno));		goto err_pthread;	}	g_print("GStreamer: SUCCESSFULLY INITIALIZED/n");	g_initialized = 1;	return 0;err_pthread:	pthread_cond_destroy(&g_main_cond);	pthread_mutex_destroy(&g_mutex);		return err;}
开发者ID:afenkart,项目名称:ti_gstreamer,代码行数:96,


示例11: gst_auto_video_sink_find_best

static GstElement *gst_auto_video_sink_find_best (GstAutoVideoSink * sink){  GList *list, *item;  GstElement *choice = NULL;  GstMessage *message = NULL;  GSList *errors = NULL;  GstBus *bus = gst_bus_new ();  GstPad *el_pad = NULL;  GstCaps *el_caps = NULL;  gboolean no_match = TRUE;  list = gst_registry_feature_filter (gst_registry_get (),      (GstPluginFeatureFilter) gst_auto_video_sink_factory_filter, FALSE, sink);  list = g_list_sort (list, (GCompareFunc) gst_auto_video_sink_compare_ranks);  GST_LOG_OBJECT (sink, "Trying to find usable video devices ...");  for (item = list; item != NULL; item = item->next) {    GstElementFactory *f = GST_ELEMENT_FACTORY (item->data);    GstElement *el;    if ((el = gst_auto_video_sink_create_element_with_pretty_name (sink, f))) {      GstStateChangeReturn ret;      GST_DEBUG_OBJECT (sink, "Testing %s", GST_OBJECT_NAME (f));      /* If autovideosink has been provided with filter caps,       * accept only sinks that match with the filter caps */      if (sink->filter_caps) {        el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");        el_caps = gst_pad_query_caps (el_pad, NULL);        gst_object_unref (el_pad);        GST_DEBUG_OBJECT (sink,            "Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,            sink->filter_caps, el_caps);        no_match = !gst_caps_can_intersect (sink->filter_caps, el_caps);        gst_caps_unref (el_caps);        if (no_match) {          GST_DEBUG_OBJECT (sink, "Incompatible caps");          gst_object_unref (el);          continue;        } else {          GST_DEBUG_OBJECT (sink, "Found compatible caps");        }      }      gst_element_set_bus (el, bus);      ret = gst_element_set_state (el, GST_STATE_READY);      if (ret == GST_STATE_CHANGE_SUCCESS) {        GST_DEBUG_OBJECT (sink, "This worked!");        choice = el;        break;      }      /* collect all error messages */      while ((message = gst_bus_pop_filtered (bus, GST_MESSAGE_ERROR))) {        GST_DEBUG_OBJECT (sink, "error message %" GST_PTR_FORMAT, message);        errors = g_slist_append (errors, message);      }      gst_element_set_state (el, GST_STATE_NULL);      gst_object_unref (el);    }  }  GST_DEBUG_OBJECT (sink, "done trying");  if (!choice) {    if (errors) {      /* FIXME: we forward the first error for now; but later on it might make       * sense to actually analyse them */      gst_message_ref (GST_MESSAGE (errors->data));      GST_DEBUG_OBJECT (sink, "reposting message %p", errors->data);      gst_element_post_message (GST_ELEMENT (sink), GST_MESSAGE (errors->data));    } else {      /* send warning message to application and use a fakesink */      GST_ELEMENT_WARNING (sink, RESOURCE, NOT_FOUND, (NULL),          ("Failed to find a usable video sink"));      choice = gst_element_factory_make ("fakesink", "fake-video-sink");      if (g_object_class_find_property (G_OBJECT_GET_CLASS (choice), "sync"))        g_object_set (choice, "sync", TRUE, NULL);      gst_element_set_state (choice, GST_STATE_READY);    }  }  gst_object_unref (bus);  gst_plugin_feature_list_free (list);  g_slist_foreach (errors, (GFunc) gst_mini_object_unref, NULL);  g_slist_free (errors);  return choice;}
开发者ID:an146,项目名称:gst-plugins-good,代码行数:92,


示例12: gst_parse_launch_full

GstElement *create_video_sink(){	GstElement *bin, *decoder = NULL;	GstIterator *iter;	GstIteratorResult res;	GError *error = NULL;	GstPad *pad;	gpointer element = NULL;	const char* decoder_name;#ifndef DESKTOP 	/* create pipeline */                                                                                 	decoder_name = "tividdec20";	bin = gst_parse_launch_full("TIViddec2 genTimeStamps=FALSE /			    engineName=decode /			    codecName=h264dec numFrames=-1 /			! videoscale method=0 /			! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 /			! ffmpegcolorspace /			! video/x-raw-rgb, bpp=16 /			! TIDmaiVideoSink displayStd=fbdev displayDevice=/dev/fb0 videoStd=QVGA /			    videoOutput=LCD resizer=FALSE accelFrameCopy=TRUE",			NULL, 0, &error);                                      #else	decoder_name = "decodebin";	bin = gst_parse_launch_full("decodebin /			! videoscale method=0 /			! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 /			! xvimagesink",			NULL, 0, &error);                                      #endif	if (!bin) {		g_error("GStreamer: failed to parse video sink pipeline/n");		return NULL;	}              	gst_object_set_name(GST_OBJECT(bin), "video-sink");	iter = gst_bin_iterate_elements(GST_BIN(bin));	res = gst_iterator_next (iter, &element);	while (res == GST_ITERATOR_OK) {		gchar *name;		name = gst_object_get_name(GST_OBJECT (element));		if (name) {			if (!strncmp(name, decoder_name, strlen(decoder_name))) {				decoder = GST_ELEMENT(element); 			}			g_printf("GS: video sink element: %s /n", name);			g_free (name);		}		gst_object_unref (element);		element = NULL;		res = gst_iterator_next (iter, &element);	}	gst_iterator_free (iter);	if (!decoder) {		/* mem leak */		g_printf("decoder element not found/n");		return NULL;	}	/* add ghostpad */	pad = gst_element_get_static_pad (decoder, "sink");	gst_element_add_pad(bin, gst_ghost_pad_new("sink", pad));	gst_object_unref(GST_OBJECT(pad));	return bin;}
开发者ID:afenkart,项目名称:ti_gstreamer,代码行数:73,


示例13: my_bus_callback

/* http://<xxx>/manual/html/section-bus-message-types.html */static gboolean my_bus_callback(GstBus *bus, GstMessage *msg,	gpointer user_data){	GstMessageType msgType;	GstObject *msgSrc;	gchar *msgSrcName;	/* used in switch */	/* error message */	gchar *debug;	GError *err;	GstState oldstate, newstate, pending;	/* stream status */	GstElement *owner;	msgType = GST_MESSAGE_TYPE(msg);	msgSrc = GST_MESSAGE_SRC(msg);	msgSrcName = GST_OBJECT_NAME(msgSrc);	switch (GST_MESSAGE_TYPE(msg)) {	case GST_MESSAGE_EOS:		g_print("GStreamer: end-of-stream/n");		pthread_mutex_lock(&g_mutex);		gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);		trigger_callback(GST_STATE_NULL);		pthread_mutex_unlock(&g_mutex);		break;	case GST_MESSAGE_ERROR:		gst_message_parse_error(msg, &err, &debug);		g_free (debug);		g_error("GStreamer: error: [%d] %s/n", err->code, err->message);		g_error_free(err);		/* TODO no sleep in callback */		pthread_mutex_lock(&g_mutex);		/* setting state to null flushes pipeline */		gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);		trigger_callback(GST_STATE_NULL);		pthread_mutex_unlock(&g_mutex);		break;	case GST_MESSAGE_STATE_CHANGED:		gst_message_parse_state_changed(msg, &oldstate, &newstate, &pending);#if 0   /* noisy */		g_print("GStreamer: %s: State change: OLD: '%s', NEW: '%s', PENDING: '%s'/n",				msgSrcName,				gststate_get_name(oldstate),				gststate_get_name(newstate),				gststate_get_name(pending));#endif		if (!strcmp(msgSrcName, g_pipeline_name))			trigger_callback(newstate); /* TODO GstState != GStreamer_state */		break;	case GST_MESSAGE_WARNING:	case GST_MESSAGE_INFO:		/* TODO */		break;	case GST_MESSAGE_APPLICATION:  /* marshal information into the main thread */	case GST_MESSAGE_ASYNC_START:	case GST_MESSAGE_ASYNC_DONE:	case GST_MESSAGE_BUFFERING: /* caching of network streams */	case GST_MESSAGE_CLOCK_LOST:	case GST_MESSAGE_CLOCK_PROVIDE:	case GST_MESSAGE_ELEMENT:  /* custom message, e.g. qtdemux redirect */	case GST_MESSAGE_LATENCY:	case GST_MESSAGE_NEW_CLOCK:	case GST_MESSAGE_REQUEST_STATE:	case GST_MESSAGE_SEGMENT_DONE:	case GST_MESSAGE_SEGMENT_START:	case GST_MESSAGE_STATE_DIRTY:	case GST_MESSAGE_STEP_DONE:	case GST_MESSAGE_STRUCTURE_CHANGE:	case GST_MESSAGE_TAG: /* meta data: artist, title */		/* ignore */		break;	case GST_MESSAGE_DURATION:	default:		g_print("GStreamer: BUS_CALL %s %d/n",				gst_message_type_get_name(GST_MESSAGE_TYPE(msg)),				GST_MESSAGE_TYPE(msg));		break;	}	return 1;}
开发者ID:afenkart,项目名称:ti_gstreamer,代码行数:95,


示例14: main

//.........这里部分代码省略.........        strncpy (input, optarg, sizeof (input) / sizeof (input[0]));        break;      case 'f':        frequency = atol (optarg);        break;      case 'h':        printf ("Usage: v4l2src-test [OPTION].../n");        for (c = 0; long_options[c].name; ++c) {          printf ("-%c, --%s/r/t/t/t/t%s/n", long_options[c].val,              long_options[c].name, long_options_desc[c]);        }        exit (0);        break;      case '?':        /* getopt_long already printed an error message. */        printf ("Use -h to see help message./n");        break;      default:        abort ();    }  }  /* Print any remaining command line arguments (not options). */  if (optind < argc) {    printf ("Use -h to see help message./n" "non-option ARGV-elements: ");    while (optind < argc)      printf ("%s ", argv[optind++]);    putchar ('/n');  }  /* init */  gst_init (&argc, &argv);  /* create elements */  if (!(pipeline = gst_pipeline_new ("my_pipeline"))) {    fprintf (stderr, "error: gst_pipeline_new return NULL");    return -1;  }  if (!(source = gst_element_factory_make ("v4l2src", NULL))) {    fprintf (stderr,        "error: gst_element_factory_make (/"v4l2src/", NULL) return NULL");    return -1;  }  if (!(sink = gst_element_factory_make ("xvimagesink", NULL))) {    fprintf (stderr,        "error: gst_element_factory_make (/"xvimagesink/", NULL) return NULL");    return -1;  }  if (numbuffers > -1) {    g_object_set (source, "num-buffers", numbuffers, NULL);  }  if (device[0]) {    g_object_set (source, "device", device, NULL);  }  if (input[0]) {    g_object_set (source, "input", input, NULL);  }  if (frequency) {    g_object_set (source, "frequency", frequency, NULL);  }  /* you would normally check that the elements were created properly */  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));  gst_bus_add_watch (bus, my_bus_callback, NULL);  /* put together a pipeline */  gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);  gst_element_link_pads (source, "src", sink, "sink");  /* start the pipeline */  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);  loop = g_main_loop_new (NULL, FALSE);  input_thread = g_thread_try_new ("v4l2src-test", read_user, source, NULL);  if (input_thread == NULL) {    fprintf (stderr, "error: g_thread_try_new() failed");    return -1;  }  g_main_loop_run (loop);  g_thread_join (input_thread);  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);  gst_object_unref (bus);  gst_object_unref (pipeline);  gst_deinit ();  return 0;}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:101,


示例15: gst_hls_demux_cache_fragments

static gbooleangst_hls_demux_cache_fragments (GstHLSDemux * demux){  gint i;  /* If this playlist is a variant playlist, select the first one   * and update it */  if (gst_m3u8_client_has_variant_playlist (demux->client)) {    GstM3U8 *child = NULL;    if (demux->connection_speed == 0) {      GST_M3U8_CLIENT_LOCK (demux->client);      child = demux->client->main->current_variant->data;      GST_M3U8_CLIENT_UNLOCK (demux->client);    } else {      GList *tmp = gst_m3u8_client_get_playlist_for_bitrate (demux->client,          demux->connection_speed);      child = GST_M3U8 (tmp->data);    }    gst_m3u8_client_set_current (demux->client, child);    if (!gst_hls_demux_update_playlist (demux, FALSE)) {      GST_ERROR_OBJECT (demux, "Could not fetch the child playlist %s",          child->uri);      return FALSE;    }  }  if (!gst_m3u8_client_is_live (demux->client)) {    GstClockTime duration = gst_m3u8_client_get_duration (demux->client);    GST_DEBUG_OBJECT (demux, "Sending duration message : %" GST_TIME_FORMAT,        GST_TIME_ARGS (duration));    if (duration != GST_CLOCK_TIME_NONE)      gst_element_post_message (GST_ELEMENT (demux),          gst_message_new_duration (GST_OBJECT (demux),              GST_FORMAT_TIME, duration));  }  /* Cache the first fragments */  for (i = 0; i < demux->fragments_cache; i++) {    gst_element_post_message (GST_ELEMENT (demux),        gst_message_new_buffering (GST_OBJECT (demux),            100 * i / demux->fragments_cache));    g_get_current_time (&demux->next_update);    if (!gst_hls_demux_get_next_fragment (demux, TRUE)) {      if (demux->end_of_playlist)        break;      if (!demux->cancelled)        GST_ERROR_OBJECT (demux, "Error caching the first fragments");      return FALSE;    }    /* make sure we stop caching fragments if something cancelled it */    if (demux->cancelled)      return FALSE;    gst_hls_demux_switch_playlist (demux);  }  gst_element_post_message (GST_ELEMENT (demux),      gst_message_new_buffering (GST_OBJECT (demux), 100));  g_get_current_time (&demux->next_update);  demux->need_cache = FALSE;  return TRUE;}
开发者ID:drothlis,项目名称:gst-plugins-bad,代码行数:68,


示例16: opus_dec_chain_parse_data

static GstFlowReturnopus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer){  GstFlowReturn res = GST_FLOW_OK;  gint size;  guint8 *data;  GstBuffer *outbuf;  gint16 *out_data;  int n, err;  int samples;  unsigned int packet_size;  GstBuffer *buf;  if (dec->state == NULL) {    /* If we did not get any headers, default to 2 channels */    if (dec->n_channels == 0) {      GstCaps *caps;      GST_INFO_OBJECT (dec, "No header, assuming single stream");      dec->n_channels = 2;      dec->sample_rate = 48000;      caps = gst_opus_dec_negotiate (dec);      GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);      gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);      gst_caps_unref (caps);      /* default stereo mapping */      dec->channel_mapping_family = 0;      dec->channel_mapping[0] = 0;      dec->channel_mapping[1] = 1;      dec->n_streams = 1;      dec->n_stereo_streams = 1;    }    GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",        dec->n_channels, dec->sample_rate);#ifndef GST_DISABLE_DEBUG    gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,        "Mapping table", dec->n_channels, dec->channel_mapping);#endif    GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,        dec->n_stereo_streams);    dec->state =        opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,        dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);    if (!dec->state || err != OPUS_OK)      goto creation_failed;  }  if (buffer) {    GST_DEBUG_OBJECT (dec, "Received buffer of size %u",        GST_BUFFER_SIZE (buffer));  } else {    GST_DEBUG_OBJECT (dec, "Received missing buffer");  }  /* if using in-band FEC, we introdude one extra frame's delay as we need     to potentially wait for next buffer to decode a missing buffer */  if (dec->use_inband_fec && !dec->primed) {    GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out");    gst_buffer_replace (&dec->last_buffer, buffer);    dec->primed = TRUE;    goto done;  }  /* That's the buffer we'll be sending to the opus decoder. */  buf = (dec->use_inband_fec      && GST_BUFFER_SIZE (dec->last_buffer) > 0) ? dec->last_buffer : buffer;  if (buf && GST_BUFFER_SIZE (buf) > 0) {    data = GST_BUFFER_DATA (buf);    size = GST_BUFFER_SIZE (buf);    GST_DEBUG_OBJECT (dec, "Using buffer of size %u", size);  } else {    /* concealment data, pass NULL as the bits parameters */    GST_DEBUG_OBJECT (dec, "Using NULL buffer");    data = NULL;    size = 0;  }  /* use maximum size (120 ms) as the number of returned samples is     not constant over the stream. */  samples = 120 * dec->sample_rate / 1000;  packet_size = samples * dec->n_channels * 2;  res = gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec),      GST_BUFFER_OFFSET_NONE, packet_size,      GST_PAD_CAPS (GST_AUDIO_DECODER_SRC_PAD (dec)), &outbuf);  if (res != GST_FLOW_OK) {    GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));    return res;  }  out_data = (gint16 *) GST_BUFFER_DATA (outbuf);  if (dec->use_inband_fec) {    if (dec->last_buffer) {      /* normal delayed decode */      GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer");      n = opus_multistream_decode (dec->state, data, size, out_data, samples,//.........这里部分代码省略.........
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer-sdk__gst-plugins-bad,代码行数:101,


示例17: gst_aggregator_iterate_sinkpads

/** * gst_aggregator_iterate_sinkpads: * @self: The #GstAggregator * @func: The function to call. * @user_data: The data to pass to @func. * * Iterate the sinkpads of aggregator to call a function on them. * * This method guarantees that @func will be called only once for each * sink pad. */gbooleangst_aggregator_iterate_sinkpads (GstAggregator * self,    GstAggregatorPadForeachFunc func, gpointer user_data){  gboolean result = FALSE;  GstIterator *iter;  gboolean done = FALSE;  GValue item = { 0, };  GList *seen_pads = NULL;  iter = gst_element_iterate_sink_pads (GST_ELEMENT (self));  if (!iter)    goto no_iter;  while (!done) {    switch (gst_iterator_next (iter, &item)) {      case GST_ITERATOR_OK:      {        GstPad *pad;        pad = g_value_get_object (&item);        /* if already pushed, skip. FIXME, find something faster to tag pads */        if (pad == NULL || g_list_find (seen_pads, pad)) {          g_value_reset (&item);          break;        }        GST_LOG_OBJECT (self, "calling function on pad %s:%s",            GST_DEBUG_PAD_NAME (pad));        result = func (self, pad, user_data);        done = !result;        seen_pads = g_list_prepend (seen_pads, pad);        g_value_reset (&item);        break;      }      case GST_ITERATOR_RESYNC:        gst_iterator_resync (iter);        break;      case GST_ITERATOR_ERROR:        GST_ERROR_OBJECT (self,            "Could not iterate over internally linked pads");        done = TRUE;        break;      case GST_ITERATOR_DONE:        done = TRUE;        break;    }  }  g_value_unset (&item);  gst_iterator_free (iter);  if (seen_pads == NULL) {    GST_DEBUG_OBJECT (self, "No pad seen");    return FALSE;  }  g_list_free (seen_pads);no_iter:  return result;}
开发者ID:asdlei00,项目名称:gst-plugins-bad,代码行数:77,


示例18: gst_wrapper_camera_bin_reset_video_src_caps

static voidgst_wrapper_camera_bin_reset_video_src_caps (GstWrapperCameraBinSrc * self,    GstCaps * caps){  GstClock *clock;  gint64 base_time;  GST_DEBUG_OBJECT (self, "Resetting src caps to %" GST_PTR_FORMAT, caps);  if (self->src_vid_src) {    GstCaps *old_caps;    g_object_get (G_OBJECT (self->src_filter), "caps", &old_caps, NULL);    if (gst_caps_is_equal (caps, old_caps)) {      GST_DEBUG_OBJECT (self, "old and new caps are same, do not reset it");      if (old_caps)        gst_caps_unref (old_caps);      return;    }    if (old_caps)      gst_caps_unref (old_caps);    clock = gst_element_get_clock (self->src_vid_src);    base_time = gst_element_get_base_time (self->src_vid_src);    /* Ideally, we should only need to get the source to READY here,     * but it seems v4l2src isn't happy with this. Putting to NULL makes     * it work.     *     * TODO fix this in v4l2src     */    gst_element_set_state (self->src_vid_src, GST_STATE_NULL);    set_capsfilter_caps (self, caps);    self->drop_newseg = TRUE;    GST_DEBUG_OBJECT (self, "Bringing source up");    if (!gst_element_sync_state_with_parent (self->src_vid_src)) {      GST_WARNING_OBJECT (self, "Failed to reset source caps");      gst_element_set_state (self->src_vid_src, GST_STATE_NULL);    }    if (clock) {      gst_element_set_clock (self->src_vid_src, clock);      gst_element_set_base_time (self->src_vid_src, base_time);      if (GST_IS_BIN (self->src_vid_src)) {        GstIterator *it =            gst_bin_iterate_elements (GST_BIN (self->src_vid_src));        gpointer item = NULL;        gboolean done = FALSE;        while (!done) {          switch (gst_iterator_next (it, &item)) {            case GST_ITERATOR_OK:              gst_element_set_base_time (GST_ELEMENT (item), base_time);              gst_object_unref (item);              break;            case GST_ITERATOR_RESYNC:              gst_iterator_resync (it);              break;            case GST_ITERATOR_ERROR:              done = TRUE;              break;            case GST_ITERATOR_DONE:              done = TRUE;              break;          }        }        gst_iterator_free (it);      }      gst_object_unref (clock);    }  }}
开发者ID:pli3,项目名称:gst-plugins-bad,代码行数:74,


示例19: gst_bin_iterate_all_by_interface

static GstElement *find_color_balance_element() {	GstIterator *iterator = gst_bin_iterate_all_by_interface(		GST_BIN(pipeline),  GST_TYPE_COLOR_BALANCE);		GstElement *color_balance_element = NULL;	gboolean done = FALSE, hardware = FALSE;#if GST_CHECK_VERSION(1, 0, 0)	GValue item = G_VALUE_INIT;#else	gpointer item;#endif	while (!done) {	switch (gst_iterator_next(iterator, &item)) {	case GST_ITERATOR_OK : {#if GST_CHECK_VERSION(1, 0, 0)		GstElement *element = g_value_get_object(&item);#else		GstElement *element = GST_ELEMENT(item);#endif		if (is_valid_color_balance_element(element)) {			if (!color_balance_element) {				color_balance_element = GST_ELEMENT_CAST(						gst_object_ref(element));				hardware =					(gst_color_balance_get_balance_type(GST_COLOR_BALANCE					(element)) == GST_COLOR_BALANCE_HARDWARE);			}			else if (!hardware) {				gboolean tmp =					(gst_color_balance_get_balance_type(GST_COLOR_BALANCE					(element)) == GST_COLOR_BALANCE_HARDWARE);				if (tmp) {					if (color_balance_element)						gst_object_unref(color_balance_element);					color_balance_element =						GST_ELEMENT_CAST(gst_object_ref(element));					hardware = TRUE;				}			}		}#if GST_CHECK_VERSION(1, 0, 0)		g_value_reset(&item);#endif		if (hardware && color_balance_element)			done = TRUE;        	break;		}	case GST_ITERATOR_RESYNC :		gst_iterator_resync(iterator);		done = FALSE;		hardware = FALSE;		if (color_balance_element)			gst_object_unref(color_balance_element);		color_balance_element = NULL;		break;	case GST_ITERATOR_DONE:	case GST_ITERATOR_ERROR:	default:		done = TRUE;	}	}#if GST_CHECK_VERSION(1, 0, 0)	g_value_unset(&item);#endif	gst_iterator_free(iterator);	return color_balance_element;}
开发者ID:BorodaZizitopa,项目名称:gstplay,代码行数:68,


示例20: gst_gl_stereo_mix_process_frames

/* called with the object lock held */static gbooleangst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer){  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer);  GstBuffer *converted_buffer, *inbuf;  GstVideoInfo *out_info = &vagg->info;#ifndef G_DISABLE_ASSERT  gint n;#endif  gint v, views;  gint valid_views = 0;  GList *walk;  inbuf = gst_buffer_new ();  walk = GST_ELEMENT (mixer)->sinkpads;  while (walk) {    GstGLStereoMixPad *pad = walk->data;    GstMemory *in_mem;    GST_LOG_OBJECT (mixer, "Handling frame %d", valid_views);    if (!pad || !pad->current_buffer) {      GST_DEBUG ("skipping texture, null frame");      walk = g_list_next (walk);      continue;    }    in_mem = gst_buffer_get_memory (pad->current_buffer, 0);    GST_LOG_OBJECT (mixer,        "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem);    /* Appending the memory to a 2nd buffer locks it     * exclusive a 2nd time, which will mark it for     * copy-on-write. The ref will keep the memory     * alive but we add a parent_buffer_meta to also     * prevent the input buffer from returning to any buffer     * pool it might belong to     */    gst_buffer_append_memory (inbuf, in_mem);    /* Use parent buffer meta to keep input buffer alive */    gst_buffer_add_parent_buffer_meta (inbuf, pad->current_buffer);    valid_views++;    walk = g_list_next (walk);  }  if (mixer->mix_info.views != valid_views) {    GST_WARNING_OBJECT (mixer, "Not enough input views to process");    return FALSE;  }  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==      GST_VIDEO_MULTIVIEW_MODE_SEPARATED)    views = out_info->views;  else    views = 1;  if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert,          FALSE, inbuf) != GST_FLOW_OK)    return FALSE;  /* Clear any existing buffers, just in case */  gst_buffer_replace (&mixer->primary_out, NULL);  gst_buffer_replace (&mixer->auxilliary_out, NULL);  if (gst_gl_view_convert_get_output (mixer->viewconvert,          &mixer->primary_out) != GST_FLOW_OK)    return FALSE;  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==      GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {    if (gst_gl_view_convert_get_output (mixer->viewconvert,            &mixer->auxilliary_out) != GST_FLOW_OK)      return FALSE;  }  if (mixer->primary_out == NULL)    return FALSE;  converted_buffer = mixer->primary_out;#ifndef G_DISABLE_ASSERT  n = gst_buffer_n_memory (converted_buffer);  g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views);#endif  for (v = 0; v < views; v++) {    gst_buffer_add_video_meta_full (converted_buffer, v,        GST_VIDEO_INFO_FORMAT (out_info),        GST_VIDEO_INFO_WIDTH (out_info),        GST_VIDEO_INFO_HEIGHT (out_info),        GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride);    if (mixer->auxilliary_out) {      gst_buffer_add_video_meta_full (mixer->auxilliary_out, v,          GST_VIDEO_INFO_FORMAT (out_info),          GST_VIDEO_INFO_WIDTH (out_info),          GST_VIDEO_INFO_HEIGHT (out_info),          GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset,          out_info->stride);//.........这里部分代码省略.........
开发者ID:CogentEmbedded,项目名称:gst-plugins-bad,代码行数:101,


示例21: gst_nle_source_pad_added_cb

static voidgst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,    GstNleSource * nlesrc){  GstCaps *caps;  const GstStructure *s;  const gchar *mime;  GstElement *appsink = NULL;  GstPad *sink_pad;  GstAppSinkCallbacks appsink_cbs;  GstNleSrcItem *item;  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);  caps = gst_pad_get_caps_reffed (pad);  s = gst_caps_get_structure (caps, 0);  mime = gst_structure_get_name (s);  GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);  if (g_strrstr (mime, "video") && !nlesrc->video_linked) {    appsink = gst_element_factory_make ("appsink", NULL);    memset (&appsink_cbs, 0, sizeof (appsink_cbs));    appsink_cbs.eos = gst_nle_source_on_video_eos;    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;    appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;    nlesrc->video_linked = TRUE;    if (!nlesrc->video_srcpad_added) {      gst_pad_set_active (nlesrc->video_srcpad, TRUE);      gst_element_add_pad (GST_ELEMENT (nlesrc),          gst_object_ref (nlesrc->video_srcpad));      nlesrc->video_srcpad_added = TRUE;    }    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),        (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);    nlesrc->video_eos = FALSE;  } else if (g_strrstr (mime, "audio") && nlesrc->with_audio      && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) {    appsink = gst_element_factory_make ("appsink", NULL);    memset (&appsink_cbs, 0, sizeof (appsink_cbs));    appsink_cbs.eos = gst_nle_source_on_audio_eos;    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;    appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;    nlesrc->audio_linked = TRUE;    if (!nlesrc->audio_srcpad_added) {      gst_pad_set_active (nlesrc->audio_srcpad, TRUE);      gst_element_add_pad (GST_ELEMENT (nlesrc),          gst_object_ref (nlesrc->audio_srcpad));      nlesrc->audio_srcpad_added = TRUE;    }    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),        (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);    nlesrc->audio_eos = FALSE;  }  if (appsink != NULL) {    g_object_set (appsink, "sync", FALSE, NULL);    gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,        NULL);    gst_bin_add (GST_BIN (nlesrc->decoder), appsink);    sink_pad = gst_element_get_static_pad (appsink, "sink");    gst_pad_link (pad, sink_pad);    gst_element_sync_state_with_parent (appsink);    gst_object_unref (sink_pad);  }}
开发者ID:fluendo,项目名称:VAS,代码行数:64,


示例22: opus_dec_chain_parse_data

static GstFlowReturnopus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer){  GstFlowReturn res = GST_FLOW_OK;  gsize size;  guint8 *data;  GstBuffer *outbuf;  gint16 *out_data;  int n, err;  int samples;  unsigned int packet_size;  GstBuffer *buf;  GstMapInfo map, omap;  if (dec->state == NULL) {    /* If we did not get any headers, default to 2 channels */    if (dec->n_channels == 0) {      GST_INFO_OBJECT (dec, "No header, assuming single stream");      dec->n_channels = 2;      dec->sample_rate = 48000;      /* default stereo mapping */      dec->channel_mapping_family = 0;      dec->channel_mapping[0] = 0;      dec->channel_mapping[1] = 1;      dec->n_streams = 1;      dec->n_stereo_streams = 1;      gst_opus_dec_negotiate (dec, NULL);    }    GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",        dec->n_channels, dec->sample_rate);#ifndef GST_DISABLE_GST_DEBUG    gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,        "Mapping table", dec->n_channels, dec->channel_mapping);#endif    GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,        dec->n_stereo_streams);    dec->state =        opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,        dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);    if (!dec->state || err != OPUS_OK)      goto creation_failed;  }  if (buffer) {    GST_DEBUG_OBJECT (dec, "Received buffer of size %" G_GSIZE_FORMAT,        gst_buffer_get_size (buffer));  } else {    GST_DEBUG_OBJECT (dec, "Received missing buffer");  }  /* if using in-band FEC, we introdude one extra frame's delay as we need     to potentially wait for next buffer to decode a missing buffer */  if (dec->use_inband_fec && !dec->primed) {    GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out");    gst_buffer_replace (&dec->last_buffer, buffer);    dec->primed = TRUE;    goto done;  }  /* That's the buffer we'll be sending to the opus decoder. */  buf = (dec->use_inband_fec      && gst_buffer_get_size (dec->last_buffer) >      0) ? dec->last_buffer : buffer;  if (buf && gst_buffer_get_size (buf) > 0) {    gst_buffer_map (buf, &map, GST_MAP_READ);    data = map.data;    size = map.size;    GST_DEBUG_OBJECT (dec, "Using buffer of size %" G_GSIZE_FORMAT, size);  } else {    /* concealment data, pass NULL as the bits parameters */    GST_DEBUG_OBJECT (dec, "Using NULL buffer");    data = NULL;    size = 0;  }  if (gst_buffer_get_size (buffer) == 0) {    GstClockTime const opus_plc_alignment = 2500 * GST_USECOND;    GstClockTime aligned_missing_duration;    GstClockTime missing_duration = GST_BUFFER_DURATION (buffer);    GST_DEBUG_OBJECT (dec,        "missing buffer, doing PLC duration %" GST_TIME_FORMAT        " plus leftover %" GST_TIME_FORMAT, GST_TIME_ARGS (missing_duration),        GST_TIME_ARGS (dec->leftover_plc_duration));    /* add the leftover PLC duration to that of the buffer */    missing_duration += dec->leftover_plc_duration;    /* align the combined buffer and leftover PLC duration to multiples     * of 2.5ms, always rounding down, and store excess duration for later */    aligned_missing_duration =        (missing_duration / opus_plc_alignment) * opus_plc_alignment;    dec->leftover_plc_duration = missing_duration - aligned_missing_duration;    /* Opus' PLC cannot operate with less than 2.5ms; skip PLC     * and accumulate the missing duration in the leftover_plc_duration//.........这里部分代码省略.........
开发者ID:vanechipi,项目名称:gst-plugins-bad,代码行数:101,


示例23: gst_nle_source_next

static voidgst_nle_source_next (GstNleSource * nlesrc){  GstNleSrcItem *item;  GstStateChangeReturn ret;  GstElement *uridecodebin;  GstBus *bus;  GstState state;  nlesrc->index++;  if (nlesrc->index >= g_list_length (nlesrc->queue)) {    gst_nle_source_push_eos (nlesrc);    return;  }  if (nlesrc->source != NULL) {    gst_object_unref (nlesrc->source);    nlesrc->source = NULL;  }  if (nlesrc->decoder != NULL) {    gst_element_set_state (GST_ELEMENT (nlesrc->decoder), GST_STATE_NULL);    gst_element_get_state (GST_ELEMENT (nlesrc->decoder), NULL, NULL, 0);    gst_object_unref (nlesrc->decoder);  }  nlesrc->decoder = gst_pipeline_new ("decoder");  uridecodebin = gst_element_factory_make ("uridecodebin", NULL);  /* Connect signal to recover source element for queries in bytes */  g_signal_connect (uridecodebin, "source-setup",      G_CALLBACK (gst_nle_source_on_source_setup), nlesrc);   gst_bin_add (GST_BIN (nlesrc->decoder), uridecodebin);  g_signal_connect (uridecodebin, "autoplug-select",      G_CALLBACK (lgm_filter_video_decoders), nlesrc);  g_signal_connect (uridecodebin, "pad-added",      G_CALLBACK (gst_nle_source_pad_added_cb), nlesrc);  g_signal_connect (uridecodebin, "no-more-pads",      G_CALLBACK (gst_nle_source_no_more_pads), nlesrc);  bus = GST_ELEMENT_BUS (nlesrc->decoder);  gst_bus_add_signal_watch (bus);  g_signal_connect (bus, "message", G_CALLBACK (gst_nle_source_bus_message),      nlesrc);  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);  GST_INFO_OBJECT (nlesrc, "Starting next item with uri:%s", item->file_path);  GST_INFO_OBJECT (nlesrc, "start:%" GST_TIME_FORMAT " stop:%"      GST_TIME_FORMAT " rate:%f", GST_TIME_ARGS (item->start),      GST_TIME_ARGS (item->stop), item->rate);  g_object_set (uridecodebin, "uri", item->file_path, NULL);  nlesrc->seek_done = FALSE;  if (GST_CLOCK_TIME_IS_VALID (item->stop)) {    nlesrc->video_seek_done = FALSE;    nlesrc->audio_seek_done = FALSE;  } else {    nlesrc->video_seek_done = TRUE;    nlesrc->audio_seek_done = TRUE;  }  nlesrc->audio_eos = TRUE;  nlesrc->video_eos = TRUE;  nlesrc->audio_ts = 0;  nlesrc->video_ts = 0;  nlesrc->start_ts = nlesrc->accu_time;  nlesrc->video_linked = FALSE;  nlesrc->audio_linked = FALSE;  nlesrc->item_setup = FALSE;  nlesrc->cached_duration = 0;  GST_DEBUG_OBJECT (nlesrc, "Start ts:%" GST_TIME_FORMAT,      GST_TIME_ARGS (nlesrc->start_ts));  gst_element_set_state (nlesrc->decoder, GST_STATE_PLAYING);  ret = gst_element_get_state (nlesrc->decoder, &state, NULL, 5 * GST_SECOND);  if (ret == GST_STATE_CHANGE_FAILURE) {    GST_WARNING_OBJECT (nlesrc, "Error changing state, selecting next item.");    gst_nle_source_check_eos (nlesrc);    return;  }  nlesrc->seek_done = TRUE;  if (!item->still_picture && GST_CLOCK_TIME_IS_VALID (item->stop)) {    GST_DEBUG_OBJECT (nlesrc, "Sending seek event");    gst_element_seek (nlesrc->decoder, 1, GST_FORMAT_TIME,        GST_SEEK_FLAG_ACCURATE,        GST_SEEK_TYPE_SET, item->start, GST_SEEK_TYPE_SET, item->stop);  }}
开发者ID:fluendo,项目名称:VAS,代码行数:91,


示例24: CV_FUNCNAME

bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,        double fps, CvSize frameSize, bool is_color ){    CV_FUNCNAME("CvVideoWriter_GStreamer::open");    __BEGIN__;    //actually doesn't support fourcc parameter and encode an avi with jpegenc    //we need to find a common api between backend to support fourcc for avi    //but also to choose in a common way codec and container format (ogg,dirac,matroska)    // check arguments    assert (filename);    assert (fps > 0);    assert (frameSize.width > 0  &&  frameSize.height > 0);    std::map<int,char*>::iterator encit;    encit=encs.find(fourcc);    if (encit==encs.end())        CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally.");//    if(!isInited) {//        gst_init (NULL, NULL);//        isInited = true;//    }    gst_initializer::init();    close();    source=gst_element_factory_make("appsrc",NULL);    file=gst_element_factory_make("filesink", NULL);    enc=gst_element_factory_make(encit->second, NULL);    mux=gst_element_factory_make("avimux", NULL);    color = gst_element_factory_make("ffmpegcolorspace", NULL);    if (!enc)        CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing.");    g_object_set(G_OBJECT(file), "location", filename, NULL);    pipeline = gst_pipeline_new (NULL);    GstCaps* caps;    if (is_color) {        input_pix_fmt=1;        caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,                                        frameSize.width,                                        frameSize.height,                                        (int) (fps * 1000),                                        1000,                                        1,                                        1);    }    else  {        input_pix_fmt=0;        caps= gst_caps_new_simple("video/x-raw-gray",                                  "width", G_TYPE_INT, frameSize.width,                                  "height", G_TYPE_INT, frameSize.height,                                  "framerate", GST_TYPE_FRACTION, int(fps),1,                                  "bpp",G_TYPE_INT,8,                                  "depth",G_TYPE_INT,8,                                  NULL);    }    gst_app_src_set_caps(GST_APP_SRC(source), caps);    if (fourcc==CV_FOURCC_DEFAULT) {        gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL);        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {            CV_ERROR(CV_StsError, "GStreamer: cannot link elements/n");        }    }    else {        gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL);        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {            CV_ERROR(CV_StsError, "GStreamer: cannot link elements/n");        }    }    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==        GST_STATE_CHANGE_FAILURE) {            CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play/n");    }    __END__;    return true;}
开发者ID:4auka,项目名称:opencv,代码行数:76,


示例25: main

intmain (int argc, char *argv[]){  GstElement *filesrc, *osssink, *queue, *parse, *decode;  GstElement *bin;  GstElement *thread;  gst_init (&argc, &argv);  if (argc != 2) {    g_print ("usage: %s <filename>/n", argv[0]);    exit (-1);  }  /* create a new thread to hold the elements */  thread = gst_thread_new ("thread");  g_assert (thread != NULL);  /* create a new bin to hold the elements */  bin = gst_bin_new ("bin");  g_assert (bin != NULL);  /* create a disk reader */  filesrc = gst_element_factory_make ("filesrc", "disk_source");  g_assert (filesrc != NULL);  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);  g_signal_connect (G_OBJECT (filesrc), "eos", G_CALLBACK (eos), thread);  queue = gst_element_factory_make ("queue", "queue");  /* and an audio sink */  osssink = gst_element_factory_make ("osssink", "play_audio");  g_assert (osssink != NULL);  parse = gst_element_factory_make ("mp3parse", "parse");  decode = gst_element_factory_make ("mpg123", "decode");  /* add objects to the main bin */  gst_bin_add (GST_BIN (bin), filesrc);  gst_bin_add (GST_BIN (bin), queue);  gst_bin_add (GST_BIN (thread), parse);  gst_bin_add (GST_BIN (thread), decode);  gst_bin_add (GST_BIN (thread), osssink);  gst_element_link_many (filesrc, queue, parse, decode, osssink, NULL);  /* make it ready */  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_READY);  /* start playing */  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);  playing = TRUE;  while (playing) {    gst_bin_iterate (GST_BIN (bin));  }  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);  exit (0);}
开发者ID:WangCrystal,项目名称:gstreamer,代码行数:62,


示例26: daala_handle_data_packet

static GstFlowReturndaala_handle_data_packet (GstDaalaDec * dec, ogg_packet * packet,    GstVideoCodecFrame * frame){  /* normal data packet */  od_img img;  gboolean keyframe;  GstFlowReturn result;  if (G_UNLIKELY (!dec->have_header))    goto not_initialized;  /* the second most significant bit of the first data byte is cleared    * for keyframes. We can only check it if it's not a zero-length packet. */  keyframe = packet->bytes && ((packet->packet[0] & 0x40));  if (G_UNLIKELY (keyframe)) {    GST_DEBUG_OBJECT (dec, "we have a keyframe");    dec->need_keyframe = FALSE;  } else if (G_UNLIKELY (dec->need_keyframe)) {    goto dropping;  }  GST_DEBUG_OBJECT (dec, "parsing data packet");  /* this does the decoding */  if (G_UNLIKELY (daala_decode_packet_in (dec->decoder, &img, packet) < 0))    goto decode_error;  if (frame &&      (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (dec),              frame) < 0))    goto dropping_qos;  if (G_UNLIKELY ((img.width != dec->info.pic_width              || img.height != dec->info.pic_height)))    goto wrong_dimensions;  result = daala_handle_image (dec, &img, frame);  return result;  /* ERRORS */not_initialized:  {    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,        (NULL), ("no header sent yet"));    return GST_FLOW_ERROR;  }dropping:  {    GST_WARNING_OBJECT (dec, "dropping frame because we need a keyframe");    return GST_CUSTOM_FLOW_DROP;  }dropping_qos:  {    GST_WARNING_OBJECT (dec, "dropping frame because of QoS");    return GST_CUSTOM_FLOW_DROP;  }decode_error:  {    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,        (NULL), ("daala decoder did not decode data packet"));    return GST_FLOW_ERROR;  }wrong_dimensions:  {    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, FORMAT,        (NULL), ("dimensions of image do not match header"));    return GST_FLOW_ERROR;  }}
开发者ID:asrashley,项目名称:gst-plugins-bad,代码行数:71,


示例27: qDebug

//.........这里部分代码省略.........  // Create encoding video pipeline  pipeline = gst_parse_launch(pipelineString.toUtf8(), &error);  if (!pipeline) {    qCritical("Failed to parse pipeline: %s", error->message);    g_error_free(error);    return false;  }  encoder = gst_bin_get_by_name(GST_BIN(pipeline), "encoder");  if (!encoder) {    qCritical("Failed to get encoder");    return false;  }  // Assuming here that X86 uses x264enc  if (hardware->getHardwareName() == "generic_x86") {    g_object_set(G_OBJECT(encoder), "speed-preset", 1, NULL); // ultrafast    g_object_set(G_OBJECT(encoder), "tune", 0x00000004, NULL); // zerolatency  }  if (hardware->getHardwareName() == "tegrak1" ||      hardware->getHardwareName() == "tegrax1") {    //g_object_set(G_OBJECT(encoder), "input-buffers", 2, NULL); // not valid on 1.0    //g_object_set(G_OBJECT(encoder), "output-buffers", 2, NULL); // not valid on 1.0    //g_object_set(G_OBJECT(encoder), "quality-level", 0, NULL);    //g_object_set(G_OBJECT(encoder), "rc-mode", 0, NULL);  }  if (hardware->getHardwareName() == "tegrax2") {    g_object_set(G_OBJECT(encoder), "preset-level", 0, NULL); // 0 == UltraFastPreset for high perf  }  setBitrate(bitrate);  {    GstElement *source;    source = gst_bin_get_by_name(GST_BIN(pipeline), "source");    if (!source) {      qCritical("Failed to get source");      return false;    }    g_object_set(G_OBJECT(source), "do-timestamp", true, NULL);    if (videoSource == "videotestsrc") {      g_object_set(G_OBJECT(source), "is-live", true, NULL);    } else if (videoSource == "v4l2src") {      //g_object_set(G_OBJECT(source), "always-copy", false, NULL);      const char *camera = "/dev/video0";      QByteArray env_camera = qgetenv("PLECO_SLAVE_CAMERA");      if (!env_camera.isNull()) {        camera = env_camera.data();      }      g_object_set(G_OBJECT(source), "device", camera, NULL);    }    if (hardware->getHardwareName() == "tegrak1" ||        hardware->getHardwareName() == "tegrax1") {      g_object_set(G_OBJECT(source), "io-mode", 1, NULL);    }  }  sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");  if (!sink) {    qCritical("Failed to get sink");    return false;  }  // Set appsink callbacks  GstAppSinkCallbacks appSinkCallbacks;  appSinkCallbacks.eos             = NULL;  appSinkCallbacks.new_preroll     = NULL;  appSinkCallbacks.new_sample      = &newBufferCB;  gst_app_sink_set_callbacks(GST_APP_SINK(sink), &appSinkCallbacks, this, NULL);#if USE_TEE  // Callbacks for the OB process appsink  ob = gst_bin_get_by_name(GST_BIN(pipeline), "ob");  if (!ob) {    qCritical("Failed to get ob appsink");    return false;  }  // Set appsink callbacks  GstAppSinkCallbacks obCallbacks;  obCallbacks.eos             = NULL;  obCallbacks.new_preroll     = NULL;  obCallbacks.new_sample      = &newBufferOBCB;  gst_app_sink_set_callbacks(GST_APP_SINK(ob), &obCallbacks, this, NULL);#endif  // Start running   gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);  launchObjectDetection();  return true;}
开发者ID:kulve,项目名称:pleco,代码行数:101,



注:本文中的GST_ELEMENT函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_ELEMENT_CLASS函数代码示例
C++ GST_DECKLINK_SRC函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。