您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_BIN函数代码示例

51自学网 2021-06-01 20:55:53
  C++
这篇教程C++ GST_BIN函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_BIN函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_BIN函数的具体用法?C++ GST_BIN怎么用?C++ GST_BIN使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_BIN函数的30个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: gst_nle_source_setup

static voidgst_nle_source_setup (GstNleSource * nlesrc){  GstElement *rotate, *videorate, *videoscale, *colorspace, *vident, *cairooverlay, *colorspace2;  GstElement *audiorate, *audioconvert, *audioresample, *aident;  GstElement *a_capsfilter, *v_capsfilter, *last;  GstPad *v_pad, *a_pad;  GstCaps *v_caps, *a_caps;  rotate = gst_element_factory_make ("flurotate", NULL);  videorate = gst_element_factory_make ("videorate", NULL);  nlesrc->videocrop = gst_element_factory_make ("videocrop", NULL);  videoscale = gst_element_factory_make ("videoscale", NULL);  colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);  v_capsfilter = gst_element_factory_make ("capsfilter", "video_capsfilter");  nlesrc->textoverlay = gst_element_factory_make ("textoverlay", NULL);  cairooverlay = gst_element_factory_make ("cairooverlay", "overlay");  colorspace2 = gst_element_factory_make ("ffmpegcolorspace", NULL);  vident = gst_element_factory_make ("identity", NULL);  v_caps = gst_caps_new_simple ("video/x-raw-yuv",      "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("I420"),      "width", G_TYPE_INT, (gint) nlesrc->width,      "height", G_TYPE_INT, (gint) nlesrc->height,      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,      "framerate", GST_TYPE_FRACTION,      (gint) nlesrc->fps_n, (gint) nlesrc->fps_d, NULL);  if (rotate) {    gst_caps_set_simple (v_caps, "rotation", G_TYPE_INT, (gint) 0, NULL);  } else {    rotate = gst_element_factory_make ("identity", NULL);   }  gst_pad_set_caps (nlesrc->video_srcpad, v_caps);  g_object_set (videoscale, "add-borders", TRUE, NULL);  g_object_set (vident, "single-segment", TRUE, NULL);  g_object_set (v_capsfilter, "caps", v_caps, NULL);  g_object_set (nlesrc->textoverlay, "valignment", 2, "halignment", 0,      "auto-resize", TRUE, "wrap-mode", 0, "silent", !nlesrc->overlay_title,      NULL);  g_signal_connect (cairooverlay, "draw",      G_CALLBACK (gst_nle_source_draw_overlay), nlesrc);  /* As videorate can duplicate a lot of buffers we want to put it last in this     transformation bin */  gst_bin_add_many (GST_BIN (nlesrc), rotate, nlesrc->videocrop,      videoscale, colorspace, nlesrc->textoverlay, videorate, v_capsfilter,      vident, NULL);  /* cairooverlay forces a colorpsace conversion ro RGB that we want to avoid   * when we are not rendering the watermark */  if (nlesrc->watermark != NULL) {    gst_bin_add_many (GST_BIN (nlesrc), cairooverlay, colorspace2, NULL);  }  gst_element_link_many (rotate, nlesrc->videocrop, videoscale, colorspace,      nlesrc->textoverlay, NULL);  if (nlesrc->watermark != NULL) {    gst_element_link_many (nlesrc->textoverlay, cairooverlay, colorspace2, NULL);    last = colorspace2;  } else {    last = nlesrc->textoverlay;  }  gst_element_link_many (last, videorate, v_capsfilter, vident, NULL);  /* Ghost source and sink pads */  v_pad = gst_element_get_pad (vident, "src");  gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->video_srcpad), v_pad);  gst_object_unref (v_pad);  v_pad = gst_element_get_pad (rotate, "sink");  gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->video_sinkpad), v_pad);  gst_object_unref (v_pad);  if (nlesrc->with_audio) {    audiorate = gst_element_factory_make ("audiorate", NULL);    audioconvert = gst_element_factory_make ("audioconvert", NULL);    audioresample = gst_element_factory_make ("audioresample", NULL);    a_capsfilter = gst_element_factory_make ("capsfilter", NULL);    aident = gst_element_factory_make ("identity", NULL);    gst_bin_add_many (GST_BIN (nlesrc), audioresample, audioconvert,        audiorate, a_capsfilter, aident, NULL);    gst_element_link_many (audioconvert, audioresample,        audiorate, a_capsfilter, aident, NULL);    a_caps = gst_nle_source_get_audio_caps (nlesrc);    gst_pad_set_caps (nlesrc->audio_srcpad, a_caps);    g_object_set (a_capsfilter, "caps", a_caps, NULL);    g_object_set (aident, "single-segment", TRUE, NULL);    /* Ghost sink and source pads */    a_pad = gst_element_get_pad (aident, "src");    gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->audio_srcpad), a_pad);//.........这里部分代码省略.........
开发者ID:fluendo,项目名称:VAS,代码行数:101,


示例2: transcode_file

static voidtranscode_file (gchar * uri, gchar * outputuri, GstEncodingProfile * prof){  GstElement *pipeline;  GstElement *src;  GstElement *ebin;  GstElement *sink;  GstBus *bus;  GstCaps *profilecaps, *rescaps;  GMainLoop *mainloop;  g_print (" Input URI  : %s/n", uri);  g_print (" Output URI : %s/n", outputuri);  sink = gst_element_make_from_uri (GST_URI_SINK, outputuri, "sink");  if (G_UNLIKELY (sink == NULL)) {    g_print ("Can't create output sink, most likely invalid output URI !/n");    return;  }  src = gst_element_factory_make ("uridecodebin", NULL);  if (G_UNLIKELY (src == NULL)) {    g_print ("Can't create uridecodebin for input URI, aborting!/n");    return;  }  /* Figure out the streams that can be passed as-is to encodebin */  g_object_get (src, "caps", &rescaps, NULL);  rescaps = gst_caps_copy (rescaps);  profilecaps = gst_encoding_profile_get_input_caps (prof);  gst_caps_append (rescaps, profilecaps);  /* Set properties */  g_object_set (src, "uri", uri, "caps", rescaps, NULL);  ebin = gst_element_factory_make ("encodebin", NULL);  g_object_set (ebin, "profile", prof, NULL);  g_signal_connect (src, "autoplug-continue", G_CALLBACK (autoplug_continue_cb),      ebin);  g_signal_connect (src, "pad-added", G_CALLBACK (pad_added_cb), ebin);  pipeline = gst_pipeline_new ("encoding-pipeline");  gst_bin_add_many (GST_BIN (pipeline), src, ebin, sink, NULL);  gst_element_link (ebin, sink);  mainloop = g_main_loop_new (NULL, FALSE);  bus = gst_pipeline_get_bus ((GstPipeline *) pipeline);  gst_bus_add_signal_watch (bus);  g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), mainloop);  if (gst_element_set_state (pipeline,          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {    g_print ("Failed to start the encoding/n");    return;  }  g_main_loop_run (mainloop);  gst_element_set_state (pipeline, GST_STATE_NULL);  gst_object_unref (pipeline);}
开发者ID:luisbg,项目名称:gupnp-dlna,代码行数:65,


示例3: tsmf_gstreamer_pipeline_build

BOOL tsmf_gstreamer_pipeline_build(TSMFGstreamerDecoder* mdecoder){#if GST_VERSION_MAJOR > 0	const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin name=videodecoder !";        const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";#else	const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin2 name=videodecoder !";	const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin2 name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";#endif	char pipeline[1024];	if (!mdecoder)		return FALSE;	/* TODO: Construction of the pipeline from a string allows easy overwrite with arguments.	 *       The only fixed elements necessary are appsrc and the volume element for audio streams.	 *       The rest could easily be provided in gstreamer pipeline notation from command line. */	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)		sprintf_s(pipeline, sizeof(pipeline), "%s %s name=videosink", video, tsmf_platform_get_video_sink());	else		sprintf_s(pipeline, sizeof(pipeline), "%s %s name=audiosink", audio, tsmf_platform_get_audio_sink());	DEBUG_TSMF("pipeline=%s", pipeline);	mdecoder->pipe = gst_parse_launch(pipeline, NULL);	if (!mdecoder->pipe)	{		WLog_ERR(TAG, "Failed to create new pipe");		return FALSE;	}	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)		mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosource");	else		mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosource");	if (!mdecoder->src)	{		WLog_ERR(TAG, "Failed to get appsrc");		return FALSE;	}	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)		mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videoqueue");	else		mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audioqueue");	if (!mdecoder->queue)	{		WLog_ERR(TAG, "Failed to get queue");		return FALSE;	}	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)		mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosink");	else		mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosink");	if (!mdecoder->outsink)	{		WLog_ERR(TAG, "Failed to get sink");		return FALSE;	}	g_signal_connect(mdecoder->outsink, "child-added", G_CALLBACK(cb_child_added), mdecoder);	if (mdecoder->media_type == TSMF_MAJOR_TYPE_AUDIO)	{		mdecoder->volume = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiovolume");		if (!mdecoder->volume)		{			WLog_ERR(TAG, "Failed to get volume");			return FALSE;		}		tsmf_gstreamer_change_volume((ITSMFDecoder*)mdecoder, mdecoder->gstVolume*((double) 10000), mdecoder->gstMuted);	}	tsmf_platform_register_handler(mdecoder);	/* AppSrc settings */	GstAppSrcCallbacks callbacks =	{		tsmf_gstreamer_need_data,		tsmf_gstreamer_enough_data,		tsmf_gstreamer_seek_data	};	g_object_set(mdecoder->src, "format", GST_FORMAT_TIME, NULL);	g_object_set(mdecoder->src, "is-live", FALSE, NULL);	g_object_set(mdecoder->src, "block", FALSE, NULL);	g_object_set(mdecoder->src, "blocksize", 1024, NULL);	gst_app_src_set_caps((GstAppSrc *) mdecoder->src, mdecoder->gst_caps);	gst_app_src_set_callbacks((GstAppSrc *)mdecoder->src, &callbacks, mdecoder, NULL);	gst_app_src_set_stream_type((GstAppSrc *) mdecoder->src, GST_APP_STREAM_TYPE_SEEKABLE);	gst_app_src_set_latency((GstAppSrc *) mdecoder->src, 0, -1);	gst_app_src_set_max_bytes((GstAppSrc *) mdecoder->src, (guint64) 0);//unlimited	g_object_set(G_OBJECT(mdecoder->queue), "use-buffering", FALSE, NULL);	g_object_set(G_OBJECT(mdecoder->queue), "use-rate-estimate", FALSE, NULL);	g_object_set(G_OBJECT(mdecoder->queue), "max-size-buffers", 0, NULL);	g_object_set(G_OBJECT(mdecoder->queue), "max-size-bytes", 0, NULL);//.........这里部分代码省略.........
开发者ID:JunaidLoonat,项目名称:FreeRDP,代码行数:101,


示例4: CV_FUNCNAME

bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,        double fps, CvSize frameSize, bool is_color ){    CV_FUNCNAME("CvVideoWriter_GStreamer::open");    __BEGIN__;    //actually doesn't support fourcc parameter and encode an avi with jpegenc    //we need to find a common api between backend to support fourcc for avi    //but also to choose in a common way codec and container format (ogg,dirac,matroska)    // check arguments    assert (filename);    assert (fps > 0);    assert (frameSize.width > 0  &&  frameSize.height > 0);    std::map<int,char*>::iterator encit;    encit=encs.find(fourcc);    if (encit==encs.end())        CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally.");//    if(!isInited) {//        gst_init (NULL, NULL);//        isInited = true;//    }    gst_initializer::init();    close();    source=gst_element_factory_make("appsrc",NULL);    file=gst_element_factory_make("filesink", NULL);    enc=gst_element_factory_make(encit->second, NULL);    mux=gst_element_factory_make("avimux", NULL);    color = gst_element_factory_make("ffmpegcolorspace", NULL);    if (!enc)        CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing.");    g_object_set(G_OBJECT(file), "location", filename, NULL);    pipeline = gst_pipeline_new (NULL);    GstCaps* caps;    if (is_color) {        input_pix_fmt=1;        caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,                                        frameSize.width,                                        frameSize.height,                                        (int) (fps * 1000),                                        1000,                                        1,                                        1);    }    else  {        input_pix_fmt=0;        caps= gst_caps_new_simple("video/x-raw-gray",                                  "width", G_TYPE_INT, frameSize.width,                                  "height", G_TYPE_INT, frameSize.height,                                  "framerate", GST_TYPE_FRACTION, int(fps),1,                                  "bpp",G_TYPE_INT,8,                                  "depth",G_TYPE_INT,8,                                  NULL);    }    gst_app_src_set_caps(GST_APP_SRC(source), caps);    if (fourcc==CV_FOURCC_DEFAULT) {        gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL);        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {            CV_ERROR(CV_StsError, "GStreamer: cannot link elements/n");        }    }    else {        gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL);        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {            CV_ERROR(CV_StsError, "GStreamer: cannot link elements/n");        }    }    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==        GST_STATE_CHANGE_FAILURE) {            CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play/n");    }    __END__;    return true;}
开发者ID:4auka,项目名称:opencv,代码行数:76,


示例5: build_pipeline

void build_pipeline(CustomData *data){	GstBus *bus;	GError *error = NULL;	guint flags;	data->count_buffer_fill = 0;	data->no_buffer_fill = 0;	data->buffer_is_slow = 0;	data->counter = 0;	pthread_mutex_lock(&data->mutex);	gst_element_set_state(data->pipeline, GST_STATE_NULL);	kill_object(data->pipeline);	gplayer_error(BUFFER_SLOW, data);	data->delta_index = 0;	data->last_buffer_load = 0;	data->buffering_time = 0;	data->flow_error = FALSE;	data->pipeline = gst_pipeline_new("test-pipeline");	data->allow_seek = FALSE;	/* Build pipeline */	data->source = gst_element_factory_make("uridecodebin", "source");	data->resample = gst_element_factory_make("audioresample", "resample");	data->typefinder = gst_element_factory_make("typefind", "typefind");	data->buffer = gst_element_factory_make("queue2", "buffer");	data->convert = gst_element_factory_make("audioconvert", "convert");	data->volume = gst_element_factory_make("volume", "volume");	data->sink = gst_element_factory_make("autoaudiosink", "sink");	if (!data->pipeline || !data->resample || !data->source || !data->convert || !data->buffer || !data->typefinder || !data->volume || !data->sink)	{		gplayer_error(-1, data);		GPlayerDEBUG("Not all elements could be created./n");		pthread_mutex_unlock(&data->mutex);		return;	}	gst_bin_add_many(GST_BIN(data->pipeline), data->source, data->buffer, data->typefinder, data->convert, data->resample, data->volume, data->sink,	NULL);	if (!gst_element_link(data->buffer, data->typefinder) || !gst_element_link(data->typefinder, data->convert)			|| !gst_element_link(data->convert, data->resample) || !gst_element_link(data->resample, data->volume)			|| !gst_element_link(data->volume, data->sink))	{		GPlayerDEBUG("Elements could not be linked./n");		kill_object(data->pipeline);		pthread_mutex_unlock(&data->mutex);		return;	}	g_signal_connect(data->source, "pad-added", (GCallback ) pad_added_handler, data);	g_signal_connect(data->typefinder, "have-type", (GCallback ) cb_typefound, data);	data->target_state = GST_STATE_READY;	gst_element_set_state(data->pipeline, GST_STATE_READY);	bus = gst_element_get_bus(data->pipeline);	g_signal_connect(G_OBJECT(bus), "message::error", (GCallback ) error_cb, data);	g_signal_connect(G_OBJECT(bus), "message::eos", (GCallback ) eos_cb, data);	g_signal_connect(G_OBJECT(bus), "message::tag", (GCallback ) tag_cb, data);	g_signal_connect(G_OBJECT(bus), "message::state-changed", (GCallback ) state_changed_cb, data);	g_signal_connect(G_OBJECT(bus), "message::clock-lost", (GCallback ) clock_lost_cb, data);	kill_object(bus);	pthread_mutex_unlock(&data->mutex);}
开发者ID:profrook,项目名称:GPlayer,代码行数:69,


示例6: main

int main(int argc, char *argv[]) {  GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink;  GstElement *video_queue, *visual, *video_convert, *video_sink;  GstBus *bus;  GstMessage *msg;  GstPadTemplate *tee_src_pad_template;  GstPad *tee_audio_pad, *tee_video_pad;  GstPad *queue_audio_pad, *queue_video_pad;  /* Initialize GStreamer */  gst_init (&argc, &argv);  /* Create the elements */  audio_source = gst_element_factory_make ("audiotestsrc", "audio_source");  tee = gst_element_factory_make ("tee", "tee");  audio_queue = gst_element_factory_make ("queue", "audio_queue");  audio_convert = gst_element_factory_make ("audioconvert", "audio_convert");  audio_resample = gst_element_factory_make ("audioresample", "audio_resample");  audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");  video_queue = gst_element_factory_make ("queue", "video_queue");  visual = gst_element_factory_make ("wavescope", "visual");  video_convert = gst_element_factory_make ("videoconvert", "video_convert");  video_sink = gst_element_factory_make ("autovideosink", "video_sink");  /* Create the empty pipeline */  pipeline = gst_pipeline_new ("test-pipeline");  if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink ||      !video_queue || !visual || !video_convert || !video_sink) {    g_printerr ("Not all elements could be created./n");    return -1;  }  /* Configure elements */  g_object_set (audio_source, "freq", 215.0f, NULL);  g_object_set (visual, "shader", 0, "style", 1, NULL);  /* Link all elements that can be automatically linked because they have "Always" pads */  gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink,      video_queue, visual, video_convert, video_sink, NULL);  if (gst_element_link_many (audio_source, tee, NULL) != TRUE ||      gst_element_link_many (audio_queue, audio_convert, audio_resample, audio_sink, NULL) != TRUE ||      gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) {    g_printerr ("Elements could not be linked./n");    gst_object_unref (pipeline);    return -1;  }  /* Manually link the Tee, which has "Request" pads */  tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%u");  tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);  g_print ("Obtained request pad %s for audio branch./n", gst_pad_get_name (tee_audio_pad));  queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink");  tee_video_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);  g_print ("Obtained request pad %s for video branch./n", gst_pad_get_name (tee_video_pad));  queue_video_pad = gst_element_get_static_pad (video_queue, "sink");  if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK ||      gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) {    g_printerr ("Tee could not be linked./n");    gst_object_unref (pipeline);    return -1;  }  gst_object_unref (queue_audio_pad);  gst_object_unref (queue_video_pad);  /* Start playing the pipeline */  gst_element_set_state (pipeline, GST_STATE_PLAYING);  /* Wait until error or EOS */  bus = gst_element_get_bus (pipeline);  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);  /* Release the request pads from the Tee, and unref them */  gst_element_release_request_pad (tee, tee_audio_pad);  gst_element_release_request_pad (tee, tee_video_pad);  gst_object_unref (tee_audio_pad);  gst_object_unref (tee_video_pad);  /* Free resources */  if (msg != NULL)    gst_message_unref (msg);  gst_object_unref (bus);  gst_element_set_state (pipeline, GST_STATE_NULL);  gst_object_unref (pipeline);  return 0;}
开发者ID:johlim,项目名称:study,代码行数:87,


示例7: gst_nle_source_next

static voidgst_nle_source_next (GstNleSource * nlesrc){  GstNleSrcItem *item;  GstStateChangeReturn ret;  GstElement *uridecodebin;  GstBus *bus;  GstState state;  nlesrc->index++;  if (nlesrc->index >= g_list_length (nlesrc->queue)) {    gst_nle_source_push_eos (nlesrc);    return;  }  if (nlesrc->source != NULL) {    gst_object_unref (nlesrc->source);    nlesrc->source = NULL;  }  if (nlesrc->decoder != NULL) {    gst_element_set_state (GST_ELEMENT (nlesrc->decoder), GST_STATE_NULL);    gst_element_get_state (GST_ELEMENT (nlesrc->decoder), NULL, NULL, 0);    gst_object_unref (nlesrc->decoder);  }  nlesrc->decoder = gst_pipeline_new ("decoder");  uridecodebin = gst_element_factory_make ("uridecodebin", NULL);  /* Connect signal to recover source element for queries in bytes */  g_signal_connect (uridecodebin, "source-setup",      G_CALLBACK (gst_nle_source_on_source_setup), nlesrc);   gst_bin_add (GST_BIN (nlesrc->decoder), uridecodebin);  g_signal_connect (uridecodebin, "autoplug-select",      G_CALLBACK (lgm_filter_video_decoders), nlesrc);  g_signal_connect (uridecodebin, "pad-added",      G_CALLBACK (gst_nle_source_pad_added_cb), nlesrc);  g_signal_connect (uridecodebin, "no-more-pads",      G_CALLBACK (gst_nle_source_no_more_pads), nlesrc);  bus = GST_ELEMENT_BUS (nlesrc->decoder);  gst_bus_add_signal_watch (bus);  g_signal_connect (bus, "message", G_CALLBACK (gst_nle_source_bus_message),      nlesrc);  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);  GST_INFO_OBJECT (nlesrc, "Starting next item with uri:%s", item->file_path);  GST_INFO_OBJECT (nlesrc, "start:%" GST_TIME_FORMAT " stop:%"      GST_TIME_FORMAT " rate:%f", GST_TIME_ARGS (item->start),      GST_TIME_ARGS (item->stop), item->rate);  g_object_set (uridecodebin, "uri", item->file_path, NULL);  nlesrc->seek_done = FALSE;  if (GST_CLOCK_TIME_IS_VALID (item->stop)) {    nlesrc->video_seek_done = FALSE;    nlesrc->audio_seek_done = FALSE;  } else {    nlesrc->video_seek_done = TRUE;    nlesrc->audio_seek_done = TRUE;  }  nlesrc->audio_eos = TRUE;  nlesrc->video_eos = TRUE;  nlesrc->audio_ts = 0;  nlesrc->video_ts = 0;  nlesrc->start_ts = nlesrc->accu_time;  nlesrc->video_linked = FALSE;  nlesrc->audio_linked = FALSE;  nlesrc->item_setup = FALSE;  nlesrc->cached_duration = 0;  GST_DEBUG_OBJECT (nlesrc, "Start ts:%" GST_TIME_FORMAT,      GST_TIME_ARGS (nlesrc->start_ts));  gst_element_set_state (nlesrc->decoder, GST_STATE_PLAYING);  ret = gst_element_get_state (nlesrc->decoder, &state, NULL, 5 * GST_SECOND);  if (ret == GST_STATE_CHANGE_FAILURE) {    GST_WARNING_OBJECT (nlesrc, "Error changing state, selecting next item.");    gst_nle_source_check_eos (nlesrc);    return;  }  nlesrc->seek_done = TRUE;  if (!item->still_picture && GST_CLOCK_TIME_IS_VALID (item->stop)) {    GST_DEBUG_OBJECT (nlesrc, "Sending seek event");    gst_element_seek (nlesrc->decoder, 1, GST_FORMAT_TIME,        GST_SEEK_FLAG_ACCURATE,        GST_SEEK_TYPE_SET, item->start, GST_SEEK_TYPE_SET, item->stop);  }}
开发者ID:fluendo,项目名称:VAS,代码行数:91,


示例8: main

intmain (int argc, char *argv[]){  GstElement *bin;  GstElement *decodebin, *decconvert;  GstElement *capsfilter, *equalizer, *spectrum, *sinkconvert, *sink;  GstCaps *caps;  GstBus *bus;  GtkWidget *appwindow, *vbox, *hbox, *scale;  int i, num_bands = NBANDS;  GOptionEntry options[] = {    {"bands", 'b', 0, G_OPTION_ARG_INT, &num_bands,        "Number of bands", NULL},    {NULL}  };  GOptionContext *ctx;  GError *err = NULL;  ctx = g_option_context_new ("- demo of audio equalizer");  g_option_context_add_main_entries (ctx, options, NULL);  g_option_context_add_group (ctx, gst_init_get_option_group ());  g_option_context_add_group (ctx, gtk_get_option_group (TRUE));  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {    g_print ("Error initializing: %s/n", err->message);    g_option_context_free (ctx);    g_clear_error (&err);    exit (1);  }  g_option_context_free (ctx);  if (argc < 2) {    g_print ("Usage: %s <uri to play>/n", argv[0]);    g_print ("    For optional arguments: --help/n");    exit (-1);  }  gst_init (&argc, &argv);  gtk_init (&argc, &argv);  bin = gst_pipeline_new ("bin");  /* Uri decoding */  decodebin = gst_element_factory_make ("uridecodebin", "decoder");  g_object_set (G_OBJECT (decodebin), "uri", argv[1], NULL);  /* Force float32 samples */  decconvert = gst_element_factory_make ("audioconvert", "decconvert");  capsfilter = gst_element_factory_make ("capsfilter", "capsfilter");  caps =      gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "F32LE",      NULL);  g_object_set (capsfilter, "caps", caps, NULL);  equalizer = gst_element_factory_make ("equalizer-nbands", "equalizer");  g_object_set (G_OBJECT (equalizer), "num-bands", num_bands, NULL);  spectrum = gst_element_factory_make ("spectrum", "spectrum");  g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80,      "post-messages", TRUE, "interval", 500 * GST_MSECOND, NULL);  sinkconvert = gst_element_factory_make ("audioconvert", "sinkconvert");  sink = gst_element_factory_make ("autoaudiosink", "sink");  gst_bin_add_many (GST_BIN (bin), decodebin, decconvert, capsfilter, equalizer,      spectrum, sinkconvert, sink, NULL);  if (!gst_element_link_many (decconvert, capsfilter, equalizer, spectrum,          sinkconvert, sink, NULL)) {    fprintf (stderr, "can't link elements/n");    exit (1);  }  /* Handle dynamic pads */  g_signal_connect (G_OBJECT (decodebin), "pad-added",      G_CALLBACK (dynamic_link), gst_element_get_static_pad (decconvert,          "sink"));  bus = gst_element_get_bus (bin);  gst_bus_add_watch (bus, message_handler, NULL);  gst_object_unref (bus);  appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL);  gtk_window_set_title (GTK_WINDOW (appwindow), "Equalizer Demo");  g_signal_connect (G_OBJECT (appwindow), "destroy",      G_CALLBACK (on_window_destroy), NULL);  vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 6);  drawingarea = gtk_drawing_area_new ();  gtk_widget_set_size_request (drawingarea, spect_bands, spect_height);  g_signal_connect (G_OBJECT (drawingarea), "configure-event",      G_CALLBACK (on_configure_event), (gpointer) spectrum);  gtk_box_pack_start (GTK_BOX (vbox), drawingarea, TRUE, TRUE, 0);  hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 20);  for (i = 0; i < num_bands; i++) {    GObject *band;    gdouble freq;//.........这里部分代码省略.........
开发者ID:GrokImageCompression,项目名称:gst-plugins-good,代码行数:101,


示例9: gst_parse_launch_full

GstElement *create_video_sink(){	GstElement *bin, *decoder = NULL;	GstIterator *iter;	GstIteratorResult res;	GError *error = NULL;	GstPad *pad;	gpointer element = NULL;	const char* decoder_name;#ifndef DESKTOP 	/* create pipeline */                                                                                 	decoder_name = "tividdec20";	bin = gst_parse_launch_full("TIViddec2 genTimeStamps=FALSE /			    engineName=decode /			    codecName=h264dec numFrames=-1 /			! videoscale method=0 /			! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 /			! ffmpegcolorspace /			! video/x-raw-rgb, bpp=16 /			! TIDmaiVideoSink displayStd=fbdev displayDevice=/dev/fb0 videoStd=QVGA /			    videoOutput=LCD resizer=FALSE accelFrameCopy=TRUE",			NULL, 0, &error);                                      #else	decoder_name = "decodebin";	bin = gst_parse_launch_full("decodebin /			! videoscale method=0 /			! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 /			! xvimagesink",			NULL, 0, &error);                                      #endif	if (!bin) {		g_error("GStreamer: failed to parse video sink pipeline/n");		return NULL;	}              	gst_object_set_name(GST_OBJECT(bin), "video-sink");	iter = gst_bin_iterate_elements(GST_BIN(bin));	res = gst_iterator_next (iter, &element);	while (res == GST_ITERATOR_OK) {		gchar *name;		name = gst_object_get_name(GST_OBJECT (element));		if (name) {			if (!strncmp(name, decoder_name, strlen(decoder_name))) {				decoder = GST_ELEMENT(element); 			}			g_printf("GS: video sink element: %s /n", name);			g_free (name);		}		gst_object_unref (element);		element = NULL;		res = gst_iterator_next (iter, &element);	}	gst_iterator_free (iter);	if (!decoder) {		/* mem leak */		g_printf("decoder element not found/n");		return NULL;	}	/* add ghostpad */	pad = gst_element_get_static_pad (decoder, "sink");	gst_element_add_pad(bin, gst_ghost_pad_new("sink", pad));	gst_object_unref(GST_OBJECT(pad));	return bin;}
开发者ID:afenkart,项目名称:ti_gstreamer,代码行数:73,


示例10: OpenDecoder

//.........这里部分代码省略.........            VLC_ENOMOD );    g_object_set( G_OBJECT( p_sys->p_decode_src ), "caps", caps.p_sinkcaps,            "emit-signals", TRUE, "format", GST_FORMAT_BYTES,            "stream-type", GST_APP_STREAM_TYPE_SEEKABLE,            /* Making DecodeBlock() to block on appsrc with max queue size of 1 byte.             * This will make the push_buffer() tightly coupled with the buffer             * flow from appsrc -> decoder. push_buffer() will only return when             * the same buffer it just fed to appsrc has also been fed to the             * decoder element as well */            "block", TRUE, "max-bytes", ( guint64 )1, NULL );    gst_caps_unref( caps.p_sinkcaps );    caps.p_sinkcaps = NULL;    cb.enough_data = NULL;    cb.need_data = NULL;    cb.seek_data = seek_data_cb;    gst_app_src_set_callbacks( GST_APP_SRC( p_sys->p_decode_src ),            &cb, p_dec, NULL );    if( dbin )    {        p_sys->p_decode_in = gst_element_factory_make( "decodebin", NULL );        VLC_GST_CHECK( p_sys->p_decode_in, NULL, "decodebin not found",                VLC_ENOMOD );        //g_object_set( G_OBJECT( p_sys->p_decode_in ),        //"max-size-buffers", 2, NULL );        //g_signal_connect( G_OBJECT( p_sys->p_decode_in ), "no-more-pads",                //G_CALLBACK( no_more_pads_cb ), p_dec );        g_signal_connect( G_OBJECT( p_sys->p_decode_in ), "pad-added",                G_CALLBACK( pad_added_cb ), p_dec );    }    /* videosink: will emit signal for every available buffer */    p_sys->p_decode_out = gst_element_factory_make( "vlcvideosink", NULL );    VLC_GST_CHECK( p_sys->p_decode_out, NULL, "vlcvideosink not found",            VLC_ENOMOD );    p_sys->p_allocator = gst_vlc_picture_plane_allocator_new(            (gpointer) p_dec );    g_object_set( G_OBJECT( p_sys->p_decode_out ), "sync", FALSE, "allocator",            p_sys->p_allocator, "id", (gpointer) p_dec, NULL );    g_signal_connect( G_OBJECT( p_sys->p_decode_out ), "new-buffer",            G_CALLBACK( frame_handoff_cb ), p_dec );    //FIXME: caps_signal#if 0    g_signal_connect( G_OBJECT( p_sys->p_decode_out ), "new-caps",            G_CALLBACK( caps_handoff_cb ), p_dec );#else    GST_VLC_VIDEO_SINK( p_sys->p_decode_out )->new_caps = caps_handoff_cb;#endif    p_sys->p_decoder = GST_ELEMENT( gst_bin_new( "decoder" ) );    VLC_GST_CHECK( p_sys->p_decoder, NULL, "bin not found", VLC_ENOMOD );    p_sys->p_bus = gst_bus_new( );    VLC_GST_CHECK( p_sys->p_bus, NULL, "failed to create bus",            VLC_ENOMOD );    gst_element_set_bus( p_sys->p_decoder, p_sys->p_bus );    gst_bin_add_many( GST_BIN( p_sys->p_decoder ),            p_sys->p_decode_src, p_sys->p_decode_in,            p_sys->p_decode_out, NULL );    gst_object_ref( p_sys->p_decode_src );    gst_object_ref( p_sys->p_decode_in );    gst_object_ref( p_sys->p_decode_out );    b_ret = gst_element_link( p_sys->p_decode_src, p_sys->p_decode_in );    VLC_GST_CHECK( b_ret, FALSE, "failed to link src <-> in",            VLC_EGENERIC );    if( !dbin )    {        b_ret = gst_element_link( p_sys->p_decode_in, p_sys->p_decode_out );        VLC_GST_CHECK( b_ret, FALSE, "failed to link in <-> out",                VLC_EGENERIC );    }    p_dec->fmt_out.i_cat = p_dec->fmt_in.i_cat;    /* set the pipeline to playing */    i_ret = gst_element_set_state( p_sys->p_decoder, GST_STATE_PLAYING );    VLC_GST_CHECK( i_ret, GST_STATE_CHANGE_FAILURE,            "set state failure", VLC_EGENERIC );    p_sys->b_running = true;    /* Set callbacks */    p_dec->pf_decode_video = DecodeBlock;    p_dec->pf_flush        = Flush;    return VLC_SUCCESS;fail:    if( caps.p_sinkcaps )        gst_caps_unref( caps.p_sinkcaps );    if( caps.p_srccaps )        gst_caps_unref( caps.p_srccaps );    if( p_list )        gst_plugin_feature_list_free( p_list );    CloseDecoder( ( vlc_object_t* )p_dec );    return i_rval;}
开发者ID:CityFire,项目名称:vlc,代码行数:101,


示例11: main

//.........这里部分代码省略.........        strncpy (input, optarg, sizeof (input) / sizeof (input[0]));        break;      case 'f':        frequency = atol (optarg);        break;      case 'h':        printf ("Usage: v4l2src-test [OPTION].../n");        for (c = 0; long_options[c].name; ++c) {          printf ("-%c, --%s/r/t/t/t/t%s/n", long_options[c].val,              long_options[c].name, long_options_desc[c]);        }        exit (0);        break;      case '?':        /* getopt_long already printed an error message. */        printf ("Use -h to see help message./n");        break;      default:        abort ();    }  }  /* Print any remaining command line arguments (not options). */  if (optind < argc) {    printf ("Use -h to see help message./n" "non-option ARGV-elements: ");    while (optind < argc)      printf ("%s ", argv[optind++]);    putchar ('/n');  }  /* init */  gst_init (&argc, &argv);  /* create elements */  if (!(pipeline = gst_pipeline_new ("my_pipeline"))) {    fprintf (stderr, "error: gst_pipeline_new return NULL");    return -1;  }  if (!(source = gst_element_factory_make ("v4l2src", NULL))) {    fprintf (stderr,        "error: gst_element_factory_make (/"v4l2src/", NULL) return NULL");    return -1;  }  if (!(sink = gst_element_factory_make ("xvimagesink", NULL))) {    fprintf (stderr,        "error: gst_element_factory_make (/"xvimagesink/", NULL) return NULL");    return -1;  }  if (numbuffers > -1) {    g_object_set (source, "num-buffers", numbuffers, NULL);  }  if (device[0]) {    g_object_set (source, "device", device, NULL);  }  if (input[0]) {    g_object_set (source, "input", input, NULL);  }  if (frequency) {    g_object_set (source, "frequency", frequency, NULL);  }  /* you would normally check that the elements were created properly */  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));  gst_bus_add_watch (bus, my_bus_callback, NULL);  /* put together a pipeline */  gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);  gst_element_link_pads (source, "src", sink, "sink");  /* start the pipeline */  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);  loop = g_main_loop_new (NULL, FALSE);  input_thread = g_thread_try_new ("v4l2src-test", read_user, source, NULL);  if (input_thread == NULL) {    fprintf (stderr, "error: g_thread_try_new() failed");    return -1;  }  g_main_loop_run (loop);  g_thread_join (input_thread);  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);  gst_object_unref (bus);  gst_object_unref (pipeline);  gst_deinit ();  return 0;}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:101,


示例12:

GstElement*			ly_ppl_video_get_element		(char *name){	GstElement *ele=NULL;	ele=gst_bin_get_by_name(GST_BIN(ly_ppl_video_bin), name);	return ele;}
开发者ID:lovesnow,项目名称:linnya,代码行数:6,


示例13: qDebug

//.........这里部分代码省略.........  pipelineString.append("/"");#if USE_TEE  pipelineString.append(" ! ");  pipelineString.append("tee name=scripttee");  // FIXME: does this case latency?  pipelineString.append(" ! ");  pipelineString.append("queue");#endif  pipelineString.append(" ! ");  pipelineString.append(hardware->getEncodingPipeline());  pipelineString.append(" ! ");  pipelineString.append("rtph264pay name=rtppay config-interval=1 mtu=500");  pipelineString.append(" ! ");  pipelineString.append("appsink name=sink sync=false max-buffers=1 drop=true");#if USE_TEE  // Tee (branch) frames for external components  pipelineString.append(" scripttee. ");  // TODO: downscale to 320x240?  pipelineString.append(" ! ");  pipelineString.append("appsink name=ob sync=false max-buffers=1 drop=true");#endif  qDebug() << "Using pipeline:" << pipelineString;  // Create encoding video pipeline  pipeline = gst_parse_launch(pipelineString.toUtf8(), &error);  if (!pipeline) {    qCritical("Failed to parse pipeline: %s", error->message);    g_error_free(error);    return false;  }  encoder = gst_bin_get_by_name(GST_BIN(pipeline), "encoder");  if (!encoder) {    qCritical("Failed to get encoder");    return false;  }  // Assuming here that X86 uses x264enc  if (hardware->getHardwareName() == "generic_x86") {    g_object_set(G_OBJECT(encoder), "speed-preset", 1, NULL); // ultrafast    g_object_set(G_OBJECT(encoder), "tune", 0x00000004, NULL); // zerolatency  }  if (hardware->getHardwareName() == "tegrak1" ||      hardware->getHardwareName() == "tegrax1") {    //g_object_set(G_OBJECT(encoder), "input-buffers", 2, NULL); // not valid on 1.0    //g_object_set(G_OBJECT(encoder), "output-buffers", 2, NULL); // not valid on 1.0    //g_object_set(G_OBJECT(encoder), "quality-level", 0, NULL);    //g_object_set(G_OBJECT(encoder), "rc-mode", 0, NULL);  }  if (hardware->getHardwareName() == "tegrax2") {    g_object_set(G_OBJECT(encoder), "preset-level", 0, NULL); // 0 == UltraFastPreset for high perf  }  setBitrate(bitrate);  {    GstElement *source;    source = gst_bin_get_by_name(GST_BIN(pipeline), "source");    if (!source) {      qCritical("Failed to get source");      return false;    }
开发者ID:kulve,项目名称:pleco,代码行数:67,


示例14: ges_source_create_topbin

GstElement *ges_source_create_topbin (const gchar * bin_name, GstElement * sub_element, ...){  va_list argp;  GstElement *element;  GstElement *prev = NULL;  GstElement *first = NULL;  GstElement *bin;  GstPad *sub_srcpad;  va_start (argp, sub_element);  bin = gst_bin_new (bin_name);  gst_bin_add (GST_BIN (bin), sub_element);  while ((element = va_arg (argp, GstElement *)) != NULL) {    gst_bin_add (GST_BIN (bin), element);    if (prev)      gst_element_link (prev, element);    prev = element;    if (first == NULL)      first = element;  }  va_end (argp);  sub_srcpad = gst_element_get_static_pad (sub_element, "src");  if (prev != NULL) {    GstPad *srcpad, *sinkpad, *ghost;    srcpad = gst_element_get_static_pad (prev, "src");    ghost = gst_ghost_pad_new ("src", srcpad);    gst_pad_set_active (ghost, TRUE);    gst_element_add_pad (bin, ghost);    sinkpad = gst_element_get_static_pad (first, "sink");    if (sub_srcpad)      gst_pad_link (sub_srcpad, sinkpad);    else      g_signal_connect (sub_element, "pad-added", G_CALLBACK (_pad_added_cb),          sinkpad);    gst_object_unref (srcpad);    gst_object_unref (sinkpad);  } else if (sub_srcpad) {    GstPad *ghost;    ghost = gst_ghost_pad_new ("src", sub_srcpad);    gst_pad_set_active (ghost, TRUE);    gst_element_add_pad (bin, ghost);  } else {    g_signal_connect (sub_element, "pad-added",        G_CALLBACK (_ghost_pad_added_cb), bin);  }  if (sub_srcpad)    gst_object_unref (sub_srcpad);  return bin;}
开发者ID:vliaskov,项目名称:gst-editing-services,代码行数:62,


示例15: gst_bin_iterate_all_by_interface

static GstElement *find_color_balance_element() {	GstIterator *iterator = gst_bin_iterate_all_by_interface(		GST_BIN(pipeline),  GST_TYPE_COLOR_BALANCE);		GstElement *color_balance_element = NULL;	gboolean done = FALSE, hardware = FALSE;#if GST_CHECK_VERSION(1, 0, 0)	GValue item = G_VALUE_INIT;#else	gpointer item;#endif	while (!done) {	switch (gst_iterator_next(iterator, &item)) {	case GST_ITERATOR_OK : {#if GST_CHECK_VERSION(1, 0, 0)		GstElement *element = g_value_get_object(&item);#else		GstElement *element = GST_ELEMENT(item);#endif		if (is_valid_color_balance_element(element)) {			if (!color_balance_element) {				color_balance_element = GST_ELEMENT_CAST(						gst_object_ref(element));				hardware =					(gst_color_balance_get_balance_type(GST_COLOR_BALANCE					(element)) == GST_COLOR_BALANCE_HARDWARE);			}			else if (!hardware) {				gboolean tmp =					(gst_color_balance_get_balance_type(GST_COLOR_BALANCE					(element)) == GST_COLOR_BALANCE_HARDWARE);				if (tmp) {					if (color_balance_element)						gst_object_unref(color_balance_element);					color_balance_element =						GST_ELEMENT_CAST(gst_object_ref(element));					hardware = TRUE;				}			}		}#if GST_CHECK_VERSION(1, 0, 0)		g_value_reset(&item);#endif		if (hardware && color_balance_element)			done = TRUE;        	break;		}	case GST_ITERATOR_RESYNC :		gst_iterator_resync(iterator);		done = FALSE;		hardware = FALSE;		if (color_balance_element)			gst_object_unref(color_balance_element);		color_balance_element = NULL;		break;	case GST_ITERATOR_DONE:	case GST_ITERATOR_ERROR:	default:		done = TRUE;	}	}#if GST_CHECK_VERSION(1, 0, 0)	g_value_unset(&item);#endif	gst_iterator_free(iterator);	return color_balance_element;}
开发者ID:BorodaZizitopa,项目名称:gstplay,代码行数:68,


示例16: pad_block_cb

static GstPadProbeReturnpad_block_cb (GstPad *srcPad, GstPadProbeInfo *info, gpointer user_data){    BansheePlayer* player;    player = (BansheePlayer*) user_data;    g_return_val_if_fail (IS_BANSHEE_PLAYER (player), GST_PAD_PROBE_OK);    // The pad_block_cb can get triggered multiple times, on different threads.    // Lock around the link/unlink code, so we don't end up going through here    // with inconsistent state.    g_mutex_lock (player->replaygain_mutex);    if ((player->replaygain_enabled && player->rgvolume_in_pipeline) ||        (!player->replaygain_enabled && !player->rgvolume_in_pipeline)) {        // The pipeline is already in the correct state.  Unblock the pad, and return.        player->rg_pad_block_id = 0;        g_mutex_unlock (player->replaygain_mutex);        return GST_PAD_PROBE_REMOVE;    }    if (player->rgvolume_in_pipeline) {        gst_element_unlink (player->before_rgvolume, player->rgvolume);        gst_element_unlink (player->rgvolume, player->after_rgvolume);    } else {        gst_element_unlink (player->before_rgvolume, player->after_rgvolume);    }    if (player->replaygain_enabled) {        player->rgvolume = _bp_rgvolume_new (player);        if (!GST_IS_ELEMENT (player->rgvolume)) {            player->replaygain_enabled = FALSE;        }    } else {        gst_element_set_state (player->rgvolume, GST_STATE_NULL);        gst_bin_remove (GST_BIN (player->audiobin), player->rgvolume);    }    if (player->replaygain_enabled && GST_IS_ELEMENT (player->rgvolume)) {        g_signal_connect (player->rgvolume, "notify::target-gain", G_CALLBACK (on_target_gain_changed), player);        gst_bin_add (GST_BIN (player->audiobin), player->rgvolume);        gst_element_sync_state_with_parent (player->rgvolume);        // link in rgvolume and connect to the real audio sink        gst_element_link (player->before_rgvolume, player->rgvolume);        gst_element_link (player->rgvolume, player->after_rgvolume);        player->rgvolume_in_pipeline = TRUE;    } else {        // link the queue with the real audio sink        gst_element_link (player->before_rgvolume, player->after_rgvolume);        player->rgvolume_in_pipeline = FALSE;    }    // Our state is now consistent    player->rg_pad_block_id = 0;    g_mutex_unlock (player->replaygain_mutex);    _bp_rgvolume_print_volume (player);    return GST_PAD_PROBE_REMOVE;}
开发者ID:3dfxmadscientist,项目名称:gnome-apps,代码行数:61,


示例17: gst_element_make_from_uri

SongLoader::Result SongLoader::LoadAudioCD() {#ifdef HAVE_AUDIOCD  // Create gstreamer cdda element  GstElement* cdda = gst_element_make_from_uri (GST_URI_SRC, "cdda://", NULL);  if (cdda == NULL) {    qLog(Error) << "Error while creating CDDA GstElement";    return Error;  }  // Change the element's state to ready and paused, to be able to query it  if (gst_element_set_state(cdda, GST_STATE_READY) == GST_STATE_CHANGE_FAILURE ||      gst_element_set_state(cdda, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {    qLog(Error) << "Error while changing CDDA GstElement's state";    gst_element_set_state(cdda, GST_STATE_NULL);    gst_object_unref(GST_OBJECT(cdda));    return Error;  }  // Get number of tracks  GstFormat fmt = gst_format_get_by_nick ("track");  GstFormat out_fmt = fmt;  gint64 num_tracks = 0;  if (!gst_element_query_duration (cdda, &out_fmt, &num_tracks) || out_fmt != fmt) {    qLog(Error) << "Error while querying cdda GstElement";    gst_object_unref(GST_OBJECT(cdda));    return Error;  }  for (int track_number = 1; track_number <= num_tracks; track_number++) {    // Init song    Song song;    guint64 duration = 0;    // quint64 == ulonglong and guint64 == ulong, therefore we must cast    if (gst_tag_list_get_uint64 (GST_CDDA_BASE_SRC(cdda)->tracks[track_number-1].tags,                                 GST_TAG_DURATION, &duration)) {      song.set_length_nanosec((quint64)duration);    }    song.set_valid(true);    song.set_filetype(Song::Type_Cdda);    song.set_url(QUrl(QString("cdda://%1").arg(track_number)));    song.set_title(QString("Track %1").arg(track_number));    song.set_track(track_number);    songs_ << song;  }  // Generate MusicBrainz DiscId  gst_tag_register_musicbrainz_tags();  GstElement *pipe = gst_pipeline_new ("pipeline");  gst_bin_add (GST_BIN (pipe), cdda);  gst_element_set_state (pipe, GST_STATE_READY);  gst_element_set_state (pipe, GST_STATE_PAUSED);  GstMessage *msg = gst_bus_timed_pop_filtered (GST_ELEMENT_BUS (pipe),                    GST_CLOCK_TIME_NONE,                    GST_MESSAGE_TAG);  GstTagList *tags = NULL;  gst_message_parse_tag (msg, &tags);  char *string_mb = NULL;  if (gst_tag_list_get_string (tags, GST_TAG_CDDA_MUSICBRAINZ_DISCID, &string_mb)) {    QString musicbrainz_discid(string_mb);    qLog(Info) << "MusicBrainz discid: " << musicbrainz_discid;    MusicBrainzClient *musicbrainz_client = new MusicBrainzClient(this);    connect(musicbrainz_client,            SIGNAL(Finished(const QString&, const QString&, MusicBrainzClient::ResultList)),            SLOT(AudioCDTagsLoaded(const QString&, const QString&, MusicBrainzClient::ResultList)));    musicbrainz_client->StartDiscIdRequest(musicbrainz_discid);    g_free(string_mb);  }    // Clean all the Gstreamer objects we have used: we don't need them anymore  gst_object_unref(GST_OBJECT(cdda));  gst_element_set_state (pipe, GST_STATE_NULL);  gst_object_unref(GST_OBJECT(pipe));  gst_object_unref(GST_OBJECT(msg));  gst_object_unref(GST_OBJECT(tags));  return Success;#else // HAVE_AUDIOCD  return Error;#endif}
开发者ID:celeron55,项目名称:clementine,代码行数:81,


示例18: fs_rtp_dtmf_sound_source_build

static GstElement *fs_rtp_dtmf_sound_source_build (FsRtpSpecialSource *source,    GList *negotiated_codecs,    FsCodec *selected_codec,    GError **error){  FsCodec *telephony_codec = NULL;  GstCaps *caps = NULL;  GstPad *pad = NULL;  GstElement *dtmfsrc = NULL;  GstElement *capsfilter = NULL;  GstPad *ghostpad = NULL;  GstElement *bin = NULL;  GstElement *encoder = NULL;  GstElement *payloader = NULL;  gchar *encoder_name = NULL;  gchar *payloader_name = NULL;  telephony_codec = get_pcm_law_sound_codec (negotiated_codecs,      &encoder_name, &payloader_name);  if (!telephony_codec)  {    g_set_error (error, FS_ERROR, FS_ERROR_INTERNAL,        "Could not find a pcma/pcmu to send dtmf on");    return NULL;  }  bin = gst_bin_new (NULL);  dtmfsrc = gst_element_factory_make ("dtmfsrc", NULL);  if (!dtmfsrc)  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not make rtpdtmfsrc");    goto error;  }  if (!gst_bin_add (GST_BIN (bin), dtmfsrc))  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not add rtpdtmfsrc to bin");    gst_object_unref (dtmfsrc);    goto error;  }  encoder = gst_element_factory_make (encoder_name, NULL);  if (!encoder)  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not make %s", encoder_name);    goto error;  }  if (!gst_bin_add (GST_BIN (bin), encoder))  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not add %s to bin", encoder_name);    gst_object_unref (dtmfsrc);    goto error;  }  if (!gst_element_link_pads (dtmfsrc, "src", encoder, "sink"))  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not link the rtpdtmfsrc and %s", encoder_name);    goto error;  }  payloader = gst_element_factory_make (payloader_name, NULL);  if (!payloader)  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not make %s", payloader_name);    goto error;  }  if (!gst_bin_add (GST_BIN (bin), payloader))  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not add %s to bin", payloader_name);    gst_object_unref (dtmfsrc);    goto error;  }  if (!gst_element_link_pads (encoder, "src", payloader, "sink"))  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not link the %s and %s", encoder_name, payloader_name);    goto error;  }  capsfilter = gst_element_factory_make ("capsfilter", NULL);  if (!capsfilter)  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not make capsfilter");    goto error;  }  if (!gst_bin_add (GST_BIN (bin), capsfilter))  {    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,        "Could not add capsfilter to bin");//.........这里部分代码省略.........
开发者ID:dksaarth,项目名称:farsight2-msn-plugin,代码行数:101,


示例19: gst_nle_source_pad_added_cb

static voidgst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,    GstNleSource * nlesrc){  GstCaps *caps;  const GstStructure *s;  const gchar *mime;  GstElement *appsink = NULL;  GstPad *sink_pad;  GstAppSinkCallbacks appsink_cbs;  GstNleSrcItem *item;  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);  caps = gst_pad_get_caps_reffed (pad);  s = gst_caps_get_structure (caps, 0);  mime = gst_structure_get_name (s);  GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);  if (g_strrstr (mime, "video") && !nlesrc->video_linked) {    appsink = gst_element_factory_make ("appsink", NULL);    memset (&appsink_cbs, 0, sizeof (appsink_cbs));    appsink_cbs.eos = gst_nle_source_on_video_eos;    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;    appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;    nlesrc->video_linked = TRUE;    if (!nlesrc->video_srcpad_added) {      gst_pad_set_active (nlesrc->video_srcpad, TRUE);      gst_element_add_pad (GST_ELEMENT (nlesrc),          gst_object_ref (nlesrc->video_srcpad));      nlesrc->video_srcpad_added = TRUE;    }    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),        (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);    nlesrc->video_eos = FALSE;  } else if (g_strrstr (mime, "audio") && nlesrc->with_audio      && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) {    appsink = gst_element_factory_make ("appsink", NULL);    memset (&appsink_cbs, 0, sizeof (appsink_cbs));    appsink_cbs.eos = gst_nle_source_on_audio_eos;    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;    appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;    nlesrc->audio_linked = TRUE;    if (!nlesrc->audio_srcpad_added) {      gst_pad_set_active (nlesrc->audio_srcpad, TRUE);      gst_element_add_pad (GST_ELEMENT (nlesrc),          gst_object_ref (nlesrc->audio_srcpad));      nlesrc->audio_srcpad_added = TRUE;    }    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),        (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);    nlesrc->audio_eos = FALSE;  }  if (appsink != NULL) {    g_object_set (appsink, "sync", FALSE, NULL);    gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,        NULL);    gst_bin_add (GST_BIN (nlesrc->decoder), appsink);    sink_pad = gst_element_get_static_pad (appsink, "sink");    gst_pad_link (pad, sink_pad);    gst_element_sync_state_with_parent (appsink);    gst_object_unref (sink_pad);  }}
开发者ID:fluendo,项目名称:VAS,代码行数:64,


示例20: run_test

static voidrun_test (const GstCaps * caps, gint src_width, gint src_height,    gint dest_width, gint dest_height, gint method,    GCallback src_handoff, gpointer src_handoff_user_data,    GCallback sink_handoff, gpointer sink_handoff_user_data){  GstElement *pipeline;  GstElement *src, *videoconvert, *capsfilter1, *identity, *scale,      *capsfilter2, *sink;  GstMessage *msg;  GstBus *bus;  GstCaps *copy;  guint n_buffers = 0;  /* skip formats that videoconvert can't handle */  if (!videoconvert_supports_caps (caps))    return;  pipeline = gst_element_factory_make ("pipeline", "pipeline");  fail_unless (pipeline != NULL);  src = gst_element_factory_make ("videotestsrc", "src");  fail_unless (src != NULL);  g_object_set (G_OBJECT (src), "num-buffers", 1, NULL);  videoconvert = gst_element_factory_make ("videoconvert", "csp");  fail_unless (videoconvert != NULL);  capsfilter1 = gst_element_factory_make ("capsfilter", "filter1");  fail_unless (capsfilter1 != NULL);  copy = gst_caps_copy (caps);  gst_caps_set_simple (copy, "width", G_TYPE_INT, src_width, "height",      G_TYPE_INT, src_height, "framerate", GST_TYPE_FRACTION, 30, 1, NULL);  g_object_set (G_OBJECT (capsfilter1), "caps", copy, NULL);  gst_caps_unref (copy);  identity = gst_element_factory_make ("identity", "identity");  fail_unless (identity != NULL);  if (src_handoff) {    g_object_set (G_OBJECT (identity), "signal-handoffs", TRUE, NULL);    g_signal_connect (identity, "handoff", G_CALLBACK (src_handoff),        src_handoff_user_data);  }  scale = gst_element_factory_make ("videoscale", "scale");  fail_unless (scale != NULL);  g_object_set (G_OBJECT (scale), "method", method, NULL);  capsfilter2 = gst_element_factory_make ("capsfilter", "filter2");  fail_unless (capsfilter2 != NULL);  copy = gst_caps_copy (caps);  gst_caps_set_simple (copy, "width", G_TYPE_INT, dest_width, "height",      G_TYPE_INT, dest_height, NULL);  g_object_set (G_OBJECT (capsfilter2), "caps", copy, NULL);  gst_caps_unref (copy);  sink = gst_element_factory_make ("fakesink", "sink");  fail_unless (sink != NULL);  g_object_set (G_OBJECT (sink), "signal-handoffs", TRUE, "async", FALSE, NULL);  g_signal_connect (sink, "handoff", G_CALLBACK (on_sink_handoff), &n_buffers);  if (sink_handoff) {    g_signal_connect (sink, "handoff", G_CALLBACK (sink_handoff),        sink_handoff_user_data);  }  gst_bin_add_many (GST_BIN (pipeline), src, videoconvert, capsfilter1,      identity, scale, capsfilter2, sink, NULL);  fail_unless (gst_element_link_pads_full (src, "src", videoconvert, "sink",          LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (videoconvert, "src", capsfilter1,          "sink", LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (capsfilter1, "src", identity, "sink",          LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (identity, "src", scale, "sink",          LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (scale, "src", capsfilter2, "sink",          LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (capsfilter2, "src", sink, "sink",          LINK_CHECK_FLAGS));  bus = gst_element_get_bus (pipeline);  fail_unless (bus != NULL);  fail_unless (gst_element_set_state (pipeline,          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,      GST_MESSAGE_EOS | GST_MESSAGE_ERROR | GST_MESSAGE_WARNING);  fail_unless_equals_int (GST_MESSAGE_TYPE (msg), GST_MESSAGE_EOS);  fail_unless (gst_element_set_state (pipeline,          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS);  fail_unless (n_buffers == 1);  gst_object_unref (pipeline);  gst_message_unref (msg);  gst_object_unref (bus);//.........这里部分代码省略.........
开发者ID:Distrotech,项目名称:gst-plugins-base,代码行数:101,


示例21: main

intmain (int argc, char *argv[]){  GstElement *filesrc, *osssink, *queue, *parse, *decode;  GstElement *bin;  GstElement *thread;  gst_init (&argc, &argv);  if (argc != 2) {    g_print ("usage: %s <filename>/n", argv[0]);    exit (-1);  }  /* create a new thread to hold the elements */  thread = gst_thread_new ("thread");  g_assert (thread != NULL);  /* create a new bin to hold the elements */  bin = gst_bin_new ("bin");  g_assert (bin != NULL);  /* create a disk reader */  filesrc = gst_element_factory_make ("filesrc", "disk_source");  g_assert (filesrc != NULL);  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);  g_signal_connect (G_OBJECT (filesrc), "eos", G_CALLBACK (eos), thread);  queue = gst_element_factory_make ("queue", "queue");  /* and an audio sink */  osssink = gst_element_factory_make ("osssink", "play_audio");  g_assert (osssink != NULL);  parse = gst_element_factory_make ("mp3parse", "parse");  decode = gst_element_factory_make ("mpg123", "decode");  /* add objects to the main bin */  gst_bin_add (GST_BIN (bin), filesrc);  gst_bin_add (GST_BIN (bin), queue);  gst_bin_add (GST_BIN (thread), parse);  gst_bin_add (GST_BIN (thread), decode);  gst_bin_add (GST_BIN (thread), osssink);  gst_element_link_many (filesrc, queue, parse, decode, osssink, NULL);  /* make it ready */  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_READY);  /* start playing */  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);  playing = TRUE;  while (playing) {    gst_bin_iterate (GST_BIN (bin));  }  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);  exit (0);}
开发者ID:WangCrystal,项目名称:gstreamer,代码行数:62,


示例22: _test_negotiation

static void_test_negotiation (const gchar * src_templ, const gchar * sink_templ,    gint width, gint height, gint par_n, gint par_d){  GstElement *pipeline;  GstElement *src, *capsfilter1, *scale, *capsfilter2, *sink;  GstBus *bus;  GMainLoop *loop;  GstCaps *caps;  TestNegotiationData data = { 0, 0, 0, 0, FALSE, NULL };  GstPad *pad;  GST_DEBUG ("Running test for src templ caps '%s' and sink templ caps '%s'",      src_templ, sink_templ);  pipeline = gst_element_factory_make ("pipeline", "pipeline");  fail_unless (pipeline != NULL);  src = gst_element_factory_make ("videotestsrc", "src");  fail_unless (src != NULL);  g_object_set (G_OBJECT (src), "num-buffers", 1, NULL);  capsfilter1 = gst_element_factory_make ("capsfilter", "filter1");  fail_unless (capsfilter1 != NULL);  caps = gst_caps_from_string (src_templ);  fail_unless (caps != NULL);  g_object_set (G_OBJECT (capsfilter1), "caps", caps, NULL);  gst_caps_unref (caps);  scale = gst_element_factory_make ("videoscale", "scale");  fail_unless (scale != NULL);  capsfilter2 = gst_element_factory_make ("capsfilter", "filter2");  fail_unless (capsfilter2 != NULL);  caps = gst_caps_from_string (sink_templ);  fail_unless (caps != NULL);  g_object_set (G_OBJECT (capsfilter2), "caps", caps, NULL);  gst_caps_unref (caps);  pad = gst_element_get_static_pad (capsfilter2, "sink");  fail_unless (pad != NULL);  g_signal_connect (pad, "notify::caps",      G_CALLBACK (_test_negotiation_notify_caps), &data);  gst_object_unref (pad);  sink = gst_element_factory_make ("fakesink", "sink");  fail_unless (sink != NULL);  g_object_set (sink, "async", FALSE, NULL);  gst_bin_add_many (GST_BIN (pipeline), src, capsfilter1, scale, capsfilter2,      sink, NULL);  fail_unless (gst_element_link_pads_full (src, "src", capsfilter1, "sink",          LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (capsfilter1, "src", scale, "sink",          LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (scale, "src", capsfilter2, "sink",          LINK_CHECK_FLAGS));  fail_unless (gst_element_link_pads_full (capsfilter2, "src", sink, "sink",          LINK_CHECK_FLAGS));  loop = g_main_loop_new (NULL, FALSE);  bus = gst_element_get_bus (pipeline);  fail_unless (bus != NULL);  gst_bus_add_signal_watch (bus);  data.loop = loop;  data.width = width;  data.height = height;  data.par_n = par_n;  data.par_d = par_d;  data.ok = FALSE;  g_signal_connect (bus, "message", G_CALLBACK (_test_negotiation_message),      &data);  gst_object_unref (bus);  fail_unless (gst_element_set_state (pipeline,          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);  g_main_loop_run (loop);  fail_unless (data.ok == TRUE);  fail_unless (gst_element_set_state (pipeline,          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS);  gst_object_unref (pipeline);  g_main_loop_unref (loop);}
开发者ID:Distrotech,项目名称:gst-plugins-base,代码行数:92,


示例23: create_encoder_pipeline

static gint create_encoder_pipeline (Encoder *encoder){    GstElement *pipeline, *element;    Bin *bin;    Link *link;    GSList *bins, *links, *elements;    GstElementFactory *element_factory;    GType type;    EncoderStream *stream;    GstAppSrcCallbacks callbacks = {        need_data_callback,        NULL,        NULL    };    GstAppSinkCallbacks encoder_appsink_callbacks = {        NULL,        NULL,        new_sample_callback    };    GstCaps *caps;    GstBus *bus;    pipeline = gst_pipeline_new (NULL);    /* add element to pipeline first. */    bins = encoder->bins;    while (bins != NULL) {        bin = bins->data;        elements = bin->elements;        while (elements != NULL) {            element = elements->data;            if (!gst_bin_add (GST_BIN (pipeline), element)) {                GST_ERROR ("add element %s to bin %s error.", gst_element_get_name (element), bin->name);                return 1;            }            elements = g_slist_next (elements);        }        bins = g_slist_next (bins);    }    /* then links element. */    bins = encoder->bins;    while (bins != NULL) {        bin = bins->data;        element = bin->first;        element_factory = gst_element_get_factory (element);        type = gst_element_factory_get_element_type (element_factory);        stream = NULL;        if (g_strcmp0 ("GstAppSrc", g_type_name (type)) == 0) {            GST_INFO ("Encoder appsrc found.");            stream = encoder_get_stream (encoder, bin->name);            gst_app_src_set_callbacks (GST_APP_SRC (element), &callbacks, stream, NULL);        }        element = bin->last;        element_factory = gst_element_get_factory (element);        type = gst_element_factory_get_element_type (element_factory);        if ((g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) ||                (g_strcmp0 ("GstHlsSink", g_type_name (type)) == 0) ||                (g_strcmp0 ("GstFileSink", g_type_name (type)) == 0)) {            GstPad *pad;            if (g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) {                GST_INFO ("Encoder appsink found.");                gst_app_sink_set_callbacks (GST_APP_SINK (element), &encoder_appsink_callbacks, encoder, NULL);            }            pad = gst_element_get_static_pad (element, "sink");            gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoder_appsink_event_probe, encoder, NULL);        }        links = bin->links;        while (links != NULL) {            link = links->data;            GST_INFO ("link element: %s -> %s", link->src_name, link->sink_name);            if (link->caps != NULL) {                caps = gst_caps_from_string (link->caps);                gst_element_link_filtered (link->src, link->sink, caps);                gst_caps_unref (caps);            } else {                gst_element_link (link->src, link->sink);            }            links = g_slist_next (links);        }        bins = g_slist_next (bins);    }    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));    gst_bus_add_watch (bus, bus_callback, encoder);    g_object_unref (bus);    encoder->pipeline = pipeline;    return 0;}
开发者ID:qianbo0423,项目名称:gstreamill,代码行数:91,


示例24: gst_wrapper_camera_bin_reset_video_src_caps

static voidgst_wrapper_camera_bin_reset_video_src_caps (GstWrapperCameraBinSrc * self,    GstCaps * caps){  GstClock *clock;  gint64 base_time;  GST_DEBUG_OBJECT (self, "Resetting src caps to %" GST_PTR_FORMAT, caps);  if (self->src_vid_src) {    GstCaps *old_caps;    g_object_get (G_OBJECT (self->src_filter), "caps", &old_caps, NULL);    if (gst_caps_is_equal (caps, old_caps)) {      GST_DEBUG_OBJECT (self, "old and new caps are same, do not reset it");      if (old_caps)        gst_caps_unref (old_caps);      return;    }    if (old_caps)      gst_caps_unref (old_caps);    clock = gst_element_get_clock (self->src_vid_src);    base_time = gst_element_get_base_time (self->src_vid_src);    /* Ideally, we should only need to get the source to READY here,     * but it seems v4l2src isn't happy with this. Putting to NULL makes     * it work.     *     * TODO fix this in v4l2src     */    gst_element_set_state (self->src_vid_src, GST_STATE_NULL);    set_capsfilter_caps (self, caps);    self->drop_newseg = TRUE;    GST_DEBUG_OBJECT (self, "Bringing source up");    if (!gst_element_sync_state_with_parent (self->src_vid_src)) {      GST_WARNING_OBJECT (self, "Failed to reset source caps");      gst_element_set_state (self->src_vid_src, GST_STATE_NULL);    }    if (clock) {      gst_element_set_clock (self->src_vid_src, clock);      gst_element_set_base_time (self->src_vid_src, base_time);      if (GST_IS_BIN (self->src_vid_src)) {        GstIterator *it =            gst_bin_iterate_elements (GST_BIN (self->src_vid_src));        gpointer item = NULL;        gboolean done = FALSE;        while (!done) {          switch (gst_iterator_next (it, &item)) {            case GST_ITERATOR_OK:              gst_element_set_base_time (GST_ELEMENT (item), base_time);              gst_object_unref (item);              break;            case GST_ITERATOR_RESYNC:              gst_iterator_resync (it);              break;            case GST_ITERATOR_ERROR:              done = TRUE;              break;            case GST_ITERATOR_DONE:              done = TRUE;              break;          }        }        gst_iterator_free (it);      }      gst_object_unref (clock);    }  }}
开发者ID:pli3,项目名称:gst-plugins-bad,代码行数:74,


示例25: close

bool CvCapture_GStreamer::open( int type, const char* filename ){    close();    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");    __BEGIN__;    gst_initializer::init();//    if(!isInited) {//        printf("gst_init/n");//        gst_init (NULL, NULL);//        gst_debug_set_active(TRUE);//        gst_debug_set_colored(TRUE);//        gst_debug_set_default_threshold(GST_LEVEL_WARNING);//        isInited = true;//    }    bool stream = false;    bool manualpipeline = false;    char *uri = NULL;    uridecodebin = NULL;    if(type != CV_CAP_GSTREAMER_FILE) {        close();        return false;    }    if(!gst_uri_is_valid(filename)) {        uri = realpath(filename, NULL);        stream=false;        if(uri) {            uri = g_filename_to_uri(uri, NULL, NULL);            if(!uri) {                CV_WARN("GStreamer: Error opening file/n");                close();                return false;            }        } else {            GError *err = NULL;            //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);            uridecodebin = gst_parse_launch(filename, &err);            if(!uridecodebin) {                CV_WARN("GStreamer: Error opening bin/n");                close();                return false;            }            stream = true;            manualpipeline = true;        }    } else {        stream = true;        uri = g_strdup(filename);    }    if(!uridecodebin) {        uridecodebin = gst_element_factory_make ("uridecodebin", NULL);        g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);        if(!uridecodebin) {            CV_WARN("GStreamer: Failed to create uridecodebin/n");            close();            return false;        }    }    if(manualpipeline) {        GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {        CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline/n");        return false;        }    pipeline = uridecodebin;    } else {    pipeline = gst_pipeline_new (NULL);        color = gst_element_factory_make("ffmpegcolorspace", NULL);        sink = gst_element_factory_make("appsink", NULL);        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);        g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);        if(!gst_element_link(color, sink)) {            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink/n");            gst_object_unref(pipeline);            return false;        }    }    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);    caps = gst_caps_new_simple("video/x-raw-rgb",                               "red_mask",   G_TYPE_INT, 0x0000FF,                               "green_mask", G_TYPE_INT, 0x00FF00,                               "blue_mask",  G_TYPE_INT, 0xFF0000,                               NULL);    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);    gst_caps_unref(caps);    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==//.........这里部分代码省略.........
开发者ID:4auka,项目名称:opencv,代码行数:101,


示例26: gst_wrapper_camera_bin_src_construct_pipeline

/** * gst_wrapper_camera_bin_src_construct_pipeline: * @bcamsrc: camerasrc object * * This function creates and links the elements of the camerasrc bin * videosrc ! cspconv ! srcfilter ! cspconv ! capsfilter ! crop ! scale ! / * capsfilter ! tee name=t *    t. ! ... (viewfinder pad) *    t. ! output-selector name=outsel *        outsel. ! (image pad) *        outsel. ! (video pad) * * Returns: TRUE, if elements were successfully created, FALSE otherwise */static gbooleangst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc){  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);  GstBin *cbin = GST_BIN (bcamsrc);  GstElement *tee;  GstElement *filter_csp;  GstElement *src_csp;  GstElement *capsfilter;  gboolean ret = FALSE;  GstPad *vf_pad;  GstPad *tee_capture_pad;  GstPad *src_caps_src_pad;  if (!self->elements_created) {    GST_DEBUG_OBJECT (self, "constructing pipeline");    /* Add application set or default video src element */    if (!(self->src_vid_src = gst_camerabin_setup_default_element (cbin,                self->app_vid_src, "autovideosrc", DEFAULT_VIDEOSRC,                "camerasrc-real-src"))) {      self->src_vid_src = NULL;      goto done;    } else {      if (!gst_camerabin_add_element (cbin, self->src_vid_src)) {        goto done;      }    }    /* we lost the reference */    self->app_vid_src = NULL;    /* we listen for changes to max-zoom in the video src so that     * we can proxy them to the basecamerasrc property */    if (g_object_class_find_property (G_OBJECT_GET_CLASS (bcamsrc), "max-zoom")) {      g_signal_connect (G_OBJECT (self->src_vid_src), "notify::max-zoom",          (GCallback) gst_wrapper_camera_bin_src_max_zoom_cb, bcamsrc);    }    /* add a buffer probe to the src elemento to drop EOS from READY->NULL */    {      GstPad *pad;      pad = gst_element_get_static_pad (self->src_vid_src, "src");      self->src_event_probe_id = gst_pad_add_event_probe (pad,          (GCallback) gst_wrapper_camera_src_src_event_probe, self);      gst_object_unref (pad);    }    if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace",            "src-colorspace"))      goto done;    if (!(self->src_filter =            gst_camerabin_create_and_add_element (cbin, "capsfilter",                "src-capsfilter")))      goto done;    /* attach to notify::caps on the first capsfilter and use a callback     * to recalculate the zoom properties when these caps change and to     * propagate the caps to the second capsfilter */    src_caps_src_pad = gst_element_get_static_pad (self->src_filter, "src");    g_signal_connect (src_caps_src_pad, "notify::caps",        G_CALLBACK (gst_wrapper_camera_bin_src_caps_cb), self);    gst_object_unref (src_caps_src_pad);    if (!(self->src_zoom_crop =            gst_camerabin_create_and_add_element (cbin, "videocrop",                "zoom-crop")))      goto done;    if (!(self->src_zoom_scale =            gst_camerabin_create_and_add_element (cbin, "videoscale",                "zoom-scale")))      goto done;    if (!(self->src_zoom_filter =            gst_camerabin_create_and_add_element (cbin, "capsfilter",                "zoom-capsfilter")))      goto done;    if (!(tee =            gst_camerabin_create_and_add_element (cbin, "tee",                "camerasrc-tee")))      goto done;    /* viewfinder pad */    vf_pad = gst_element_get_request_pad (tee, "src%d");//.........这里部分代码省略.........
开发者ID:pli3,项目名称:gst-plugins-bad,代码行数:101,


示例27: main

gint main (gint argc, gchar *argv[]){    gtk_init (&argc, &argv);    gst_init (&argc, &argv);    GstElement* pipeline = gst_pipeline_new ("pipeline");    //window that contains an area where the video is drawn    GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);    gtk_widget_set_size_request (window, 640, 480);    gtk_window_move (GTK_WINDOW (window), 300, 10);    gtk_window_set_title (GTK_WINDOW (window), "glimagesink implement the gstxoverlay interface");    GdkGeometry geometry;    geometry.min_width = 1;    geometry.min_height = 1;    geometry.max_width = -1;    geometry.max_height = -1;    gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE);    //window to control the states    GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);    geometry.min_width = 1;    geometry.min_height = 1;    geometry.max_width = -1;    geometry.max_height = -1;    gtk_window_set_geometry_hints (GTK_WINDOW (window_control), window_control, &geometry, GDK_HINT_MIN_SIZE);    gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);    gtk_window_move (GTK_WINDOW (window_control), 10, 10);    GtkWidget* table = gtk_table_new (2, 1, TRUE);    gtk_container_add (GTK_CONTAINER (window_control), table);    //control state null    GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");    g_signal_connect (G_OBJECT (button_state_null), "clicked",        G_CALLBACK (button_state_null_cb), pipeline);    gtk_table_attach_defaults (GTK_TABLE (table), button_state_null, 0, 1, 0, 1);    gtk_widget_show (button_state_null);    //control state ready    GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");    g_signal_connect (G_OBJECT (button_state_ready), "clicked",        G_CALLBACK (button_state_ready_cb), pipeline);    gtk_table_attach_defaults (GTK_TABLE (table), button_state_ready, 0, 1, 1, 2);    gtk_widget_show (button_state_ready);    //control state paused    GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");    g_signal_connect (G_OBJECT (button_state_paused), "clicked",        G_CALLBACK (button_state_paused_cb), pipeline);    gtk_table_attach_defaults (GTK_TABLE (table), button_state_paused, 0, 1, 2, 3);    gtk_widget_show (button_state_paused);    //control state playing    GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");    g_signal_connect (G_OBJECT (button_state_playing), "clicked",        G_CALLBACK (button_state_playing_cb), pipeline);    gtk_table_attach_defaults (GTK_TABLE (table), button_state_playing, 0, 1, 3, 4);    gtk_widget_show (button_state_playing);    //change framerate    GtkWidget* slider_fps = gtk_vscale_new_with_range (1, 30, 2);    g_signal_connect (G_OBJECT (slider_fps), "format-value",        G_CALLBACK (slider_fps_cb), pipeline);    gtk_table_attach_defaults (GTK_TABLE (table), slider_fps, 1, 2, 0, 4);    gtk_widget_show (slider_fps);    gtk_widget_show (table);    gtk_widget_show (window_control);    //configure the pipeline    g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(destroy_cb), pipeline);    GstElement* videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc");    GstElement* glupload = gst_element_factory_make ("glupload", "glupload");    GstElement* glfiltercube = gst_element_factory_make ("glfiltercube", "glfiltercube");    GstElement* glfilterlaplacian = gst_element_factory_make ("glfilterlaplacian", "glfilterlaplacian");    GstElement* videosink = gst_element_factory_make ("glimagesink", "glimagesink");    GstCaps *caps = gst_caps_new_simple("video/x-raw-yuv",                                        "width", G_TYPE_INT, 640,                                        "height", G_TYPE_INT, 480,                                        "framerate", GST_TYPE_FRACTION, 25, 1,                                        "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'),                                        NULL) ;    gst_bin_add_many (GST_BIN (pipeline), videosrc, glupload, glfiltercube, glfilterlaplacian, videosink, NULL);    gboolean link_ok = gst_element_link_filtered(videosrc, glupload, caps) ;    gst_caps_unref(caps) ;    if(!link_ok)    {        g_warning("Failed to link videosrc to glupload!/n") ;        return -1;    }    if(!gst_element_link_many(glupload, glfiltercube, glfilterlaplacian, videosink, NULL))    {        g_warning("Failed to link glupload to videosink!/n") ;        return -1;    }//.........这里部分代码省略.........
开发者ID:ChinnaSuhas,项目名称:ossbuild,代码行数:101,


示例28: main

intmain (int argc, char *argv[]){  GstElement *bin, *filesrc, *decoder, *audiosink;  GstElement *conv, *resample;	xmlfile = "helloworld_logs";  std_log(LOG_FILENAME_LINE, "Test Started hellowworld");    gst_init (&argc, &argv);   if (argc != 2) {    g_print ("usage: %s <mp3 file>/n", argv[0]);    std_log(LOG_FILENAME_LINE, "Test Failed argument need to be passed");    create_xml(1);     exit (-1);  }  /* create a new bin to hold the elements */  bin = gst_pipeline_new ("pipeline");  g_assert (bin);  /* create a disk reader */  filesrc = gst_element_factory_make (/*"filesrc"*/"audiotestsrc", "disk_source");  g_assert (filesrc);  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);  /* now it's time to get the decoder */  //decoder = gst_element_factory_make ("mad", "decode");//  if (!decoder) {//    std_log(LOG_FILENAME_LINE, "could not find plugin mad");//    g_print ("could not find plugin /"mad/"");//    return -1;//  }  /* also, we need to add some converters to make sure the audio stream   * from the decoder is converted into a format the audio sink can   * understand (if necessary) *///  conv = gst_element_factory_make ("audioconvert", "audioconvert");//  if (!conv) {//    std_log(LOG_FILENAME_LINE, "could not create /"audioconvert/" element!");//    g_print ("could not create /"audioconvert/" element!");//    return -1;//  }//  resample = gst_element_factory_make ("audioresample", "audioresample");//  if (!conv) {//    std_log(LOG_FILENAME_LINE, "could not create /"audioresample/" element!");//    g_print ("could not create /"audioresample/" element!");//    return -1;//  }  /* and an audio sink */  audiosink = gst_element_factory_make ("devsoundsink", "play_audio");  g_assert (audiosink);  /* add objects to the main pipeline */  gst_bin_add_many (GST_BIN (bin), filesrc/*, decoder, conv,      resample*/, audiosink, NULL);  /* link the elements */  gst_element_link_many (filesrc, /*decoder, conv, resample,*/ audiosink, NULL);  /* start playing */  std_log(LOG_FILENAME_LINE, "START PLAYING");  gst_element_set_state (bin, GST_STATE_PLAYING);  std_log(LOG_FILENAME_LINE, "STOP PLAYING ");  /* Run event loop listening for bus messages until EOS or ERROR */  event_loop (bin);  /* stop the bin */  std_log(LOG_FILENAME_LINE, "START BIN");  gst_element_set_state (bin, GST_STATE_NULL);  std_log(LOG_FILENAME_LINE, "START BIN");	std_log(LOG_FILENAME_LINE, "Test Successful");  create_xml(0);   exit (0);}
开发者ID:kuailexs,项目名称:symbiandump-mw1,代码行数:77,


示例29: gst_segment_init

bool GstEnginePipeline::Init() {  // Here we create all the parts of the gstreamer pipeline - from the source  // to the sink.  The parts of the pipeline are split up into bins:  //   uri decode bin -> audio bin  // The uri decode bin is a gstreamer builtin that automatically picks the  // right type of source and decoder for the URI.  // The audio bin gets created here and contains:  //   queue ! audioconvert ! <caps32>  //         ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee  // rgvolume and rglimiter are only created when replaygain is enabled.  // After the tee the pipeline splits.  One split is converted to 16-bit int  // samples for the scope, the other is kept as float32 and sent to the  // speaker.  //   tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink  //   tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale  //        ! convert ! audiosink  gst_segment_init(&last_decodebin_segment_, GST_FORMAT_TIME);  // Audio bin  audiobin_ = gst_bin_new("audiobin");  gst_bin_add(GST_BIN(pipeline_), audiobin_);  // Create the sink  if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false;  if (g_object_class_find_property(G_OBJECT_GET_CLASS(audiosink_), "device") &&      !device_.toString().isEmpty()) {    switch (device_.type()) {      case QVariant::Int:        g_object_set(G_OBJECT(audiosink_), "device", device_.toInt(), nullptr);        break;      case QVariant::String:        g_object_set(G_OBJECT(audiosink_), "device",                     device_.toString().toUtf8().constData(), nullptr);        break;#ifdef Q_OS_WIN32      case QVariant::ByteArray: {        GUID guid = QUuid(device_.toByteArray());        g_object_set(G_OBJECT(audiosink_), "device", &guid, nullptr);        break;      }#endif  // Q_OS_WIN32      default:        qLog(Warning) << "Unknown device type" << device_;        break;    }  }  // Create all the other elements  GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue,      *convert;  queue_ = engine_->CreateElement("queue2", audiobin_);  audioconvert_ = engine_->CreateElement("audioconvert", audiobin_);  tee = engine_->CreateElement("tee", audiobin_);  probe_queue = engine_->CreateElement("queue", audiobin_);  probe_converter = engine_->CreateElement("audioconvert", audiobin_);  probe_sink = engine_->CreateElement("fakesink", audiobin_);  audio_queue = engine_->CreateElement("queue", audiobin_);  equalizer_preamp_ = engine_->CreateElement("volume", audiobin_);  equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_);  stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_);  volume_ = engine_->CreateElement("volume", audiobin_);  audioscale_ = engine_->CreateElement("audioresample", audiobin_);  convert = engine_->CreateElement("audioconvert", audiobin_);  if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter ||      !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ ||      !stereo_panorama_ || !volume_ || !audioscale_ || !convert) {    return false;  }  // Create the replaygain elements if it's enabled.  event_probe is the  // audioconvert element we attach the probe to, which will change depending  // on whether replaygain is enabled.  convert_sink is the element after the  // first audioconvert, which again will change.  GstElement* event_probe = audioconvert_;  GstElement* convert_sink = tee;  if (rg_enabled_) {    rgvolume_ = engine_->CreateElement("rgvolume", audiobin_);    rglimiter_ = engine_->CreateElement("rglimiter", audiobin_);    audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_);    event_probe = audioconvert2_;    convert_sink = rgvolume_;    if (!rgvolume_ || !rglimiter_ || !audioconvert2_) {      return false;    }    // Set replaygain settings    g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr);    g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr);//.........这里部分代码省略.........
开发者ID:jonathanchristison,项目名称:Clementine,代码行数:101,


示例30: main

gintmain (gint argc, gchar * argv[]){  GstStateChangeReturn ret;  GstElement *pipeline;  GstElement *filter, *sink;  GstElement *sourcebin;  GError *error = NULL;  GtkWidget *window;  GtkWidget *screen;  GtkWidget *vbox, *combo;  GtkWidget *hbox;  GtkWidget *play, *pause, *null, *ready;  gchar **source_desc_array = NULL;  gchar *source_desc = NULL;  GOptionContext *context;  GOptionEntry options[] = {    {"source-bin", 's', 0, G_OPTION_ARG_STRING_ARRAY, &source_desc_array,        "Use a custom source bin description (gst-launch style)", NULL}    ,    {NULL}  };  context = g_option_context_new (NULL);  g_option_context_add_main_entries (context, options, NULL);  g_option_context_add_group (context, gst_init_get_option_group ());  g_option_context_add_group (context, gtk_get_option_group (TRUE));  if (!g_option_context_parse (context, &argc, &argv, &error)) {    g_print ("Inizialization error: %s/n", GST_STR_NULL (error->message));    return -1;  }  g_option_context_free (context);  if (source_desc_array != NULL) {    source_desc = g_strjoinv (" ", source_desc_array);    g_strfreev (source_desc_array);  }  if (source_desc == NULL) {    source_desc =        g_strdup        ("videotestsrc ! video/x-raw, width=352, height=288 ! identity");  }  sourcebin =      gst_parse_bin_from_description (g_strdup (source_desc), TRUE, &error);  g_free (source_desc);  if (error) {    g_print ("Error while parsing source bin description: %s/n",        GST_STR_NULL (error->message));    return -1;  }  g_set_application_name ("gst-gl-effects test app");  window = gtk_window_new (GTK_WINDOW_TOPLEVEL);  gtk_container_set_border_width (GTK_CONTAINER (window), 3);  pipeline = gst_pipeline_new ("pipeline");  filter = gst_element_factory_make ("gleffects", "flt");  sink = gst_element_factory_make ("glimagesink", "glsink");  gst_bin_add_many (GST_BIN (pipeline), sourcebin, filter, sink, NULL);  if (!gst_element_link_many (sourcebin, filter, sink, NULL)) {    g_print ("Failed to link one or more elements!/n");    return -1;  }  g_signal_connect (G_OBJECT (window), "delete-event",      G_CALLBACK (destroy_cb), pipeline);  g_signal_connect (G_OBJECT (window), "destroy-event",      G_CALLBACK (destroy_cb), pipeline);  screen = gtk_drawing_area_new ();  gtk_widget_set_size_request (screen, 640, 480);       // 500 x 376  vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 2);  gtk_box_pack_start (GTK_BOX (vbox), screen, TRUE, TRUE, 0);  combo = gtk_combo_box_text_new ();  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "identity");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "mirror");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "squeeze");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "stretch");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "fisheye");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "twirl");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "bulge");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "tunnel");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "square");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "heat");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "xpro");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "lumaxpro");  gtk_combo_box_text_append_text (GTK_COMBO_BOX_TEXT (combo), "sepia");//.........这里部分代码省略.........
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer__attic__gst-plugins-gl,代码行数:101,



注:本文中的GST_BIN函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_BUFFER_CAPS函数代码示例
C++ GST_BASE_TRANSFORM_CLASS函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。