您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_BUFFER_DTS函数代码示例

51自学网 2021-06-01 20:55:57
  C++
这篇教程C++ GST_BUFFER_DTS函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_BUFFER_DTS函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_BUFFER_DTS函数的具体用法?C++ GST_BUFFER_DTS怎么用?C++ GST_BUFFER_DTS使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_BUFFER_DTS函数的29个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: gst_adapter_prev_dts_at_offset

/** * gst_adapter_prev_dts_at_offset: * @adapter: a #GstAdapter * @offset: the offset in the adapter at which to get timestamp * @distance: (out) (allow-none): pointer to location for distance, or %NULL * * Get the dts that was before the byte at offset @offset in the adapter. When * @distance is given, the amount of bytes between the dts and the current * position is returned. * * The dts is reset to GST_CLOCK_TIME_NONE and the distance is set to 0 when * the adapter is first created or when it is cleared. This also means that before * the first byte with a dts is removed from the adapter, the dts * and distance returned are GST_CLOCK_TIME_NONE and 0 respectively. * * Since: 1.2 * Returns: The previously seen dts at given offset. */GstClockTimegst_adapter_prev_dts_at_offset (GstAdapter * adapter, gsize offset,    guint64 * distance){  GstBuffer *cur;  GSList *g;  gsize read_offset = 0;  GstClockTime dts = adapter->dts;  g_return_val_if_fail (GST_IS_ADAPTER (adapter), GST_CLOCK_TIME_NONE);  g = adapter->buflist;  while (g && read_offset < offset + adapter->skip) {    cur = g->data;    read_offset += gst_buffer_get_size (cur);    if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS (cur))) {      dts = GST_BUFFER_DTS (cur);    }    g = g_slist_next (g);  }  if (distance)    *distance = adapter->dts_distance + offset;  return dts;}
开发者ID:Kurento,项目名称:gstreamer,代码行数:47,


示例2: gst_rtp_opus_pay_handle_buffer

static GstFlowReturngst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,    GstBuffer * buffer){  GstBuffer *outbuf;  GstClockTime pts, dts, duration;  CopyMetaData data;  pts = GST_BUFFER_PTS (buffer);  dts = GST_BUFFER_DTS (buffer);  duration = GST_BUFFER_DURATION (buffer);  outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);  data.pay = GST_RTP_OPUS_PAY (basepayload);  data.outbuf = outbuf;  gst_buffer_foreach_meta (buffer, foreach_metadata, &data);  outbuf = gst_buffer_append (outbuf, buffer);  GST_BUFFER_PTS (outbuf) = pts;  GST_BUFFER_DTS (outbuf) = dts;  GST_BUFFER_DURATION (outbuf) = duration;  /* Push out */  return gst_rtp_base_payload_push (basepayload, outbuf);}
开发者ID:Kurento,项目名称:gst-plugins-good,代码行数:25,


示例3: process_buffer_locked

static gbooleanprocess_buffer_locked (GstRTPMux * rtp_mux, GstRTPMuxPadPrivate * padpriv,    GstRTPBuffer * rtpbuffer){  GstRTPMuxClass *klass = GST_RTP_MUX_GET_CLASS (rtp_mux);  if (klass->accept_buffer_locked)    if (!klass->accept_buffer_locked (rtp_mux, padpriv, rtpbuffer))      return FALSE;  rtp_mux->seqnum++;  gst_rtp_buffer_set_seq (rtpbuffer, rtp_mux->seqnum);  gst_rtp_buffer_set_ssrc (rtpbuffer, rtp_mux->current_ssrc);  gst_rtp_mux_readjust_rtp_timestamp_locked (rtp_mux, padpriv, rtpbuffer);  GST_LOG_OBJECT (rtp_mux,      "Pushing packet size %" G_GSIZE_FORMAT ", seq=%d, ts=%u",      rtpbuffer->map[0].size, rtp_mux->seqnum,      gst_rtp_buffer_get_timestamp (rtpbuffer));  if (padpriv) {    if (padpriv->segment.format == GST_FORMAT_TIME) {      GST_BUFFER_PTS (rtpbuffer->buffer) =          gst_segment_to_running_time (&padpriv->segment, GST_FORMAT_TIME,          GST_BUFFER_PTS (rtpbuffer->buffer));      GST_BUFFER_DTS (rtpbuffer->buffer) =          gst_segment_to_running_time (&padpriv->segment, GST_FORMAT_TIME,          GST_BUFFER_DTS (rtpbuffer->buffer));    }  }  return TRUE;}
开发者ID:hizukiayaka,项目名称:gst-plugins-good,代码行数:33,


示例4: mpegpsmux_queue_buffer_for_stream

static GstBuffer *mpegpsmux_queue_buffer_for_stream (MpegPsMux * mux, MpegPsPadData * ps_data){  GstCollectData *c_data = (GstCollectData *) ps_data;  GstBuffer *buf;  g_assert (ps_data->queued.buf == NULL);  buf = gst_collect_pads_peek (mux->collect, c_data);  if (buf == NULL)    return NULL;  ps_data->queued.buf = buf;  /* do any raw -> byte-stream format conversions (e.g. for H.264, AAC) */  if (ps_data->prepare_func) {    buf = ps_data->prepare_func (buf, ps_data, mux);    if (buf) {                  /* Take the prepared buffer instead */      gst_buffer_unref (ps_data->queued.buf);      ps_data->queued.buf = buf;    } else {                    /* If data preparation returned NULL, use unprepared one */      buf = ps_data->queued.buf;    }  }  ps_data->queued.pts = GST_BUFFER_PTS (buf);  if (GST_CLOCK_TIME_IS_VALID (ps_data->queued.pts)) {    ps_data->queued.pts = gst_segment_to_running_time (&c_data->segment,        GST_FORMAT_TIME, ps_data->queued.pts);  }  ps_data->queued.dts = GST_BUFFER_DTS (buf);  if (GST_CLOCK_TIME_IS_VALID (ps_data->queued.dts)) {    ps_data->queued.dts = gst_segment_to_running_time (&c_data->segment,        GST_FORMAT_TIME, ps_data->queued.dts);  }  if (GST_BUFFER_PTS_IS_VALID (buf) && GST_BUFFER_DTS_IS_VALID (buf)) {    ps_data->queued.ts = MIN (ps_data->queued.dts, ps_data->queued.pts);  } else if (GST_BUFFER_PTS_IS_VALID (buf) && !GST_BUFFER_DTS_IS_VALID (buf)) {    ps_data->queued.ts = ps_data->queued.pts;  } else if (GST_BUFFER_DTS_IS_VALID (buf) && !GST_BUFFER_PTS_IS_VALID (buf)) {    GST_WARNING_OBJECT (c_data->pad, "got DTS without PTS");    ps_data->queued.ts = ps_data->queued.dts;  } else {    ps_data->queued.ts = GST_CLOCK_TIME_NONE;  }  GST_DEBUG_OBJECT (mux, "Queued buffer with ts %" GST_TIME_FORMAT ": "      "uncorrected pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT ", "      "buffer pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " for PID 0x%04x",      GST_TIME_ARGS (ps_data->queued.ts),      GST_TIME_ARGS (GST_BUFFER_PTS (buf)),      GST_TIME_ARGS (GST_BUFFER_DTS (buf)),      GST_TIME_ARGS (ps_data->queued.pts),      GST_TIME_ARGS (ps_data->queued.dts), ps_data->stream_id);  return buf;}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:59,


示例5: handle_buffer_measuring

static voidhandle_buffer_measuring (GstSplitMuxPartReader * reader,    GstSplitMuxPartPad * part_pad, GstBuffer * buf){  GstClockTime ts = GST_CLOCK_TIME_NONE;  GstClockTimeDiff offset;  if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS &&      !part_pad->seen_buffer) {    /* If this is the first buffer on the pad in the collect_streams state,     * then calculate inital offset based on running time of this segment */    part_pad->initial_ts_offset =        part_pad->orig_segment.start + part_pad->orig_segment.base -        part_pad->orig_segment.time;    GST_DEBUG_OBJECT (reader,        "Initial TS offset for pad %" GST_PTR_FORMAT " now %" GST_TIME_FORMAT,        part_pad, GST_TIME_ARGS (part_pad->initial_ts_offset));  }  part_pad->seen_buffer = TRUE;  /* Adjust buffer timestamps */  offset = reader->start_offset + part_pad->segment.base;  offset -= part_pad->initial_ts_offset;  /* Update the stored max duration on the pad,   * always preferring making DTS contiguous   * where possible */  if (GST_BUFFER_DTS_IS_VALID (buf))    ts = GST_BUFFER_DTS (buf) + offset;  else if (GST_BUFFER_PTS_IS_VALID (buf))    ts = GST_BUFFER_PTS (buf) + offset;  GST_DEBUG_OBJECT (reader, "Pad %" GST_PTR_FORMAT      " incoming PTS %" GST_TIME_FORMAT      " DTS %" GST_TIME_FORMAT " offset by %" GST_STIME_FORMAT      " to %" GST_TIME_FORMAT, part_pad,      GST_TIME_ARGS (GST_BUFFER_DTS (buf)),      GST_TIME_ARGS (GST_BUFFER_PTS (buf)),      GST_STIME_ARGS (offset), GST_TIME_ARGS (ts));  if (GST_CLOCK_TIME_IS_VALID (ts)) {    if (GST_BUFFER_DURATION_IS_VALID (buf))      ts += GST_BUFFER_DURATION (buf);    if (GST_CLOCK_TIME_IS_VALID (ts) && ts > part_pad->max_ts) {      part_pad->max_ts = ts;      GST_LOG_OBJECT (reader,          "pad %" GST_PTR_FORMAT " max TS now %" GST_TIME_FORMAT, part_pad,          GST_TIME_ARGS (part_pad->max_ts));    }  }  /* Is it time to move to measuring state yet? */  check_if_pads_collected (reader);}
开发者ID:ConfusedReality,项目名称:pkg_multimedia_gst-plugins-good,代码行数:54,


示例6: print_buffer_metadata

static void print_buffer_metadata(WebKitVideoSink* sink, GstBuffer* buffer){    gchar dts_str[64], pts_str[64], dur_str[64];    gchar flag_str[100];    if (GST_BUFFER_DTS (buffer) != GST_CLOCK_TIME_NONE) {        g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT,                    GST_TIME_ARGS (GST_BUFFER_DTS (buffer)));    } else {        g_strlcpy (dts_str, "none", sizeof (dts_str));    }    if (GST_BUFFER_PTS (buffer) != GST_CLOCK_TIME_NONE) {        g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT,                    GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));    } else {        g_strlcpy (pts_str, "none", sizeof (pts_str));    }    if (GST_BUFFER_DURATION (buffer) != GST_CLOCK_TIME_NONE) {        g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT,                    GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));    } else {        g_strlcpy (dur_str, "none", sizeof (dur_str));    }    {        const char *flag_list[15] = {            "", "", "", "", "live", "decode-only", "discont", "resync", "corrupted",            "marker", "header", "gap", "droppable", "delta-unit", "in-caps"        };        guint i;        char *end = flag_str;        end[0] = '/0';        for (i = 0; i < G_N_ELEMENTS (flag_list); i++) {            if (GST_MINI_OBJECT_CAST (buffer)->flags & (1 << i)) {                strcpy (end, flag_list[i]);                end += strlen (end);                end[0] = ' ';                end[1] = '/0';                end++;            }        }    }    g_printerr ("chain   ******* (%s:%s) (%u bytes, dts: %s, pts: %s"                ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %"                G_GINT64_FORMAT ", flags: %08x %s) %p/n",                    GST_DEBUG_PAD_NAME (GST_BASE_SINK_CAST (sink)->sinkpad),                (guint) gst_buffer_get_size (buffer), dts_str, pts_str,                dur_str, GST_BUFFER_OFFSET (buffer), GST_BUFFER_OFFSET_END (buffer),                GST_MINI_OBJECT_CAST (buffer)->flags, flag_str, buffer);}
开发者ID:ceyusa,项目名称:gst-wk,代码行数:53,


示例7: gst_imx_blitter_video_transform_copy_metadata

static gboolean gst_imx_blitter_video_transform_copy_metadata(G_GNUC_UNUSED GstBaseTransform *trans, GstBuffer *input, GstBuffer *outbuf){	/* Only copy timestamps; the rest of the metadata must not be copied */	GST_BUFFER_DTS(outbuf) = GST_BUFFER_DTS(input);	GST_BUFFER_PTS(outbuf) = GST_BUFFER_PTS(input);	/* For GStreamer 1.3.1 and newer, make sure the GST_BUFFER_FLAG_TAG_MEMORY flag	 * isn't copied, otherwise the output buffer will be reallocated all the time */	GST_BUFFER_FLAGS(outbuf) = GST_BUFFER_FLAGS(input);#if GST_CHECK_VERSION(1, 3, 1)	GST_BUFFER_FLAG_UNSET(outbuf, GST_BUFFER_FLAG_TAG_MEMORY);#endif	return TRUE;}
开发者ID:merics,项目名称:gstreamer-imx,代码行数:15,


示例8: generate_test_buffer

static GstBuffer *generate_test_buffer (guint seq_num, guint ssrc){    GstBuffer *buf;    guint8 *payload;    guint i;    GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;    gsize size = 10;    buf = gst_rtp_buffer_new_allocate (size, 0, 0);    GST_BUFFER_DTS (buf) = GST_MSECOND * 20 * seq_num;    GST_BUFFER_PTS (buf) = GST_MSECOND * 20 * seq_num;    gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp);    gst_rtp_buffer_set_payload_type (&rtp, 0);    gst_rtp_buffer_set_seq (&rtp, seq_num);    gst_rtp_buffer_set_timestamp (&rtp, 160 * seq_num);    gst_rtp_buffer_set_ssrc (&rtp, ssrc);    payload = gst_rtp_buffer_get_payload (&rtp);    for (i = 0; i < size; i++)        payload[i] = 0xff;    gst_rtp_buffer_unmap (&rtp);    return buf;}
开发者ID:pexip,项目名称:gst-plugins-good,代码行数:27,


示例9: gst_identity_update_last_message_for_buffer

static voidgst_identity_update_last_message_for_buffer (GstIdentity * identity,    const gchar * action, GstBuffer * buf, gsize size){  gchar dts_str[64], pts_str[64], dur_str[64];  gchar *flag_str;  GST_OBJECT_LOCK (identity);  flag_str = gst_buffer_get_flags_string (buf);  g_free (identity->last_message);  identity->last_message = g_strdup_printf ("%s   ******* (%s:%s) "      "(%" G_GSIZE_FORMAT " bytes, dts: %s, pts:%s, duration: %s, offset: %"      G_GINT64_FORMAT ", " "offset_end: % " G_GINT64_FORMAT      ", flags: %08x %s) %p", action,      GST_DEBUG_PAD_NAME (GST_BASE_TRANSFORM_CAST (identity)->sinkpad), size,      print_pretty_time (dts_str, sizeof (dts_str), GST_BUFFER_DTS (buf)),      print_pretty_time (pts_str, sizeof (pts_str), GST_BUFFER_PTS (buf)),      print_pretty_time (dur_str, sizeof (dur_str), GST_BUFFER_DURATION (buf)),      GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf),      GST_BUFFER_FLAGS (buf), flag_str, buf);  g_free (flag_str);  GST_OBJECT_UNLOCK (identity);  gst_identity_notify_last_message (identity);}
开发者ID:MathieuDuponchelle,项目名称:gstreamer,代码行数:28,


示例10: do_buffer_stats

static voiddo_buffer_stats (GstStatsTracer * self, GstPad * this_pad,    GstPadStats * this_pad_stats, GstPad * that_pad,    GstPadStats * that_pad_stats, GstBuffer * buf, GstClockTime elapsed){  GstElement *this_elem = get_real_pad_parent (this_pad);  GstElementStats *this_elem_stats = get_element_stats (self, this_elem);  GstElement *that_elem = get_real_pad_parent (that_pad);  GstElementStats *that_elem_stats = get_element_stats (self, that_elem);  /* TODO(ensonic): need a quark-table (shared with the tracer-front-ends?) */  gst_tracer_log_trace (gst_structure_new ("buffer",          "thread-id", G_TYPE_UINT, GPOINTER_TO_UINT (g_thread_self ()),          "ts", G_TYPE_UINT64, elapsed,          "pad-ix", G_TYPE_UINT, this_pad_stats->index,          "elem-ix", G_TYPE_UINT, this_elem_stats->index,          "peer-pad-ix", G_TYPE_UINT, that_pad_stats->index,          "peer-elem-ix", G_TYPE_UINT, that_elem_stats->index,          "buffer-size", G_TYPE_UINT, gst_buffer_get_size (buf),          "buffer-pts", G_TYPE_UINT64, GST_BUFFER_PTS (buf),          "buffer-dts", G_TYPE_UINT64, GST_BUFFER_DTS (buf),          "buffer-duration", G_TYPE_UINT64, GST_BUFFER_DURATION (buf),          "buffer-flags", GST_TYPE_BUFFER_FLAGS, GST_BUFFER_FLAGS (buf),          /*             scheduling-jitter: for this we need the last_ts on the pad           */          NULL));}
开发者ID:carlo0815,项目名称:gstreamer1.7.1,代码行数:28,


示例11: play_push_func

static GstFlowReturnplay_push_func (GstMidiParse * midiparse, GstMidiTrack * track,    guint8 event, guint8 * data, guint length, gpointer user_data){  GstBuffer *outbuf;  GstMapInfo info;  GstClockTime position;  outbuf = gst_buffer_new_allocate (NULL, length + 1, NULL);  gst_buffer_map (outbuf, &info, GST_MAP_WRITE);  info.data[0] = event;  if (length)    memcpy (&info.data[1], data, length);  gst_buffer_unmap (outbuf, &info);  position = midiparse->segment.position;  GST_BUFFER_PTS (outbuf) = position;  GST_BUFFER_DTS (outbuf) = position;  GST_DEBUG_OBJECT (midiparse, "pushing %" GST_TIME_FORMAT,      GST_TIME_ARGS (position));  if (midiparse->discont) {    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);    midiparse->discont = FALSE;  }  return gst_pad_push (midiparse->srcpad, outbuf);}
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:30,


示例12: gst_vtdec_enqueue_frame

static voidgst_vtdec_enqueue_frame (void *data1, void *data2, VTStatus result,    VTDecodeInfoFlags info, CVBufferRef cvbuf, CMTime pts, CMTime duration){  GstVTDec *self = GST_VTDEC_CAST (data1);  GstBuffer *src_buf = GST_BUFFER (data2);  GstBuffer *buf;  if (result != kVTSuccess) {    GST_ERROR_OBJECT (self, "Error decoding frame %d", result);    goto beach;  }  if (kVTDecodeInfo_FrameDropped & info) {    GST_WARNING_OBJECT (self, "Frame dropped");    goto beach;  }  buf = gst_core_video_buffer_new (cvbuf, &self->vinfo);  gst_buffer_copy_into (buf, src_buf, GST_BUFFER_COPY_METADATA, 0, -1);  GST_BUFFER_PTS (buf) = pts.value;  GST_BUFFER_DURATION (buf) = duration.value;  g_queue_push_head (self->cur_outbufs, buf);  if (GST_BUFFER_PTS (src_buf) <= GST_BUFFER_DTS (src_buf)) {    GST_LOG_OBJECT (self, "Flushing interal queue of buffers");    self->flush = TRUE;  } else {    GST_LOG_OBJECT (self, "Queuing buffer");  }beach:  return;}
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:34,


示例13: generate_test_buffer_full

static GstBuffer *generate_test_buffer_full (GstClockTime dts,    guint seq_num, guint32 rtp_ts, guint ssrc){  GstBuffer *buf;  guint8 *payload;  guint i;  GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;  buf = gst_rtp_buffer_new_allocate (TEST_BUF_SIZE, 0, 0);  GST_BUFFER_DTS (buf) = dts;  gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp);  gst_rtp_buffer_set_payload_type (&rtp, TEST_BUF_PT);  gst_rtp_buffer_set_seq (&rtp, seq_num);  gst_rtp_buffer_set_timestamp (&rtp, rtp_ts);  gst_rtp_buffer_set_ssrc (&rtp, ssrc);  payload = gst_rtp_buffer_get_payload (&rtp);  for (i = 0; i < TEST_BUF_SIZE; i++)    payload[i] = 0xff;  gst_rtp_buffer_unmap (&rtp);  return buf;}
开发者ID:nnikos123,项目名称:gst-plugins-good,代码行数:26,


示例14: gst_pipeline_get_clock

void ofxGstRTPServer::sendAudioOut(PooledAudioFrame * pooledFrame){	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));	gst_object_ref(clock);	GstClockTime now = gst_clock_get_time (clock) - gst_element_get_base_time(gst.getPipeline());	gst_object_unref (clock);	if(firstAudioFrame && !audioAutoTimestamp){		prevTimestampAudio = now;		firstAudioFrame = false;		return;	}	int size = pooledFrame->audioFrame._payloadDataLengthInSamples*2*pooledFrame->audioFrame._audioChannel;	GstBuffer * echoCancelledBuffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,(void*)pooledFrame->audioFrame._payloadData,size,0,size,pooledFrame,(GDestroyNotify)&ofxWebRTCAudioPool::relaseFrame);	if(!audioAutoTimestamp){		GstClockTime duration = (pooledFrame->audioFrame._payloadDataLengthInSamples * GST_SECOND / pooledFrame->audioFrame._frequencyInHz);		GstClockTime now = prevTimestamp + duration;		GST_BUFFER_OFFSET(echoCancelledBuffer) = numFrameAudio++;		GST_BUFFER_OFFSET_END(echoCancelledBuffer) = numFrameAudio;		GST_BUFFER_DTS (echoCancelledBuffer) = now;		GST_BUFFER_PTS (echoCancelledBuffer) = now;		GST_BUFFER_DURATION(echoCancelledBuffer) = duration;		prevTimestampAudio = now;	}	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcAudio, echoCancelledBuffer);	if (flow_return != GST_FLOW_OK) {		ofLogError(LOG_NAME) << "error pushing audio buffer: flow_return was " << flow_return;	}}
开发者ID:anchowee,项目名称:ofxGstRTP,代码行数:33,


示例15: generate_test_buffer

static GstBuffer *generate_test_buffer (GstClockTime gst_ts,    gboolean marker_bit, guint seq_num, guint32 rtp_ts, guint ssrc){  GstBuffer *buf;  guint8 *payload;  guint i;  GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;  buf = gst_rtp_buffer_new_allocate (payload_size, 0, 0);  GST_BUFFER_DTS (buf) = gst_ts;  GST_BUFFER_PTS (buf) = gst_ts;  gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp);  gst_rtp_buffer_set_payload_type (&rtp, payload_type);  gst_rtp_buffer_set_marker (&rtp, marker_bit);  gst_rtp_buffer_set_seq (&rtp, seq_num);  gst_rtp_buffer_set_timestamp (&rtp, rtp_ts);  gst_rtp_buffer_set_ssrc (&rtp, ssrc);  payload = gst_rtp_buffer_get_payload (&rtp);  for (i = 0; i < payload_size; i++)    payload[i] = 0xff;  gst_rtp_buffer_unmap (&rtp);  return buf;}
开发者ID:Distrotech,项目名称:gst-plugins-good,代码行数:28,


示例16: splitmux_part_pad_chain

static GstFlowReturnsplitmux_part_pad_chain (GstPad * pad, GstObject * parent, GstBuffer * buf){  GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (pad);  GstSplitMuxPartReader *reader = part_pad->reader;  GstDataQueueItem *item;  GstClockTimeDiff offset;  GST_LOG_OBJECT (reader, "Pad %" GST_PTR_FORMAT " %" GST_PTR_FORMAT, pad, buf);  SPLITMUX_PART_LOCK (reader);  if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS ||      reader->prep_state == PART_STATE_PREPARING_MEASURE_STREAMS) {    handle_buffer_measuring (reader, part_pad, buf);    gst_buffer_unref (buf);    SPLITMUX_PART_UNLOCK (reader);    return GST_FLOW_OK;  }  if (!block_until_can_push (reader)) {    /* Flushing */    SPLITMUX_PART_UNLOCK (reader);    gst_buffer_unref (buf);    return GST_FLOW_FLUSHING;  }  /* Adjust buffer timestamps */  offset = reader->start_offset + part_pad->segment.base;  offset -= part_pad->initial_ts_offset;  if (GST_BUFFER_PTS_IS_VALID (buf))    GST_BUFFER_PTS (buf) += offset;  if (GST_BUFFER_DTS_IS_VALID (buf))    GST_BUFFER_DTS (buf) += offset;  /* We are active, and one queue is empty, place this buffer in   * the dataqueue */  GST_LOG_OBJECT (reader, "Enqueueing buffer %" GST_PTR_FORMAT, buf);  item = g_slice_new (GstDataQueueItem);  item->destroy = (GDestroyNotify) splitmux_part_free_queue_item;  item->object = GST_MINI_OBJECT (buf);  item->size = gst_buffer_get_size (buf);  item->duration = GST_BUFFER_DURATION (buf);  if (item->duration == GST_CLOCK_TIME_NONE)    item->duration = 0;  item->visible = TRUE;  gst_object_ref (part_pad);  SPLITMUX_PART_UNLOCK (reader);  if (!gst_data_queue_push (part_pad->queue, item)) {    splitmux_part_free_queue_item (item);    gst_object_unref (part_pad);    return GST_FLOW_FLUSHING;  }  gst_object_unref (part_pad);  return GST_FLOW_OK;}
开发者ID:ConfusedReality,项目名称:pkg_multimedia_gst-plugins-good,代码行数:60,


示例17: gst_fake_src_get_times

static voidgst_fake_src_get_times (GstBaseSrc * basesrc, GstBuffer * buffer,    GstClockTime * start, GstClockTime * end){  GstFakeSrc *src;  src = GST_FAKE_SRC (basesrc);  /* sync on the timestamp of the buffer if requested. */  if (src->sync) {    GstClockTime timestamp, duration;    /* first sync on DTS, else use PTS */    timestamp = GST_BUFFER_DTS (buffer);    if (!GST_CLOCK_TIME_IS_VALID (timestamp))      timestamp = GST_BUFFER_PTS (buffer);    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {      /* get duration to calculate end time */      duration = GST_BUFFER_DURATION (buffer);      if (GST_CLOCK_TIME_IS_VALID (duration)) {        *end = timestamp + duration;      }      *start = timestamp;    }  } else {    *start = -1;    *end = -1;  }}
开发者ID:MathieuDuponchelle,项目名称:gstreamer,代码行数:30,


示例18: create_timestamp_buffer

static GstBuffer *create_timestamp_buffer (GstClockTime timestamp){  GstBuffer * buf = gst_buffer_new ();  GST_BUFFER_PTS (buf) = timestamp;  GST_BUFFER_DTS (buf) = timestamp;  return buf;}
开发者ID:pexip,项目名称:gstreamer,代码行数:8,


示例19: gst_imx_blitter_video_transform_copy_metadata

static gboolean gst_imx_blitter_video_transform_copy_metadata(G_GNUC_UNUSED GstBaseTransform *trans, GstBuffer *input, GstBuffer *outbuf){	/* Copy PTS, DTS, duration, offset, offset-end	 * These do not change in the videotransform operation */	GST_BUFFER_DTS(outbuf) = GST_BUFFER_DTS(input);	GST_BUFFER_PTS(outbuf) = GST_BUFFER_PTS(input);	GST_BUFFER_DURATION(outbuf) = GST_BUFFER_DURATION(input);	GST_BUFFER_OFFSET(outbuf) = GST_BUFFER_OFFSET(input);	GST_BUFFER_OFFSET_END(outbuf) = GST_BUFFER_OFFSET_END(input);	/* For GStreamer 1.3.1 and newer, make sure the GST_BUFFER_FLAG_TAG_MEMORY flag	 * isn't copied, otherwise the output buffer will be reallocated all the time */	GST_BUFFER_FLAGS(outbuf) = GST_BUFFER_FLAGS(input);#if GST_CHECK_VERSION(1, 3, 1)	GST_BUFFER_FLAG_UNSET(outbuf, GST_BUFFER_FLAG_TAG_MEMORY);#endif	return TRUE;}
开发者ID:gale320,项目名称:gstreamer-imx,代码行数:19,


示例20: getTimeStamp

void ofxGstRTPServer::newFrameDepth(ofShortPixels & pixels, GstClockTime timestamp, float pixel_size, float distance){	//unsigned long long time = ofGetElapsedTimeMicros();	// here we push new depth frames in the pipeline, it's important	// to timestamp them properly so gstreamer can sync them with the	// audio.	if(!appSrcDepth) return;	GstClockTime now = timestamp;	if(!depthAutoTimestamp){		if(now==GST_CLOCK_TIME_NONE){			now = getTimeStamp();		}		if(firstDepthFrame){			prevTimestampDepth = now;			firstDepthFrame = false;			return;		}	}	ofxDepthCompressedFrame frame = depthCompressor.newFrame(pixels,pixel_size,distance);	GstBuffer * buffer = gst_buffer_new_allocate(NULL,frame.compressedData().size()*sizeof(short),NULL);	GstMapInfo mapinfo;	gst_buffer_map(buffer,&mapinfo,GST_MAP_WRITE);	memcpy(mapinfo.data,&frame.compressedData()[0],frame.compressedData().size()*sizeof(short));	gst_buffer_unmap(buffer,&mapinfo);	// timestamp the buffer, right now we are using:	// timestamp = current pipeline time - base time	// duration = timestamp - previousTimeStamp	// the duration is actually the duration of the previous frame	// but should be accurate enough	if(!depthAutoTimestamp){		GST_BUFFER_OFFSET(buffer) = numFrameDepth++;		GST_BUFFER_OFFSET_END(buffer) = numFrameDepth;		GST_BUFFER_DTS (buffer) = now;		GST_BUFFER_PTS (buffer) = now;		GST_BUFFER_DURATION(buffer) = now-prevTimestampDepth;		prevTimestampDepth = now;	}	if(sendDepthKeyFrame){		//emitDepthKeyFrame();	}	// finally push the buffer into the pipeline through the appsrc element	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcDepth, buffer);	if (flow_return != GST_FLOW_OK) {		ofLogError() << "error pushing depth buffer: flow_return was " << flow_return;	}	//cout << "sending depth buffer with " << pixels.getWidth() << "," << pixels.getHeight() << " csize: " << frame.compressedData().size() << endl;	//cout << ofGetElapsedTimeMicros() - time << endl;}
开发者ID:anchowee,项目名称:ofxGstRTP,代码行数:55,


示例21: spice_gst_decoder_queue_frame

/* spice_gst_decoder_queue_frame() queues the SpiceFrame for decoding and * displaying. The steps it goes through are as follows: * * 1) A SpiceGstFrame is created to keep track of SpiceFrame and some additional *    metadata. The SpiceGstFrame is then pushed to the decoding_queue. * 2) frame->data, which contains the compressed frame data, is reffed and *    wrapped in a GstBuffer which is pushed to the GStreamer pipeline for *    decoding. * 3) As soon as the GStreamer pipeline no longer needs the compressed frame it *    will call frame->unref_data() to free it. * 4) Once the decompressed frame is available the GStreamer pipeline calls *    new_sample() in the GStreamer thread. * 5) new_sample() then matches the decompressed frame to a SpiceGstFrame from *    the decoding queue using the GStreamer timestamp information to deal with *    dropped frames. The SpiceGstFrame is popped from the decoding_queue. * 6) new_sample() then attaches the decompressed frame to the SpiceGstFrame, *    pushes it to the display_queue and calls schedule_frame(). * 7) schedule_frame() then uses gstframe->frame->mm_time to arrange for *    display_frame() to be called, in the main thread, at the right time for *    the next frame. * 8) display_frame() pops the first SpiceGstFrame from the display_queue and *    calls stream_display_frame(). * 9) display_frame() then frees the SpiceGstFrame, which frees the SpiceFrame *    and decompressed frame with it. */static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,                                              SpiceFrame *frame, int latency){    SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;    if (frame->size == 0) {        SPICE_DEBUG("got an empty frame buffer!");        frame->free(frame);        return TRUE;    }    if (frame->mm_time < decoder->last_mm_time) {        SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"                    " resetting stream",                    frame->mm_time, decoder->last_mm_time);        /* Let GStreamer deal with the frame anyway */    }    decoder->last_mm_time = frame->mm_time;    if (latency < 0 &&        decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) {        /* Dropping MJPEG frames has no impact on those that follow and         * saves CPU so do it.         */        SPICE_DEBUG("dropping a late MJPEG frame");        frame->free(frame);        return TRUE;    }    if (decoder->pipeline == NULL) {        /* An error occurred, causing the GStreamer pipeline to be freed */        spice_warning("An error occurred, stopping the video stream");        return FALSE;    }    /* ref() the frame data for the buffer */    frame->ref_data(frame->data_opaque);    GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS,                                                    frame->data, frame->size, 0, frame->size,                                                    frame->data_opaque, frame->unref_data);    GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;    GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;    GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000;    g_mutex_lock(&decoder->queues_mutex);    g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame));    g_mutex_unlock(&decoder->queues_mutex);    if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {        SPICE_DEBUG("GStreamer error: unable to push frame of size %u", frame->size);        stream_dropped_frame_on_playback(decoder->base.stream);    }    return TRUE;}
开发者ID:fgouget,项目名称:spice-gtk,代码行数:80,


示例22: create_buffer

static GstBuffer *create_buffer (guint8 * data, gsize size){  GstBuffer * buf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,    data, size, 0, size, NULL, NULL);  GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE;  GST_BUFFER_DTS (buf) = GST_CLOCK_TIME_NONE;  GST_BUFFER_DURATION (buf) = GST_CLOCK_TIME_NONE;  GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;  return buf;}
开发者ID:DylanZA,项目名称:gst-plugins-good,代码行数:12,


示例23: create_buffer

static GstBuffer *create_buffer (guint8 * data, gsize size,    GstClockTime timestamp, GstClockTime duration){  GstBuffer * buf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,    data, size, 0, size, NULL, NULL);  GST_BUFFER_PTS (buf) = timestamp;  GST_BUFFER_DTS (buf) = timestamp;  GST_BUFFER_DURATION (buf) = duration;  GST_BUFFER_OFFSET (buf) = 0;  GST_BUFFER_OFFSET_END (buf) = 0;  return buf;}
开发者ID:DylanZA,项目名称:gst-plugins-good,代码行数:13,


示例24: cm_sample_buffer_from_gst_buffer

static CMSampleBufferRefcm_sample_buffer_from_gst_buffer (GstVtdec * vtdec, GstBuffer * buf){  OSStatus status;  CMBlockBufferRef bbuf = NULL;  CMSampleBufferRef sbuf = NULL;  CMSampleTimingInfo sample_timing;  CMSampleTimingInfo time_array[1];  g_return_val_if_fail (vtdec->format_description, NULL);  /* create a block buffer */  bbuf = cm_block_buffer_from_gst_buffer (buf, GST_MAP_READ);  if (bbuf == NULL) {    GST_ELEMENT_ERROR (vtdec, RESOURCE, FAILED, (NULL),        ("failed creating CMBlockBuffer"));    return NULL;  }  /* create a sample buffer */  if (GST_BUFFER_DURATION_IS_VALID (buf))    sample_timing.duration = CMTimeMake (GST_BUFFER_DURATION (buf), GST_SECOND);  else    sample_timing.duration = kCMTimeInvalid;  if (GST_BUFFER_PTS_IS_VALID (buf))    sample_timing.presentationTimeStamp =        CMTimeMake (GST_BUFFER_PTS (buf), GST_SECOND);  else    sample_timing.presentationTimeStamp = kCMTimeInvalid;  if (GST_BUFFER_DTS_IS_VALID (buf))    sample_timing.decodeTimeStamp =        CMTimeMake (GST_BUFFER_DTS (buf), GST_SECOND);  else    sample_timing.decodeTimeStamp = kCMTimeInvalid;  time_array[0] = sample_timing;  status =      CMSampleBufferCreate (NULL, bbuf, TRUE, 0, 0, vtdec->format_description,      1, 1, time_array, 0, NULL, &sbuf);  CFRelease (bbuf);  if (status != noErr) {    GST_ELEMENT_ERROR (vtdec, RESOURCE, FAILED, (NULL),        ("CMSampleBufferCreate returned %d", (int) status));    return NULL;  }  return sbuf;}
开发者ID:luisbg,项目名称:gst-plugins-bad,代码行数:51,


示例25: set_gstbuf_time_and_offset

/* Set buffer timing and offset data */static void set_gstbuf_time_and_offset(GstAndroidVideoSource * p_src, GstBuffer * p_buf){    GstElement *p_element;    GstClock *p_clock;    GstClockTime now;    GstClockTime base_time;    GA_LOGTRACE("ENTER %s --xx--> thread(%ld)", __FUNCTION__, pthread_self());    p_element = GST_ELEMENT_CAST(p_src);    GST_OBJECT_LOCK(p_element);    p_clock = GST_ELEMENT_CLOCK(p_element);    if (p_clock) {        gst_object_ref(p_clock);        base_time = p_element->base_time;        GA_LOGTRACE("%s: base_time is: %llu", __FUNCTION__, base_time);    } else {        base_time = GST_CLOCK_TIME_NONE;    }    GST_OBJECT_UNLOCK(p_element);    if (p_clock) {        /* Wrap around is not considered a problem due to the clock being 64 bit (famous last words? :-) ) */        now = gst_clock_get_time(p_clock) - base_time;        GA_LOGTRACE("%s: gst_clock_get_time returns: %llu", __FUNCTION__, gst_clock_get_time(p_clock));    } else {        now = GST_CLOCK_TIME_NONE;    }    if (p_clock) {        gst_object_unref(p_clock);        p_clock = NULL;    }    GST_BUFFER_PTS(p_buf) = now;    GST_BUFFER_DTS(p_buf) = GST_CLOCK_TIME_NONE;    GST_BUFFER_DURATION(p_buf) = GST_CLOCK_TIME_NONE;    GST_BUFFER_OFFSET(p_buf) = GST_BUFFER_OFFSET_NONE;    GST_BUFFER_OFFSET_END(p_buf) = GST_BUFFER_OFFSET_NONE;    GA_LOGTRACE("%s: setting presentation timestamp (GstBuffer) to: %llu (%"GST_TIME_FORMAT")", __FUNCTION__, now, GST_TIME_ARGS(now));    GA_LOGTRACE("%s: m_prev_timestamp: %llu (%"GST_TIME_FORMAT")", __FUNCTION__, p_src->m_prev_timestamp, GST_TIME_ARGS(p_src->m_prev_timestamp));    GA_LOGTRACE("%s: timestamp diff: %llu (%"GST_TIME_FORMAT")", __FUNCTION__, now - p_src->m_prev_timestamp, GST_TIME_ARGS(now - p_src->m_prev_timestamp));    p_src->m_prev_timestamp = now;    GA_LOGTRACE("EXIT %s", __FUNCTION__);    return;}
开发者ID:EricssonResearch,项目名称:openwebrtc-gst-plugins,代码行数:51,


示例26: default_reset_buffer

static voiddefault_reset_buffer (GstBufferPool * pool, GstBuffer * buffer){  GST_BUFFER_FLAGS (buffer) = 0;  GST_BUFFER_PTS (buffer) = GST_CLOCK_TIME_NONE;  GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;  GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;  GST_BUFFER_OFFSET (buffer) = GST_BUFFER_OFFSET_NONE;  GST_BUFFER_OFFSET_END (buffer) = GST_BUFFER_OFFSET_NONE;  /* remove all metadata without the POOLED flag */  gst_buffer_foreach_meta (buffer, remove_meta_unpooled, pool);}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:14,


示例27: gst_gl_filtershader_filter

static gbooleangst_gl_filtershader_filter (GstGLFilter * filter, GstBuffer * inbuf,    GstBuffer * outbuf){  GstGLFilterShader *filtershader = GST_GL_FILTERSHADER (filter);  if (!_gst_clock_time_to_double (GST_BUFFER_PTS (inbuf), &filtershader->time)) {    if (!_gst_clock_time_to_double (GST_BUFFER_DTS (inbuf),            &filtershader->time))      _gint64_time_val_to_double (g_get_monotonic_time (), &filtershader->time);  }  return gst_gl_filter_filter_texture (filter, inbuf, outbuf);}
开发者ID:thaytan,项目名称:gst-plugins-base,代码行数:14,


示例28: set_headers

static gbooleanset_headers (GstBuffer ** buffer, guint idx, GstRTPBaseDepayload * depayload){  GstRTPBaseDepayloadPrivate *priv = depayload->priv;  GstClockTime pts, dts, duration;  *buffer = gst_buffer_make_writable (*buffer);  pts = GST_BUFFER_PTS (*buffer);  dts = GST_BUFFER_DTS (*buffer);  duration = GST_BUFFER_DURATION (*buffer);  /* apply last incomming timestamp and duration to outgoing buffer if   * not otherwise set. */  if (!GST_CLOCK_TIME_IS_VALID (pts))    GST_BUFFER_PTS (*buffer) = priv->pts;  if (!GST_CLOCK_TIME_IS_VALID (dts))    GST_BUFFER_DTS (*buffer) = priv->dts;  if (!GST_CLOCK_TIME_IS_VALID (duration))    GST_BUFFER_DURATION (*buffer) = priv->duration;  if (G_UNLIKELY (depayload->priv->discont)) {    GST_LOG_OBJECT (depayload, "Marking DISCONT on output buffer");    GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_DISCONT);    depayload->priv->discont = FALSE;  }  /* make sure we only set the timestamp on the first packet */  priv->pts = GST_CLOCK_TIME_NONE;  priv->dts = GST_CLOCK_TIME_NONE;  priv->duration = GST_CLOCK_TIME_NONE;  if (priv->source_info && priv->input_buffer)    add_rtp_source_meta (*buffer, priv->input_buffer);  return TRUE;}
开发者ID:pexip,项目名称:gst-plugins-base,代码行数:37,


示例29: find_timestamp

static gbooleanfind_timestamp (GstBuffer ** buffer, guint idx, gpointer user_data){  HeaderData *data = user_data;  data->dts = GST_BUFFER_DTS (*buffer);  data->pts = GST_BUFFER_PTS (*buffer);  data->offset = GST_BUFFER_OFFSET (*buffer);  /* stop when we find a timestamp. We take whatever offset is associated with   * the timestamp (if any) to do perfect timestamps when we need to. */  if (data->pts != -1)    return FALSE;  else    return TRUE;}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:15,



注:本文中的GST_BUFFER_DTS函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_BUFFER_DURATION函数代码示例
C++ GST_BUFFER_DATA函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。