您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_CLOCK_TIME_IS_VALID函数代码示例

51自学网 2021-06-01 20:56:10
  C++
这篇教程C++ GST_CLOCK_TIME_IS_VALID函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_CLOCK_TIME_IS_VALID函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_CLOCK_TIME_IS_VALID函数的具体用法?C++ GST_CLOCK_TIME_IS_VALID怎么用?C++ GST_CLOCK_TIME_IS_VALID使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_CLOCK_TIME_IS_VALID函数的27个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: _set_duration

static gboolean_set_duration (GESTimelineElement * element, GstClockTime duration){  GESTrackElement *object = GES_TRACK_ELEMENT (element);  GESTrackElementPrivate *priv = object->priv;  if (GST_CLOCK_TIME_IS_VALID (_MAXDURATION (element)) &&      duration > _INPOINT (object) + _MAXDURATION (element))    duration = _MAXDURATION (element) - _INPOINT (object);  if (priv->gnlobject != NULL) {    if (G_UNLIKELY (duration == _DURATION (object)))      return FALSE;    g_object_set (priv->gnlobject, "duration", duration, NULL);  } else    priv->pending_duration = duration;  _update_control_bindings (element, ges_timeline_element_get_inpoint (element),      duration);  return TRUE;}
开发者ID:vliaskov,项目名称:gst-editing-services,代码行数:23,


示例2: g_signal_emit_by_name

void AudioTestSource_i::_new_gst_buffer(GstElement *sink, AudioTestSource_i* comp) {	static GstBuffer *buffer;	static std::vector<short> packet;    /* Retrieve the buffer */    g_signal_emit_by_name (sink, "pull-buffer", &buffer);    if (buffer) {    	BULKIO::PrecisionUTCTime T;	    /* The only thing we do in this example is print a * to indicate a received buffer */    	if (GST_CLOCK_TIME_IS_VALID(buffer->timestamp)) {    		T = _from_gst_timestamp(buffer->timestamp);    	} else {    		T = _now();    	}    	packet.resize(buffer->size / 2); // TODO the division should come from reading buffer->caps    	memcpy(&packet[0], buffer->data, buffer->size);    	comp->audio_out->pushPacket(packet, T, false, comp->stream_id);	    gst_buffer_unref (buffer);    }}
开发者ID:54AndyN,项目名称:audio-components,代码行数:23,


示例3: gst_audio_panorama_transform

/* this function does the actual processing */static GstFlowReturngst_audio_panorama_transform (GstBaseTransform * base, GstBuffer * inbuf,    GstBuffer * outbuf){  GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);  GstClockTime timestamp, stream_time;  GstMapInfo inmap, outmap;  timestamp = GST_BUFFER_TIMESTAMP (inbuf);  stream_time =      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,      GST_TIME_ARGS (timestamp));  if (GST_CLOCK_TIME_IS_VALID (stream_time))    gst_object_sync_values (GST_OBJECT (filter), stream_time);  gst_buffer_map (inbuf, &inmap, GST_MAP_READ);  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);    memset (outmap.data, 0, outmap.size);  } else {    /* output always stereo, input mono or stereo,     * and info describes input format */    guint num_samples = outmap.size / (2 * GST_AUDIO_INFO_BPS (&filter->info));    filter->process (filter, inmap.data, outmap.data, num_samples);  }  gst_buffer_unmap (inbuf, &inmap);  gst_buffer_unmap (outbuf, &outmap);  return GST_FLOW_OK;}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:39,


示例4: gst_mpegv_parse_parse_frame

static GstFlowReturngst_mpegv_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame){  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);  GstBuffer *buffer = frame->buffer;  if (G_UNLIKELY (mpvparse->pichdr.pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_I))    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);  else    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);  /* maybe only sequence in this buffer, though not recommended,   * so mark it as such and force 0 duration */  if (G_UNLIKELY (mpvparse->pic_offset < 0)) {    GST_DEBUG_OBJECT (mpvparse, "frame holds no picture data");    frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;    GST_BUFFER_DURATION (buffer) = 0;  }  if (mpvparse->pic_offset > 4) {    gst_base_parse_set_ts_at_offset (parse, mpvparse->pic_offset - 4);  }  if (mpvparse->frame_repeat_count      && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {    GST_BUFFER_DURATION (buffer) =        (1 + mpvparse->frame_repeat_count) * GST_BUFFER_DURATION (buffer) / 2;  }  if (G_UNLIKELY (mpvparse->drop && !mpvparse->config)) {    GST_DEBUG_OBJECT (mpvparse, "dropping frame as no config yet");    return GST_BASE_PARSE_FLOW_DROPPED;  }  gst_mpegv_parse_update_src_caps (mpvparse);  return GST_FLOW_OK;}
开发者ID:iainlane,项目名称:gstreamer,代码行数:37,


示例5: gst_burn_transform_frame

/* Actual processing. */static GstFlowReturngst_burn_transform_frame (GstVideoFilter * vfilter,    GstVideoFrame * in_frame, GstVideoFrame * out_frame){  GstBurn *filter = GST_BURN (vfilter);  gint video_size, adjustment;  guint32 *src, *dest;  GstClockTime timestamp;  gint64 stream_time;  src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);  dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);  video_size = GST_VIDEO_FRAME_WIDTH (in_frame) *      GST_VIDEO_FRAME_HEIGHT (in_frame);  /* GstController: update the properties */  timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);  stream_time =      gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,      GST_FORMAT_TIME, timestamp);  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,      GST_TIME_ARGS (timestamp));  if (GST_CLOCK_TIME_IS_VALID (stream_time))    gst_object_sync_values (GST_OBJECT (filter), stream_time);  GST_OBJECT_LOCK (filter);  adjustment = filter->adjustment;  GST_OBJECT_UNLOCK (filter);  /*** Now the image processing work.... ***/  gaudi_orc_burn (dest, src, adjustment, video_size);  return GST_FLOW_OK;}
开发者ID:Distrotech,项目名称:gst-plugins-bad,代码行数:38,


示例6: gst_rtp_mpv_pay_handle_buffer

static GstFlowReturngst_rtp_mpv_pay_handle_buffer (GstBaseRTPPayload * basepayload,    GstBuffer * buffer){  GstRTPMPVPay *rtpmpvpay;  guint avail, packet_len;  GstClockTime timestamp, duration;  GstFlowReturn ret;  rtpmpvpay = GST_RTP_MPV_PAY (basepayload);  timestamp = GST_BUFFER_TIMESTAMP (buffer);  duration = GST_BUFFER_DURATION (buffer);  gst_adapter_push (rtpmpvpay->adapter, buffer);  avail = gst_adapter_available (rtpmpvpay->adapter);  /* Initialize new RTP payload */  if (avail == 0) {    rtpmpvpay->first_ts = timestamp;    rtpmpvpay->duration = duration;  }  /* get packet length of previous data and this new data,   * payload length includes a 4 byte MPEG video-specific header */  packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);  if (gst_basertppayload_is_filled (basepayload,          packet_len, rtpmpvpay->duration + duration)) {    ret = gst_rtp_mpv_pay_flush (rtpmpvpay, timestamp, duration);  } else {    if (GST_CLOCK_TIME_IS_VALID (duration))      rtpmpvpay->duration += duration;    ret = GST_FLOW_OK;  }  return ret;}
开发者ID:roopar,项目名称:gst-plugins-good,代码行数:37,


示例7: gst_goo_timestamp_gst2omx

/** * Utility function to handle transferring Gstreamer timestamp to OMX * timestamp.  This function handles discontinuities and timestamp * renormalization. * * @omx_buffer the destination OMX buffer for the timestamp * @buffer     the source Gstreamer buffer for the timestamp * @normalize  should this buffer be the one that we renormalize on *   (iff normalization is required)?  (ie. with TI OMX, you should *   only re-normalize on a video buffer) */gbooleangst_goo_timestamp_gst2omx (		OMX_BUFFERHEADERTYPE* omx_buffer,		GstBuffer* buffer,		gboolean normalize){	GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);	if (GST_GOO_UTIL_IS_DISCONT (buffer))	{		needs_normalization = TRUE;		GST_DEBUG ("needs_normalization");	}	if (needs_normalization && normalize)	{		GST_INFO ("Setting OMX_BUFFER_STARTTIME..");		omx_buffer->nFlags |= OMX_BUFFERFLAG_STARTTIME;		omx_normalize_timestamp = GST2OMX_TIMESTAMP ((gint64)timestamp);		needs_normalization = FALSE;		GST_DEBUG ("omx_normalize_timestamp=%lld", omx_normalize_timestamp);	}	/* transfer timestamp to openmax */	if (GST_CLOCK_TIME_IS_VALID (timestamp))	{		omx_buffer->nTimeStamp = GST2OMX_TIMESTAMP ((gint64)timestamp) - omx_normalize_timestamp;		GST_INFO ("OMX timestamp = %lld (%lld - %lld)", omx_buffer->nTimeStamp, GST2OMX_TIMESTAMP ((gint64)timestamp), omx_normalize_timestamp);		return TRUE;	}	else	{		GST_WARNING ("Invalid timestamp!");		return FALSE;	}}
开发者ID:mrchapp,项目名称:gst-goo,代码行数:47,


示例8: gst_timed_value_control_source_unset

/** * gst_timed_value_control_source_unset: * @self: the #GstTimedValueControlSource object * @timestamp: the time the control-change should be removed from * * Used to remove the value of given controller-handled property at a certain * time. * * Returns: FALSE if the value couldn't be unset (i.e. not found, TRUE otherwise. */gbooleangst_timed_value_control_source_unset (GstTimedValueControlSource * self,    GstClockTime timestamp){  GSequenceIter *iter;  gboolean res = FALSE;  GstControlPoint *cp = NULL;  g_return_val_if_fail (GST_IS_TIMED_VALUE_CONTROL_SOURCE (self), FALSE);  g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp), FALSE);  g_mutex_lock (&self->lock);  /* check if a control point for the timestamp exists */  if (G_LIKELY (self->values) && (iter =          g_sequence_lookup (self->values, &timestamp,              (GCompareDataFunc) gst_control_point_find, NULL))) {    /* Iter contains the iter right after timestamp, i.e.     * we need to get the previous one and check the timestamp     */    cp = g_slice_dup (GstControlPoint, g_sequence_get (iter));    g_sequence_remove (iter);    self->nvalues--;    self->valid_cache = FALSE;    res = TRUE;  }  g_mutex_unlock (&self->lock);  if (cp) {    g_signal_emit (self,        gst_timed_value_control_source_signals[VALUE_REMOVED_SIGNAL], 0, cp);    g_slice_free (GstControlPoint, cp);  }  return res;}
开发者ID:loganek,项目名称:gstreamer,代码行数:46,


示例9: gst_direct_control_binding_get_value

static GValue *gst_direct_control_binding_get_value (GstControlBinding * _self,    GstClockTime timestamp){  GstDirectControlBinding *self = GST_DIRECT_CONTROL_BINDING (_self);  GValue *dst_val = NULL;  gdouble src_val;  g_return_val_if_fail (GST_IS_DIRECT_CONTROL_BINDING (self), NULL);  g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp), NULL);  g_return_val_if_fail (GST_CONTROL_BINDING_PSPEC (self), FALSE);  /* get current value via control source */  if (gst_control_source_get_value (self->cs, timestamp, &src_val)) {    dst_val = g_new0 (GValue, 1);    g_value_init (dst_val, G_PARAM_SPEC_VALUE_TYPE (_self->pspec));    self->convert_g_value (self, src_val, dst_val);  } else {    GST_LOG ("no control value for property %s at ts %" GST_TIME_FORMAT,        _self->name, GST_TIME_ARGS (timestamp));  }  return dst_val;}
开发者ID:Grobik1,项目名称:gstreamer,代码行数:24,


示例10: gst_frame_positionner_transform_ip

static GstFlowReturngst_frame_positionner_transform_ip (GstBaseTransform * trans, GstBuffer * buf){  GstFramePositionnerMeta *meta;  GstFramePositionner *framepositionner = GST_FRAME_POSITIONNER (trans);  GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buf);  if (GST_CLOCK_TIME_IS_VALID (timestamp)) {    gst_object_sync_values (GST_OBJECT (trans), timestamp);  }  meta =      (GstFramePositionnerMeta *) gst_buffer_add_meta (buf,      gst_frame_positionner_get_info (), NULL);  GST_OBJECT_LOCK (framepositionner);  meta->alpha = framepositionner->alpha;  meta->posx = framepositionner->posx;  meta->posy = framepositionner->posy;  meta->zorder = framepositionner->zorder;  GST_OBJECT_UNLOCK (framepositionner);  return GST_FLOW_OK;}
开发者ID:dark-al,项目名称:gst-editing-services-old,代码行数:24,


示例11: ges_layer_add_asset

/** * ges_layer_add_asset: * @layer: a #GESLayer * @asset: The asset to add to * @start: The start value to set on the new #GESClip * @inpoint: The inpoint value to set on the new #GESClip * @duration: The duration value to set on the new #GESClip * @track_types: The #GESTrackType to set on the the new #GESClip * * Creates Clip from asset, adds it to layer and * returns a reference to it. * * Returns: (transfer none): Created #GESClip */GESClip *ges_layer_add_asset (GESLayer * layer,    GESAsset * asset, GstClockTime start, GstClockTime inpoint,    GstClockTime duration, GESTrackType track_types){  GESClip *clip;  g_return_val_if_fail (GES_IS_LAYER (layer), NULL);  g_return_val_if_fail (GES_IS_ASSET (asset), NULL);  g_return_val_if_fail (g_type_is_a (ges_asset_get_extractable_type          (asset), GES_TYPE_CLIP), NULL);  GST_DEBUG_OBJECT (layer, "Adding asset %s with: start: %" GST_TIME_FORMAT      " inpoint: %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT      " track types: %d (%s)", ges_asset_get_id (asset), GST_TIME_ARGS (start),      GST_TIME_ARGS (inpoint), GST_TIME_ARGS (duration), track_types,      ges_track_type_name (track_types));  clip = GES_CLIP (ges_asset_extract (asset, NULL));  _set_start0 (GES_TIMELINE_ELEMENT (clip), start);  _set_inpoint0 (GES_TIMELINE_ELEMENT (clip), inpoint);  if (track_types != GES_TRACK_TYPE_UNKNOWN)    ges_clip_set_supported_formats (clip, track_types);  if (GST_CLOCK_TIME_IS_VALID (duration)) {    _set_duration0 (GES_TIMELINE_ELEMENT (clip), duration);  }  if (!ges_layer_add_clip (layer, clip)) {    gst_object_unref (clip);    return NULL;  }  return clip;}
开发者ID:dark-al,项目名称:gst-editing-services-old,代码行数:50,


示例12: gst_audio_fx_base_iir_filter_transform_ip

/* GstBaseTransform vmethod implementations */static GstFlowReturngst_audio_fx_base_iir_filter_transform_ip (GstBaseTransform * base,    GstBuffer * buf){  GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);  guint num_samples;  GstClockTime timestamp, stream_time;  GstMapInfo map;  timestamp = GST_BUFFER_TIMESTAMP (buf);  stream_time =      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,      GST_TIME_ARGS (timestamp));  if (GST_CLOCK_TIME_IS_VALID (stream_time))    gst_object_sync_values (GST_OBJECT (filter), stream_time);  gst_buffer_map (buf, &map, GST_MAP_READWRITE);  num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);  g_mutex_lock (&filter->lock);  if (filter->a == NULL || filter->b == NULL) {    g_warn_if_fail (filter->a != NULL && filter->b != NULL);    gst_buffer_unmap (buf, &map);    g_mutex_unlock (&filter->lock);    return GST_FLOW_ERROR;  }  filter->process (filter, map.data, num_samples);  g_mutex_unlock (&filter->lock);  gst_buffer_unmap (buf, &map);  return GST_FLOW_OK;}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:37,


示例13: gst_v4l2_video_dec_decide_allocation

static gbooleangst_v4l2_video_dec_decide_allocation (GstVideoDecoder * decoder,    GstQuery * query){  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);  GstClockTime latency;  gboolean ret = FALSE;  if (gst_v4l2_object_decide_allocation (self->v4l2capture, query))    ret = GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,        query);  if (GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration)) {    latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;    GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT " (%"        G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS (latency),        self->v4l2capture->min_buffers, self->v4l2capture->duration);    gst_video_decoder_set_latency (decoder, latency, latency);  } else {    GST_WARNING_OBJECT (self, "Duration invalid, not setting latency");  }  return ret;}
开发者ID:hizukiayaka,项目名称:gst-plugins-good,代码行数:24,


示例14: ges_track_object_set_duration_internal

static inline gbooleanges_track_object_set_duration_internal (GESTrackObject * object,    guint64 duration){  GESTrackObjectPrivate *priv = object->priv;  GST_DEBUG ("object:%p, duration:%" GST_TIME_FORMAT,      object, GST_TIME_ARGS (duration));  if (GST_CLOCK_TIME_IS_VALID (priv->maxduration) &&      duration > object->inpoint + priv->maxduration)    duration = priv->maxduration - object->inpoint;  if (priv->gnlobject != NULL) {    if (G_UNLIKELY (duration == object->duration))      return FALSE;    g_object_set (priv->gnlobject, "duration", duration,        "media-duration", duration, NULL);  } else    priv->pending_duration = duration;  return TRUE;}
开发者ID:volodymyrrudyi,项目名称:gst-editing-services,代码行数:24,


示例15: gst_mim_dec_chain

//.........这里部分代码省略.........    if (gst_adapter_available (mimdec->adapter) < payload_size + 24)      return GST_FLOW_OK;    /* We have a whole packet and have read the header, lets flush it out */    gst_adapter_flush (mimdec->adapter, 24);    frame_body = gst_adapter_map (mimdec->adapter, payload_size);    if (mimdec->buffer_size < 0) {      /* Check if its a keyframe, otherwise skip it */      if (GUINT32_FROM_LE (*((guint32 *) (frame_body + 12))) != 0) {        gst_adapter_unmap (mimdec->adapter);        gst_adapter_flush (mimdec->adapter, payload_size);        return GST_FLOW_OK;      }      if (!mimic_decoder_init (mimdec->dec, frame_body)) {        gst_adapter_unmap (mimdec->adapter);        gst_adapter_flush (mimdec->adapter, payload_size);        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),            ("mimic_decoder_init error"));        return GST_FLOW_ERROR;      }      if (!mimic_get_property (mimdec->dec, "buffer_size",              &mimdec->buffer_size)) {        gst_adapter_unmap (mimdec->adapter);        gst_adapter_flush (mimdec->adapter, payload_size);        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),            ("mimic_get_property('buffer_size') error"));        return GST_FLOW_ERROR;      }      mimic_get_property (mimdec->dec, "width", &width);      mimic_get_property (mimdec->dec, "height", &height);      GST_DEBUG_OBJECT (mimdec,          "Initialised decoder with %d x %d payload size %d buffer_size %d",          width, height, payload_size, mimdec->buffer_size);      caps = gst_caps_new_simple ("video/x-raw",          "format", G_TYPE_STRING, "RGB",          "framerate", GST_TYPE_FRACTION, 0, 1,          "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);      gst_pad_set_caps (mimdec->srcpad, caps);      gst_caps_unref (caps);    }    if (mimdec->need_segment) {      GstSegment segment;      gst_segment_init (&segment, GST_FORMAT_TIME);      if (GST_CLOCK_TIME_IS_VALID (in_time))        segment.start = in_time;      else        segment.start = current_ts * GST_MSECOND;      event = gst_event_new_segment (&segment);    }    mimdec->need_segment = FALSE;    if (event)      result = gst_pad_push_event (mimdec->srcpad, event);    event = NULL;    if (!result) {      GST_WARNING_OBJECT (mimdec, "gst_pad_push_event failed");      return GST_FLOW_ERROR;    }    out_buf = gst_buffer_new_allocate (NULL, mimdec->buffer_size, NULL);    gst_buffer_map (out_buf, &map, GST_MAP_READWRITE);    if (!mimic_decode_frame (mimdec->dec, frame_body, map.data)) {      GST_WARNING_OBJECT (mimdec, "mimic_decode_frame error/n");      gst_adapter_flush (mimdec->adapter, payload_size);      gst_buffer_unmap (out_buf, &map);      gst_buffer_unref (out_buf);      GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),          ("mimic_decode_frame error"));      return GST_FLOW_ERROR;    }    gst_buffer_unmap (out_buf, &map);    gst_adapter_flush (mimdec->adapter, payload_size);    if (GST_CLOCK_TIME_IS_VALID (in_time))      GST_BUFFER_TIMESTAMP (out_buf) = in_time;    else      GST_BUFFER_TIMESTAMP (out_buf) = current_ts * GST_MSECOND;    res = gst_pad_push (mimdec->srcpad, out_buf);    if (res != GST_FLOW_OK)      break;  }  return res;}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:101,


示例16: gst_tensor_aggregator_chain

/** * @brief Chain function, this function does the actual processing. */static GstFlowReturngst_tensor_aggregator_chain (GstPad * pad, GstObject * parent, GstBuffer * buf){  GstTensorAggregator *self;  GstFlowReturn ret = GST_FLOW_OK;  GstAdapter *adapter;  gsize avail, buf_size, frame_size, out_size;  guint frames_in, frames_out, frames_flush;  GstClockTime duration;  self = GST_TENSOR_AGGREGATOR (parent);  g_assert (self->tensor_configured);  buf_size = gst_buffer_get_size (buf);  g_return_val_if_fail (buf_size > 0, GST_FLOW_ERROR);  frames_in = self->frames_in;  frames_out = self->frames_out;  frames_flush = self->frames_flush;  frame_size = buf_size / frames_in;  if (frames_in == frames_out) {    /** push the incoming buffer (do concat if needed) */    return gst_tensor_aggregator_push (self, buf, frame_size);  }  adapter = self->adapter;  g_assert (adapter != NULL);  duration = GST_BUFFER_DURATION (buf);  if (GST_CLOCK_TIME_IS_VALID (duration)) {    /** supposed same duration for incoming buffer */    duration = gst_util_uint64_scale_int (duration, frames_out, frames_in);  }  gst_adapter_push (adapter, buf);  out_size = frame_size * frames_out;  g_assert (out_size > 0);  while ((avail = gst_adapter_available (adapter)) >= out_size &&      ret == GST_FLOW_OK) {    GstBuffer *outbuf;    GstClockTime pts, dts;    guint64 pts_dist, dts_dist;    gsize flush;    pts = gst_adapter_prev_pts (adapter, &pts_dist);    dts = gst_adapter_prev_dts (adapter, &dts_dist);    /**     * Update timestamp.     * If frames-in is larger then frames-out, the same timestamp (pts and dts) would be returned.     */    if (frames_in > 1) {      gint fn, fd;      fn = self->in_config.rate_n;      fd = self->in_config.rate_d;      if (fn > 0 && fd > 0) {        if (GST_CLOCK_TIME_IS_VALID (pts)) {          pts +=              gst_util_uint64_scale_int (pts_dist * fd, GST_SECOND,              fn * frame_size);        }        if (GST_CLOCK_TIME_IS_VALID (dts)) {          dts +=              gst_util_uint64_scale_int (dts_dist * fd, GST_SECOND,              fn * frame_size);        }      }    }    outbuf = gst_adapter_get_buffer (adapter, out_size);    outbuf = gst_buffer_make_writable (outbuf);    /** set timestamp */    GST_BUFFER_PTS (outbuf) = pts;    GST_BUFFER_DTS (outbuf) = dts;    GST_BUFFER_DURATION (outbuf) = duration;    ret = gst_tensor_aggregator_push (self, outbuf, frame_size);    /** flush data */    if (frames_flush > 0) {      flush = frame_size * frames_flush;      if (flush > avail) {        /**         * @todo flush data         * Invalid state, tried to flush large size.         * We have to determine how to handle this case. (flush the out-size or all available bytes)         * Now all available bytes in adapter will be flushed.         */        flush = avail;//.........这里部分代码省略.........
开发者ID:myungjoo,项目名称:nnstreamer,代码行数:101,


示例17: gst_hls_demux_loop

static voidgst_hls_demux_loop (GstHLSDemux * demux){  GstBuffer *buf;  GstFlowReturn ret;  /* Loop for the source pad task. The task is started when we have   * received the main playlist from the source element. It tries first to   * cache the first fragments and then it waits until it has more data in the   * queue. This task is woken up when we push a new fragment to the queue or   * when we reached the end of the playlist  */  if (G_UNLIKELY (demux->need_cache)) {    if (!gst_hls_demux_cache_fragments (demux))      goto cache_error;    /* we can start now the updates thread */    gst_hls_demux_start_update (demux);    GST_INFO_OBJECT (demux, "First fragments cached successfully");  }  if (g_queue_is_empty (demux->queue)) {    if (demux->end_of_playlist)      goto end_of_playlist;    goto empty_queue;  }  buf = g_queue_pop_head (demux->queue);  /* Figure out if we need to create/switch pads */  if (G_UNLIKELY (!demux->srcpad          || GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad)          || demux->need_segment)) {    switch_pads (demux, GST_BUFFER_CAPS (buf));    demux->need_segment = TRUE;  }  if (demux->need_segment) {    /* And send a newsegment */    GST_DEBUG_OBJECT (demux, "Sending new-segment. Segment start:%"        GST_TIME_FORMAT, GST_TIME_ARGS (demux->position));    gst_pad_push_event (demux->srcpad,        gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, demux->position,            GST_CLOCK_TIME_NONE, demux->position));    demux->need_segment = FALSE;  }  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))    demux->position += GST_BUFFER_DURATION (buf);  ret = gst_pad_push (demux->srcpad, buf);  if (ret != GST_FLOW_OK)    goto error;  return;end_of_playlist:  {    GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS");    gst_pad_push_event (demux->srcpad, gst_event_new_eos ());    gst_hls_demux_stop (demux);    return;  }cache_error:  {    gst_task_pause (demux->task);    if (!demux->cancelled) {      GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,          ("Could not cache the first fragments"), (NULL));      gst_hls_demux_stop (demux);    }    return;  }error:  {    /* FIXME: handle error */    GST_DEBUG_OBJECT (demux, "error, stopping task");    gst_hls_demux_stop (demux);    return;  }empty_queue:  {    gst_task_pause (demux->task);    return;  }}
开发者ID:thiagoss,项目名称:gst-plugins-bad,代码行数:89,


示例18: gst_hls_demux_src_query

static gbooleangst_hls_demux_src_query (GstPad * pad, GstQuery * query){  GstHLSDemux *hlsdemux;  gboolean ret = FALSE;  if (query == NULL)    return FALSE;  hlsdemux = GST_HLS_DEMUX (gst_pad_get_element_private (pad));  switch (query->type) {    case GST_QUERY_DURATION:{      GstClockTime duration = -1;      GstFormat fmt;      gst_query_parse_duration (query, &fmt, NULL);      if (fmt == GST_FORMAT_TIME) {        duration = gst_m3u8_client_get_duration (hlsdemux->client);        if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0) {          gst_query_set_duration (query, GST_FORMAT_TIME, duration);          ret = TRUE;        }      }      GST_INFO_OBJECT (hlsdemux, "GST_QUERY_DURATION returns %s with duration %"          GST_TIME_FORMAT, ret ? "TRUE" : "FALSE", GST_TIME_ARGS (duration));      break;    }    case GST_QUERY_URI:      if (hlsdemux->client) {        /* FIXME: Do we answer with the variant playlist, with the current         * playlist or the the uri of the least downlowaded fragment? */        gst_query_set_uri (query, hlsdemux->client->current->uri);        ret = TRUE;      }      break;    case GST_QUERY_SEEKING:{      GstFormat fmt;      gint64 stop = -1;      gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);      GST_INFO_OBJECT (hlsdemux, "Received GST_QUERY_SEEKING with format %d",          fmt);      if (fmt == GST_FORMAT_TIME) {        GstClockTime duration;        duration = gst_m3u8_client_get_duration (hlsdemux->client);        if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0)          stop = duration;        gst_query_set_seeking (query, fmt,            !gst_m3u8_client_is_live (hlsdemux->client), 0, stop);        ret = TRUE;        GST_INFO_OBJECT (hlsdemux, "GST_QUERY_SEEKING returning with stop : %"            GST_TIME_FORMAT, GST_TIME_ARGS (stop));      }      break;    }    default:      /* Don't fordward queries upstream because of the special nature of this       * "demuxer", which relies on the upstream element only to be fed with the       * first playlist */      break;  }  return ret;}
开发者ID:thiagoss,项目名称:gst-plugins-bad,代码行数:67,


示例19: gst_ks_video_device_read_frame

GstFlowReturngst_ks_video_device_read_frame (GstKsVideoDevice * self, guint8 * buf,    gulong buf_size, gulong * bytes_read, GstClockTime * presentation_time,    gulong * error_code, gchar ** error_str){  GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self);  guint req_idx;  DWORD wait_ret;  BOOL success;  DWORD bytes_returned;  g_assert (priv->cur_media_type != NULL);  /* First time we're called, submit the requests. */  if (G_UNLIKELY (!priv->requests_submitted)) {    priv->requests_submitted = TRUE;    for (req_idx = 0; req_idx < priv->num_requests; req_idx++) {      ReadRequest *req = &g_array_index (priv->requests, ReadRequest, req_idx);      if (!gst_ks_video_device_request_frame (self, req, error_code, error_str))        goto error_request_failed;    }  }  do {    /* Wait for either a request to complete, a cancel or a timeout */    wait_ret = WaitForMultipleObjects (priv->request_events->len,        (HANDLE *) priv->request_events->data, FALSE, READ_TIMEOUT);    if (wait_ret == WAIT_TIMEOUT)      goto error_timeout;    else if (wait_ret == WAIT_FAILED)      goto error_wait;    /* Stopped? */    if (WaitForSingleObject (priv->cancel_event, 0) == WAIT_OBJECT_0)      goto error_cancel;    *bytes_read = 0;    /* Find the last ReadRequest that finished and get the result, immediately     * re-issuing each request that has completed. */    for (req_idx = wait_ret - WAIT_OBJECT_0;        req_idx < priv->num_requests; req_idx++) {      ReadRequest *req = &g_array_index (priv->requests, ReadRequest, req_idx);      /*       * Completed? WaitForMultipleObjects() returns the lowest index if       * multiple objects are in the signaled state, and we know that requests       * are processed one by one so there's no point in looking further once       * we've found the first that's non-signaled.       */      if (WaitForSingleObject (req->overlapped.hEvent, 0) != WAIT_OBJECT_0)        break;      success = GetOverlappedResult (priv->pin_handle, &req->overlapped,          &bytes_returned, TRUE);      ResetEvent (req->overlapped.hEvent);      if (success) {        KSSTREAM_HEADER *hdr = &req->params.header;        KS_FRAME_INFO *frame_info = &req->params.frame_info;        GstClockTime timestamp = GST_CLOCK_TIME_NONE;        GstClockTime duration = GST_CLOCK_TIME_NONE;        if (hdr->OptionsFlags & KSSTREAM_HEADER_OPTIONSF_TIMEVALID)          timestamp = hdr->PresentationTime.Time * 100;        if (hdr->OptionsFlags & KSSTREAM_HEADER_OPTIONSF_DURATIONVALID)          duration = hdr->Duration * 100;        /* Assume it's a good frame */        *bytes_read = hdr->DataUsed;        if (G_LIKELY (presentation_time != NULL))          *presentation_time = timestamp;        if (G_UNLIKELY (GST_DEBUG_IS_ENABLED ())) {          gchar *options_flags_str =              ks_options_flags_to_string (hdr->OptionsFlags);          GST_DEBUG ("PictureNumber=%" G_GUINT64_FORMAT ", DropCount=%"              G_GUINT64_FORMAT ", PresentationTime=%" GST_TIME_FORMAT              ", Duration=%" GST_TIME_FORMAT ", OptionsFlags=%s: %d bytes",              frame_info->PictureNumber, frame_info->DropCount,              GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration),              options_flags_str, hdr->DataUsed);          g_free (options_flags_str);        }        /* Protect against old frames. This should never happen, see previous         * comment on last_timestamp. */        if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {          if (G_UNLIKELY (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp) &&                  timestamp < priv->last_timestamp)) {            GST_WARNING ("got an old frame (last_timestamp=%" GST_TIME_FORMAT                ", timestamp=%" GST_TIME_FORMAT ")",                GST_TIME_ARGS (priv->last_timestamp),//.........这里部分代码省略.........
开发者ID:eta-im-dev,项目名称:media,代码行数:101,


示例20: gst_shape_wipe_video_sink_chain

static GstFlowReturngst_shape_wipe_video_sink_chain (GstPad * pad, GstObject * parent,    GstBuffer * buffer){  GstShapeWipe *self = GST_SHAPE_WIPE (parent);  GstFlowReturn ret = GST_FLOW_OK;  GstBuffer *mask = NULL, *outbuf = NULL;  GstClockTime timestamp;  gboolean new_outbuf = FALSE;  GstVideoFrame inframe, outframe, maskframe;  if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&self->vinfo) ==          GST_VIDEO_FORMAT_UNKNOWN))    goto not_negotiated;  timestamp = GST_BUFFER_TIMESTAMP (buffer);  timestamp =      gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);  if (GST_CLOCK_TIME_IS_VALID (timestamp))    gst_object_sync_values (GST_OBJECT (self), timestamp);  GST_LOG_OBJECT (self,      "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f",      GST_TIME_ARGS (timestamp), self->mask_position);  g_mutex_lock (&self->mask_mutex);  if (self->shutdown)    goto shutdown;  if (!self->mask)    g_cond_wait (&self->mask_cond, &self->mask_mutex);  if (self->mask == NULL || self->shutdown) {    goto shutdown;  } else {    mask = gst_buffer_ref (self->mask);  }  g_mutex_unlock (&self->mask_mutex);  if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer)))    goto qos;  /* Try to blend inplace, if it's not possible   * get a new buffer from downstream. */  if (!gst_buffer_is_writable (buffer)) {    outbuf = gst_buffer_new_allocate (NULL, gst_buffer_get_size (buffer), NULL);    gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);    new_outbuf = TRUE;  } else {    outbuf = buffer;  }  gst_video_frame_map (&inframe, &self->vinfo, buffer,      new_outbuf ? GST_MAP_READ : GST_MAP_READWRITE);  gst_video_frame_map (&outframe, &self->vinfo, outbuf,      new_outbuf ? GST_MAP_WRITE : GST_MAP_READWRITE);  gst_video_frame_map (&maskframe, &self->minfo, mask, GST_MAP_READ);  switch (GST_VIDEO_INFO_FORMAT (&self->vinfo)) {    case GST_VIDEO_FORMAT_AYUV:    case GST_VIDEO_FORMAT_ARGB:    case GST_VIDEO_FORMAT_ABGR:      if (self->mask_bpp == 16)        gst_shape_wipe_blend_argb_16 (self, &inframe, &maskframe, &outframe);      else        gst_shape_wipe_blend_argb_8 (self, &inframe, &maskframe, &outframe);      break;    case GST_VIDEO_FORMAT_BGRA:    case GST_VIDEO_FORMAT_RGBA:      if (self->mask_bpp == 16)        gst_shape_wipe_blend_bgra_16 (self, &inframe, &maskframe, &outframe);      else        gst_shape_wipe_blend_bgra_8 (self, &inframe, &maskframe, &outframe);      break;    default:      g_assert_not_reached ();      break;  }  gst_video_frame_unmap (&outframe);  gst_video_frame_unmap (&inframe);  gst_video_frame_unmap (&maskframe);  gst_buffer_unref (mask);  if (new_outbuf)    gst_buffer_unref (buffer);  ret = gst_pad_push (self->srcpad, outbuf);  if (G_UNLIKELY (ret != GST_FLOW_OK))    goto push_failed;  return ret;  /* Errors */not_negotiated:  {    GST_ERROR_OBJECT (self, "No valid caps yet");//.........这里部分代码省略.........
开发者ID:adesurya,项目名称:gst-mobile,代码行数:101,


示例21: gst_two_lame_chain

//.........这里部分代码省略.........    if (twolame->float_input)        num_samples = size / 4;    else        num_samples = size / 2;    /* allocate space for output */    mp3_buffer_size = 1.25 * num_samples + 16384;    mp3_data = g_malloc (mp3_buffer_size);    if (twolame->num_channels == 1) {        if (twolame->float_input)            mp3_size = twolame_encode_buffer_float32 (twolame->glopts,                       (float *) data,                       (float *) data, num_samples, mp3_data, mp3_buffer_size);        else            mp3_size = twolame_encode_buffer (twolame->glopts,                                              (short int *) data,                                              (short int *) data, num_samples, mp3_data, mp3_buffer_size);    } else {        if (twolame->float_input)            mp3_size = twolame_encode_buffer_float32_interleaved (twolame->glopts,                       (float *) data,                       num_samples / twolame->num_channels, mp3_data, mp3_buffer_size);        else            mp3_size = twolame_encode_buffer_interleaved (twolame->glopts,                       (short int *) data,                       num_samples / twolame->num_channels, mp3_data, mp3_buffer_size);    }    GST_LOG_OBJECT (twolame, "encoded %d bytes of audio to %d bytes of mp3",                    size, mp3_size);    if (twolame->float_input)        duration = gst_util_uint64_scale_int (size, GST_SECOND,                                              4 * twolame->samplerate * twolame->num_channels);    else        duration = gst_util_uint64_scale_int (size, GST_SECOND,                                              2 * twolame->samplerate * twolame->num_channels);    if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE &&            GST_BUFFER_DURATION (buf) != duration) {        GST_DEBUG_OBJECT (twolame, "incoming buffer had incorrect duration %"                          GST_TIME_FORMAT ", outgoing buffer will have correct duration %"                          GST_TIME_FORMAT,                          GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_TIME_ARGS (duration));    }    if (twolame->last_ts == GST_CLOCK_TIME_NONE) {        twolame->last_ts = GST_BUFFER_TIMESTAMP (buf);        twolame->last_offs = GST_BUFFER_OFFSET (buf);        twolame->last_duration = duration;    } else {        twolame->last_duration += duration;    }    gst_buffer_unref (buf);    if (mp3_size < 0) {        g_warning ("error %d", mp3_size);    }    if (mp3_size > 0) {        GstBuffer *outbuf;        outbuf = gst_buffer_new ();        GST_BUFFER_DATA (outbuf) = mp3_data;        GST_BUFFER_MALLOCDATA (outbuf) = mp3_data;        GST_BUFFER_SIZE (outbuf) = mp3_size;        GST_BUFFER_TIMESTAMP (outbuf) = twolame->last_ts;        GST_BUFFER_OFFSET (outbuf) = twolame->last_offs;        GST_BUFFER_DURATION (outbuf) = twolame->last_duration;        gst_buffer_set_caps (outbuf, GST_PAD_CAPS (twolame->srcpad));        result = gst_pad_push (twolame->srcpad, outbuf);        twolame->last_flow = result;        if (result != GST_FLOW_OK) {            GST_DEBUG_OBJECT (twolame, "flow return: %s", gst_flow_get_name (result));        }        if (GST_CLOCK_TIME_IS_VALID (twolame->last_ts))            twolame->eos_ts = twolame->last_ts + twolame->last_duration;        else            twolame->eos_ts = GST_CLOCK_TIME_NONE;        twolame->last_ts = GST_CLOCK_TIME_NONE;    } else {        g_free (mp3_data);        result = GST_FLOW_OK;    }    return result;    /* ERRORS */not_setup:    {        gst_buffer_unref (buf);        GST_ELEMENT_ERROR (twolame, CORE, NEGOTIATION, (NULL),                           ("encoder not initialized (input is not audio?)"));        return GST_FLOW_ERROR;    }}
开发者ID:jwzl,项目名称:ossbuild,代码行数:101,


示例22: gst_ks_video_src_timestamp_buffer

static gbooleangst_ks_video_src_timestamp_buffer (GstKsVideoSrc * self, GstBuffer * buf,    GstClockTime presentation_time){  GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self);  GstClockTime duration;  GstClock *clock;  GstClockTime timestamp;  duration = gst_ks_video_device_get_duration (priv->device);  GST_OBJECT_LOCK (self);  clock = GST_ELEMENT_CLOCK (self);  if (clock != NULL) {    gst_object_ref (clock);    timestamp = GST_ELEMENT (self)->base_time;    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {      if (presentation_time > GST_ELEMENT (self)->base_time)        presentation_time -= GST_ELEMENT (self)->base_time;      else        presentation_time = 0;    }  } else {    timestamp = GST_CLOCK_TIME_NONE;  }  GST_OBJECT_UNLOCK (self);  if (clock != NULL) {    /* The time according to the current clock */    timestamp = gst_clock_get_time (clock) - timestamp;    if (timestamp > duration)      timestamp -= duration;    else      timestamp = 0;    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {      /*       * We don't use this for anything yet, need to ponder how to deal       * with pins that use an internal clock and timestamp from 0.       */      GstClockTimeDiff diff = GST_CLOCK_DIFF (presentation_time, timestamp);      GST_DEBUG_OBJECT (self, "diff between gst and driver timestamp: %"          G_GINT64_FORMAT, diff);    }    gst_object_unref (clock);    clock = NULL;    /* Unless it's the first frame, align the current timestamp on a multiple     * of duration since the previous */    if (GST_CLOCK_TIME_IS_VALID (priv->prev_ts)) {      GstClockTime delta;      guint delta_remainder, delta_offset;      /* REVISIT: I've seen this happen with the GstSystemClock on Windows,       *          scary... */      if (timestamp < priv->prev_ts) {        GST_INFO_OBJECT (self, "clock is ticking backwards");        return FALSE;      }      /* Round to a duration boundary */      delta = timestamp - priv->prev_ts;      delta_remainder = delta % duration;      if (delta_remainder < duration / 3)        timestamp -= delta_remainder;      else        timestamp += duration - delta_remainder;      /* How many frames are we off then? */      delta = timestamp - priv->prev_ts;      delta_offset = delta / duration;      if (delta_offset == 1)    /* perfect */        GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);      else if (delta_offset > 1) {        guint lost = delta_offset - 1;        GST_INFO_OBJECT (self, "lost %d frame%s, setting discont flag",            lost, (lost > 1) ? "s" : "");        GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);      } else if (delta_offset == 0) {   /* overproduction, skip this frame */        GST_INFO_OBJECT (self, "skipping frame");        return FALSE;      }      priv->offset += delta_offset;    }    priv->prev_ts = timestamp;  }  GST_BUFFER_OFFSET (buf) = priv->offset;  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;  GST_BUFFER_TIMESTAMP (buf) = timestamp;  GST_BUFFER_DURATION (buf) = duration;  return TRUE;//.........这里部分代码省略.........
开发者ID:dylansong77,项目名称:gstreamer,代码行数:101,


示例23: gst_rtp_base_payload_prepare_push

/* Updates the SSRC, payload type, seqnum and timestamp of the RTP buffer * before the buffer is pushed. */static GstFlowReturngst_rtp_base_payload_prepare_push (GstRTPBasePayload * payload,    gpointer obj, gboolean is_list){  GstRTPBasePayloadPrivate *priv;  HeaderData data;  if (payload->clock_rate == 0)    goto no_rate;  priv = payload->priv;  /* update first, so that the property is set to the last   * seqnum pushed */  payload->seqnum = priv->next_seqnum;  /* fill in the fields we want to set on all headers */  data.payload = payload;  data.seqnum = payload->seqnum;  data.ssrc = payload->current_ssrc;  data.pt = payload->pt;  /* find the first buffer with a timestamp */  if (is_list) {    data.dts = -1;    data.pts = -1;    data.offset = GST_BUFFER_OFFSET_NONE;    gst_buffer_list_foreach (GST_BUFFER_LIST_CAST (obj), find_timestamp, &data);  } else {    data.dts = GST_BUFFER_DTS (GST_BUFFER_CAST (obj));    data.pts = GST_BUFFER_PTS (GST_BUFFER_CAST (obj));    data.offset = GST_BUFFER_OFFSET (GST_BUFFER_CAST (obj));  }  /* convert to RTP time */  if (priv->perfect_rtptime && data.offset != GST_BUFFER_OFFSET_NONE &&      priv->base_offset != GST_BUFFER_OFFSET_NONE) {    /* if we have an offset, use that for making an RTP timestamp */    data.rtptime = payload->ts_base + priv->base_rtime +        data.offset - priv->base_offset;    GST_LOG_OBJECT (payload,        "Using offset %" G_GUINT64_FORMAT " for RTP timestamp", data.offset);  } else if (GST_CLOCK_TIME_IS_VALID (data.pts)) {    gint64 rtime;    /* no offset, use the gstreamer pts */    rtime = gst_segment_to_running_time (&payload->segment, GST_FORMAT_TIME,        data.pts);    if (rtime == -1) {      GST_LOG_OBJECT (payload, "Clipped pts, using base RTP timestamp");      rtime = 0;    } else {      GST_LOG_OBJECT (payload,          "Using running_time %" GST_TIME_FORMAT " for RTP timestamp",          GST_TIME_ARGS (rtime));      rtime =          gst_util_uint64_scale_int (rtime, payload->clock_rate, GST_SECOND);      priv->base_offset = data.offset;      priv->base_rtime = rtime;    }    /* add running_time in clock-rate units to the base timestamp */    data.rtptime = payload->ts_base + rtime;  } else {    GST_LOG_OBJECT (payload,        "Using previous RTP timestamp %" G_GUINT32_FORMAT, payload->timestamp);    /* no timestamp to convert, take previous timestamp */    data.rtptime = payload->timestamp;  }  /* set ssrc, payload type, seq number, caps and rtptime */  if (is_list) {    gst_buffer_list_foreach (GST_BUFFER_LIST_CAST (obj), set_headers, &data);  } else {    GstBuffer *buf = GST_BUFFER_CAST (obj);    set_headers (&buf, 0, &data);  }  priv->next_seqnum = data.seqnum;  payload->timestamp = data.rtptime;  GST_LOG_OBJECT (payload, "Preparing to push packet with size %"      G_GSIZE_FORMAT ", seq=%d, rtptime=%u, pts %" GST_TIME_FORMAT,      (is_list) ? -1 : gst_buffer_get_size (GST_BUFFER (obj)),      payload->seqnum, data.rtptime, GST_TIME_ARGS (data.pts));  if (g_atomic_int_compare_and_exchange (&payload->          priv->notified_first_timestamp, 1, 0)) {    g_object_notify (G_OBJECT (payload), "timestamp");    g_object_notify (G_OBJECT (payload), "seqnum");  }  return GST_FLOW_OK;  /* ERRORS */no_rate:  {    GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL),//.........这里部分代码省略.........
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:101,


示例24: mpegtsmux_collected

static GstFlowReturnmpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux){  GstFlowReturn ret = GST_FLOW_OK;  MpegTsPadData *best = NULL;  GST_DEBUG_OBJECT (mux, "Pads collected");  if (mux->first) {    ret = mpegtsmux_create_streams (mux);    if (G_UNLIKELY (ret != GST_FLOW_OK))      return ret;    best = mpegtsmux_choose_best_stream (mux);    if (!mpegtsdemux_prepare_srcpad (mux)) {      GST_DEBUG_OBJECT (mux, "Failed to send new segment");      goto new_seg_fail;    }    mux->first = FALSE;  } else {    best = mpegtsmux_choose_best_stream (mux);  }  if (best != NULL) {    TsMuxProgram *prog = best->prog;    GstBuffer *buf = best->queued_buf;    gint64 pts = -1;    gboolean delta = TRUE;    if (prog == NULL) {      GST_ELEMENT_ERROR (mux, STREAM, MUX, ("Stream is not associated with "              "any program"), (NULL));      return GST_FLOW_ERROR;    }    if (G_UNLIKELY (prog->pcr_stream == NULL)) {      if (best) {        /* Take the first data stream for the PCR */        GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best),            "Use stream (pid=%d) from pad as PCR for program (prog_id = %d)",            MPEG_TS_PAD_DATA (best)->pid, MPEG_TS_PAD_DATA (best)->prog_id);        /* Set the chosen PCR stream */        tsmux_program_set_pcr_stream (prog, best->stream);      }    }    g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);    if (best->stream->is_video_stream)      delta = GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);    GST_DEBUG_OBJECT (mux, "delta: %d", delta);    GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best),        "Chose stream for output (PID: 0x%04x)", best->pid);    if (GST_CLOCK_TIME_IS_VALID (best->cur_ts)) {      pts = GSTTIME_TO_MPEGTIME (best->cur_ts);      GST_DEBUG_OBJECT (mux, "Buffer has TS %" GST_TIME_FORMAT " pts %"          G_GINT64_FORMAT, GST_TIME_ARGS (best->cur_ts), pts);    }    tsmux_stream_add_data (best->stream, GST_BUFFER_DATA (buf),        GST_BUFFER_SIZE (buf), buf, pts, -1, !delta);    best->queued_buf = NULL;    mux->is_delta = delta;    while (tsmux_stream_bytes_in_buffer (best->stream) > 0) {      if (!tsmux_write_stream_packet (mux->tsmux, best->stream)) {        GST_DEBUG_OBJECT (mux, "Failed to write data packet");        goto write_fail;      }    }    if (prog->pcr_stream == best->stream) {      mux->last_ts = best->last_ts;    }  } else {    /* FIXME: Drain all remaining streams */    /* At EOS */    gst_pad_push_event (mux->srcpad, gst_event_new_eos ());  }  return ret;new_seg_fail:  return GST_FLOW_ERROR;write_fail:  /* FIXME: Failed writing data for some reason. Should set appropriate error */  return mux->last_flow_ret;}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:90,


示例25: gst_audio_fx_base_fir_filter_push_residue

voidgst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self){  GstBuffer *outbuf;  GstFlowReturn res;  gint rate = GST_AUDIO_FILTER_RATE (self);  gint channels = GST_AUDIO_FILTER_CHANNELS (self);  gint bps = GST_AUDIO_FILTER_BPS (self);  gint outsize, outsamples;  GstMapInfo map;  guint8 *in, *out;  if (channels == 0 || rate == 0 || self->nsamples_in == 0) {    self->buffer_fill = 0;    g_free (self->buffer);    self->buffer = NULL;    return;  }  /* Calculate the number of samples and their memory size that   * should be pushed from the residue */  outsamples = self->nsamples_in - (self->nsamples_out - self->latency);  if (outsamples <= 0) {    self->buffer_fill = 0;    g_free (self->buffer);    self->buffer = NULL;    return;  }  outsize = outsamples * channels * bps;  if (!self->fft || self->low_latency) {    gint64 diffsize, diffsamples;    /* Process the difference between latency and residue length samples     * to start at the actual data instead of starting at the zeros before     * when we only got one buffer smaller than latency */    diffsamples =        ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels;    if (diffsamples > 0) {      diffsize = diffsamples * channels * bps;      in = g_new0 (guint8, diffsize);      out = g_new0 (guint8, diffsize);      self->nsamples_out += self->process (self, in, out, diffsamples);      g_free (in);      g_free (out);    }    outbuf = gst_buffer_new_and_alloc (outsize);    /* Convolve the residue with zeros to get the actual remaining data */    in = g_new0 (guint8, outsize);    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);    self->nsamples_out += self->process (self, in, map.data, outsamples);    gst_buffer_unmap (outbuf, &map);    g_free (in);  } else {    guint gensamples = 0;    outbuf = gst_buffer_new_and_alloc (outsize);    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);    while (gensamples < outsamples) {      guint step_insamples = self->block_length - self->buffer_fill;      guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps);      guint8 *out = g_new (guint8, self->block_length * channels * bps);      guint step_gensamples;      step_gensamples = self->process (self, zeroes, out, step_insamples);      g_free (zeroes);      memcpy (map.data + gensamples * bps, out, MIN (step_gensamples,              outsamples - gensamples) * bps);      gensamples += MIN (step_gensamples, outsamples - gensamples);      g_free (out);    }    self->nsamples_out += gensamples;    gst_buffer_unmap (outbuf, &map);  }  /* Set timestamp, offset, etc from the values we   * saved when processing the regular buffers */  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))    GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts;  else    GST_BUFFER_TIMESTAMP (outbuf) = 0;  GST_BUFFER_TIMESTAMP (outbuf) +=      gst_util_uint64_scale_int (self->nsamples_out - outsamples -      self->latency, GST_SECOND, rate);  GST_BUFFER_DURATION (outbuf) =      gst_util_uint64_scale_int (outsamples, GST_SECOND, rate);  if (self->start_off != GST_BUFFER_OFFSET_NONE) {    GST_BUFFER_OFFSET (outbuf) =        self->start_off + self->nsamples_out - outsamples - self->latency;    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples;  }//.........这里部分代码省略.........
开发者ID:felipemogollon,项目名称:gst-plugins-good,代码行数:101,


示例26: gst_audio_fx_base_fir_filter_transform

static GstFlowReturngst_audio_fx_base_fir_filter_transform (GstBaseTransform * base,    GstBuffer * inbuf, GstBuffer * outbuf){  GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);  GstClockTime timestamp, expected_timestamp;  gint channels = GST_AUDIO_FILTER_CHANNELS (self);  gint rate = GST_AUDIO_FILTER_RATE (self);  gint bps = GST_AUDIO_FILTER_BPS (self);  GstMapInfo inmap, outmap;  guint input_samples;  guint output_samples;  guint generated_samples;  guint64 output_offset;  gint64 diff = 0;  GstClockTime stream_time;  timestamp = GST_BUFFER_TIMESTAMP (outbuf);  if (!GST_CLOCK_TIME_IS_VALID (timestamp)      && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) {    GST_ERROR_OBJECT (self, "Invalid timestamp");    return GST_FLOW_ERROR;  }  g_mutex_lock (&self->lock);  stream_time =      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);  GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,      GST_TIME_ARGS (timestamp));  if (GST_CLOCK_TIME_IS_VALID (stream_time))    gst_object_sync_values (GST_OBJECT (self), stream_time);  g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR);  g_return_val_if_fail (channels != 0, GST_FLOW_ERROR);  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))    expected_timestamp =        self->start_ts + gst_util_uint64_scale_int (self->nsamples_in,        GST_SECOND, rate);  else    expected_timestamp = GST_CLOCK_TIME_NONE;  /* Reset the residue if already existing on discont buffers */  if (GST_BUFFER_IS_DISCONT (inbuf)      || (GST_CLOCK_TIME_IS_VALID (expected_timestamp)          && (ABS (GST_CLOCK_DIFF (timestamp,                      expected_timestamp) > 5 * GST_MSECOND)))) {    GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing");    if (GST_CLOCK_TIME_IS_VALID (expected_timestamp))      gst_audio_fx_base_fir_filter_push_residue (self);    self->buffer_fill = 0;    g_free (self->buffer);    self->buffer = NULL;    self->start_ts = timestamp;    self->start_off = GST_BUFFER_OFFSET (inbuf);    self->nsamples_out = 0;    self->nsamples_in = 0;  } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) {    self->start_ts = timestamp;    self->start_off = GST_BUFFER_OFFSET (inbuf);  }  gst_buffer_map (inbuf, &inmap, GST_MAP_READ);  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);  input_samples = (inmap.size / bps) / channels;  output_samples = (outmap.size / bps) / channels;  self->nsamples_in += input_samples;  generated_samples =      self->process (self, inmap.data, outmap.data, input_samples);  gst_buffer_unmap (inbuf, &inmap);  gst_buffer_unmap (outbuf, &outmap);  g_assert (generated_samples <= output_samples);  self->nsamples_out += generated_samples;  if (generated_samples == 0)    goto no_samples;  /* Calculate the number of samples we can push out now without outputting   * latency zeros in the beginning */  diff = ((gint64) self->nsamples_out) - ((gint64) self->latency);  if (diff < 0)    goto no_samples;  if (diff < generated_samples) {    gint64 tmp = diff;    diff = generated_samples - diff;    generated_samples = tmp;  } else {    diff = 0;  }  gst_buffer_resize (outbuf, diff * bps * channels,      generated_samples * bps * channels);//.........这里部分代码省略.........
开发者ID:felipemogollon,项目名称:gst-plugins-good,代码行数:101,


示例27: gst_base_video_decoder_finish_frame

GstFlowReturngst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,    GstVideoFrame * frame){  GstBaseVideoDecoderClass *base_video_decoder_class;  GstBuffer *src_buffer;  GST_DEBUG ("finish frame");  base_video_decoder_class =      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);  GST_DEBUG ("finish frame sync=%d pts=%" G_GINT64_FORMAT, frame->is_sync_point,      frame->presentation_timestamp);  if (frame->is_sync_point) {    if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {      if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {        GST_DEBUG ("sync timestamp %" G_GINT64_FORMAT " diff %" G_GINT64_FORMAT,            frame->presentation_timestamp,            frame->presentation_timestamp -            base_video_decoder->state.segment.start);        base_video_decoder->timestamp_offset = frame->presentation_timestamp;        base_video_decoder->field_index = 0;      } else {        /* This case is for one initial timestamp and no others, e.g.,         * filesrc ! decoder ! xvimagesink */        GST_WARNING ("sync timestamp didn't change, ignoring");        frame->presentation_timestamp = GST_CLOCK_TIME_NONE;      }    } else {      GST_WARNING ("sync point doesn't have timestamp");      if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {        GST_ERROR ("No base timestamp.  Assuming frames start at 0");        base_video_decoder->timestamp_offset = 0;        base_video_decoder->field_index = 0;      }    }  }  frame->field_index = base_video_decoder->field_index;  base_video_decoder->field_index += frame->n_fields;  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {    frame->presentation_timestamp =        gst_base_video_decoder_get_field_timestamp (base_video_decoder,        frame->field_index);    frame->presentation_duration = GST_CLOCK_TIME_NONE;    frame->decode_timestamp =        gst_base_video_decoder_get_timestamp (base_video_decoder,        frame->decode_frame_number);  }  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {    frame->presentation_duration =        gst_base_video_decoder_get_field_timestamp (base_video_decoder,        frame->field_index + frame->n_fields) - frame->presentation_timestamp;  }  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {    if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {      GST_WARNING ("decreasing timestamp (%" G_GINT64_FORMAT " < %"          G_GINT64_FORMAT ")", frame->presentation_timestamp,          base_video_decoder->last_timestamp);    }  }  base_video_decoder->last_timestamp = frame->presentation_timestamp;  GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);  if (base_video_decoder->state.interlaced) {#ifndef GST_VIDEO_BUFFER_TFF#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)#endif#ifndef GST_VIDEO_BUFFER_RFF#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)#endif#ifndef GST_VIDEO_BUFFER_ONEFIELD#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)#endif    int tff = base_video_decoder->state.top_field_first;    if (frame->field_index & 1) {      tff ^= 1;    }    if (tff) {      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);    } else {      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);    }    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);    if (frame->n_fields == 3) {      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);    } else if (frame->n_fields == 1) {      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);    }  }  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;  GST_BUFFER_OFFSET (frame->src_buffer) = -1;  GST_BUFFER_OFFSET_END (frame->src_buffer) = -1;//.........这里部分代码省略.........
开发者ID:zsx,项目名称:ossbuild,代码行数:101,



注:本文中的GST_CLOCK_TIME_IS_VALID函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_DEBUG函数代码示例
C++ GST_CAT_DEBUG_OBJECT函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。