您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_BUFFER_FLAG_SET函数代码示例

51自学网 2021-06-01 20:55:59
  C++
这篇教程C++ GST_BUFFER_FLAG_SET函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_BUFFER_FLAG_SET函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_BUFFER_FLAG_SET函数的具体用法?C++ GST_BUFFER_FLAG_SET怎么用?C++ GST_BUFFER_FLAG_SET使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_BUFFER_FLAG_SET函数的27个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: gst_rtp_mpa_depay_process

static GstBuffer *gst_rtp_mpa_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf){  GstRtpMPADepay *rtpmpadepay;  GstBuffer *outbuf;  GstRTPBuffer rtp = { NULL };  gint payload_len;#if 0  guint8 *payload;  guint16 frag_offset;#endif  gboolean marker;  rtpmpadepay = GST_RTP_MPA_DEPAY (depayload);  gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);  payload_len = gst_rtp_buffer_get_payload_len (&rtp);  if (payload_len <= 4)    goto empty_packet;#if 0  payload = gst_rtp_buffer_get_payload (&rtp);  /* strip off header   *   *  0                   1                   2                   3   *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+   * |             MBZ               |          Frag_offset          |   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+   */  frag_offset = (payload[2] << 8) | payload[3];#endif  /* subbuffer skipping the 4 header bytes */  outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 4, -1);  marker = gst_rtp_buffer_get_marker (&rtp);  if (marker) {    /* mark start of talkspurt with RESYNC */    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);  }  GST_DEBUG_OBJECT (rtpmpadepay,      "gst_rtp_mpa_depay_chain: pushing buffer of size %" G_GSIZE_FORMAT "",      gst_buffer_get_size (outbuf));  gst_rtp_buffer_unmap (&rtp);  /* FIXME, we can push half mpeg frames when they are split over multiple   * RTP packets */  return outbuf;  /* ERRORS */empty_packet:  {    GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,        ("Empty Payload."), (NULL));    gst_rtp_buffer_unmap (&rtp);    return NULL;  }}
开发者ID:Lachann,项目名称:gst-plugins-good,代码行数:62,


示例2: gst_vaapidecode_push_decoded_frame

static GstFlowReturngst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec,    GstVideoCodecFrame * out_frame){  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);  GstVaapiSurfaceProxy *proxy;  GstVaapiSurface *surface;  GstFlowReturn ret;  const GstVaapiRectangle *crop_rect;  GstVaapiVideoMeta *meta;  GstBufferPoolAcquireParams *params = NULL;  GstVaapiVideoBufferPoolAcquireParams vaapi_params = { {0,}, };  guint flags, out_flags = 0;  gboolean alloc_renegotiate, caps_renegotiate;  if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) {    proxy = gst_video_codec_frame_get_user_data (out_frame);    surface = GST_VAAPI_SURFACE_PROXY_SURFACE (proxy);    crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy);    /* in theory, we are not supposed to check the surface resolution     * change here since it should be advertised before from ligstvaapi.     * But there are issues with it especially for some vp9 streams where     * upstream element set un-cropped values in set_format() which make     * everything a mess. So better doing the explicit check here irrespective     * of what notification we get from upstream or libgstvaapi.Also, even if     * we received notification from libgstvaapi, the frame we are going to     * be pushed at this point might not have the notified resolution if there     * are queued frames in decoded picture buffer. */    alloc_renegotiate = is_surface_resolution_changed (decode, surface);    caps_renegotiate = is_display_resolution_changed (decode, crop_rect);    if (gst_pad_needs_reconfigure (GST_VIDEO_DECODER_SRC_PAD (vdec))        || alloc_renegotiate || caps_renegotiate || decode->do_renego) {      g_atomic_int_set (&decode->do_renego, FALSE);      if (!gst_vaapidecode_negotiate (decode))        return GST_FLOW_ERROR;    }    gst_vaapi_surface_proxy_set_destroy_notify (proxy,        (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode));    if (is_src_allocator_dmabuf (decode)) {      vaapi_params.proxy = gst_vaapi_surface_proxy_ref (proxy);      params = (GstBufferPoolAcquireParams *) & vaapi_params;    }    ret = gst_video_decoder_allocate_output_frame_with_params (vdec, out_frame,        params);    if (params)      gst_vaapi_surface_proxy_unref (vaapi_params.proxy);    if (ret != GST_FLOW_OK)      goto error_create_buffer;    /* if not dmabuf is negotiated set the vaapi video meta in the     * proxy */    if (!params) {      meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer);      if (!meta)        goto error_get_meta;      gst_vaapi_video_meta_set_surface_proxy (meta, proxy);    }    flags = gst_vaapi_surface_proxy_get_flags (proxy);    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED)      out_flags |= GST_BUFFER_FLAG_CORRUPTED;    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) {      out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED;      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF)        out_flags |= GST_VIDEO_BUFFER_FLAG_TFF;      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF)        out_flags |= GST_VIDEO_BUFFER_FLAG_RFF;      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD)        out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD;    }    GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags);    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) {      GST_BUFFER_FLAG_SET (out_frame->output_buffer,          GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE);    }#if (USE_GLX || USE_EGL)    if (decode->has_texture_upload_meta)      gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer);#endif  }  if (decode->in_segment.rate < 0.0      && !GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (out_frame)) {    GST_TRACE_OBJECT (decode, "drop frame in reverse playback");    gst_video_decoder_release_frame (GST_VIDEO_DECODER (decode), out_frame);    return GST_FLOW_OK;  }  ret = gst_video_decoder_finish_frame (vdec, out_frame);  if (ret != GST_FLOW_OK)    goto error_commit_buffer;  return GST_FLOW_OK;//.........这里部分代码省略.........
开发者ID:zzoon,项目名称:gstreamer-vaapi,代码行数:101,


示例3: gst_decklink_audio_src_create

//.........这里部分代码省略.........      capture_packet_free (p);    GST_DEBUG_OBJECT (self, "Flushing");    return GST_FLOW_FLUSHING;  }  p->packet->GetBytes ((gpointer *) & data);  sample_count = p->packet->GetSampleFrameCount ();  data_size = self->info.bpf * sample_count;  ap = (AudioPacket *) g_malloc0 (sizeof (AudioPacket));  *buffer =      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,      (gpointer) data, data_size, 0, data_size, ap,      (GDestroyNotify) audio_packet_free);  ap->packet = p->packet;  p->packet->AddRef ();  ap->input = self->input->input;  ap->input->AddRef ();  timestamp = p->capture_time;  // Jitter and discontinuity handling, based on audiobasesrc  start_time = timestamp;  // Convert to the sample numbers  start_offset =      gst_util_uint64_scale (start_time, self->info.rate, GST_SECOND);  end_offset = start_offset + sample_count;  end_time = gst_util_uint64_scale_int (end_offset, GST_SECOND,      self->info.rate);  duration = end_time - start_time;  if (self->next_offset == (guint64) - 1) {    discont = TRUE;  } else {    guint64 diff, max_sample_diff;    // Check discont    if (start_offset <= self->next_offset)      diff = self->next_offset - start_offset;    else      diff = start_offset - self->next_offset;    max_sample_diff =        gst_util_uint64_scale_int (self->alignment_threshold, self->info.rate,        GST_SECOND);    // Discont!    if (G_UNLIKELY (diff >= max_sample_diff)) {      if (self->discont_wait > 0) {        if (self->discont_time == GST_CLOCK_TIME_NONE) {          self->discont_time = start_time;        } else if (start_time - self->discont_time >= self->discont_wait) {          discont = TRUE;          self->discont_time = GST_CLOCK_TIME_NONE;        }      } else {        discont = TRUE;      }    } else if (G_UNLIKELY (self->discont_time != GST_CLOCK_TIME_NONE)) {      // we have had a discont, but are now back on track!      self->discont_time = GST_CLOCK_TIME_NONE;    }  }  if (discont) {    // Have discont, need resync and use the capture timestamps    if (self->next_offset != (guint64) - 1)      GST_INFO_OBJECT (self, "Have discont. Expected %"          G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT,          self->next_offset, start_offset);    GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_DISCONT);    self->next_offset = end_offset;  } else {    // No discont, just keep counting    self->discont_time = GST_CLOCK_TIME_NONE;    timestamp =        gst_util_uint64_scale (self->next_offset, GST_SECOND, self->info.rate);    self->next_offset += sample_count;    duration =        gst_util_uint64_scale (self->next_offset, GST_SECOND,        self->info.rate) - timestamp;  }  GST_BUFFER_TIMESTAMP (*buffer) = timestamp;  GST_BUFFER_DURATION (*buffer) = duration;  GST_DEBUG_OBJECT (self,      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));  capture_packet_free (p);  return flow_ret;}
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:101,


示例4: gst_video_rate_flush_prev

/* flush the oldest buffer */static GstFlowReturngst_video_rate_flush_prev (GstVideoRate * videorate, gboolean duplicate){  GstFlowReturn res;  GstBuffer *outbuf;  GstClockTime push_ts;  if (!videorate->prevbuf)    goto eos_before_buffers;  /* make sure we can write to the metadata */  outbuf = gst_buffer_make_writable (gst_buffer_ref (videorate->prevbuf));  GST_BUFFER_OFFSET (outbuf) = videorate->out;  GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1;  if (videorate->discont) {    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);    videorate->discont = FALSE;  } else    GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT);  if (duplicate)    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);  else    GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP);  /* this is the timestamp we put on the buffer */  push_ts = videorate->next_ts;  videorate->out++;  videorate->out_frame_count++;  if (videorate->to_rate_numerator) {    /* interpolate next expected timestamp in the segment */    videorate->next_ts =        videorate->segment.base + videorate->segment.start +        videorate->base_ts + gst_util_uint64_scale (videorate->out_frame_count,        videorate->to_rate_denominator * GST_SECOND,        videorate->to_rate_numerator);    GST_BUFFER_DURATION (outbuf) = videorate->next_ts - push_ts;  }  /* We do not need to update time in VFR (variable frame rate) mode */  if (!videorate->drop_only) {    /* adapt for looping, bring back to time in current segment. */    GST_BUFFER_TIMESTAMP (outbuf) = push_ts - videorate->segment.base;  }  GST_LOG_OBJECT (videorate,      "old is best, dup, pushing buffer outgoing ts %" GST_TIME_FORMAT,      GST_TIME_ARGS (push_ts));  res = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (videorate), outbuf);  return res;  /* WARNINGS */eos_before_buffers:  {    GST_INFO_OBJECT (videorate, "got EOS before any buffer was received");    return GST_FLOW_OK;  }}
开发者ID:Lachann,项目名称:gst-plugins-base,代码行数:64,


示例5: gst_timidity_loop

//.........这里部分代码省略.........        timidity->time_per_frame;    gst_segment_set_newsegment (timidity->o_segment, FALSE, 1.0,        GST_FORMAT_DEFAULT, 0, GST_CLOCK_TIME_NONE, 0);    gst_pad_push_event (timidity->srcpad,        gst_timidity_get_new_segment_event (timidity, GST_FORMAT_TIME, FALSE));    /* extract tags */    text = mid_song_get_meta (timidity->song, MID_SONG_TEXT);    if (text) {      tags = gst_tag_list_new ();      gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, text, NULL);      //g_free (text);    }    text = mid_song_get_meta (timidity->song, MID_SONG_COPYRIGHT);    if (text) {      if (tags == NULL)        tags = gst_tag_list_new ();      gst_tag_list_add (tags, GST_TAG_MERGE_APPEND,          GST_TAG_COPYRIGHT, text, NULL);      //g_free (text);    }    if (tags) {      gst_element_found_tags (GST_ELEMENT (timidity), tags);    }    GST_DEBUG_OBJECT (timidity, "Parsing song done");    return;  }  if (timidity->o_segment_changed) {    GstSegment *segment = gst_timidity_get_segment (timidity, GST_FORMAT_TIME,        !timidity->o_new_segment);    GST_LOG_OBJECT (timidity,        "sending newsegment from %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT        ", pos=%" GST_TIME_FORMAT, GST_TIME_ARGS ((guint64) segment->start),        GST_TIME_ARGS ((guint64) segment->stop),        GST_TIME_ARGS ((guint64) segment->time));    if (timidity->o_segment->flags & GST_SEEK_FLAG_SEGMENT) {      gst_element_post_message (GST_ELEMENT (timidity),          gst_message_new_segment_start (GST_OBJECT (timidity),              segment->format, segment->start));    }    gst_segment_free (segment);    timidity->o_segment_changed = FALSE;    return;  }  if (timidity->o_seek) {    /* perform a seek internally */    timidity->o_segment->last_stop = timidity->o_segment->time;    mid_song_seek (timidity->song,        (timidity->o_segment->last_stop * timidity->time_per_frame) /        GST_MSECOND);  }  out = gst_timidity_get_buffer (timidity);  if (!out) {    GST_LOG_OBJECT (timidity, "Song ended, generating eos");    gst_pad_push_event (timidity->srcpad, gst_event_new_eos ());    timidity->o_seek = FALSE;    goto paused;  }  if (timidity->o_seek) {    GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT);    timidity->o_seek = FALSE;  }  gst_buffer_set_caps (out, timidity->out_caps);  ret = gst_pad_push (timidity->srcpad, out);  if (GST_FLOW_IS_FATAL (ret) || ret == GST_FLOW_NOT_LINKED)    goto error;  return;paused:  {    GST_DEBUG_OBJECT (timidity, "pausing task");    gst_pad_pause_task (timidity->sinkpad);    return;  }error:  {    GST_ELEMENT_ERROR (timidity, STREAM, FAILED,        ("Internal data stream error"),        ("Streaming stopped, reason %s", gst_flow_get_name (ret)));    gst_pad_push_event (timidity->srcpad, gst_event_new_eos ());    goto paused;  }}
开发者ID:jonasl,项目名称:gst-svtplayer,代码行数:101,


示例6: gst_vdp_vpp_drain

static GstFlowReturngst_vdp_vpp_drain (GstVdpVideoPostProcess * vpp){  GstVdpPicture current_pic;  guint32 video_surfaces_past_count;  VdpVideoSurface video_surfaces_past[MAX_PICTURES];  guint32 video_surfaces_future_count;  VdpVideoSurface video_surfaces_future[MAX_PICTURES];  GstFlowReturn ret;  while (gst_vdp_vpp_get_next_picture (vpp,          &current_pic,          &video_surfaces_past_count, video_surfaces_past,          &video_surfaces_future_count, video_surfaces_future)) {    GError *err;    GstVdpOutputBuffer *outbuf;    GstStructure *structure;    GstVideoRectangle src_r = { 0, }    , dest_r = {    0,};    VdpRect rect;    GstVdpDevice *device;    VdpStatus status;    err = NULL;    ret =        gst_vdp_output_src_pad_alloc_buffer ((GstVdpOutputSrcPad *) vpp->srcpad,        &outbuf, &err);    if (ret != GST_FLOW_OK)      goto output_pad_error;    src_r.w = vpp->width;    src_r.h = vpp->height;    if (vpp->got_par) {      gint new_width;      new_width = gst_util_uint64_scale_int (src_r.w, vpp->par_n, vpp->par_d);      src_r.x += (src_r.w - new_width) / 2;      src_r.w = new_width;    }    structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);    if (!gst_structure_get_int (structure, "width", &dest_r.w) ||        !gst_structure_get_int (structure, "height", &dest_r.h))      goto invalid_caps;    if (vpp->force_aspect_ratio) {      GstVideoRectangle res_r;      gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);      rect.x0 = res_r.x;      rect.x1 = res_r.w + res_r.x;      rect.y0 = res_r.y;      rect.y1 = res_r.h + res_r.y;    } else {      rect.x0 = 0;      rect.x1 = dest_r.w;      rect.y0 = 0;      rect.y1 = dest_r.h;    }    device = vpp->device;    status =        device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,        current_pic.structure, video_surfaces_past_count, video_surfaces_past,        current_pic.buf->surface, video_surfaces_future_count,        video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL);    if (status != VDP_STATUS_OK)      goto render_error;    GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp;    if (gst_vdp_vpp_is_interlaced (vpp))      GST_BUFFER_DURATION (outbuf) = vpp->field_duration;    else      GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf);    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT))      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL))      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL);    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP))      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);    err = NULL;    ret =        gst_vdp_output_src_pad_push ((GstVdpOutputSrcPad *) vpp->srcpad,        outbuf, &err);    if (ret != GST_FLOW_OK)      goto output_pad_error;    continue;  invalid_caps://.........这里部分代码省略.........
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:101,


示例7: gst_vdp_vpp_chain

static GstFlowReturngst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer){  GstVdpVideoPostProcess *vpp =      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));  GstClockTime qostime;  GstFlowReturn ret = GST_FLOW_OK;  GError *err;  GST_DEBUG ("chain");  /* can only do QoS if the segment is in TIME */  if (vpp->segment.format != GST_FORMAT_TIME)    goto no_qos;  /* QOS is done on the running time of the buffer, get it now */  qostime = gst_segment_to_running_time (&vpp->segment, GST_FORMAT_TIME,      GST_BUFFER_TIMESTAMP (buffer));  if (qostime != -1) {    gboolean need_skip;    GstClockTime earliest_time;    /* lock for getting the QoS parameters that are set (in a different thread)     * with the QOS events */    GST_OBJECT_LOCK (vpp);    earliest_time = vpp->earliest_time;    /* check for QoS, don't perform conversion for buffers     * that are known to be late. */    need_skip = GST_CLOCK_TIME_IS_VALID (earliest_time) && qostime != -1 &&        qostime <= earliest_time;    GST_OBJECT_UNLOCK (vpp);    if (need_skip) {      GST_DEBUG_OBJECT (vpp, "skipping transform: qostime %"          GST_TIME_FORMAT " <= %" GST_TIME_FORMAT,          GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));      /* mark discont for next buffer */      vpp->discont = TRUE;      gst_buffer_unref (buffer);      return GST_FLOW_OK;    }  }no_qos:  if (vpp->discont) {    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);    vpp->discont = FALSE;  }  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {    GST_DEBUG_OBJECT (vpp, "Received discont buffer");    gst_vdp_vpp_flush (vpp);  }  if (!vpp->native_input) {    GstVdpVideoBuffer *video_buf;    err = NULL;    video_buf =        (GstVdpVideoBuffer *) gst_vdp_buffer_pool_get_buffer (vpp->vpool, &err);    if (G_UNLIKELY (!video_buf))      goto video_buf_error;    if (!gst_vdp_video_buffer_upload (video_buf, buffer, vpp->fourcc,            vpp->width, vpp->height)) {      gst_buffer_unref (GST_BUFFER (video_buf));      GST_ELEMENT_ERROR (vpp, RESOURCE, READ,          ("Couldn't upload YUV data to vdpau"), (NULL));      ret = GST_FLOW_ERROR;      goto error;    }    gst_buffer_copy_metadata (GST_BUFFER (video_buf), buffer,        GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);    gst_buffer_unref (buffer);    buffer = GST_BUFFER (video_buf);  }  if (G_UNLIKELY (vpp->mixer == VDP_INVALID_HANDLE)) {    ret = gst_vdp_vpp_create_mixer (vpp);    if (ret != GST_FLOW_OK)      goto error;  }  gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer));  ret = gst_vdp_vpp_drain (vpp);done:  gst_object_unref (vpp);  return ret;error:  gst_buffer_unref (buffer);//.........这里部分代码省略.........
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:101,


示例8: gst_shm_src_create

static GstFlowReturngst_shm_src_create (GstPushSrc * psrc, GstBuffer ** outbuf){  GstShmSrc *self = GST_SHM_SRC (psrc);  gchar *buf = NULL;  int rv = 0;  struct GstShmBuffer *gsb;  do {    if (gst_poll_wait (self->poll, GST_CLOCK_TIME_NONE) < 0) {      if (errno == EBUSY)        return GST_FLOW_WRONG_STATE;      GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),          ("Poll failed on fd: %s", strerror (errno)));      return GST_FLOW_ERROR;    }    if (self->unlocked)      return GST_FLOW_WRONG_STATE;    if (gst_poll_fd_has_closed (self->poll, &self->pollfd)) {      GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),          ("Control socket has closed"));      return GST_FLOW_ERROR;    }    if (gst_poll_fd_has_error (self->poll, &self->pollfd)) {      GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),          ("Control socket has error"));      return GST_FLOW_ERROR;    }    if (gst_poll_fd_can_read (self->poll, &self->pollfd)) {      buf = NULL;      GST_LOG_OBJECT (self, "Reading from pipe");      GST_OBJECT_LOCK (self);      rv = sp_client_recv (self->pipe->pipe, &buf);      GST_OBJECT_UNLOCK (self);      if (rv < 0) {        GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),            ("Error reading control data: %d", rv));        return GST_FLOW_ERROR;      }    }  } while (buf == NULL);  GST_LOG_OBJECT (self, "Got buffer %p of size %d", buf, rv);  gsb = g_slice_new0 (struct GstShmBuffer);  gsb->buf = buf;  gsb->pipe = self->pipe;  gst_shm_pipe_inc (self->pipe);  *outbuf = gst_buffer_new ();  GST_BUFFER_FLAG_SET (*outbuf, GST_BUFFER_FLAG_READONLY);  GST_BUFFER_DATA (*outbuf) = (guint8 *) buf;  GST_BUFFER_SIZE (*outbuf) = rv;  GST_BUFFER_MALLOCDATA (*outbuf) = (guint8 *) gsb;  GST_BUFFER_FREE_FUNC (*outbuf) = free_buffer;  return GST_FLOW_OK;}
开发者ID:kanongil,项目名称:gst-plugins-bad,代码行数:62,


示例9: gst_rtp_amr_depay_process

//.........这里部分代码省略.........    CMR = (payload[0] & 0xf0) >> 4;    /* strip CMR header now, pack FT and the data for the decoder */    payload_len -= 1;    payload += 1;    GST_DEBUG_OBJECT (rtpamrdepay, "payload len %d", payload_len);    if (rtpamrdepay->interleaving) {      ILL = (payload[0] & 0xf0) >> 4;      ILP = (payload[0] & 0x0f);      payload_len -= 1;      payload += 1;      if (ILP > ILL)        goto wrong_interleaving;    }    /*     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6     * +-+-+-+-+-+-+-+-+..     * |F|  FT   |Q|P|P| more FT..     * +-+-+-+-+-+-+-+-+..     */    /* count number of packets by counting the FTs. Also     * count number of amr data bytes and number of non-empty     * packets (this is also the number of CRCs if present). */    amr_len = 0;    num_nonempty_packets = 0;    num_packets = 0;    for (i = 0; i < payload_len; i++) {      gint fr_size;      guint8 FT;      FT = (payload[i] & 0x78) >> 3;      fr_size = frame_size[FT];      GST_DEBUG_OBJECT (rtpamrdepay, "frame size %d", fr_size);      if (fr_size == -1)        goto wrong_framesize;      if (fr_size > 0) {        amr_len += fr_size;        num_nonempty_packets++;      }      num_packets++;      if ((payload[i] & 0x80) == 0)        break;    }    if (rtpamrdepay->crc) {      /* data len + CRC len + header bytes should be smaller than payload_len */      if (num_packets + num_nonempty_packets + amr_len > payload_len)        goto wrong_length_1;    } else {      /* data len + header bytes should be smaller than payload_len */      if (num_packets + amr_len > payload_len)        goto wrong_length_2;    }    outbuf = gst_buffer_new_and_alloc (payload_len);    /* point to destination */    p = GST_BUFFER_DATA (outbuf);    /* point to first data packet */    dp = payload + num_packets;    if (rtpamrdepay->crc) {      /* skip CRC if present */      dp += num_nonempty_packets;    }    for (i = 0; i < num_packets; i++) {      gint fr_size;      /* copy FT, clear F bit */      *p++ = payload[i] & 0x7f;      fr_size = frame_size[(payload[i] & 0x78) >> 3];      if (fr_size > 0) {        /* copy data packet, FIXME, calc CRC here. */        memcpy (p, dp, fr_size);        p += fr_size;        dp += fr_size;      }    }    /* we can set the duration because each packet is 20 milliseconds */    GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND;    if (gst_rtp_buffer_get_marker (buf)) {      /* marker bit marks a discont buffer after a talkspurt. */      GST_DEBUG_OBJECT (depayload, "marker bit was set");      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);    }    GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d",        GST_BUFFER_SIZE (outbuf));  }
开发者ID:spunktsch,项目名称:svtplayer,代码行数:101,


示例10: gst_decklink_src_task

static voidgst_decklink_src_task (void *priv){    GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv);    GstBuffer *buffer;    GstBuffer *audio_buffer;    IDeckLinkVideoInputFrame *video_frame;    IDeckLinkAudioInputPacket *audio_frame;    void *data;    int n_samples;    GstFlowReturn ret;    const GstDecklinkMode *mode;    GST_DEBUG_OBJECT (decklinksrc, "task");    g_mutex_lock (decklinksrc->mutex);    while (decklinksrc->video_frame == NULL && !decklinksrc->stop) {        g_cond_wait (decklinksrc->cond, decklinksrc->mutex);    }    video_frame = decklinksrc->video_frame;    audio_frame = decklinksrc->audio_frame;    decklinksrc->video_frame = NULL;    decklinksrc->audio_frame = NULL;    g_mutex_unlock (decklinksrc->mutex);    if (decklinksrc->stop) {        GST_DEBUG ("stopping task");        return;    }    /* warning on dropped frames */    if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) {        GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ,                             ("Dropped %d frame(s), for a total of %d frame(s)",                              decklinksrc->dropped_frames - decklinksrc->dropped_frames_old,                              decklinksrc->dropped_frames),                             (NULL));        decklinksrc->dropped_frames_old = decklinksrc->dropped_frames;    }    mode = gst_decklink_get_mode (decklinksrc->mode);    video_frame->GetBytes (&data);    if (decklinksrc->copy_data) {        buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2);        memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2);        video_frame->Release ();    } else {        buffer = gst_buffer_new ();        GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2;        GST_BUFFER_DATA (buffer) = (guint8 *) data;        GST_BUFFER_FREE_FUNC (buffer) = video_frame_free;        GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) video_frame;    }    GST_BUFFER_TIMESTAMP (buffer) =        gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND,                                   mode->fps_d, mode->fps_n);    GST_BUFFER_DURATION (buffer) =        gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND,                                   mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer);    GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num;    GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num;    if (decklinksrc->frame_num == 0) {        GstEvent *event;        gboolean ret;        GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);        event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,                                           GST_CLOCK_TIME_NONE, 0);        ret = gst_pad_push_event (decklinksrc->videosrcpad, event);        if (!ret) {            GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret);            return;        }    }    if (decklinksrc->video_caps == NULL) {        decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode);    }    gst_buffer_set_caps (buffer, decklinksrc->video_caps);    ret = gst_pad_push (decklinksrc->videosrcpad, buffer);    if (ret != GST_FLOW_OK) {        GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL));    }    if (gst_pad_is_linked (decklinksrc->audiosrcpad)) {        n_samples = audio_frame->GetSampleFrameCount ();        audio_frame->GetBytes (&data);        audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2);        memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2);        GST_BUFFER_TIMESTAMP (audio_buffer) =//.........这里部分代码省略.........
开发者ID:nezticle,项目名称:gst-plugins-bad-free,代码行数:101,


示例11: gst_audio_rate_chain

//.........这里部分代码省略.........    guint64 fillsamples;    /* We don't want to allocate a single unreasonably huge buffer - it might       be hundreds of megabytes. So, limit each output buffer to one second of       audio */    fillsamples = in_offset - audiorate->next_offset;    while (fillsamples > 0) {      guint64 cursamples = MIN (fillsamples, audiorate->rate);      fillsamples -= cursamples;      fillsize = cursamples * audiorate->bytes_per_sample;      fill = gst_buffer_new_and_alloc (fillsize);      /* FIXME, 0 might not be the silence byte for the negotiated format. */      memset (GST_BUFFER_DATA (fill), 0, fillsize);      GST_DEBUG_OBJECT (audiorate, "inserting %lld samples", cursamples);      GST_BUFFER_OFFSET (fill) = audiorate->next_offset;      audiorate->next_offset += cursamples;      GST_BUFFER_OFFSET_END (fill) = audiorate->next_offset;      /* Use next timestamp, then calculate following timestamp based on        * offset to get duration. Neccesary complexity to get 'perfect'        * streams */      GST_BUFFER_TIMESTAMP (fill) = audiorate->next_ts;      audiorate->next_ts = gst_util_uint64_scale_int (audiorate->next_offset,          GST_SECOND, audiorate->rate);      GST_BUFFER_DURATION (fill) = audiorate->next_ts -          GST_BUFFER_TIMESTAMP (fill);      /* we created this buffer to fill a gap */      GST_BUFFER_FLAG_SET (fill, GST_BUFFER_FLAG_GAP);      /* set discont if it's pending, this is mostly done for the first buffer        * and after a flushing seek */      if (audiorate->discont) {        GST_BUFFER_FLAG_SET (fill, GST_BUFFER_FLAG_DISCONT);        audiorate->discont = FALSE;      }      gst_buffer_set_caps (fill, GST_PAD_CAPS (audiorate->srcpad));      ret = gst_pad_push (audiorate->srcpad, fill);      if (ret != GST_FLOW_OK)        goto beach;      audiorate->out++;      audiorate->add += cursamples;      if (!audiorate->silent)        g_object_notify (G_OBJECT (audiorate), "add");    }  } else if (in_offset < audiorate->next_offset) {    /* need to remove samples */    if (in_offset_end <= audiorate->next_offset) {      guint64 drop = in_size / audiorate->bytes_per_sample;      audiorate->drop += drop;      GST_DEBUG_OBJECT (audiorate, "dropping %lld samples", drop);      /* we can drop the buffer completely */      gst_buffer_unref (buf);      if (!audiorate->silent)        g_object_notify (G_OBJECT (audiorate), "drop");
开发者ID:prajnashi,项目名称:gst-plugins-base,代码行数:67,


示例12: gst_rtp_xqt_depay_process

//.........这里部分代码省略.........         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         * |S| Reserved                    | Sample Length                 |         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         * | Sample Timestamp                                              |         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         * . Sample Data ...                                               .         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         * |S| Reserved                    | Sample Length                 |         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         * | Sample Timestamp                                              |         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         * . Sample Data ...                                               .         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         * . ......                                                        .         * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+         */        while (payload_len > 8) {          s = (payload[0] & 0x80) != 0; /* contains sync sample */          slen = (payload[2] << 8) | payload[3];          /* timestamp =           *    (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) |           *    payload[7];           */          payload += 8;          payload_len -= 8;          if (slen > payload_len)            slen = payload_len;          outbuf = gst_buffer_new_and_alloc (slen);          gst_buffer_fill (outbuf, 0, payload, slen);          if (!s)            GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);          gst_rtp_base_depayload_push (depayload, outbuf);          /* aligned on 32 bit boundary */          slen = GST_ROUND_UP_4 (slen);          payload += slen;          payload_len -= slen;        }        break;      }      case 3:      {        /* one sample per packet, use adapter to combine based on marker bit. */        outbuf = gst_buffer_new_and_alloc (payload_len);        gst_buffer_fill (outbuf, 0, payload, payload_len);        gst_adapter_push (rtpxqtdepay->adapter, outbuf);        outbuf = NULL;        if (!m)          goto done;        avail = gst_adapter_available (rtpxqtdepay->adapter);        outbuf = gst_adapter_take_buffer (rtpxqtdepay->adapter, avail);        GST_DEBUG_OBJECT (rtpxqtdepay,            "gst_rtp_xqt_depay_chain: pushing buffer of size %u", avail);        goto done;      }    }
开发者ID:Acidburn0zzz,项目名称:gstreamer-libde265,代码行数:67,


示例13: videodecoder_chain

//.........这里部分代码省略.........    {        if (av_new_packet(&decoder->packet, GST_BUFFER_SIZE(buf)) == 0)        {            memcpy(decoder->packet.data, GST_BUFFER_DATA(buf), GST_BUFFER_SIZE(buf));            if (GST_BUFFER_TIMESTAMP_IS_VALID(buf))                base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf);            else                base->context->reordered_opaque = AV_NOPTS_VALUE;            num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet);            av_free_packet(&decoder->packet);        }        else        {            result = GST_FLOW_ERROR;            goto _exit;        }    }    else    {        av_init_packet(&decoder->packet);        decoder->packet.data = GST_BUFFER_DATA(buf);        decoder->packet.size = GST_BUFFER_SIZE(buf);        if (GST_BUFFER_TIMESTAMP_IS_VALID(buf))            base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf);        else            base->context->reordered_opaque = AV_NOPTS_VALUE;        num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet);    }    if (num_dec < 0)    {        //        basedecoder_flush(base);#ifdef DEBUG_OUTPUT        g_print ("videodecoder_chain error: %s/n", avelement_error_to_string(AVELEMENT(decoder), num_dec));#endif        goto _exit;    }    if (decoder->frame_finished > 0)    {        if (!videodecoder_configure_sourcepad(decoder))            result = GST_FLOW_ERROR;        else        {            GstBuffer *outbuf = NULL;            result = gst_pad_alloc_buffer_and_set_caps(base->srcpad, base->context->frame_number,                                                       decoder->frame_size, GST_PAD_CAPS(base->srcpad), &outbuf);            if (result != GST_FLOW_OK)            {                if (result != GST_FLOW_WRONG_STATE)                {                    gst_element_message_full(GST_ELEMENT(decoder), GST_MESSAGE_ERROR,                                             GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,                                             ("Decoded video buffer allocation failed"), NULL,                                             ("videodecoder.c"), ("videodecoder_chain"), 0);                }            }            else            {                if (base->frame->reordered_opaque != AV_NOPTS_VALUE)                {                    GST_BUFFER_TIMESTAMP(outbuf) = base->frame->reordered_opaque;                    GST_BUFFER_DURATION(outbuf) = GST_BUFFER_DURATION(buf); // Duration for video usually same                }                GST_BUFFER_SIZE(outbuf) = decoder->frame_size;                // Copy image by parts from different arrays.                memcpy(GST_BUFFER_DATA(outbuf),                     base->frame->data[0], decoder->u_offset);                memcpy(GST_BUFFER_DATA(outbuf) + decoder->u_offset, base->frame->data[1], decoder->uv_blocksize);                memcpy(GST_BUFFER_DATA(outbuf) + decoder->v_offset, base->frame->data[2], decoder->uv_blocksize);                GST_BUFFER_OFFSET_END(outbuf) = GST_BUFFER_OFFSET_NONE;                if (decoder->discont || GST_BUFFER_IS_DISCONT(buf))                {#ifdef DEBUG_OUTPUT                    g_print("Video discont: frame size=%dx%d/n", base->context->width, base->context->height);#endif                    GST_BUFFER_FLAG_SET(outbuf, GST_BUFFER_FLAG_DISCONT);                    decoder->discont = FALSE;                }#ifdef VERBOSE_DEBUG                g_print("videodecoder: pushing buffer ts=%.4f sec", (double)GST_BUFFER_TIMESTAMP(outbuf)/GST_SECOND);#endif                result = gst_pad_push(base->srcpad, outbuf);#ifdef VERBOSE_DEBUG                g_print(" done, res=%s/n", gst_flow_get_name(result));#endif            }        }    }_exit:// INLINE - gst_buffer_unref()    gst_buffer_unref(buf);    return result;}
开发者ID:166MMX,项目名称:openjdk.java.net-openjfx-8u40-rt,代码行数:101,


示例14: gst_jasper_dec_chain

static GstFlowReturngst_jasper_dec_chain (GstPad * pad, GstBuffer * buf){  GstJasperDec *dec;  GstFlowReturn ret = GST_FLOW_OK;  GstClockTime ts;  GstBuffer *outbuf = NULL;  guint8 *data;  guint size;  gboolean decode;  dec = GST_JASPER_DEC (GST_PAD_PARENT (pad));  if (dec->fmt < 0)    goto not_negotiated;  ts = GST_BUFFER_TIMESTAMP (buf);  GST_LOG_OBJECT (dec, "buffer with ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))    dec->discont = TRUE;  decode = gst_jasper_dec_do_qos (dec, ts);  /* FIXME: do clipping */  if (G_UNLIKELY (!decode)) {    dec->discont = TRUE;    goto done;  }  /* strip possible prefix */  if (dec->strip) {    GstBuffer *tmp;    tmp = gst_buffer_create_sub (buf, dec->strip,        GST_BUFFER_SIZE (buf) - dec->strip);    gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS);    gst_buffer_unref (buf);    buf = tmp;  }  /* preprend possible codec_data */  if (dec->codec_data) {    GstBuffer *tmp;    tmp =        gst_buffer_append (gst_buffer_ref (dec->codec_data),        gst_buffer_ref (buf));    gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS);    gst_buffer_unref (buf);    buf = tmp;  }  /* now really feed the data to decoder */  data = GST_BUFFER_DATA (buf);  size = GST_BUFFER_SIZE (buf);  ret = gst_jasper_dec_get_picture (dec, data, size, &outbuf);  if (outbuf) {    gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS);    if (dec->discont) {      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);      dec->discont = FALSE;    }    if (ret == GST_FLOW_OK)      ret = gst_pad_push (dec->srcpad, outbuf);    else      gst_buffer_unref (outbuf);  }done:  gst_buffer_unref (buf);  return ret;  /* ERRORS */not_negotiated:  {    GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL),        ("format wasn't negotiated before chain function"));    ret = GST_FLOW_NOT_NEGOTIATED;    goto done;  }}
开发者ID:collects,项目名称:gst-plugins-bad,代码行数:87,


示例15: gst_vaapidecode_push_decoded_frame

static GstFlowReturngst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec,    GstVideoCodecFrame * out_frame){  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);  GstVaapiSurfaceProxy *proxy;  GstFlowReturn ret;  const GstVaapiRectangle *crop_rect;  GstVaapiVideoMeta *meta;  guint flags, out_flags = 0;  if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) {    proxy = gst_video_codec_frame_get_user_data (out_frame);    /* reconfigure if un-cropped surface resolution changed */    if (is_surface_resolution_changed (vdec, GST_VAAPI_SURFACE_PROXY_SURFACE (proxy)))      gst_vaapidecode_negotiate (decode);    gst_vaapi_surface_proxy_set_destroy_notify (proxy,        (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode));    ret = gst_video_decoder_allocate_output_frame (vdec, out_frame);    if (ret != GST_FLOW_OK)      goto error_create_buffer;    meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer);    if (!meta)      goto error_get_meta;    gst_vaapi_video_meta_set_surface_proxy (meta, proxy);    flags = gst_vaapi_surface_proxy_get_flags (proxy);    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED)      out_flags |= GST_BUFFER_FLAG_CORRUPTED;    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) {      out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED;      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF)        out_flags |= GST_VIDEO_BUFFER_FLAG_TFF;      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF)        out_flags |= GST_VIDEO_BUFFER_FLAG_RFF;      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD)        out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD;    }    GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags);#if GST_CHECK_VERSION(1,5,0)    /* First-in-bundle flag only appeared in 1.5 dev */    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) {      GST_BUFFER_FLAG_SET (out_frame->output_buffer,          GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE);    }#endif    crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy);    if (crop_rect) {      GstVideoCropMeta *const crop_meta =          gst_buffer_add_video_crop_meta (out_frame->output_buffer);      if (crop_meta) {        crop_meta->x = crop_rect->x;        crop_meta->y = crop_rect->y;        crop_meta->width = crop_rect->width;        crop_meta->height = crop_rect->height;      }    }#if (USE_GLX || USE_EGL)    if (decode->has_texture_upload_meta)      gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer);#endif  }  ret = gst_video_decoder_finish_frame (vdec, out_frame);  if (ret != GST_FLOW_OK)    goto error_commit_buffer;  gst_video_codec_frame_unref (out_frame);  return GST_FLOW_OK;  /* ERRORS */error_create_buffer:  {    const GstVaapiID surface_id =        gst_vaapi_surface_get_id (GST_VAAPI_SURFACE_PROXY_SURFACE (proxy));    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,        ("Failed to create sink buffer"),        ("video sink failed to create video buffer for proxy'ed "            "surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id)));    gst_video_decoder_drop_frame (vdec, out_frame);    gst_video_codec_frame_unref (out_frame);    return GST_FLOW_ERROR;  }error_get_meta:  {    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,        ("Failed to get vaapi video meta attached to video buffer"),        ("Failed to get vaapi video meta attached to video buffer"));    gst_video_decoder_drop_frame (vdec, out_frame);    gst_video_codec_frame_unref (out_frame);    return GST_FLOW_ERROR;  }error_commit_buffer://.........这里部分代码省略.........
开发者ID:DarkLighters,项目名称:gstreamer-vaapi,代码行数:101,


示例16: gst_rtmp_src_create

/* * Read a new buffer from src->reqoffset, takes care of events * and seeking and such. */static GstFlowReturngst_rtmp_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer){  GstRTMPSrc *src;  GstBuffer *buf;  GstMapInfo map;  guint8 *data;  guint todo;  gsize bsize;  int read;  int size;  src = GST_RTMP_SRC (pushsrc);  g_return_val_if_fail (src->rtmp != NULL, GST_FLOW_ERROR);  size = GST_BASE_SRC_CAST (pushsrc)->blocksize;  GST_DEBUG ("reading from %" G_GUINT64_FORMAT      ", size %u", src->cur_offset, size);  buf = gst_buffer_new_allocate (NULL, size, NULL);  if (G_UNLIKELY (buf == NULL)) {    GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", size);    return GST_FLOW_ERROR;  }  bsize = todo = size;  gst_buffer_map (buf, &map, GST_MAP_WRITE);  data = map.data;  read = bsize = 0;  while (todo > 0) {    read = RTMP_Read (src->rtmp, (char *) data, todo);    if (G_UNLIKELY (read == 0 && todo == size)) {      goto eos;    } else if (G_UNLIKELY (read == 0)) {      todo = 0;      break;    }    if (G_UNLIKELY (read < 0))      goto read_failed;    if (read < todo) {      data += read;      todo -= read;      bsize += read;    } else {      bsize += todo;      todo = 0;    }    GST_LOG ("  got size %d", read);  }  gst_buffer_unmap (buf, &map);  gst_buffer_resize (buf, 0, bsize);  if (src->discont) {    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);    src->discont = FALSE;  }  GST_BUFFER_TIMESTAMP (buf) = src->last_timestamp;  GST_BUFFER_OFFSET (buf) = src->cur_offset;  src->cur_offset += size;  if (src->last_timestamp == GST_CLOCK_TIME_NONE)    src->last_timestamp = src->rtmp->m_mediaStamp * GST_MSECOND;  else    src->last_timestamp =        MAX (src->last_timestamp, src->rtmp->m_mediaStamp * GST_MSECOND);  GST_LOG_OBJECT (src, "Created buffer of size %u at %" G_GINT64_FORMAT      " with timestamp %" GST_TIME_FORMAT, size, GST_BUFFER_OFFSET (buf),      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));  /* we're done, return the buffer */  *buffer = buf;  return GST_FLOW_OK;read_failed:  {    gst_buffer_unref (buf);    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Failed to read data"));    return GST_FLOW_ERROR;  }eos:  {    gst_buffer_unref (buf);    GST_DEBUG_OBJECT (src, "Reading data gave EOS");    return GST_FLOW_EOS;  }}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:100,


示例17: gst_rdt_manager_loop

/* push packets from the queue to the downstream demuxer */static voidgst_rdt_manager_loop (GstPad * pad){  GstRDTManager *rdtmanager;  GstRDTManagerSession *session;  GstBuffer *buffer;  GstFlowReturn result;  rdtmanager = GST_RDT_MANAGER (GST_PAD_PARENT (pad));  session = gst_pad_get_element_private (pad);  JBUF_LOCK_CHECK (session, flushing);  GST_DEBUG_OBJECT (rdtmanager, "Peeking item");  while (TRUE) {    /* always wait if we are blocked */    if (!session->blocked) {      /* if we have a packet, we can exit the loop and grab it */      if (rdt_jitter_buffer_num_packets (session->jbuf) > 0)        break;      /* no packets but we are EOS, do eos logic */      if (session->eos)        goto do_eos;    }    /* underrun, wait for packets or flushing now */    session->waiting = TRUE;    JBUF_WAIT_CHECK (session, flushing);    session->waiting = FALSE;  }  buffer = rdt_jitter_buffer_pop (session->jbuf);  GST_DEBUG_OBJECT (rdtmanager, "Got item %p", buffer);  if (session->discont) {    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);    session->discont = FALSE;  }  JBUF_UNLOCK (session);  result = gst_pad_push (session->recv_rtp_src, buffer);  if (result != GST_FLOW_OK)    goto pause;  return;  /* ERRORS */flushing:  {    GST_DEBUG_OBJECT (rdtmanager, "we are flushing");    gst_pad_pause_task (session->recv_rtp_src);    JBUF_UNLOCK (session);    return;  }do_eos:  {    /* store result, we are flushing now */    GST_DEBUG_OBJECT (rdtmanager, "We are EOS, pushing EOS downstream");    session->srcresult = GST_FLOW_EOS;    gst_pad_pause_task (session->recv_rtp_src);    gst_pad_push_event (session->recv_rtp_src, gst_event_new_eos ());    JBUF_UNLOCK (session);    return;  }pause:  {    GST_DEBUG_OBJECT (rdtmanager, "pausing task, reason %s",        gst_flow_get_name (result));    JBUF_LOCK (session);    /* store result */    session->srcresult = result;    /* we don't post errors or anything because upstream will do that for us     * when we pass the return value upstream. */    gst_pad_pause_task (session->recv_rtp_src);    JBUF_UNLOCK (session);    return;  }}
开发者ID:GrokImageCompression,项目名称:gst-plugins-ugly,代码行数:81,


示例18: gst_rtp_vp9_depay_process

//.........这里部分代码省略.........        guint r = (ss[sssize] & 0x0c) >> 2;        GST_TRACE_OBJECT (self, "N_G[%u]: 0x%02x -> T=%u, U=%u, R=%u", i,            ss[sssize], t, u, r);        for (j = 0; j < r; j++)          GST_TRACE_OBJECT (self, "  R[%u]: P_DIFF=%u", j, ss[sssize + 1 + j]);        sssize += 1 + r;        if (G_UNLIKELY (size < hdrsize + sssize + 1))          goto too_small;      }    }    hdrsize += sssize;  }  GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size);  if (G_UNLIKELY (hdrsize >= size))    goto too_small;  payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1);  {    GstMapInfo map;    gst_buffer_map (payload, &map, GST_MAP_READ);    GST_MEMDUMP_OBJECT (self, "vp9 payload", map.data, 16);    gst_buffer_unmap (payload, &map);  }  gst_adapter_push (self->adapter, payload);  /* Marker indicates that it was the last rtp packet for this frame */  if (gst_rtp_buffer_get_marker (rtp)) {    GstBuffer *out;    gboolean key_frame_first_layer = !p_bit && spatial_layer == 0;    if (gst_adapter_available (self->adapter) < 10)      goto too_small;    out = gst_adapter_take_buffer (self->adapter,        gst_adapter_available (self->adapter));    self->started = FALSE;    /* mark keyframes */    out = gst_buffer_make_writable (out);    /* Filter away all metas that are not sensible to copy */    gst_rtp_drop_meta (GST_ELEMENT_CAST (self), out,        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));    if (!key_frame_first_layer) {      GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT);      if (!self->caps_sent) {        gst_buffer_unref (out);        out = NULL;        GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame");        gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay),            gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,                TRUE, 0));      }    } else {      GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT);      if (self->last_width != self->ss_width ||          self->last_height != self->ss_height) {        GstCaps *srccaps;        /* Width and height are optional in the RTP header. Consider to parse         * the frame header in addition if missing from RTP header */        if (self->ss_width != 0 && self->ss_height != 0) {          srccaps = gst_caps_new_simple ("video/x-vp9",              "framerate", GST_TYPE_FRACTION, 0, 1,              "width", G_TYPE_INT, self->ss_width,              "height", G_TYPE_INT, self->ss_height, NULL);        } else {          srccaps = gst_caps_new_simple ("video/x-vp9",              "framerate", GST_TYPE_FRACTION, 0, 1, NULL);        }        gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps);        gst_caps_unref (srccaps);        self->caps_sent = TRUE;        self->last_width = self->ss_width;        self->last_height = self->ss_height;        self->ss_width = 0;        self->ss_height = 0;      }    }    return out;  }done:  return NULL;too_small:  GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring");  gst_adapter_clear (self->adapter);  self->started = FALSE;  goto done;}
开发者ID:fanc999,项目名称:gst-plugins-good,代码行数:101,


示例19: gst_base_video_decoder_finish_frame

GstFlowReturngst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,    GstVideoFrame * frame){  GstBaseVideoDecoderClass *base_video_decoder_class;  GstClockTime presentation_timestamp;  GstClockTime presentation_duration;  GstBuffer *src_buffer;  GST_DEBUG ("finish frame");  base_video_decoder_class =      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);  gst_base_video_decoder_calculate_timestamps (base_video_decoder, frame,      &presentation_timestamp, &presentation_duration);  src_buffer = frame->src_buffer;  GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);  if (base_video_decoder->state.interlaced) {#ifndef GST_VIDEO_BUFFER_TFF#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)#endif#ifndef GST_VIDEO_BUFFER_RFF#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)#endif#ifndef GST_VIDEO_BUFFER_ONEFIELD#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)#endif    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) {      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF);    } else {      GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF);    }    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF);    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);    if (frame->n_fields == 3) {      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF);    } else if (frame->n_fields == 1) {      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);    }  }  if (base_video_decoder->discont) {    GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DISCONT);    base_video_decoder->discont = FALSE;  }  GST_BUFFER_TIMESTAMP (src_buffer) = presentation_timestamp;  GST_BUFFER_DURATION (src_buffer) = presentation_duration;  GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE;  GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE;  GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,      GST_TIME_ARGS (presentation_timestamp));  gst_base_video_decoder_set_src_caps (base_video_decoder);  if (base_video_decoder->sink_clipping) {    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +        GST_BUFFER_DURATION (src_buffer);    if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME,            start, stop, &start, &stop)) {      GST_BUFFER_TIMESTAMP (src_buffer) = start;      GST_BUFFER_DURATION (src_buffer) = stop - start;      GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT          " %" GST_TIME_FORMAT          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT          " time %" GST_TIME_FORMAT,          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +              GST_BUFFER_DURATION (src_buffer)),          GST_TIME_ARGS (base_video_decoder->segment.start),          GST_TIME_ARGS (base_video_decoder->segment.stop),          GST_TIME_ARGS (base_video_decoder->segment.time));    } else {      GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT          " %" GST_TIME_FORMAT          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT          " time %" GST_TIME_FORMAT,          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +              GST_BUFFER_DURATION (src_buffer)),          GST_TIME_ARGS (base_video_decoder->segment.start),          GST_TIME_ARGS (base_video_decoder->segment.stop),          GST_TIME_ARGS (base_video_decoder->segment.time));      gst_video_frame_unref (frame);      return GST_FLOW_OK;    }  }  gst_buffer_ref (src_buffer);  gst_video_frame_unref (frame);  if (base_video_decoder_class->shape_output)//.........这里部分代码省略.........
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:101,


示例20: gst_interleave_collected

//.........这里部分代码省略.........    self->func (outdata, input_info.data, self->channels, nsamples);    gst_buffer_unmap (inbuf, &input_info);  next:    if (inbuf)      gst_buffer_unref (inbuf);  }  if (ncollected == 0) {    gst_buffer_unmap (outbuf, &write_info);    goto eos;  }  GST_OBJECT_LOCK (self);  if (self->pending_segment) {    GstEvent *event;    GstSegment segment;    event = self->pending_segment;    self->pending_segment = NULL;    GST_OBJECT_UNLOCK (self);    /* convert the input segment to time now */    gst_event_copy_segment (event, &segment);    if (segment.format != GST_FORMAT_TIME) {      gst_event_unref (event);      /* not time, convert */      switch (segment.format) {        case GST_FORMAT_BYTES:          segment.start *= width;          if (segment.stop != -1)            segment.stop *= width;          if (segment.position != -1)            segment.position *= width;          /* fallthrough for the samples case */        case GST_FORMAT_DEFAULT:          segment.start =              gst_util_uint64_scale_int (segment.start, GST_SECOND, self->rate);          if (segment.stop != -1)            segment.stop =                gst_util_uint64_scale_int (segment.stop, GST_SECOND,                self->rate);          if (segment.position != -1)            segment.position =                gst_util_uint64_scale_int (segment.position, GST_SECOND,                self->rate);          break;        default:          GST_WARNING ("can't convert segment values");          segment.start = 0;          segment.stop = -1;          segment.position = 0;          break;      }      event = gst_event_new_segment (&segment);    }    gst_pad_push_event (self->src, event);    GST_OBJECT_LOCK (self);  }  GST_OBJECT_UNLOCK (self);  if (timestamp != -1) {    self->offset = gst_util_uint64_scale_int (timestamp, self->rate,        GST_SECOND);    self->timestamp = timestamp;  }  GST_BUFFER_TIMESTAMP (outbuf) = self->timestamp;  GST_BUFFER_OFFSET (outbuf) = self->offset;  self->offset += nsamples;  self->timestamp = gst_util_uint64_scale_int (self->offset,      GST_SECOND, self->rate);  GST_BUFFER_DURATION (outbuf) =      self->timestamp - GST_BUFFER_TIMESTAMP (outbuf);  if (empty)    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);  gst_buffer_unmap (outbuf, &write_info);  GST_LOG_OBJECT (self, "pushing outbuf, timestamp %" GST_TIME_FORMAT,      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));  ret = gst_pad_push (self->src, outbuf);  return ret;eos:  {    GST_DEBUG_OBJECT (self, "no data available, must be EOS");    if (outbuf)      gst_buffer_unref (outbuf);    gst_pad_push_event (self->src, gst_event_new_eos ());    return GST_FLOW_EOS;  }}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:101,


示例21: legacyresample_do_output

static GstFlowReturnlegacyresample_do_output (GstLegacyresample * legacyresample,    GstBuffer * outbuf){  int outsize;  int outsamples;  ResampleState *r;  r = legacyresample->resample;  outsize = resample_get_output_size (r);  GST_LOG_OBJECT (legacyresample, "legacyresample can give me %d bytes",      outsize);  /* protect against mem corruption */  if (outsize > GST_BUFFER_SIZE (outbuf)) {    GST_WARNING_OBJECT (legacyresample,        "overriding legacyresample's outsize %d with outbuffer's size %d",        outsize, GST_BUFFER_SIZE (outbuf));    outsize = GST_BUFFER_SIZE (outbuf);  }  /* catch possibly wrong size differences */  if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) {    GST_WARNING_OBJECT (legacyresample,        "legacyresample's outsize %d too far from outbuffer's size %d",        outsize, GST_BUFFER_SIZE (outbuf));  }  outsize = resample_get_output_data (r, GST_BUFFER_DATA (outbuf), outsize);  outsamples = outsize / r->sample_size;  GST_LOG_OBJECT (legacyresample, "resample gave me %d bytes or %d samples",      outsize, outsamples);  GST_BUFFER_OFFSET (outbuf) = legacyresample->offset;  GST_BUFFER_TIMESTAMP (outbuf) = legacyresample->next_ts;  if (legacyresample->ts_offset != -1) {    legacyresample->offset += outsamples;    legacyresample->ts_offset += outsamples;    legacyresample->next_ts =        gst_util_uint64_scale_int (legacyresample->ts_offset, GST_SECOND,        legacyresample->o_rate);    GST_BUFFER_OFFSET_END (outbuf) = legacyresample->offset;    /* we calculate DURATION as the difference between "next" timestamp     * and current timestamp so we ensure a contiguous stream, instead of     * having rounding errors. */    GST_BUFFER_DURATION (outbuf) = legacyresample->next_ts -        GST_BUFFER_TIMESTAMP (outbuf);  } else {    /* no valid offset know, we can still sortof calculate the duration though */    GST_BUFFER_DURATION (outbuf) =        gst_util_uint64_scale_int (outsamples, GST_SECOND,        legacyresample->o_rate);  }  /* check for possible mem corruption */  if (outsize > GST_BUFFER_SIZE (outbuf)) {    /* this is an error that when it happens, would need fixing in the     * resample library; we told it we wanted only GST_BUFFER_SIZE (outbuf),     * and it gave us more ! */    GST_WARNING_OBJECT (legacyresample,        "legacyresample, you memory corrupting bastard. "        "you gave me outsize %d while my buffer was size %d",        outsize, GST_BUFFER_SIZE (outbuf));    return GST_FLOW_ERROR;  }  /* catch possibly wrong size differences */  if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) {    GST_WARNING_OBJECT (legacyresample,        "legacyresample's written outsize %d too far from outbuffer's size %d",        outsize, GST_BUFFER_SIZE (outbuf));  }  GST_BUFFER_SIZE (outbuf) = outsize;  if (G_UNLIKELY (legacyresample->need_discont)) {    GST_DEBUG_OBJECT (legacyresample,        "marking this buffer with the DISCONT flag");    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);    legacyresample->need_discont = FALSE;  }  GST_LOG_OBJECT (legacyresample, "transformed to buffer of %d bytes, ts %"      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %"      G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT,      outsize, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),      GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf));  return GST_FLOW_OK;}
开发者ID:ChinnaSuhas,项目名称:ossbuild,代码行数:92,


示例22: recv_sample

static GstFlowReturnrecv_sample (GstAppSink * appsink, gpointer user_data){  KmsRecorderEndpoint *self =      KMS_RECORDER_ENDPOINT (GST_OBJECT_PARENT (appsink));  GstAppSrc *appsrc = GST_APP_SRC (user_data);  KmsUriEndpointState state;  GstFlowReturn ret;  GstSample *sample;  GstSegment *segment;  GstBuffer *buffer;  BaseTimeType *base_time;  GstClockTime offset;  g_signal_emit_by_name (appsink, "pull-sample", &sample);  if (sample == NULL)    return GST_FLOW_OK;  buffer = gst_sample_get_buffer (sample);  if (buffer == NULL) {    ret = GST_FLOW_OK;    goto end;  }  segment = gst_sample_get_segment (sample);  g_object_get (G_OBJECT (self), "state", &state, NULL);  if (state != KMS_URI_ENDPOINT_STATE_START) {    GST_WARNING ("Dropping buffer received in invalid state %" GST_PTR_FORMAT,        buffer);    // TODO: Add a flag to discard buffers until keyframe    ret = GST_FLOW_OK;    goto end;  }  gst_buffer_ref (buffer);  buffer = gst_buffer_make_writable (buffer);  if (GST_BUFFER_PTS_IS_VALID (buffer))    buffer->pts =        gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->pts);  if (GST_BUFFER_DTS_IS_VALID (buffer))    buffer->dts =        gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->dts);  BASE_TIME_LOCK (self);  base_time = g_object_get_data (G_OBJECT (self), BASE_TIME_DATA);  if (base_time == NULL) {    base_time = g_slice_new0 (BaseTimeType);    base_time->pts = buffer->pts;    base_time->dts = buffer->dts;    GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT,        base_time->pts);    g_object_set_data_full (G_OBJECT (self), BASE_TIME_DATA, base_time,        release_base_time_type);  }  if (!GST_CLOCK_TIME_IS_VALID (base_time->pts)      && GST_BUFFER_PTS_IS_VALID (buffer)) {    base_time->pts = buffer->pts;    GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT,        base_time->pts);    base_time->dts = buffer->dts;  }  if (GST_CLOCK_TIME_IS_VALID (base_time->pts)) {    if (GST_BUFFER_PTS_IS_VALID (buffer)) {      offset = base_time->pts + self->priv->paused_time;      if (buffer->pts > offset) {        buffer->pts -= offset;      } else {        buffer->pts = 0;      }    }  }  if (GST_CLOCK_TIME_IS_VALID (base_time->dts)) {    if (GST_BUFFER_DTS_IS_VALID (buffer)) {      offset = base_time->dts + self->priv->paused_time;      if (buffer->dts > offset) {        buffer->dts -= offset;      } else {        buffer->dts = 0;      }    }  }  BASE_TIME_UNLOCK (GST_OBJECT_PARENT (appsink));  GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE);  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER))    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);  ret = gst_app_src_push_buffer (appsrc, buffer);  if (ret != GST_FLOW_OK) {    /* something wrong *///.........这里部分代码省略.........
开发者ID:s-silva,项目名称:kms-elements,代码行数:101,


示例23: gst_multifdsink_create_streamheader

/* from the given two data buffers, create two streamheader buffers and * some caps that match it, and store them in the given pointers * returns  one ref to each of the buffers and the caps */static voidgst_multifdsink_create_streamheader (const gchar * data1,    const gchar * data2, GstBuffer ** hbuf1, GstBuffer ** hbuf2,    GstCaps ** caps){  GstBuffer *buf;  GValue array = { 0 };  GValue value = { 0 };  GstStructure *structure;  guint size1 = strlen (data1);  guint size2 = strlen (data2);  fail_if (hbuf1 == NULL);  fail_if (hbuf2 == NULL);  fail_if (caps == NULL);  /* create caps with streamheader, set the caps, and push the IN_CAPS   * buffers */  *hbuf1 = gst_buffer_new_and_alloc (size1);  GST_BUFFER_FLAG_SET (*hbuf1, GST_BUFFER_FLAG_IN_CAPS);  memcpy (GST_BUFFER_DATA (*hbuf1), data1, size1);  *hbuf2 = gst_buffer_new_and_alloc (size2);  GST_BUFFER_FLAG_SET (*hbuf2, GST_BUFFER_FLAG_IN_CAPS);  memcpy (GST_BUFFER_DATA (*hbuf2), data2, size2);  /* we want to keep them around for the tests */  gst_buffer_ref (*hbuf1);  gst_buffer_ref (*hbuf2);  g_value_init (&array, GST_TYPE_ARRAY);  g_value_init (&value, GST_TYPE_BUFFER);  /* we take a copy, set it on the array (which refs it), then unref our copy */  buf = gst_buffer_copy (*hbuf1);  gst_value_set_buffer (&value, buf);  ASSERT_BUFFER_REFCOUNT (buf, "copied buffer", 2);  gst_buffer_unref (buf);  gst_value_array_append_value (&array, &value);  g_value_unset (&value);  g_value_init (&value, GST_TYPE_BUFFER);  buf = gst_buffer_copy (*hbuf2);  gst_value_set_buffer (&value, buf);  ASSERT_BUFFER_REFCOUNT (buf, "copied buffer", 2);  gst_buffer_unref (buf);  gst_value_array_append_value (&array, &value);  g_value_unset (&value);  *caps = gst_caps_from_string ("application/x-gst-check");  structure = gst_caps_get_structure (*caps, 0);  gst_structure_set_value (structure, "streamheader", &array);  g_value_unset (&array);  ASSERT_CAPS_REFCOUNT (*caps, "streamheader caps", 1);  /* set our streamheadery caps on the buffers */  gst_buffer_set_caps (*hbuf1, *caps);  gst_buffer_set_caps (*hbuf2, *caps);  ASSERT_CAPS_REFCOUNT (*caps, "streamheader caps", 3);  GST_DEBUG ("created streamheader caps %p %" GST_PTR_FORMAT, *caps, *caps);}
开发者ID:kuailexs,项目名称:symbiandump-mw1,代码行数:64,


示例24: gst_base_video_encoder_finish_frame

GstFlowReturngst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,    GstVideoFrame * frame){  GstFlowReturn ret;  GstBaseVideoEncoderClass *base_video_encoder_class;  base_video_encoder_class =      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);  frame->system_frame_number =      GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number;  GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number++;  if (frame->is_sync_point) {    base_video_encoder->distance_from_sync = 0;    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);  } else {    GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);  }  frame->distance_from_sync = base_video_encoder->distance_from_sync;  base_video_encoder->distance_from_sync++;  frame->decode_frame_number = frame->system_frame_number - 1;  if (frame->decode_frame_number < 0) {    frame->decode_timestamp = 0;  } else {    frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number,        GST_SECOND * GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_d,        GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_n);  }  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;  GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =      g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);  if (!base_video_encoder->set_output_caps) {    if (base_video_encoder_class->get_caps) {      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps =          base_video_encoder_class->get_caps (base_video_encoder);    } else {      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps =          gst_caps_new_simple ("video/unknown", NULL);    }    gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),        GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);    base_video_encoder->set_output_caps = TRUE;  }  gst_buffer_set_caps (GST_BUFFER (frame->src_buffer),      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);  if (frame->force_keyframe) {    GstClockTime stream_time;    GstClockTime running_time;    GstStructure *s;    running_time =        gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC        (base_video_encoder)->segment, GST_FORMAT_TIME,        frame->presentation_timestamp);    stream_time =        gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC        (base_video_encoder)->segment, GST_FORMAT_TIME,        frame->presentation_timestamp);    /* FIXME this should send the event that we got on the sink pad       instead of creating a new one */    s = gst_structure_new ("GstForceKeyUnit",        "timestamp", G_TYPE_UINT64, frame->presentation_timestamp,        "stream-time", G_TYPE_UINT64, stream_time,        "running-time", G_TYPE_UINT64, running_time, NULL);    gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),        gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s));  }  if (base_video_encoder_class->shape_output) {    ret = base_video_encoder_class->shape_output (base_video_encoder, frame);  } else {    ret =        gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),        frame->src_buffer);  }  gst_base_video_codec_free_frame (frame);  return ret;}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:93,


示例25: test_burst_client_bytes_with_keyframe

/* keep 100 bytes and burst 80 bytes to clients */void test_burst_client_bytes_with_keyframe(){  GstElement *sink;  GstBuffer *buffer;  GstCaps *caps;  int pfd1[2];  int pfd2[2];  int pfd3[2];  gchar data[16];  gint i;  guint buffers_queued;    std_log(LOG_FILENAME_LINE, "Test Started test_burst_client_bytes_with_keyframe");  sink = setup_multifdsink ();  /* make sure we keep at least 100 bytes at all times */  g_object_set (sink, "bytes-min", 100, NULL);  g_object_set (sink, "sync-method", 5, NULL);  /* 3 = burst_with_keyframe */  g_object_set (sink, "burst-unit", 3, NULL);   /* 3 = bytes */  g_object_set (sink, "burst-value", (guint64) 80, NULL);  fail_if (pipe (pfd1) == -1);  fail_if (pipe (pfd2) == -1);  fail_if (pipe (pfd3) == -1);  ASSERT_SET_STATE (sink, GST_STATE_PLAYING, GST_STATE_CHANGE_ASYNC);  caps = gst_caps_from_string ("application/x-gst-check");  GST_DEBUG ("Created test caps %p %" GST_PTR_FORMAT, caps, caps);  /* push buffers in, 9 * 16 bytes = 144 bytes */  for (i = 0; i < 9; i++) {    gchar *data;    buffer = gst_buffer_new_and_alloc (16);    gst_buffer_set_caps (buffer, caps);    /* mark most buffers as delta */    if (i != 0 && i != 4 && i != 8)      GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);    /* copy some id */    data = (gchar *) GST_BUFFER_DATA (buffer);    g_snprintf (data, 16, "deadbee%08x", i);    fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);  }  /* check that at least 7 buffers (112 bytes) are in the queue */  g_object_get (sink, "buffers-queued", &buffers_queued, NULL);  fail_if (buffers_queued != 7);  /* now add the clients */  g_signal_emit_by_name (sink, "add", pfd1[1]);  g_signal_emit_by_name (sink, "add_full", pfd2[1], 5,      3, (guint64) 50, 3, (guint64) 90);  g_signal_emit_by_name (sink, "add_full", pfd3[1], 5,      3, (guint64) 50, 3, (guint64) 50);  /* push last buffer to make client fds ready for reading */  for (i = 9; i < 10; i++) {    gchar *data;    buffer = gst_buffer_new_and_alloc (16);    gst_buffer_set_caps (buffer, caps);    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);    /* copy some id */    data = (gchar *) GST_BUFFER_DATA (buffer);    g_snprintf (data, 16, "deadbee%08x", i);    fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);  }  /* now we should only read the last 6 buffers (min 5 * 16 = 80 bytes),   * keyframe at buffer 4 */  GST_DEBUG ("Reading from client 1");  fail_if (read (pfd1[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000004", 16) == 0);  fail_if (read (pfd1[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000005", 16) == 0);  fail_if (read (pfd1[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000006", 16) == 0);  fail_if (read (pfd1[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000007", 16) == 0);  fail_if (read (pfd1[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000008", 16) == 0);  fail_if (read (pfd1[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000009", 16) == 0);  /* second client only bursts 50 bytes = 4 buffers, there is   * no keyframe above min and below max, so send min */  GST_DEBUG ("Reading from client 2");  fail_if (read (pfd2[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000006", 16) == 0);  fail_if (read (pfd2[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000007", 16) == 0);  fail_if (read (pfd2[0], data, 16) < 16);  fail_unless (strncmp (data, "deadbee00000008", 16) == 0);//.........这里部分代码省略.........
开发者ID:kuailexs,项目名称:symbiandump-mw1,代码行数:101,


示例26: gst_ogg_parse_chain

//.........这里部分代码省略.........              GstBuffer *buf = GST_BUFFER (stream->headers->data);              result = gst_pad_push (ogg->srcpad, buf);              if (result != GST_FLOW_OK)                return result;            }            for (l = ogg->oggstreams; l != NULL; l = l->next) {              GstOggStream *stream = (GstOggStream *) l->data;              GList *j;              /* pushed the first one for each stream already, now do 2-N */              for (j = stream->headers->next; j != NULL; j = j->next) {                GstBuffer *buf = GST_BUFFER (j->data);                result = gst_pad_push (ogg->srcpad, buf);                if (result != GST_FLOW_OK)                  return result;              }            }            ogg->in_headers = 0;            /* And finally the pending data pages */            for (l = ogg->oggstreams; l != NULL; l = l->next) {              GstOggStream *stream = (GstOggStream *) l->data;              GList *k;              if (stream->unknown_pages == NULL)                continue;              if (found_pending_headers) {                GST_WARNING_OBJECT (ogg, "Incorrectly muxed headers found at "                    "approximate offset %" G_GINT64_FORMAT, ogg->offset);              }              found_pending_headers = TRUE;              GST_LOG_OBJECT (ogg, "Pushing %d pending pages after headers",                  g_list_length (stream->unknown_pages) + 1);              for (k = stream->unknown_pages; k != NULL; k = k->next) {                GstBuffer *buf = GST_BUFFER (k->data);                result = gst_pad_push (ogg->srcpad, buf);                if (result != GST_FLOW_OK)                  return result;              }              g_list_foreach (stream->unknown_pages,                  (GFunc) gst_mini_object_unref, NULL);              g_list_free (stream->unknown_pages);              stream->unknown_pages = NULL;            }          }          if (granule == -1) {            stream->stored_buffers = g_list_append (stream->stored_buffers,                pagebuffer);          } else {            while (stream->stored_buffers) {              GstBuffer *buf = stream->stored_buffers->data;              buf = gst_buffer_make_writable (buf);              GST_BUFFER_TIMESTAMP (buf) = buffertimestamp;              if (!keyframe) {                GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);              } else {                keyframe = FALSE;              }              result = gst_pad_push (ogg->srcpad, buf);              if (result != GST_FLOW_OK)                return result;              stream->stored_buffers =                  g_list_delete_link (stream->stored_buffers,                  stream->stored_buffers);            }            pagebuffer = gst_buffer_make_writable (pagebuffer);            if (!keyframe) {              GST_BUFFER_FLAG_SET (pagebuffer, GST_BUFFER_FLAG_DELTA_UNIT);            } else {              keyframe = FALSE;            }            result = gst_pad_push (ogg->srcpad, pagebuffer);            if (result != GST_FLOW_OK)              return result;          }        }      }    }  }  return result;failure:  gst_pad_push_event (GST_PAD (ogg->srcpad), gst_event_new_eos ());  return GST_FLOW_ERROR;}
开发者ID:ConfusedReality,项目名称:pkg_multimedia_gst-plugins-base,代码行数:101,


示例27: gst_buffer_create_sub

/** * gst_buffer_create_sub: * @parent: a #GstBuffer. * @offset: the offset into parent #GstBuffer at which the new sub-buffer  *          begins. * @size: the size of the new #GstBuffer sub-buffer, in bytes. * * Creates a sub-buffer from @parent at @offset and @size. * This sub-buffer uses the actual memory space of the parent buffer. * This function will copy the offset and timestamp fields when the * offset is 0. If not, they will be set to #GST_CLOCK_TIME_NONE and  * #GST_BUFFER_OFFSET_NONE. * If @offset equals 0 and @size equals the total size of @buffer, the * duration and offset end fields are also copied. If not they will be set * to #GST_CLOCK_TIME_NONE and #GST_BUFFER_OFFSET_NONE. * * MT safe. * Returns: the new #GstBuffer. * Returns NULL if the arguments were invalid. */GstBuffer *gst_buffer_create_sub (GstBuffer * buffer, guint offset, guint size){  GstSubBuffer *subbuffer;  GstBuffer *parent;  gboolean complete;  g_return_val_if_fail (buffer != NULL, NULL);  g_return_val_if_fail (buffer->mini_object.refcount > 0, NULL);  g_return_val_if_fail (buffer->size >= offset + size, NULL);  /* find real parent */  if (GST_IS_SUBBUFFER (buffer)) {    parent = GST_SUBBUFFER_CAST (buffer)->parent;  } else {    parent = buffer;  }  gst_buffer_ref (parent);  /* create the new buffer */  subbuffer = (GstSubBuffer *) gst_mini_object_new (_gst_subbuffer_type);  subbuffer->parent = parent;  GST_CAT_LOG (GST_CAT_BUFFER, "new subbuffer %p (parent %p)", subbuffer,      parent);  /* set the right values in the child */  GST_BUFFER_DATA (GST_BUFFER_CAST (subbuffer)) = buffer->data + offset;  GST_BUFFER_SIZE (GST_BUFFER_CAST (subbuffer)) = size;  if ((offset == 0) && (size == GST_BUFFER_SIZE (buffer))) {    /* copy all the flags except IN_CAPS */    GST_BUFFER_FLAG_SET (subbuffer, GST_BUFFER_FLAGS (buffer));    GST_BUFFER_FLAG_UNSET (subbuffer, GST_BUFFER_FLAG_IN_CAPS);  } else {    /* copy only PREROLL & GAP flags */    GST_BUFFER_FLAG_SET (subbuffer, (GST_BUFFER_FLAGS (buffer) &            (GST_BUFFER_FLAG_PREROLL | GST_BUFFER_FLAG_GAP)));  }  /* we can copy the timestamp and offset if the new buffer starts at   * offset 0 */  if (offset == 0) {    GST_BUFFER_TIMESTAMP (subbuffer) = GST_BUFFER_TIMESTAMP (buffer);    GST_BUFFER_OFFSET (subbuffer) = GST_BUFFER_OFFSET (buffer);    complete = (buffer->size == size);  } else {    GST_BUFFER_TIMESTAMP (subbuffer) = GST_CLOCK_TIME_NONE;    GST_BUFFER_OFFSET (subbuffer) = GST_BUFFER_OFFSET_NONE;    complete = FALSE;  }  if (complete) {    GstCaps *caps;    /* if we copied the complete buffer we can copy the duration,     * offset_end and caps as well */    GST_BUFFER_DURATION (subbuffer) = GST_BUFFER_DURATION (buffer);    GST_BUFFER_OFFSET_END (subbuffer) = GST_BUFFER_OFFSET_END (buffer);    if ((caps = GST_BUFFER_CAPS (buffer)))      gst_caps_ref (caps);    GST_BUFFER_CAPS (subbuffer) = caps;  } else {    GST_BUFFER_DURATION (subbuffer) = GST_CLOCK_TIME_NONE;    GST_BUFFER_OFFSET_END (subbuffer) = GST_BUFFER_OFFSET_NONE;    GST_BUFFER_CAPS (subbuffer) = NULL;  }  return GST_BUFFER_CAST (subbuffer);}
开发者ID:wosigh,项目名称:gstreamer,代码行数:89,



注:本文中的GST_BUFFER_FLAG_SET函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_BUFFER_FLAG_UNSET函数代码示例
C++ GST_BUFFER_DURATION函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。