您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_BUFFER_DURATION函数代码示例

51自学网 2021-06-01 20:55:59
  C++
这篇教程C++ GST_BUFFER_DURATION函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_BUFFER_DURATION函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_BUFFER_DURATION函数的具体用法?C++ GST_BUFFER_DURATION怎么用?C++ GST_BUFFER_DURATION使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_BUFFER_DURATION函数的26个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: gst_jpeg_parse_push_buffer

static GstFlowReturngst_jpeg_parse_push_buffer (GstJpegParse * parse, guint len){  GstBuffer *outbuf;  GstFlowReturn ret = GST_FLOW_OK;  gboolean header_ok;  /* reset the offset (only when we flushed) */  parse->priv->last_offset = 0;  parse->priv->last_entropy_len = 0;  outbuf = gst_adapter_take_buffer (parse->priv->adapter, len);  if (outbuf == NULL) {    GST_ELEMENT_ERROR (parse, STREAM, DECODE,        ("Failed to take buffer of size %u", len),        ("Failed to take buffer of size %u", len));    return GST_FLOW_ERROR;  }  header_ok = gst_jpeg_parse_read_header (parse, outbuf);  if (parse->priv->new_segment == TRUE      || parse->priv->width != parse->priv->caps_width      || parse->priv->height != parse->priv->caps_height      || parse->priv->framerate_numerator !=      parse->priv->caps_framerate_numerator      || parse->priv->framerate_denominator !=      parse->priv->caps_framerate_denominator) {    if (!gst_jpeg_parse_set_new_caps (parse, header_ok)) {      GST_ELEMENT_ERROR (parse, CORE, NEGOTIATION,          ("Can't set caps to the src pad"), ("Can't set caps to the src pad"));      return GST_FLOW_ERROR;    }    if (parse->priv->tags) {      GST_DEBUG_OBJECT (parse, "Pushing tags: %" GST_PTR_FORMAT,          parse->priv->tags);      gst_element_found_tags_for_pad (GST_ELEMENT_CAST (parse),          parse->priv->srcpad, parse->priv->tags);      parse->priv->tags = NULL;    }    parse->priv->new_segment = FALSE;    parse->priv->caps_width = parse->priv->width;    parse->priv->caps_height = parse->priv->height;    parse->priv->caps_framerate_numerator = parse->priv->framerate_numerator;    parse->priv->caps_framerate_denominator =        parse->priv->framerate_denominator;  }  GST_BUFFER_TIMESTAMP (outbuf) = parse->priv->next_ts;  if (parse->priv->has_fps && GST_CLOCK_TIME_IS_VALID (parse->priv->next_ts)      && GST_CLOCK_TIME_IS_VALID (parse->priv->duration)) {    parse->priv->next_ts += parse->priv->duration;  } else {    parse->priv->duration = GST_CLOCK_TIME_NONE;    parse->priv->next_ts = GST_CLOCK_TIME_NONE;  }  GST_BUFFER_DURATION (outbuf) = parse->priv->duration;  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (parse->priv->srcpad));  GST_LOG_OBJECT (parse, "pushing buffer (ts=%" GST_TIME_FORMAT ", len=%u)",      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), len);  ret = gst_pad_push (parse->priv->srcpad, outbuf);  return ret;}
开发者ID:pli3,项目名称:gst-plugins-bad,代码行数:71,


示例2: gst_ivf_parse_chain

/* chain function * this function does the actual processing */static GstFlowReturngst_ivf_parse_chain (GstPad * pad, GstBuffer * buf){  GstIvfParse *ivf = GST_IVF_PARSE (GST_OBJECT_PARENT (pad));  gboolean res;  /* lazy creation of the adapter */  if (G_UNLIKELY (ivf->adapter == NULL)) {    ivf->adapter = gst_adapter_new ();  }  GST_LOG_OBJECT (ivf, "Pushing buffer of size %u to adapter",      GST_BUFFER_SIZE (buf));  gst_adapter_push (ivf->adapter, buf); /* adapter takes ownership of buf */  res = GST_FLOW_OK;  switch (ivf->state) {    case GST_IVF_PARSE_START:      if (gst_adapter_available (ivf->adapter) >= 32) {        GstCaps *caps;        const guint8 *data = gst_adapter_peek (ivf->adapter, 32);        guint32 magic = GST_READ_UINT32_LE (data);        guint16 version = GST_READ_UINT16_LE (data + 4);        guint16 header_size = GST_READ_UINT16_LE (data + 6);        guint32 fourcc = GST_READ_UINT32_LE (data + 8);        guint16 width = GST_READ_UINT16_LE (data + 12);        guint16 height = GST_READ_UINT16_LE (data + 14);        guint32 rate_num = GST_READ_UINT32_LE (data + 16);        guint32 rate_den = GST_READ_UINT32_LE (data + 20);#ifndef GST_DISABLE_GST_DEBUG        guint32 num_frames = GST_READ_UINT32_LE (data + 24);#endif        /* last 4 bytes unused */        gst_adapter_flush (ivf->adapter, 32);        if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') ||            version != 0 || header_size != 32 ||            fourcc != GST_MAKE_FOURCC ('V', 'P', '8', '0')) {          GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL));          return GST_FLOW_ERROR;        }        /* create src pad caps */        caps = gst_caps_new_simple ("video/x-vp8",            "width", G_TYPE_INT, width, "height", G_TYPE_INT, height,            "framerate", GST_TYPE_FRACTION, rate_num, rate_den, NULL);        GST_INFO_OBJECT (ivf, "Found stream: %" GST_PTR_FORMAT, caps);        GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames);        gst_pad_set_caps (ivf->srcpad, caps);        gst_caps_unref (caps);        /* keep framerate in instance for convenience */        ivf->rate_num = rate_num;        ivf->rate_den = rate_den;        gst_pad_push_event (ivf->srcpad, gst_event_new_new_segment (FALSE, 1.0,                GST_FORMAT_TIME, 0, -1, 0));        /* move along */        ivf->state = GST_IVF_PARSE_DATA;      } else {        GST_LOG_OBJECT (ivf, "Header data not yet available.");        break;      }      /* fall through */    case GST_IVF_PARSE_DATA:      while (gst_adapter_available (ivf->adapter) > 12) {        const guint8 *data = gst_adapter_peek (ivf->adapter, 12);        guint32 frame_size = GST_READ_UINT32_LE (data);        guint64 frame_pts = GST_READ_UINT64_LE (data + 4);        GST_LOG_OBJECT (ivf,            "Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size,            frame_pts);        if (gst_adapter_available (ivf->adapter) >= 12 + frame_size) {          GstBuffer *frame;          gst_adapter_flush (ivf->adapter, 12);          frame = gst_adapter_take_buffer (ivf->adapter, frame_size);          gst_buffer_set_caps (frame, GST_PAD_CAPS (ivf->srcpad));          GST_BUFFER_TIMESTAMP (frame) =              gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->rate_den,              ivf->rate_num);          GST_BUFFER_DURATION (frame) =              gst_util_uint64_scale_int (GST_SECOND, ivf->rate_den,              ivf->rate_num);//.........这里部分代码省略.........
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:101,


示例3: gst_rtp_h263p_pay_flush

static GstFlowReturngst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay){  guint avail;  GstBufferList *list = NULL;  GstBuffer *outbuf = NULL;  GstFlowReturn ret;  gboolean fragmented = FALSE;  avail = gst_adapter_available (rtph263ppay->adapter);  if (avail == 0)    return GST_FLOW_OK;  fragmented = FALSE;  /* This algorithm assumes the H263/+/++ encoder sends complete frames in each   * buffer */  /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:   *  This algorithm implements the Follow-on packets method for packetization.   *  This assumes low packet loss network.    * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:   *  This algorithm separates large frames at synchronisation points (Segments)   *  (See RFC 4629 section 6). It would be interesting to have a property such as network   *  quality to select between both packetization methods */  /* TODO Add VRC supprt (See RFC 4629 section 5.2) */  while (avail > 0) {    guint towrite;    guint8 *payload;    gint header_len;    guint next_gop = 0;    gboolean found_gob = FALSE;    GstRTPBuffer rtp = { NULL };    GstBuffer *payload_buf;    if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {      /* start after 1st gop possible */      /* Check if we have a gob or eos , eossbs */      /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */      next_gop =          gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,          0x00008000, 0, avail);      if (next_gop == 0) {        GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");        found_gob = TRUE;      }      /* Find next and cut the packet accordingly */      /* TODO we should get as many gobs as possible until MTU is reached, this       * code seems to just get one GOB per packet */      if (next_gop == 0 && avail > 3)        next_gop =            gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,            0x00008000, 3, avail - 3);      GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d", next_gop);      if (next_gop == -1)        next_gop = 0;    }    /* for picture start frames (non-fragmented), we need to remove the first     * two 0x00 bytes and set P=1 */    if (!fragmented || found_gob) {      gst_adapter_flush (rtph263ppay->adapter, 2);      avail -= 2;    }    header_len = 2;    towrite = MIN (avail, gst_rtp_buffer_calc_payload_len        (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));    if (next_gop > 0)      towrite = MIN (next_gop, towrite);    outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0);    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);    /* last fragment gets the marker bit set */    gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);    payload = gst_rtp_buffer_get_payload (&rtp);    /*  0                   1     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+     * |   RR    |P|V|   PLEN    |PEBIT|     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+     */    /* if fragmented or gop header , write p bit =1 */    payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;    payload[1] = 0;    GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;    GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;    gst_rtp_buffer_unmap (&rtp);    payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtph263ppay), outbuf, payload_buf,        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));    outbuf = gst_buffer_append (outbuf, payload_buf);    avail -= towrite;//.........这里部分代码省略.........
开发者ID:pexip,项目名称:gst-plugins-good,代码行数:101,


示例4: speex_dec_chain_parse_data

static GstFlowReturnspeex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,    GstClockTime timestamp, GstClockTime duration){  GstFlowReturn res = GST_FLOW_OK;  gint i, fpp;  guint size;  guint8 *data;  SpeexBits *bits;  if (!dec->frame_duration)    goto not_negotiated;  if (timestamp != -1) {    dec->segment.last_stop = timestamp;  } else {    timestamp = dec->segment.last_stop;  }  if (buf) {    data = GST_BUFFER_DATA (buf);    size = GST_BUFFER_SIZE (buf);    /* send data to the bitstream */    speex_bits_read_from (&dec->bits, (char *) data, size);    fpp = 0;    bits = &dec->bits;    GST_DEBUG_OBJECT (dec, "received buffer of size %u, fpp %d", size, fpp);  } else {    /* concealment data, pass NULL as the bits parameters */    GST_DEBUG_OBJECT (dec, "creating concealment data");    fpp = dec->header->frames_per_packet;    bits = NULL;  }  /* now decode each frame, catering for unknown number of them (e.g. rtp) */  for (i = 0; (!fpp || i < fpp) && (!bits || speex_bits_remaining (bits) > 0);      i++) {    GstBuffer *outbuf;    gint16 *out_data;    gint ret;    GST_LOG_OBJECT (dec, "decoding frame %d/%d", i, fpp);    res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,        GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header->nb_channels * 2,        GST_PAD_CAPS (dec->srcpad), &outbuf);    if (res != GST_FLOW_OK) {      GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));      return res;    }    out_data = (gint16 *) GST_BUFFER_DATA (outbuf);    ret = speex_decode_int (dec->state, bits, out_data);    if (ret == -1) {      /* uh? end of stream */      GST_WARNING_OBJECT (dec, "Unexpected end of stream found");      gst_buffer_unref (outbuf);      outbuf = NULL;      break;    } else if (ret == -2) {      GST_WARNING_OBJECT (dec, "Decoding error: corrupted stream?");      gst_buffer_unref (outbuf);      outbuf = NULL;      break;    }    if (bits && speex_bits_remaining (bits) < 0) {      GST_WARNING_OBJECT (dec, "Decoding overflow: corrupted stream?");      gst_buffer_unref (outbuf);      outbuf = NULL;      break;    }    if (dec->header->nb_channels == 2)      speex_decode_stereo_int (out_data, dec->frame_size, dec->stereo);    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;    GST_BUFFER_DURATION (outbuf) = dec->frame_duration;    dec->segment.last_stop += dec->frame_duration;    timestamp = dec->segment.last_stop;    GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"        GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),        GST_TIME_ARGS (dec->frame_duration));    res = gst_pad_push (dec->srcpad, outbuf);    if (res != GST_FLOW_OK) {      GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));      break;    }  }  return res;//.........这里部分代码省略.........
开发者ID:dgerlach,项目名称:gst-plugins-good,代码行数:101,


示例5: gst_gdiscreencapsrc_create

static GstFlowReturngst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf){  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);  GstBuffer *new_buf;  gint new_buf_size;  GstClock *clock;  GstClockTime buf_time, buf_dur;  guint64 frame_number;  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||          !src->info.bmiHeader.biHeight)) {    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),        ("format wasn't negotiated before create function"));    return GST_FLOW_NOT_NEGOTIATED;  }  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *      (-src->info.bmiHeader.biHeight);  GST_LOG_OBJECT (src,      "creating buffer of %d bytes with %dx%d image",      new_buf_size, (gint) src->info.bmiHeader.biWidth,      (gint) (-src->info.bmiHeader.biHeight));  new_buf = gst_buffer_new_and_alloc (new_buf_size);  clock = gst_element_get_clock (GST_ELEMENT (src));  if (clock != NULL) {    GstClockTime time, base_time;    /* Calculate sync time. */    time = gst_clock_get_time (clock);    base_time = gst_element_get_base_time (GST_ELEMENT (src));    buf_time = time - base_time;    if (src->rate_numerator) {      frame_number = gst_util_uint64_scale (buf_time,          src->rate_numerator, GST_SECOND * src->rate_denominator);    } else {      frame_number = -1;    }  } else {    buf_time = GST_CLOCK_TIME_NONE;    frame_number = -1;  }  if (frame_number != -1 && frame_number == src->frame_number) {    GstClockID id;    GstClockReturn ret;    /* Need to wait for the next frame */    frame_number += 1;    /* Figure out what the next frame time is */    buf_time = gst_util_uint64_scale (frame_number,        src->rate_denominator * GST_SECOND, src->rate_numerator);    id = gst_clock_new_single_shot_id (clock,        buf_time + gst_element_get_base_time (GST_ELEMENT (src)));    GST_OBJECT_LOCK (src);    src->clock_id = id;    GST_OBJECT_UNLOCK (src);    GST_DEBUG_OBJECT (src, "Waiting for next frame time %" G_GUINT64_FORMAT,        buf_time);    ret = gst_clock_id_wait (id, NULL);    GST_OBJECT_LOCK (src);    gst_clock_id_unref (id);    src->clock_id = NULL;    if (ret == GST_CLOCK_UNSCHEDULED) {      /* Got woken up by the unlock function */      GST_OBJECT_UNLOCK (src);      return GST_FLOW_FLUSHING;    }    GST_OBJECT_UNLOCK (src);    /* Duration is a complete 1/fps frame duration */    buf_dur =        gst_util_uint64_scale_int (GST_SECOND, src->rate_denominator,        src->rate_numerator);  } else if (frame_number != -1) {    GstClockTime next_buf_time;    GST_DEBUG_OBJECT (src, "No need to wait for next frame time %"        G_GUINT64_FORMAT " next frame = %" G_GINT64_FORMAT " prev = %"        G_GINT64_FORMAT, buf_time, frame_number, src->frame_number);    next_buf_time = gst_util_uint64_scale (frame_number + 1,        src->rate_denominator * GST_SECOND, src->rate_numerator);    /* Frame duration is from now until the next expected capture time */    buf_dur = next_buf_time - buf_time;  } else {    buf_dur = GST_CLOCK_TIME_NONE;  }  src->frame_number = frame_number;  GST_BUFFER_TIMESTAMP (new_buf) = buf_time;  GST_BUFFER_DURATION (new_buf) = buf_dur;//.........这里部分代码省略.........
开发者ID:GrokImageCompression,项目名称:gst-plugins-bad,代码行数:101,


示例6: gst_wavpack_enc_chain

static GstFlowReturngst_wavpack_enc_chain (GstPad * pad, GstBuffer * buf){  GstWavpackEnc *enc = GST_WAVPACK_ENC (gst_pad_get_parent (pad));  uint32_t sample_count = GST_BUFFER_SIZE (buf) / 4;  GstFlowReturn ret;  /* reset the last returns to GST_FLOW_OK. This is only set to something else   * while WavpackPackSamples() or more specific gst_wavpack_enc_push_block()   * so not valid anymore */  enc->srcpad_last_return = enc->wvcsrcpad_last_return = GST_FLOW_OK;  GST_DEBUG ("got %u raw samples", sample_count);  /* check if we already have a valid WavpackContext, otherwise make one */  if (!enc->wp_context) {    /* create raw context */    enc->wp_context =        WavpackOpenFileOutput (gst_wavpack_enc_push_block, &enc->wv_id,        (enc->correction_mode > 0) ? &enc->wvc_id : NULL);    if (!enc->wp_context) {      GST_ELEMENT_ERROR (enc, LIBRARY, INIT, (NULL),          ("error creating Wavpack context"));      gst_object_unref (enc);      gst_buffer_unref (buf);      return GST_FLOW_ERROR;    }    /* set the WavpackConfig according to our parameters */    gst_wavpack_enc_set_wp_config (enc);    /* set the configuration to the context now that we know everything     * and initialize the encoder */    if (!WavpackSetConfiguration (enc->wp_context,            enc->wp_config, (uint32_t) (-1))        || !WavpackPackInit (enc->wp_context)) {      GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL),          ("error setting up wavpack encoding context"));      WavpackCloseFile (enc->wp_context);      gst_object_unref (enc);      gst_buffer_unref (buf);      return GST_FLOW_ERROR;    }    GST_DEBUG ("setup of encoding context successfull");  }  /* Save the timestamp of the first buffer. This will be later   * used as offset for all following buffers */  if (enc->timestamp_offset == GST_CLOCK_TIME_NONE) {    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {      enc->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);      enc->next_ts = GST_BUFFER_TIMESTAMP (buf);    } else {      enc->timestamp_offset = 0;      enc->next_ts = 0;    }  }  /* Check if we have a continous stream, if not drop some samples or the buffer or   * insert some silence samples */  if (enc->next_ts != GST_CLOCK_TIME_NONE &&      GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {    guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);    guint64 diff_bytes;    GST_WARNING_OBJECT (enc, "Buffer is older than previous "        "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT        "), cannot handle. Clipping buffer.",        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),        GST_TIME_ARGS (enc->next_ts));    diff_bytes =        GST_CLOCK_TIME_TO_FRAMES (diff, enc->samplerate) * enc->channels * 2;    if (diff_bytes >= GST_BUFFER_SIZE (buf)) {      gst_buffer_unref (buf);      return GST_FLOW_OK;    }    buf = gst_buffer_make_metadata_writable (buf);    GST_BUFFER_DATA (buf) += diff_bytes;    GST_BUFFER_SIZE (buf) -= diff_bytes;    GST_BUFFER_TIMESTAMP (buf) += diff;    if (GST_BUFFER_DURATION_IS_VALID (buf))      GST_BUFFER_DURATION (buf) -= diff;  }  /* Allow a diff of at most 5 ms */  if (enc->next_ts != GST_CLOCK_TIME_NONE      && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {    if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&        GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > 5 * GST_MSECOND) {      GST_WARNING_OBJECT (enc,          "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,          GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, 5 * GST_MSECOND);      WavpackFlushSamples (enc->wp_context);      enc->timestamp_offset += (GST_BUFFER_TIMESTAMP (buf) - enc->next_ts);    }  }//.........这里部分代码省略.........
开发者ID:prajnashi,项目名称:gst-plugins-good,代码行数:101,


示例7: gst_rtp_celt_pay_flush_queued

static GstFlowReturngst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay){  GstFlowReturn ret;  GstBuffer *buf, *outbuf;  guint8 *payload, *spayload;  guint payload_len;  GstClockTime duration;  GstRTPBuffer rtp = { NULL, };  payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;  duration = rtpceltpay->qduration;  GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT,      payload_len, GST_TIME_ARGS (rtpceltpay->qduration));  /* get a big enough packet for the sizes + payloads */  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);  GST_BUFFER_DURATION (outbuf) = duration;  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);  /* point to the payload for size headers and data */  spayload = gst_rtp_buffer_get_payload (&rtp);  payload = spayload + rtpceltpay->sbytes;  while ((buf = g_queue_pop_head (rtpceltpay->queue))) {    guint size;    /* copy first timestamp to output */    if (GST_BUFFER_PTS (outbuf) == -1)      GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);    /* write the size to the header */    size = gst_buffer_get_size (buf);    while (size > 0xff) {      *spayload++ = 0xff;      size -= 0xff;    }    *spayload++ = size;    /* copy payload */    size = gst_buffer_get_size (buf);    gst_buffer_extract (buf, 0, payload, size);    payload += size;    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpceltpay), outbuf, buf,        g_quark_from_static_string (GST_META_TAG_AUDIO_STR));    gst_buffer_unref (buf);  }  gst_rtp_buffer_unmap (&rtp);  /* we consumed it all */  rtpceltpay->bytes = 0;  rtpceltpay->sbytes = 0;  rtpceltpay->qduration = 0;  ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf);  return ret;}
开发者ID:ConfusedReality,项目名称:pkg_multimedia_gst-plugins-good,代码行数:63,


示例8: gst_rtp_mux_chain

static GstFlowReturngst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer){  GstRTPMux *rtp_mux;  GstFlowReturn ret;  GstRTPMuxPadPrivate *padpriv;  gboolean drop;  gboolean changed = FALSE;  GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;  rtp_mux = GST_RTP_MUX (parent);  if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {    GstCaps *current_caps = gst_pad_get_current_caps (pad);    if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {      ret = GST_FLOW_NOT_NEGOTIATED;      gst_buffer_unref (buffer);      goto out;    }    gst_caps_unref (current_caps);  }  GST_OBJECT_LOCK (rtp_mux);  padpriv = gst_pad_get_element_private (pad);  if (!padpriv) {    GST_OBJECT_UNLOCK (rtp_mux);    gst_buffer_unref (buffer);    return GST_FLOW_NOT_LINKED;  }  buffer = gst_buffer_make_writable (buffer);  if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) {    GST_OBJECT_UNLOCK (rtp_mux);    gst_buffer_unref (buffer);    GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer");    return GST_FLOW_ERROR;  }  drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer);  gst_rtp_buffer_unmap (&rtpbuffer);  if (!drop) {    if (pad != rtp_mux->last_pad) {      changed = TRUE;      g_clear_object (&rtp_mux->last_pad);      rtp_mux->last_pad = g_object_ref (pad);    }    if (GST_BUFFER_DURATION_IS_VALID (buffer) &&        GST_BUFFER_PTS_IS_VALID (buffer))      rtp_mux->last_stop = GST_BUFFER_PTS (buffer) +          GST_BUFFER_DURATION (buffer);    else      rtp_mux->last_stop = GST_CLOCK_TIME_NONE;  }  GST_OBJECT_UNLOCK (rtp_mux);  if (changed)    gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);  if (drop) {    gst_buffer_unref (buffer);    ret = GST_FLOW_OK;  } else {    ret = gst_pad_push (rtp_mux->srcpad, buffer);  }out:  return ret;}
开发者ID:ConfusedReality,项目名称:pkg_multimedia_gst-plugins-good,代码行数:75,


示例9: gst_audio_segment_clip_clip_buffer

static GstFlowReturngst_audio_segment_clip_clip_buffer (GstSegmentClip * base, GstBuffer * buffer,    GstBuffer ** outbuf){  GstAudioSegmentClip *self = GST_AUDIO_SEGMENT_CLIP (base);  GstSegment *segment = &base->segment;  GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);  GstClockTime duration = GST_BUFFER_DURATION (buffer);  guint64 offset = GST_BUFFER_OFFSET (buffer);  guint64 offset_end = GST_BUFFER_OFFSET_END (buffer);  guint size = gst_buffer_get_size (buffer);  if (!self->rate || !self->framesize) {    GST_ERROR_OBJECT (self, "Not negotiated yet");    gst_buffer_unref (buffer);    return GST_FLOW_NOT_NEGOTIATED;  }  if (segment->format != GST_FORMAT_DEFAULT &&      segment->format != GST_FORMAT_TIME) {    GST_DEBUG_OBJECT (self, "Unsupported segment format %s",        gst_format_get_name (segment->format));    *outbuf = buffer;    return GST_FLOW_OK;  }  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {    GST_WARNING_OBJECT (self, "Buffer without valid timestamp");    *outbuf = buffer;    return GST_FLOW_OK;  }  *outbuf =      gst_audio_buffer_clip (buffer, segment, self->rate, self->framesize);  if (!*outbuf) {    GST_DEBUG_OBJECT (self, "Buffer outside the configured segment");    /* Now return unexpected if we're before/after the end */    if (segment->format == GST_FORMAT_TIME) {      if (segment->rate >= 0) {        if (segment->stop != -1 && timestamp >= segment->stop)          return GST_FLOW_EOS;      } else {        if (!GST_CLOCK_TIME_IS_VALID (duration))          duration =              gst_util_uint64_scale_int (size, GST_SECOND,              self->framesize * self->rate);        if (segment->start != -1 && timestamp + duration <= segment->start)          return GST_FLOW_EOS;      }    } else {      if (segment->rate >= 0) {        if (segment->stop != -1 && offset != -1 && offset >= segment->stop)          return GST_FLOW_EOS;      } else if (offset != -1 || offset_end != -1) {        if (offset_end == -1)          offset_end = offset + size / self->framesize;        if (segment->start != -1 && offset_end <= segment->start)          return GST_FLOW_EOS;      }    }  }  return GST_FLOW_OK;}
开发者ID:0p1pp1,项目名称:gst-plugins-bad,代码行数:68,


示例10: gst_musepackdec_loop

static voidgst_musepackdec_loop (GstPad * sinkpad){  GstMusepackDec *musepackdec;  GstFlowReturn flow;  GstBuffer *out;#ifdef MPC_IS_OLD_API  guint32 update_acc, update_bits;#else  mpc_frame_info frame;  mpc_status err;#endif  gint num_samples, samplerate, bitspersample;  musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad));  samplerate = g_atomic_int_get (&musepackdec->rate);  if (samplerate == 0) {    if (!gst_musepack_stream_init (musepackdec))      goto pause_task;    gst_musepackdec_send_newsegment (musepackdec);    samplerate = g_atomic_int_get (&musepackdec->rate);  }  bitspersample = g_atomic_int_get (&musepackdec->bps);  flow = gst_pad_alloc_buffer_and_set_caps (musepackdec->srcpad, -1,      MPC_DECODER_BUFFER_LENGTH * 4, GST_PAD_CAPS (musepackdec->srcpad), &out);  if (flow != GST_FLOW_OK) {    GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));    goto pause_task;  }#ifdef MPC_IS_OLD_API  num_samples = mpc_decoder_decode (musepackdec->d,      (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out), &update_acc, &update_bits);  if (num_samples < 0) {    GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");    GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));    goto pause_task;  } else if (num_samples == 0) {    goto eos_and_pause;  }#else  frame.buffer = (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out);  err = mpc_demux_decode (musepackdec->d, &frame);  if (err != MPC_STATUS_OK) {    GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");    GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));    goto pause_task;  } else if (frame.bits == -1) {    goto eos_and_pause;  }  num_samples = frame.samples;#endif  GST_BUFFER_SIZE (out) = num_samples * bitspersample;  GST_BUFFER_OFFSET (out) = musepackdec->segment.last_stop;  GST_BUFFER_TIMESTAMP (out) =      gst_util_uint64_scale_int (musepackdec->segment.last_stop,      GST_SECOND, samplerate);  GST_BUFFER_DURATION (out) =      gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate);  musepackdec->segment.last_stop += num_samples;  GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT,      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out)));  flow = gst_pad_push (musepackdec->srcpad, out);  if (flow != GST_FLOW_OK) {    GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));    goto pause_task;  }  /* check if we're at the end of a configured segment */  if (musepackdec->segment.stop != -1 &&      musepackdec->segment.last_stop >= musepackdec->segment.stop) {    gint64 stop_time;    GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment");    if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0)      goto eos_and_pause;    GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message");    stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop,        GST_SECOND, samplerate);    gst_element_post_message (GST_ELEMENT (musepackdec),        gst_message_new_segment_done (GST_OBJECT (musepackdec),            GST_FORMAT_TIME, stop_time));//.........这里部分代码省略.........
开发者ID:drothlis,项目名称:gst-plugins-bad,代码行数:101,


示例11: gst_ac3_parse_handle_frame

//.........这里部分代码省略.........      /* Loop till we get one frame from each substream */      do {        framesize += frmsiz;        if (!gst_byte_reader_skip (&reader, frmsiz)            || map.size < (framesize + 6)) {          more = TRUE;          break;        }        if (!gst_ac3_parse_frame_header (ac3parse, buf, framesize, &frmsiz,                NULL, NULL, NULL, &sid, &eac)) {          *skipsize = off + 2;          goto cleanup;        }      } while (sid);    }    /* We're now at the next frame, so no need to skip if resyncing */    frmsiz = 0;  }  if (lost_sync && !draining) {    guint16 word = 0;    GST_DEBUG_OBJECT (ac3parse, "resyncing; checking next frame syncword");    if (more || !gst_byte_reader_skip (&reader, frmsiz) ||        !gst_byte_reader_get_uint16_be (&reader, &word)) {      GST_DEBUG_OBJECT (ac3parse, "... but not sufficient data");      gst_base_parse_set_min_frame_size (parse, framesize + 8);      *skipsize = 0;      goto cleanup;    } else {      if (word != 0x0b77) {        GST_DEBUG_OBJECT (ac3parse, "0x%x not OK", word);        *skipsize = off + 2;        goto cleanup;      } else {        /* ok, got sync now, let's assume constant frame size */        gst_base_parse_set_min_frame_size (parse, framesize);      }    }  }  /* expect to have found a frame here */  g_assert (framesize);  ret = TRUE;  /* arrange for metadata setup */  if (G_UNLIKELY (sid)) {    /* dependent frame, no need to (ac)count for or consider further */    GST_LOG_OBJECT (parse, "sid: %d", sid);    frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;    /* TODO maybe also mark as DELTA_UNIT,     * if that does not surprise baseparse elsewhere */    /* occupies same time space as previous base frame */    if (G_LIKELY (GST_BUFFER_TIMESTAMP (buf) >= GST_BUFFER_DURATION (buf)))      GST_BUFFER_TIMESTAMP (buf) -= GST_BUFFER_DURATION (buf);    /* only shortcut if we already arranged for caps */    if (G_LIKELY (ac3parse->sample_rate > 0))      goto cleanup;  }  if (G_UNLIKELY (ac3parse->sample_rate != rate || ac3parse->channels != chans          || ac3parse->eac != eac)) {    GstCaps *caps = gst_caps_new_simple (eac ? "audio/x-eac3" : "audio/x-ac3",        "framed", G_TYPE_BOOLEAN, TRUE, "rate", G_TYPE_INT, rate,        "channels", G_TYPE_INT, chans, NULL);    gst_caps_set_simple (caps, "alignment", G_TYPE_STRING,        g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937 ?        "iec61937" : "frame", NULL);    gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);    gst_caps_unref (caps);    ac3parse->sample_rate = rate;    ac3parse->channels = chans;    ac3parse->eac = eac;    update_rate = TRUE;  }  if (G_UNLIKELY (ac3parse->blocks != blocks)) {    ac3parse->blocks = blocks;    update_rate = TRUE;  }  if (G_UNLIKELY (update_rate))    gst_base_parse_set_frame_rate (parse, rate, 256 * blocks, 2, 2);cleanup:  gst_buffer_unmap (buf, &map);  if (ret && framesize <= map.size) {    res = gst_base_parse_finish_frame (parse, frame, framesize);  }  return res;}
开发者ID:nnikos123,项目名称:gst-plugins-good,代码行数:101,


示例12: gst_pad_probe_info_get_buffer

GstPadProbeReturn GstEnginePipeline::HandoffCallback(GstPad*,                                                     GstPadProbeInfo* info,                                                     gpointer self) {  GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);  GstBuffer* buf = gst_pad_probe_info_get_buffer(info);  QList<BufferConsumer*> consumers;  {    QMutexLocker l(&instance->buffer_consumers_mutex_);    consumers = instance->buffer_consumers_;  }  for (BufferConsumer* consumer : consumers) {    gst_buffer_ref(buf);    consumer->ConsumeBuffer(buf, instance->id());  }  // Calculate the end time of this buffer so we can stop playback if it's  // after the end time of this song.  if (instance->end_offset_nanosec_ > 0) {    quint64 start_time = GST_BUFFER_TIMESTAMP(buf) - instance->segment_start_;    quint64 duration = GST_BUFFER_DURATION(buf);    quint64 end_time = start_time + duration;    if (end_time > instance->end_offset_nanosec_) {      if (instance->has_next_valid_url()) {        if (instance->next_url_ == instance->url_ &&            instance->next_beginning_offset_nanosec_ ==                instance->end_offset_nanosec_) {          // The "next" song is actually the next segment of this file - so          // cheat and keep on playing, but just tell the Engine we've moved on.          instance->end_offset_nanosec_ = instance->next_end_offset_nanosec_;          instance->next_url_ = QUrl();          instance->next_beginning_offset_nanosec_ = 0;          instance->next_end_offset_nanosec_ = 0;          // GstEngine will try to seek to the start of the new section, but          // we're already there so ignore it.          instance->ignore_next_seek_ = true;          emit instance->EndOfStreamReached(instance->id(), true);        } else {          // We have a next song but we can't cheat, so move to it normally.          instance->TransitionToNext();        }      } else {        // There's no next song        emit instance->EndOfStreamReached(instance->id(), false);      }    }  }  if (instance->emit_track_ended_on_time_discontinuity_) {    if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT) ||        GST_BUFFER_OFFSET(buf) < instance->last_buffer_offset_) {      qLog(Debug) << "Buffer discontinuity - emitting EOS";      instance->emit_track_ended_on_time_discontinuity_ = false;      emit instance->EndOfStreamReached(instance->id(), true);    }  }  instance->last_buffer_offset_ = GST_BUFFER_OFFSET(buf);  return GST_PAD_PROBE_OK;}
开发者ID:ivovegter,项目名称:Clementine,代码行数:64,


示例13: gst_rtp_amr_pay_handle_buffer

static GstFlowReturngst_rtp_amr_pay_handle_buffer (GstBaseRTPPayload * basepayload,    GstBuffer * buffer){  GstRtpAMRPay *rtpamrpay;  GstFlowReturn ret;  guint size, payload_len;  GstBuffer *outbuf;  guint8 *payload, *data, *payload_amr;  GstClockTime timestamp, duration;  guint packet_len, mtu;  gint i, num_packets, num_nonempty_packets;  gint amr_len;  gint *frame_size;  rtpamrpay = GST_RTP_AMR_PAY (basepayload);  mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpamrpay);  size = GST_BUFFER_SIZE (buffer);  data = GST_BUFFER_DATA (buffer);  timestamp = GST_BUFFER_TIMESTAMP (buffer);  duration = GST_BUFFER_DURATION (buffer);  /* setup frame size pointer */  if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB)    frame_size = nb_frame_size;  else    frame_size = wb_frame_size;  GST_DEBUG_OBJECT (basepayload, "got %d bytes", size);  /* FIXME, only    * octet aligned, no interleaving, single channel, no CRC,   * no robust-sorting. To fix this you need to implement the downstream   * negotiation function. */  /* first count number of packets and total amr frame size */  amr_len = num_packets = num_nonempty_packets = 0;  for (i = 0; i < size; i++) {    guint8 FT;    gint fr_size;    FT = (data[i] & 0x78) >> 3;    fr_size = frame_size[FT];    GST_DEBUG_OBJECT (basepayload, "frame size %d", fr_size);    /* FIXME, we don't handle this yet.. */    if (fr_size <= 0)      goto wrong_size;    amr_len += fr_size;    num_nonempty_packets++;    num_packets++;    i += fr_size;  }  if (amr_len > size)    goto incomplete_frame;  /* we need one extra byte for the CMR, the ToC is in the input   * data */  payload_len = size + 1;  /* get packet len to check against MTU */  packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);  if (packet_len > mtu)    goto too_big;  /* now alloc output buffer */  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);  /* copy timestamp */  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;  /* FIXME: when we do more than one AMR frame per packet, fix this */  if (duration != GST_CLOCK_TIME_NONE)    GST_BUFFER_DURATION (outbuf) = duration;  else {    GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;  }  if (GST_BUFFER_IS_DISCONT (buffer)) {    GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);    gst_rtp_buffer_set_marker (outbuf, TRUE);  }  /* get payload, this is now writable */  payload = gst_rtp_buffer_get_payload (outbuf);  /*   0 1 2 3 4 5 6 7    *  +-+-+-+-+-+-+-+-+   *  |  CMR  |R|R|R|R|   *  +-+-+-+-+-+-+-+-+   */  payload[0] = 0xF0;            /* CMR, no specific mode requested */  /* this is where we copy the AMR data, after num_packets FTs and the   * CMR. */  payload_amr = payload + num_packets + 1;//.........这里部分代码省略.........
开发者ID:roopar,项目名称:gst-plugins-good,代码行数:101,


示例14: gst_amc_audio_dec_handle_frame

static GstFlowReturngst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf){    GstAmcAudioDec *self;    gint idx;    GstAmcBuffer *buf;    GstAmcBufferInfo buffer_info;    guint offset = 0;    GstClockTime timestamp, duration, timestamp_offset = 0;    GstMapInfo minfo;    memset (&minfo, 0, sizeof (minfo));    self = GST_AMC_AUDIO_DEC (decoder);    GST_DEBUG_OBJECT (self, "Handling frame");    /* Make sure to keep a reference to the input here,     * it can be unreffed from the other thread if     * finish_frame() is called */    if (inbuf)        inbuf = gst_buffer_ref (inbuf);    if (!self->started) {        GST_ERROR_OBJECT (self, "Codec not started yet");        if (inbuf)            gst_buffer_unref (inbuf);        return GST_FLOW_NOT_NEGOTIATED;    }    if (self->eos) {        GST_WARNING_OBJECT (self, "Got frame after EOS");        if (inbuf)            gst_buffer_unref (inbuf);        return GST_FLOW_EOS;    }    if (self->flushing)        goto flushing;    if (self->downstream_flow_ret != GST_FLOW_OK)        goto downstream_error;    if (!inbuf)        return gst_amc_audio_dec_drain (self);    timestamp = GST_BUFFER_PTS (inbuf);    duration = GST_BUFFER_DURATION (inbuf);    gst_buffer_map (inbuf, &minfo, GST_MAP_READ);    while (offset < minfo.size) {        /* Make sure to release the base class stream lock, otherwise         * _loop() can't call _finish_frame() and we might block forever         * because no input buffers are released */        GST_AUDIO_DECODER_STREAM_UNLOCK (self);        /* Wait at most 100ms here, some codecs don't fail dequeueing if         * the codec is flushing, causing deadlocks during shutdown */        idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000);        GST_AUDIO_DECODER_STREAM_LOCK (self);        if (idx < 0) {            if (self->flushing)                goto flushing;            switch (idx) {            case INFO_TRY_AGAIN_LATER:                GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");                continue;             /* next try */                break;            case G_MININT:                GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");                goto dequeue_error;            default:                g_assert_not_reached ();                break;            }            continue;        }        if (idx >= self->n_input_buffers)            goto invalid_buffer_index;        if (self->flushing)            goto flushing;        if (self->downstream_flow_ret != GST_FLOW_OK) {            memset (&buffer_info, 0, sizeof (buffer_info));            gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info);            goto downstream_error;        }        /* Now handle the frame */        /* Copy the buffer content in chunks of size as requested         * by the port */        buf = &self->input_buffers[idx];        memset (&buffer_info, 0, sizeof (buffer_info));        buffer_info.offset = 0;//.........这里部分代码省略.........
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:101,


示例15: gst_wavpack_enc_push_block

static intgst_wavpack_enc_push_block (void *id, void *data, int32_t count){  GstWavpackEncWriteID *wid = (GstWavpackEncWriteID *) id;  GstWavpackEnc *enc = GST_WAVPACK_ENC (wid->wavpack_enc);  GstFlowReturn *flow;  GstBuffer *buffer;  GstPad *pad;  guchar *block = (guchar *) data;  pad = (wid->correction) ? enc->wvcsrcpad : enc->srcpad;  flow =      (wid->correction) ? &enc->wvcsrcpad_last_return : &enc->      srcpad_last_return;  *flow = gst_pad_alloc_buffer_and_set_caps (pad, GST_BUFFER_OFFSET_NONE,      count, GST_PAD_CAPS (pad), &buffer);  if (*flow != GST_FLOW_OK) {    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",        GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));    return FALSE;  }  g_memmove (GST_BUFFER_DATA (buffer), block, count);  if (count > sizeof (WavpackHeader) && memcmp (block, "wvpk", 4) == 0) {    /* if it's a Wavpack block set buffer timestamp and duration, etc */    WavpackHeader wph;    GST_LOG_OBJECT (enc, "got %d bytes of encoded wavpack %sdata",        count, (wid->correction) ? "correction " : "");    gst_wavpack_read_header (&wph, block);    /* Only set when pushing the first buffer again, in that case     * we don't want to delay the buffer or push newsegment events     */    if (!wid->passthrough) {      /* Only push complete blocks */      if (enc->pending_buffer == NULL) {        enc->pending_buffer = buffer;        enc->pending_offset = wph.block_index;      } else if (enc->pending_offset == wph.block_index) {        enc->pending_buffer = gst_buffer_join (enc->pending_buffer, buffer);      } else {        GST_ERROR ("Got incomplete block, dropping");        gst_buffer_unref (enc->pending_buffer);        enc->pending_buffer = buffer;        enc->pending_offset = wph.block_index;      }      if (!(wph.flags & FINAL_BLOCK))        return TRUE;      buffer = enc->pending_buffer;      enc->pending_buffer = NULL;      enc->pending_offset = 0;      /* if it's the first wavpack block, send a NEW_SEGMENT event */      if (wph.block_index == 0) {        gst_pad_push_event (pad,            gst_event_new_new_segment (FALSE,                1.0, GST_FORMAT_TIME, 0, GST_BUFFER_OFFSET_NONE, 0));        /* save header for later reference, so we can re-send it later on         * EOS with fixed up values for total sample count etc. */        if (enc->first_block == NULL && !wid->correction) {          enc->first_block =              g_memdup (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));          enc->first_block_size = GST_BUFFER_SIZE (buffer);        }      }    }    /* set buffer timestamp, duration, offset, offset_end from     * the wavpack header */    GST_BUFFER_TIMESTAMP (buffer) = enc->timestamp_offset +        gst_util_uint64_scale_int (GST_SECOND, wph.block_index,        enc->samplerate);    GST_BUFFER_DURATION (buffer) =        gst_util_uint64_scale_int (GST_SECOND, wph.block_samples,        enc->samplerate);    GST_BUFFER_OFFSET (buffer) = wph.block_index;    GST_BUFFER_OFFSET_END (buffer) = wph.block_index + wph.block_samples;  } else {    /* if it's something else set no timestamp and duration on the buffer */    GST_DEBUG_OBJECT (enc, "got %d bytes of unknown data", count);    GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;    GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;  }  /* push the buffer and forward errors */  GST_DEBUG_OBJECT (enc, "pushing buffer with %d bytes",      GST_BUFFER_SIZE (buffer));  *flow = gst_pad_push (pad, buffer);  if (*flow != GST_FLOW_OK) {    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",//.........这里部分代码省略.........
开发者ID:prajnashi,项目名称:gst-plugins-good,代码行数:101,


示例16: pad_chain

//.........这里部分代码省略.........                if (omx_buffer->nOffset == 0 &&                    self->share_input_buffer)                {                    {                        GstBuffer *old_buf;                        old_buf = omx_buffer->pAppPrivate;                        if (old_buf)                        {                            gst_buffer_unref (old_buf);                        }                        else if (omx_buffer->pBuffer)                        {                            g_free (omx_buffer->pBuffer);                        }                    }                    omx_buffer->pBuffer = GST_BUFFER_DATA (buf);                    omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf);                    omx_buffer->nFilledLen = GST_BUFFER_SIZE (buf);                    omx_buffer->pAppPrivate = buf;                }                else                {                    omx_buffer->nFilledLen = MIN (GST_BUFFER_SIZE (buf) - buffer_offset,                                                  omx_buffer->nAllocLen - omx_buffer->nOffset);                    memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (buf) + buffer_offset, omx_buffer->nFilledLen);                }                if (self->use_timestamps)                {                    GstClockTime timestamp_offset = 0;                    if (buffer_offset && GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE)                    {                        timestamp_offset = gst_util_uint64_scale_int (buffer_offset,                                                                      GST_BUFFER_DURATION (buf),                                                                      GST_BUFFER_SIZE (buf));                    }                    omx_buffer->nTimeStamp = gst_util_uint64_scale_int (GST_BUFFER_TIMESTAMP (buf) + timestamp_offset,                                                                        OMX_TICKS_PER_SECOND,                                                                        GST_SECOND);                }                buffer_offset += omx_buffer->nFilledLen;                GST_LOG_OBJECT (self, "release_buffer");                /** @todo untaint buffer */                g_omx_port_release_buffer (in_port, omx_buffer);            }            else            {                GST_WARNING_OBJECT (self, "null buffer");                ret = GST_FLOW_WRONG_STATE;                goto out_flushing;            }        }    }    else    {        GST_WARNING_OBJECT (self, "done");        ret = GST_FLOW_UNEXPECTED;    }    if (!self->share_input_buffer)
开发者ID:roopar,项目名称:gst-openmax,代码行数:67,


示例17: probe_cb

/* Pipeline Callbacks */static gbooleanprobe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object,    gpointer userdata){  InsanityTest *test = INSANITY_TEST (ptest);  global_last_probe = g_get_monotonic_time ();  DECODER_TEST_LOCK ();  if (GST_IS_BUFFER (object)) {    GstBuffer *buf;    GstClockTime ts;    buf = GST_BUFFER (object);    ts = GST_BUFFER_PTS (buf);    /* First check clipping */    if (glob_testing_parser == FALSE && GST_CLOCK_TIME_IS_VALID (ts) &&        glob_waiting_segment == FALSE) {      GstClockTime ts_end, cstart, cstop;      /* Check if buffer is completely outside the segment */      ts_end = ts;      if (GST_BUFFER_DURATION_IS_VALID (buf))        ts_end += GST_BUFFER_DURATION (buf);      /* Check if buffer is completely outside the segment */      ts_end = ts;      if (!gst_segment_clip (&glob_last_segment,              glob_last_segment.format, ts, ts_end, &cstart, &cstop)) {        char *msg = g_strdup_printf ("Got timestamp %" GST_TIME_FORMAT " -- %"            GST_TIME_FORMAT ", outside configured segment (%" GST_TIME_FORMAT            " -- %" GST_TIME_FORMAT "), method %s",            GST_TIME_ARGS (ts), GST_TIME_ARGS (ts_end),            GST_TIME_ARGS (glob_last_segment.start),            GST_TIME_ARGS (glob_last_segment.stop),            test_get_name (glob_in_progress));        insanity_test_validate_checklist_item (INSANITY_TEST (ptest),            "segment-clipping", FALSE, msg);        g_free (msg);        glob_bad_segment_clipping = TRUE;      }    }    switch (glob_in_progress) {      case TEST_NONE:        if (glob_waiting_first_segment == TRUE)          insanity_test_validate_checklist_item (test, "first-segment",              FALSE, "Got a buffer before the first segment");        /* Got the first buffer, starting testing dance */        next_test (test);        break;      case TEST_POSITION:        test_position (test, buf);        break;      case TEST_FAST_FORWARD:      case TEST_BACKWARD_PLAYBACK:      case TEST_FAST_BACKWARD:      {        gint64 stime_ts;        if (GST_CLOCK_TIME_IS_VALID (ts) == FALSE ||            glob_waiting_segment == TRUE) {          break;        }        stime_ts = gst_segment_to_stream_time (&glob_last_segment,            glob_last_segment.format, ts);        if (GST_CLOCK_TIME_IS_VALID (glob_seek_first_buf_ts) == FALSE) {          GstClockTime expected_ts =              gst_segment_to_stream_time (&glob_last_segment,              glob_last_segment.format,              glob_seek_rate <              0 ? glob_seek_stop_ts : glob_seek_segment_seektime);          GstClockTimeDiff diff = ABS (GST_CLOCK_DIFF (stime_ts, expected_ts));          if (diff > SEEK_THRESHOLD) {            gchar *valmsg =                g_strdup_printf ("Received buffer timestamp %" GST_TIME_FORMAT                " Seeek wanted %" GST_TIME_FORMAT "",                GST_TIME_ARGS (stime_ts),                GST_TIME_ARGS (expected_ts));            validate_current_test (test, FALSE, valmsg);            next_test (test);            g_free (valmsg);          } else            glob_seek_first_buf_ts = stime_ts;        } else {          GstClockTimeDiff diff =              GST_CLOCK_DIFF (stime_ts, glob_seek_first_buf_ts);          if (diff < 0)            diff = -diff;//.........这里部分代码省略.........
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer__attic__insanity-gst,代码行数:101,


示例18: gst_vaapidecode_step

static GstFlowReturngst_vaapidecode_step(GstVaapiDecode *decode){    GstVaapiSurfaceProxy *proxy;    GstVaapiDecoderStatus status;    GstBuffer *buffer;    GstFlowReturn ret;    GstClockTime timestamp;    gint64 end_time;    for (;;) {        end_time = decode->render_time_base;        if (!end_time)            end_time = g_get_monotonic_time();        end_time += GST_TIME_AS_USECONDS(decode->last_buffer_time);        end_time += G_TIME_SPAN_SECOND;        proxy = gst_vaapi_decoder_get_surface(decode->decoder, &status);        if (!proxy) {            if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) {                gboolean was_signalled;                g_mutex_lock(decode->decoder_mutex);                was_signalled = g_cond_wait_until(                    decode->decoder_ready,                    decode->decoder_mutex,                    end_time                );                g_mutex_unlock(decode->decoder_mutex);                if (was_signalled)                    continue;                goto error_decode_timeout;            }            if (status != GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA)                goto error_decode;            /* More data is needed */            break;        }        g_object_weak_ref(            G_OBJECT(proxy),            (GWeakNotify)gst_vaapidecode_release,            decode        );        buffer = gst_vaapi_video_buffer_new(decode->display);        if (!buffer)            goto error_create_buffer;        timestamp = GST_VAAPI_SURFACE_PROXY_TIMESTAMP(proxy);        if (!decode->render_time_base)            decode->render_time_base = g_get_monotonic_time();        decode->last_buffer_time = timestamp;        GST_BUFFER_TIMESTAMP(buffer) = timestamp;        GST_BUFFER_DURATION(buffer) = GST_VAAPI_SURFACE_PROXY_DURATION(proxy);        gst_buffer_set_caps(buffer, GST_PAD_CAPS(decode->srcpad));        if (GST_VAAPI_SURFACE_PROXY_TFF(proxy))            GST_BUFFER_FLAG_SET(buffer, GST_VIDEO_BUFFER_TFF);        gst_vaapi_video_buffer_set_surface_proxy(            GST_VAAPI_VIDEO_BUFFER(buffer),            proxy        );        ret = gst_pad_push(decode->srcpad, buffer);        if (ret != GST_FLOW_OK)            goto error_commit_buffer;        g_object_unref(proxy);    }    return GST_FLOW_OK;    /* ERRORS */error_decode_timeout:    {        GST_DEBUG("decode timeout. Decoder required a VA surface but none "                  "got available within one second");        return GST_FLOW_UNEXPECTED;    }error_decode:    {        GST_DEBUG("decode error %d", status);        switch (status) {        case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:        case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:        case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:            ret = GST_FLOW_NOT_SUPPORTED;            break;        default:            ret = GST_FLOW_UNEXPECTED;            break;        }        return ret;    }error_create_buffer:    {        const GstVaapiID surface_id =            gst_vaapi_surface_get_id(GST_VAAPI_SURFACE_PROXY_SURFACE(proxy));//.........这里部分代码省略.........
开发者ID:cbetz421,项目名称:gst-vaapi,代码行数:101,


示例19: gst_rtp_celt_pay_handle_buffer

static GstFlowReturngst_rtp_celt_pay_handle_buffer (GstRTPBasePayload * basepayload,    GstBuffer * buffer){  GstFlowReturn ret;  GstRtpCELTPay *rtpceltpay;  gsize payload_len;  GstMapInfo map;  GstClockTime duration, packet_dur;  guint i, ssize, packet_len;  rtpceltpay = GST_RTP_CELT_PAY (basepayload);  ret = GST_FLOW_OK;  gst_buffer_map (buffer, &map, GST_MAP_READ);  switch (rtpceltpay->packet) {    case 0:      /* ident packet. We need to parse the headers to construct the RTP       * properties. */      if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, map.data, map.size))        goto parse_error;      goto cleanup;    case 1:      /* comment packet, we ignore it */      goto cleanup;    default:      /* other packets go in the payload */      break;  }  gst_buffer_unmap (buffer, &map);  duration = GST_BUFFER_DURATION (buffer);  GST_LOG_OBJECT (rtpceltpay,      "got buffer of duration %" GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT,      GST_TIME_ARGS (duration), map.size);  /* calculate the size of the size field and the payload */  ssize = 1;  for (i = map.size; i > 0xff; i -= 0xff)    ssize++;  GST_DEBUG_OBJECT (rtpceltpay, "bytes for size %u", ssize);  /* calculate what the new size and duration would be of the packet */  payload_len = ssize + map.size + rtpceltpay->bytes + rtpceltpay->sbytes;  if (rtpceltpay->qduration != -1 && duration != -1)    packet_dur = rtpceltpay->qduration + duration;  else    packet_dur = 0;  packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);  if (gst_rtp_base_payload_is_filled (basepayload, packet_len, packet_dur)) {    /* size or duration would overflow the packet, flush the queued data */    ret = gst_rtp_celt_pay_flush_queued (rtpceltpay);  }  /* queue the packet */  gst_rtp_celt_pay_add_queued (rtpceltpay, buffer, ssize, map.size, duration);done:  rtpceltpay->packet++;  return ret;  /* ERRORS */cleanup:  {    gst_buffer_unmap (buffer, &map);    goto done;  }parse_error:  {    GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL),        ("Error parsing first identification packet."));    gst_buffer_unmap (buffer, &map);    return GST_FLOW_ERROR;  }}
开发者ID:ConfusedReality,项目名称:pkg_multimedia_gst-plugins-good,代码行数:83,


示例20: gst_ladspa_source_type_fill

static GstFlowReturngst_ladspa_source_type_fill (GstBaseSrc * base, guint64 offset,    guint length, GstBuffer * buffer){  GstLADSPASource *ladspa = GST_LADSPA_SOURCE (base);  GstClockTime next_time;  gint64 next_sample, next_byte;  gint bytes, samples;  GstElementClass *eclass;  GstMapInfo map;  gint samplerate, bpf;  /* example for tagging generated data */  if (!ladspa->tags_pushed) {    GstTagList *taglist;    taglist = gst_tag_list_new (GST_TAG_DESCRIPTION, "ladspa wave", NULL);    eclass = GST_ELEMENT_CLASS (gst_ladspa_source_type_parent_class);    if (eclass->send_event)      eclass->send_event (GST_ELEMENT (base), gst_event_new_tag (taglist));    else      gst_tag_list_unref (taglist);    ladspa->tags_pushed = TRUE;  }  if (ladspa->eos_reached) {    GST_INFO_OBJECT (ladspa, "eos");    return GST_FLOW_EOS;  }  samplerate = GST_AUDIO_INFO_RATE (&ladspa->info);  bpf = GST_AUDIO_INFO_BPF (&ladspa->info);  /* if no length was given, use our default length in samples otherwise convert   * the length in bytes to samples. */  if (length == -1)    samples = ladspa->samples_per_buffer;  else    samples = length / bpf;  /* if no offset was given, use our next logical byte */  if (offset == -1)    offset = ladspa->next_byte;  /* now see if we are at the byteoffset we think we are */  if (offset != ladspa->next_byte) {    GST_DEBUG_OBJECT (ladspa, "seek to new offset %" G_GUINT64_FORMAT, offset);    /* we have a discont in the expected sample offset, do a 'seek' */    ladspa->next_sample = offset / bpf;    ladspa->next_time =        gst_util_uint64_scale_int (ladspa->next_sample, GST_SECOND, samplerate);    ladspa->next_byte = offset;  }  /* check for eos */  if (ladspa->check_seek_stop &&      (ladspa->sample_stop > ladspa->next_sample) &&      (ladspa->sample_stop < ladspa->next_sample + samples)      ) {    /* calculate only partial buffer */    ladspa->generate_samples_per_buffer =        ladspa->sample_stop - ladspa->next_sample;    next_sample = ladspa->sample_stop;    ladspa->eos_reached = TRUE;  } else {    /* calculate full buffer */    ladspa->generate_samples_per_buffer = samples;    next_sample =        ladspa->next_sample + (ladspa->reverse ? (-samples) : samples);  }  bytes = ladspa->generate_samples_per_buffer * bpf;  next_byte = ladspa->next_byte + (ladspa->reverse ? (-bytes) : bytes);  next_time = gst_util_uint64_scale_int (next_sample, GST_SECOND, samplerate);  GST_LOG_OBJECT (ladspa, "samplerate %d", samplerate);  GST_LOG_OBJECT (ladspa,      "next_sample %" G_GINT64_FORMAT ", ts %" GST_TIME_FORMAT, next_sample,      GST_TIME_ARGS (next_time));  gst_buffer_set_size (buffer, bytes);  GST_BUFFER_OFFSET (buffer) = ladspa->next_sample;  GST_BUFFER_OFFSET_END (buffer) = next_sample;  if (!ladspa->reverse) {    GST_BUFFER_TIMESTAMP (buffer) =        ladspa->timestamp_offset + ladspa->next_time;    GST_BUFFER_DURATION (buffer) = next_time - ladspa->next_time;  } else {    GST_BUFFER_TIMESTAMP (buffer) = ladspa->timestamp_offset + next_time;    GST_BUFFER_DURATION (buffer) = ladspa->next_time - next_time;  }  gst_object_sync_values (GST_OBJECT (ladspa), GST_BUFFER_TIMESTAMP (buffer));  ladspa->next_time = next_time;  ladspa->next_sample = next_sample;  ladspa->next_byte = next_byte;//.........这里部分代码省略.........
开发者ID:0p1pp1,项目名称:gst-plugins-bad,代码行数:101,


示例21: gst_decklink_video_sink_prepare

static GstFlowReturngst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer){  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);  GstVideoFrame vframe;  IDeckLinkMutableVideoFrame *frame;  guint8 *outdata, *indata;  GstFlowReturn flow_ret;  HRESULT ret;  GstClockTime timestamp, duration;  GstClockTime running_time, running_time_duration;  gint i;  GstClock *clock;  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);  // FIXME: Handle no timestamps  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {    return GST_FLOW_ERROR;  }  timestamp = GST_BUFFER_TIMESTAMP (buffer);  duration = GST_BUFFER_DURATION (buffer);  if (duration == GST_CLOCK_TIME_NONE) {    duration =        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,        self->info.fps_n);  }  running_time =      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,      GST_FORMAT_TIME, timestamp);  running_time_duration =      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,      GST_FORMAT_TIME, timestamp + duration) - running_time;  // FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916  // We need to drop late buffers here immediately instead of  // potentially overflowing the internal queue of the hardware  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));  if (clock) {    GstClockTime clock_running_time, base_time, clock_time, latency,        max_lateness;    base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));    clock_time = gst_clock_get_time (clock);    if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {      clock_running_time = clock_time - base_time;      latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));      max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));      if (clock_running_time >          running_time + running_time_duration + latency + max_lateness) {        GST_DEBUG_OBJECT (self,            "Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,            GST_TIME_ARGS (clock_running_time),            GST_TIME_ARGS (running_time + running_time_duration));        if (self->last_render_time == GST_CLOCK_TIME_NONE            || (self->last_render_time < clock_running_time                && clock_running_time - self->last_render_time >= GST_SECOND)) {          GST_DEBUG_OBJECT (self,              "Rendering frame nonetheless because we had none for more than 1s");          running_time = clock_running_time;          running_time_duration = 0;        } else {          GST_WARNING_OBJECT (self, "Dropping frame");          gst_object_unref (clock);          return GST_FLOW_OK;        }      }    }    gst_object_unref (clock);  }  self->last_render_time = running_time;  ret = self->output->output->CreateVideoFrame (self->info.width,      self->info.height, self->info.stride[0], bmdFormat8BitYUV,      bmdFrameFlagDefault, &frame);  if (ret != S_OK) {    GST_ELEMENT_ERROR (self, STREAM, FAILED,        (NULL), ("Failed to create video frame: 0x%08x", ret));    return GST_FLOW_ERROR;  }  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {    GST_ERROR_OBJECT (self, "Failed to map video frame");    flow_ret = GST_FLOW_ERROR;    goto out;  }  frame->GetBytes ((void **) &outdata);  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);  for (i = 0; i < self->info.height; i++) {    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);    outdata += frame->GetRowBytes ();  }  gst_video_frame_unmap (&vframe);//.........这里部分代码省略.........
开发者ID:ego5710,项目名称:gst-plugins-bad,代码行数:101,


示例22: celt_dec_chain_parse_data

static GstFlowReturncelt_dec_chain_parse_data (GstCeltDec * dec, GstBuffer * buf,    GstClockTime timestamp, GstClockTime duration){  GstFlowReturn res = GST_FLOW_OK;  gint size;  guint8 *data;  GstBuffer *outbuf;  gint16 *out_data;  gint error = CELT_OK;  if (timestamp != -1) {    dec->segment.last_stop = timestamp;    dec->granulepos = -1;  }  if (buf) {    data = GST_BUFFER_DATA (buf);    size = GST_BUFFER_SIZE (buf);    GST_DEBUG_OBJECT (dec, "received buffer of size %u", size);    if (!GST_BUFFER_TIMESTAMP_IS_VALID (buf)        && GST_BUFFER_OFFSET_END_IS_VALID (buf)) {      dec->granulepos = GST_BUFFER_OFFSET_END (buf);      GST_DEBUG_OBJECT (dec,          "Taking granulepos from upstream: %" G_GUINT64_FORMAT,          dec->granulepos);    }    /* copy timestamp */  } else {    /* concealment data, pass NULL as the bits parameters */    GST_DEBUG_OBJECT (dec, "creating concealment data");    data = NULL;    size = 0;  }  res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,      GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header.nb_channels * 2,      GST_PAD_CAPS (dec->srcpad), &outbuf);  if (res != GST_FLOW_OK) {    GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));    return res;  }  out_data = (gint16 *) GST_BUFFER_DATA (outbuf);  GST_LOG_OBJECT (dec, "decoding frame");  error = celt_decode (dec->state, data, size, out_data);  if (error != CELT_OK) {    GST_WARNING_OBJECT (dec, "Decoding error: %d", error);    return GST_FLOW_ERROR;  }  if (dec->granulepos == -1) {    if (dec->segment.format != GST_FORMAT_TIME) {      GST_WARNING_OBJECT (dec, "segment not initialized or not TIME format");      dec->granulepos = dec->frame_size;    } else {      dec->granulepos = gst_util_uint64_scale_int (dec->segment.last_stop,          dec->header.sample_rate, GST_SECOND) + dec->frame_size;    }    GST_DEBUG_OBJECT (dec, "granulepos=%" G_GINT64_FORMAT, dec->granulepos);  }  GST_BUFFER_OFFSET (outbuf) = dec->granulepos - dec->frame_size;  GST_BUFFER_OFFSET_END (outbuf) = dec->granulepos;  GST_BUFFER_TIMESTAMP (outbuf) =      gst_util_uint64_scale_int (dec->granulepos - dec->frame_size, GST_SECOND,      dec->header.sample_rate);  GST_BUFFER_DURATION (outbuf) = dec->frame_duration;  dec->granulepos += dec->frame_size;  dec->segment.last_stop += dec->frame_duration;  GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),      GST_TIME_ARGS (dec->frame_duration));  res = gst_pad_push (dec->srcpad, outbuf);  if (res != GST_FLOW_OK)    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));  return res;}
开发者ID:prajnashi,项目名称:gst-plugins-bad,代码行数:88,


示例23: gst_dvbsub_overlay_chain_video

static GstFlowReturngst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent,    GstBuffer * buffer){  GstDVBSubOverlay *overlay = GST_DVBSUB_OVERLAY (parent);  GstFlowReturn ret = GST_FLOW_OK;  gint64 start, stop;  guint64 cstart, cstop;  gboolean in_seg;  GstClockTime vid_running_time, vid_running_time_end;  if (GST_VIDEO_INFO_FORMAT (&overlay->info) == GST_VIDEO_FORMAT_UNKNOWN)    return GST_FLOW_NOT_NEGOTIATED;  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))    goto missing_timestamp;  start = GST_BUFFER_TIMESTAMP (buffer);  GST_LOG_OBJECT (overlay,      "Video segment: %" GST_SEGMENT_FORMAT " --- Subtitle position: %"      GST_TIME_FORMAT " --- BUFFER: ts=%" GST_TIME_FORMAT,      &overlay->video_segment,      GST_TIME_ARGS (overlay->subtitle_segment.position),      GST_TIME_ARGS (start));  /* ignore buffers that are outside of the current segment */  if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {    stop = GST_CLOCK_TIME_NONE;  } else {    stop = start + GST_BUFFER_DURATION (buffer);  }  in_seg = gst_segment_clip (&overlay->video_segment, GST_FORMAT_TIME,      start, stop, &cstart, &cstop);  if (!in_seg) {    GST_DEBUG_OBJECT (overlay, "Buffer outside configured segment -- dropping");    gst_buffer_unref (buffer);    return GST_FLOW_OK;  }  buffer = gst_buffer_make_writable (buffer);  GST_BUFFER_TIMESTAMP (buffer) = cstart;  if (GST_BUFFER_DURATION_IS_VALID (buffer))    GST_BUFFER_DURATION (buffer) = cstop - cstart;  vid_running_time =      gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,      cstart);  if (GST_BUFFER_DURATION_IS_VALID (buffer))    vid_running_time_end =        gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,        cstop);  else    vid_running_time_end = vid_running_time;  GST_DEBUG_OBJECT (overlay, "Video running time: %" GST_TIME_FORMAT,      GST_TIME_ARGS (vid_running_time));  overlay->video_segment.position = GST_BUFFER_TIMESTAMP (buffer);  g_mutex_lock (&overlay->dvbsub_mutex);  if (!g_queue_is_empty (overlay->pending_subtitles)) {    DVBSubtitles *tmp, *candidate = NULL;    while (!g_queue_is_empty (overlay->pending_subtitles)) {      tmp = g_queue_peek_head (overlay->pending_subtitles);      if (tmp->pts > vid_running_time_end) {        /* For a future video frame */        break;      } else if (tmp->num_rects == 0) {        /* Clear screen */        if (overlay->current_subtitle)          dvb_subtitles_free (overlay->current_subtitle);        overlay->current_subtitle = NULL;        if (candidate)          dvb_subtitles_free (candidate);        candidate = NULL;        g_queue_pop_head (overlay->pending_subtitles);        dvb_subtitles_free (tmp);        tmp = NULL;      } else if (tmp->pts + tmp->page_time_out * GST_SECOND *          ABS (overlay->subtitle_segment.rate) >= vid_running_time) {        if (candidate)          dvb_subtitles_free (candidate);        candidate = tmp;        g_queue_pop_head (overlay->pending_subtitles);      } else {        /* Too late */        dvb_subtitles_free (tmp);        tmp = NULL;        g_queue_pop_head (overlay->pending_subtitles);      }    }    if (candidate) {      GST_DEBUG_OBJECT (overlay,          "Time to show the next subtitle page (%" GST_TIME_FORMAT " >= %"          GST_TIME_FORMAT ") - it has %u regions",//.........这里部分代码省略.........
开发者ID:GrokImageCompression,项目名称:gst-plugins-bad,代码行数:101,


示例24: gst_decklink_video_sink_prepare

static GstFlowReturngst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer){  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);  GstVideoFrame vframe;  IDeckLinkMutableVideoFrame *frame;  guint8 *outdata, *indata;  GstFlowReturn flow_ret;  HRESULT ret;  GstClockTime timestamp, duration;  GstClockTime running_time, running_time_duration;  GstClockTime latency, render_delay;  GstClockTimeDiff ts_offset;  gint i;  GstDecklinkVideoFormat caps_format;  BMDPixelFormat format;  gint stride;  GstVideoTimeCodeMeta *tc_meta;  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);  // FIXME: Handle no timestamps  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {    return GST_FLOW_ERROR;  }  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);  format = gst_decklink_pixel_format_from_type (caps_format);  timestamp = GST_BUFFER_TIMESTAMP (buffer);  duration = GST_BUFFER_DURATION (buffer);  if (duration == GST_CLOCK_TIME_NONE) {    duration =        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,        self->info.fps_n);  }  running_time =      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,      GST_FORMAT_TIME, timestamp);  running_time_duration =      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,      GST_FORMAT_TIME, timestamp + duration) - running_time;  /* See gst_base_sink_adjust_time() */  latency = gst_base_sink_get_latency (bsink);  render_delay = gst_base_sink_get_render_delay (bsink);  ts_offset = gst_base_sink_get_ts_offset (bsink);  running_time += latency;  if (ts_offset < 0) {    ts_offset = -ts_offset;    if ((GstClockTime) ts_offset < running_time)      running_time -= ts_offset;    else      running_time = 0;  } else {    running_time += ts_offset;  }  if (running_time > render_delay)    running_time -= render_delay;  else    running_time = 0;  ret = self->output->output->CreateVideoFrame (self->info.width,      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,      &frame);  if (ret != S_OK) {    GST_ELEMENT_ERROR (self, STREAM, FAILED,        (NULL), ("Failed to create video frame: 0x%08x", ret));    return GST_FLOW_ERROR;  }  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {    GST_ERROR_OBJECT (self, "Failed to map video frame");    flow_ret = GST_FLOW_ERROR;    goto out;  }  frame->GetBytes ((void **) &outdata);  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);  stride = MIN (GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0), frame->GetRowBytes());  for (i = 0; i < self->info.height; i++) {    memcpy (outdata, indata, stride);    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);    outdata += frame->GetRowBytes ();  }  gst_video_frame_unmap (&vframe);  tc_meta = gst_buffer_get_video_time_code_meta (buffer);  if (tc_meta) {    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;    gchar *tc_str;    if (((GstVideoTimeCodeFlags) (tc_meta->tc.                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);    else      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);//.........这里部分代码省略.........
开发者ID:auni53,项目名称:gst-plugins-bad,代码行数:101,


示例25: gst_gdiscreencapsrc_create

static GstFlowReturngst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf){  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);  GstBuffer *new_buf;  GstFlowReturn res;  gint new_buf_size;  GstClock *clock;  GstClockTime time;  GstClockTime base_time;  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||          !src->info.bmiHeader.biHeight)) {    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),        ("format wasn't negotiated before create function"));    return GST_FLOW_NOT_NEGOTIATED;  } else if (G_UNLIKELY (src->rate_numerator == 0 && src->frames == 1)) {    GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->frames);    return GST_FLOW_UNEXPECTED;  }  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *      (-src->info.bmiHeader.biHeight);  GST_LOG_OBJECT (src,      "creating buffer of %lu bytes with %dx%d image for frame %d",      new_buf_size, src->info.bmiHeader.biWidth,      -src->info.bmiHeader.biHeight, (gint) src->frames);  res =      gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (src),      GST_BUFFER_OFFSET_NONE, new_buf_size,      GST_PAD_CAPS (GST_BASE_SRC_PAD (push_src)), &new_buf);  if (res != GST_FLOW_OK) {    GST_DEBUG_OBJECT (src, "could not allocate buffer, reason %s",        gst_flow_get_name (res));    return res;  }  clock = gst_element_get_clock (GST_ELEMENT (src));  if (clock) {    /* Calculate sync time. */    GstClockTime frame_time =        gst_util_uint64_scale_int (src->frames * GST_SECOND,        src->rate_denominator, src->rate_numerator);    time = gst_clock_get_time (clock);    base_time = gst_element_get_base_time (GST_ELEMENT (src));    GST_BUFFER_TIMESTAMP (new_buf) = MAX (time - base_time, frame_time);  } else {    GST_BUFFER_TIMESTAMP (new_buf) = GST_CLOCK_TIME_NONE;  }  /* Do screen capture and put it into buffer... */  gst_gdiscreencapsrc_screen_capture (src, new_buf);  if (src->rate_numerator) {    GST_BUFFER_DURATION (new_buf) =        gst_util_uint64_scale_int (GST_SECOND,        src->rate_denominator, src->rate_numerator);    if (clock) {      GST_BUFFER_DURATION (new_buf) =          MAX (GST_BUFFER_DURATION (new_buf),          gst_clock_get_time (clock) - time);    }  } else {    /* NONE means forever */    GST_BUFFER_DURATION (new_buf) = GST_CLOCK_TIME_NONE;  }  GST_BUFFER_OFFSET (new_buf) = src->frames;  src->frames++;  GST_BUFFER_OFFSET_END (new_buf) = src->frames;  gst_object_unref (clock);  *buf = new_buf;  return GST_FLOW_OK;}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:79,


示例26: gst_rsvg_dec_chain

static GstFlowReturngst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer){  GstRsvgDec *rsvg = GST_RSVG_DEC (GST_PAD_PARENT (pad));  gboolean completed = FALSE;  const guint8 *data;  guint size;  gboolean ret = GST_FLOW_OK;  /* first_timestamp is used slightly differently where a framerate     is given or not.     If there is a frame rate, it will be used as a base.     If there is not, it will be used to keep track of the timestamp     of the first buffer, to be used as the timestamp of the output     buffer. When a buffer is output, first timestamp will resync to     the next buffer's timestamp. */  if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) {    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))      rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer);    else if (rsvg->fps_n != 0)      rsvg->first_timestamp = 0;  }  gst_adapter_push (rsvg->adapter, buffer);  size = gst_adapter_available (rsvg->adapter);  /* "<svg></svg>" */  while (size >= 5 + 6 && ret == GST_FLOW_OK) {    guint i;    data = gst_adapter_peek (rsvg->adapter, size);    for (i = size - 6; i >= 5; i--) {      if (memcmp (data + i, "</svg>", 6) == 0) {        completed = TRUE;        size = i + 6;        break;      }    }    if (completed) {      GstBuffer *outbuf = NULL;      GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);      data = gst_adapter_peek (rsvg->adapter, size);      ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf);      if (ret != GST_FLOW_OK)        break;      if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) {        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;        if (GST_BUFFER_DURATION_IS_VALID (buffer)) {          GstClockTime end =              GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ?              GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp;          end += GST_BUFFER_DURATION (buffer);          GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf);        }        if (rsvg->fps_n == 0) {          rsvg->first_timestamp = GST_CLOCK_TIME_NONE;        } else {          GST_BUFFER_DURATION (outbuf) =              gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,              rsvg->fps_n * GST_SECOND);        }      } else if (rsvg->fps_n != 0) {        GST_BUFFER_TIMESTAMP (outbuf) =            rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count,            rsvg->fps_d, rsvg->fps_n * GST_SECOND);        GST_BUFFER_DURATION (outbuf) =            gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,            rsvg->fps_n * GST_SECOND);      } else {        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;      }      rsvg->frame_count++;      if (rsvg->need_newsegment) {        gst_pad_push_event (rsvg->srcpad,            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));        rsvg->need_newsegment = FALSE;      }      if (rsvg->pending_events) {        GList *l;        for (l = rsvg->pending_events; l; l = l->next)          gst_pad_push_event (rsvg->srcpad, l->data);        g_list_free (rsvg->pending_events);        rsvg->pending_events = NULL;      }      if (rsvg->pending_tags) {        gst_element_found_tags (GST_ELEMENT_CAST (rsvg), rsvg->pending_tags);        rsvg->pending_tags = NULL;//.........这里部分代码省略.........
开发者ID:thiagoss,项目名称:gst-plugins-bad,代码行数:101,



注:本文中的GST_BUFFER_DURATION函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_BUFFER_FLAG_SET函数代码示例
C++ GST_BUFFER_DTS函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。