这篇教程C++ GST_BUFFER_DATA函数代码示例写得很实用,希望能帮到您。
本文整理汇总了C++中GST_BUFFER_DATA函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_BUFFER_DATA函数的具体用法?C++ GST_BUFFER_DATA怎么用?C++ GST_BUFFER_DATA使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。 在下文中一共展示了GST_BUFFER_DATA函数的30个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。 示例1: play_loopstatic voidplay_loop (GstPad * pad){ GstFlowReturn ret; GstNsfDec *nsfdec; GstBuffer *out; gint64 value, offset, time; GstFormat format; nsfdec = GST_NSFDEC (gst_pad_get_parent (pad)); out = gst_buffer_new_and_alloc (nsfdec->blocksize); gst_buffer_set_caps (out, GST_PAD_CAPS (pad)); nsf_frame (nsfdec->nsf); apu_process (GST_BUFFER_DATA (out), nsfdec->blocksize / nsfdec->bps); /* get offset in samples */ format = GST_FORMAT_DEFAULT; gst_nsfdec_src_convert (nsfdec->srcpad, GST_FORMAT_BYTES, nsfdec->total_bytes, &format, &offset); GST_BUFFER_OFFSET (out) = offset; /* get current timestamp */ format = GST_FORMAT_TIME; gst_nsfdec_src_convert (nsfdec->srcpad, GST_FORMAT_BYTES, nsfdec->total_bytes, &format, &time); GST_BUFFER_TIMESTAMP (out) = time; /* update position and get new timestamp to calculate duration */ nsfdec->total_bytes += nsfdec->blocksize; /* get offset in samples */ format = GST_FORMAT_DEFAULT; gst_nsfdec_src_convert (nsfdec->srcpad, GST_FORMAT_BYTES, nsfdec->total_bytes, &format, &value); GST_BUFFER_OFFSET_END (out) = value; format = GST_FORMAT_TIME; gst_nsfdec_src_convert (nsfdec->srcpad, GST_FORMAT_BYTES, nsfdec->total_bytes, &format, &value); GST_BUFFER_DURATION (out) = value - time; if ((ret = gst_pad_push (nsfdec->srcpad, out)) != GST_FLOW_OK) goto pause;done: gst_object_unref (nsfdec); return; /* ERRORS */pause: { const gchar *reason = gst_flow_get_name (ret); GST_DEBUG_OBJECT (nsfdec, "pausing task, reason %s", reason); gst_pad_pause_task (pad); if (ret == GST_FLOW_UNEXPECTED) { /* perform EOS logic, FIXME, segment seek? */ gst_pad_push_event (pad, gst_event_new_eos ()); } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) { /* for fatal errors we post an error message */ GST_ELEMENT_ERROR (nsfdec, STREAM, FAILED, (NULL), ("streaming task paused, reason %s", reason)); gst_pad_push_event (pad, gst_event_new_eos ()); } goto done; }}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:71,
示例2: gst_video_mark_yuvstatic GstFlowReturngst_video_mark_yuv (GstVideoMark * videomark, GstBuffer * buffer){ GstVideoFormat format; gint i, pw, ph, row_stride, pixel_stride, offset; gint width, height, req_width, req_height; guint8 *d, *data; guint64 pattern_shift; guint8 color; data = GST_BUFFER_DATA (buffer); format = videomark->format; width = videomark->width; height = videomark->height; pw = videomark->pattern_width; ph = videomark->pattern_height; row_stride = gst_video_format_get_row_stride (format, 0, width); pixel_stride = gst_video_format_get_pixel_stride (format, 0); offset = gst_video_format_get_component_offset (format, 0, width, height); req_width = (videomark->pattern_count + videomark->pattern_data_count) * pw + videomark->left_offset; req_height = videomark->bottom_offset + ph; if (req_width > width || req_height > height) { GST_ELEMENT_ERROR (videomark, STREAM, WRONG_TYPE, (NULL), ("videomark pattern doesn't fit video, need at least %ix%i (stream has %ix%i)", req_width, req_height, width, height)); return GST_FLOW_ERROR; } /* draw the bottom left pixels */ for (i = 0; i < videomark->pattern_count; i++) { d = data + offset; /* move to start of bottom left */ d += row_stride * (height - ph - videomark->bottom_offset) + pixel_stride * videomark->left_offset; /* move to i-th pattern */ d += pixel_stride * pw * i; if (i & 1) /* odd pixels must be white */ color = 255; else color = 0; /* draw box of width * height */ gst_video_mark_draw_box (videomark, d, pw, ph, row_stride, pixel_stride, color); } pattern_shift = G_GUINT64_CONSTANT (1) << (videomark->pattern_data_count - 1); /* get the data of the pattern */ for (i = 0; i < videomark->pattern_data_count; i++) { d = data + offset; /* move to start of bottom left, adjust for offsets */ d += row_stride * (height - ph - videomark->bottom_offset) + pixel_stride * videomark->left_offset; /* move after the fixed pattern */ d += pixel_stride * videomark->pattern_count * pw; /* move to i-th pattern data */ d += pixel_stride * pw * i; if (videomark->pattern_data & pattern_shift) color = 255; else color = 0; gst_video_mark_draw_box (videomark, d, pw, ph, row_stride, pixel_stride, color); pattern_shift >>= 1; } return GST_FLOW_OK;}
开发者ID:ChinnaSuhas,项目名称:ossbuild,代码行数:79,
示例3: glfwMakeContextCurrentbool nvxio::GStreamerBaseRenderImpl::flush(){ if (!pipeline) return false; glfwMakeContextCurrent(window_); if (glfwWindowShouldClose(window_)) return false; gl_->PixelStorei(GL_PACK_ALIGNMENT, 1); gl_->PixelStorei(GL_PACK_ROW_LENGTH, wndWidth_); { GstClockTime duration = GST_SECOND / (double)GSTREAMER_DEFAULT_FPS; GstClockTime timestamp = num_frames * duration;#if GST_VERSION_MAJOR == 0 GstBuffer * buffer = gst_buffer_try_new_and_alloc(wndHeight_ * wndWidth_ * 4); if (!buffer) { NVXIO_PRINT("Cannot create GStreamer buffer"); FinalizeGStreamerPipeline(); return false; } gl_->ReadPixels(0, 0, wndWidth_, wndHeight_, GL_RGBA, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buffer)); GST_BUFFER_TIMESTAMP(buffer) = timestamp; if (!GST_BUFFER_TIMESTAMP_IS_VALID(buffer)) NVXIO_PRINT("Failed to setup timestamp");#else GstBuffer * buffer = gst_buffer_new_allocate(NULL, wndHeight_ * wndWidth_ * 4, NULL); GstMapInfo info; gst_buffer_map(buffer, &info, GST_MAP_READ); gl_->ReadPixels(0, 0, wndWidth_, wndHeight_, GL_RGBA, GL_UNSIGNED_BYTE, info.data); gst_buffer_unmap(buffer, &info); GST_BUFFER_PTS(buffer) = timestamp; if (!GST_BUFFER_PTS_IS_VALID(buffer)) NVXIO_PRINT("Failed to setup PTS"); GST_BUFFER_DTS(buffer) = timestamp; if (!GST_BUFFER_DTS_IS_VALID(buffer)) NVXIO_PRINT("Failed to setup DTS");#endif GST_BUFFER_DURATION(buffer) = duration; if (!GST_BUFFER_DURATION_IS_VALID(buffer)) NVXIO_PRINT("Failed to setup duration"); GST_BUFFER_OFFSET(buffer) = num_frames++; if (!GST_BUFFER_OFFSET_IS_VALID(buffer)) NVXIO_PRINT("Failed to setup offset"); if (gst_app_src_push_buffer(appsrc, buffer) != GST_FLOW_OK) { NVXIO_PRINT("Error pushing buffer to GStreamer pipeline"); FinalizeGStreamerPipeline(); return false; } } // reset state gl_->PixelStorei(GL_PACK_ALIGNMENT, 4); gl_->PixelStorei(GL_PACK_ROW_LENGTH, 0); glfwSwapBuffers(window_); clearGlBuffer(); return true;}
开发者ID:neariot,项目名称:sfm,代码行数:73,
示例4: gst_segment_clipHRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample){ GstBuffer *out_buf = NULL; gboolean in_seg = FALSE; GstClockTime buf_start, buf_stop; gint64 clip_start = 0, clip_stop = 0; guint start_offset = 0, stop_offset; GstClockTime duration; if(pMediaSample) { BYTE *pBuffer = NULL; LONGLONG lStart = 0, lStop = 0; long size = pMediaSample->GetActualDataLength(); pMediaSample->GetPointer(&pBuffer); pMediaSample->GetTime(&lStart, &lStop); if (!GST_CLOCK_TIME_IS_VALID (mDec->timestamp)) { // Convert REFERENCE_TIME to GST_CLOCK_TIME mDec->timestamp = (GstClockTime)lStart * 100; } duration = (lStop - lStart) * 100; buf_start = mDec->timestamp; buf_stop = mDec->timestamp + duration; /* save stop position to start next buffer with it */ mDec->timestamp = buf_stop; /* check if this buffer is in our current segment */ in_seg = gst_segment_clip (mDec->segment, GST_FORMAT_TIME, buf_start, buf_stop, &clip_start, &clip_stop); /* if the buffer is out of segment do not push it downstream */ if (!in_seg) { GST_DEBUG_OBJECT (mDec, "buffer is out of segment, start %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT, GST_TIME_ARGS (buf_start), GST_TIME_ARGS (buf_stop)); goto done; } /* buffer is entirely or partially in-segment, so allocate a * GstBuffer for output, and clip if required */ /* allocate a new buffer for raw audio */ mDec->last_ret = gst_pad_alloc_buffer (mDec->srcpad, GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (mDec->srcpad), &out_buf); if (!out_buf) { GST_WARNING_OBJECT (mDec, "cannot allocate a new GstBuffer"); goto done; } /* set buffer properties */ GST_BUFFER_TIMESTAMP (out_buf) = buf_start; GST_BUFFER_DURATION (out_buf) = duration; memcpy (GST_BUFFER_DATA (out_buf), pBuffer, MIN ((unsigned int)size, GST_BUFFER_SIZE (out_buf))); /* we have to remove some heading samples */ if ((GstClockTime) clip_start > buf_start) { start_offset = (guint)gst_util_uint64_scale_int (clip_start - buf_start, mDec->rate, GST_SECOND) * mDec->depth / 8 * mDec->channels; } else start_offset = 0; /* we have to remove some trailing samples */ if ((GstClockTime) clip_stop < buf_stop) { stop_offset = (guint)gst_util_uint64_scale_int (buf_stop - clip_stop, mDec->rate, GST_SECOND) * mDec->depth / 8 * mDec->channels; } else stop_offset = size; /* truncating */ if ((start_offset != 0) || (stop_offset != (size_t) size)) { GstBuffer *subbuf = gst_buffer_create_sub (out_buf, start_offset, stop_offset - start_offset); if (subbuf) { gst_buffer_set_caps (subbuf, GST_PAD_CAPS (mDec->srcpad)); gst_buffer_unref (out_buf); out_buf = subbuf; } } GST_BUFFER_TIMESTAMP (out_buf) = clip_start; GST_BUFFER_DURATION (out_buf) = clip_stop - clip_start; /* replace the saved stop position by the clipped one */ mDec->timestamp = clip_stop; GST_DEBUG_OBJECT (mDec, "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf)), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf) + GST_BUFFER_DURATION (out_buf)),//.........这里部分代码省略.........
开发者ID:spunktsch,项目名称:svtplayer,代码行数:101,
示例5: data_procstatic OSErrdata_proc (SGChannel c, Ptr p, long len, long * offset, long chRefCon, TimeValue time, short writeType, long refCon){ GstOSXVideoSrc * self; gint fps_n, fps_d; GstClockTime duration, timestamp, latency; CodecFlags flags; ComponentResult err; PixMapHandle hPixMap; Rect portRect; int pix_rowBytes; void *pix_ptr; int pix_height; int pix_size; self = GST_OSX_VIDEO_SRC (refCon); if (self->buffer != NULL) { gst_buffer_unref (self->buffer); self->buffer = NULL; } err = DecompressSequenceFrameS (self->dec_seq, p, len, 0, &flags, NULL); if (err != noErr) { GST_ERROR_OBJECT (self, "DecompressSequenceFrameS returned %d", (int) err); return err; } hPixMap = GetGWorldPixMap (self->world); LockPixels (hPixMap); GetPortBounds (self->world, &portRect); pix_rowBytes = (int) GetPixRowBytes (hPixMap); pix_ptr = GetPixBaseAddr (hPixMap); pix_height = (portRect.bottom - portRect.top); pix_size = pix_rowBytes * pix_height; GST_DEBUG_OBJECT (self, "num=%5d, height=%d, rowBytes=%d, size=%d", self->seq_num, pix_height, pix_rowBytes, pix_size); fps_n = FRAMERATE; fps_d = 1; duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n); latency = duration; timestamp = gst_clock_get_time (GST_ELEMENT_CAST (self)->clock); timestamp -= gst_element_get_base_time (GST_ELEMENT_CAST (self)); if (timestamp > latency) timestamp -= latency; else timestamp = 0; self->buffer = gst_buffer_new_and_alloc (pix_size); GST_BUFFER_OFFSET (self->buffer) = self->seq_num; GST_BUFFER_TIMESTAMP (self->buffer) = timestamp; memcpy (GST_BUFFER_DATA (self->buffer), pix_ptr, pix_size); self->seq_num++; UnlockPixels (hPixMap); return noErr;}
开发者ID:eta-im-dev,项目名称:media,代码行数:64,
示例6: gst_scene_change_filter_ip_I420static GstFlowReturngst_scene_change_filter_ip_I420 (GstVideoFilter2 * videofilter2, GstBuffer * buf, int start, int end){ GstSceneChange *scenechange; double score_min; double score_max; double threshold; double score; gboolean change; int i; int width; int height; g_return_val_if_fail (GST_IS_SCENE_CHANGE (videofilter2), GST_FLOW_ERROR); scenechange = GST_SCENE_CHANGE (videofilter2); width = GST_VIDEO_FILTER2_WIDTH (videofilter2); height = GST_VIDEO_FILTER2_HEIGHT (videofilter2); if (!scenechange->oldbuf) { scenechange->n_diffs = 0; memset (scenechange->diffs, 0, sizeof (double) * SC_N_DIFFS); scenechange->oldbuf = gst_buffer_ref (buf); return GST_FLOW_OK; } score = get_frame_score (GST_BUFFER_DATA (scenechange->oldbuf), GST_BUFFER_DATA (buf), width, height); memmove (scenechange->diffs, scenechange->diffs + 1, sizeof (double) * (SC_N_DIFFS - 1)); scenechange->diffs[SC_N_DIFFS - 1] = score; scenechange->n_diffs++; gst_buffer_unref (scenechange->oldbuf); scenechange->oldbuf = gst_buffer_ref (buf); score_min = scenechange->diffs[0]; score_max = scenechange->diffs[0]; for (i = 1; i < SC_N_DIFFS - 1; i++) { score_min = MIN (score_min, scenechange->diffs[i]); score_max = MAX (score_max, scenechange->diffs[i]); } threshold = 1.8 * score_max - 0.8 * score_min; if (scenechange->n_diffs > 2) { if (score < 5) { change = FALSE; } else if (score / threshold < 1.0) { change = FALSE; } else if (score / threshold > 2.5) { change = TRUE; } else if (score > 50) { change = TRUE; } else { change = FALSE; } } else { change = FALSE; }#ifdef TESTING if (change != is_shot_change (scenechange->n_diffs)) { g_print ("%d %g %g %g %d/n", scenechange->n_diffs, score / threshold, score, threshold, change); }#endif if (change) { GstEvent *event; GST_DEBUG_OBJECT (scenechange, "%d %g %g %g %d", scenechange->n_diffs, score / threshold, score, threshold, change); event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, gst_structure_new ("GstForceKeyUnit", NULL)); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (scenechange), event); } return GST_FLOW_OK;}
开发者ID:collects,项目名称:gst-plugins-bad,代码行数:84,
示例7: gst_devsound_src_createstatic GstFlowReturn gst_devsound_src_create(GstBaseSrc *src, guint64 offset, guint size, GstBuffer **buf) { GstDevsoundSrc *dsrc= GST_DEVSOUND_SRC(src); int bufferpos=0; int ret = KErrNone; if(!g_queue_get_length(dataqueue) && (dsrc->eosreceived == TRUE)) { pthread_mutex_lock(&(create_mutex1)); pthread_cond_signal(&(create_condition1)); pthread_mutex_unlock(&(create_mutex1)); post_symbian_error( src,KErrCancel ); return GST_FLOW_UNEXPECTED; } //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "gst_devsound_src_create ENTER "); //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "Before Buffer Alloc in CREATE ",NULL); *buf = gst_buffer_try_new_and_alloc(size); //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "AFter Buffer Alloc in CREATE ",NULL); if(*buf == NULL) { post_symbian_error( src,KErrNoMemory ); return GST_FLOW_UNEXPECTED; } while (size > 0) { if (dataleft >= size) { // if there is some data left in the popped buffer previously whose size // is more then the buffer which is incoming fresh to get filled, fill it //here. and if the data left in the popped buffer is 0, then unref it //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "dataleft >=size in CREATE ", NULL); memcpy(GST_BUFFER_DATA(*buf)+bufferpos,GST_BUFFER_DATA(popBuffer)+dataCopied,size); bufferpos+=size; dataCopied += size; dataleft = GST_BUFFER_SIZE(popBuffer) - dataCopied; size = 0; if (dataleft == 0) { dataCopied = 0; gst_buffer_unref(popBuffer); popBuffer = NULL; } } else { // if the dataleft in the popped buffer is greater then 0 and less then // the size of data needed for the fresh buffer. copy the remaining data // from the popped buffer and then unref it. if (dataleft > 0) { //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "dataleft >0 in CREATE ",NULL); memcpy(GST_BUFFER_DATA(*buf)+bufferpos,GST_BUFFER_DATA(popBuffer)+dataCopied,dataleft); size -= dataleft; bufferpos += dataleft; dataCopied = 0; dataleft = 0; gst_buffer_unref(popBuffer); popBuffer = NULL; } // we wait here if the dataqueue length is 0 and we need data // to be filled in the queue from the DevSound Thread if (!g_queue_get_length(dataqueue)) { //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "Before WAIT in CREATE ",NULL); if(dsrc->eosreceived == TRUE) { post_symbian_error( src,KErrCancel ); return GST_FLOW_UNEXPECTED; } else { cmd = RECORDING; return_error = KErrNone; pthread_mutex_lock(&(create_mutex1)); pthread_cond_signal(&(create_condition1)); pthread_mutex_unlock(&(create_mutex1)); pthread_mutex_lock(&(create_mutex1)); pthread_cond_wait(&(create_condition1), &(create_mutex1)); ret = return_error; pthread_mutex_unlock(&(create_mutex1)); } //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "AFTER WAIT in CREATE ",NULL); } if( ret ) { post_symbian_error( src,ret ); return GST_FLOW_UNEXPECTED; } //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) dsrc, "Before POP in CREATE ",NULL); GST_OBJECT_LOCK(dsrc); popBuffer = (GstBuffer*)g_queue_pop_tail(dataqueue); GST_OBJECT_UNLOCK(dsrc); if(!popBuffer )//.........这里部分代码省略.........
开发者ID:kuailexs,项目名称:symbiandump-mw1,代码行数:101,
示例8: gst_identity_transform_ipstatic GstFlowReturngst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf){ GstFlowReturn ret = GST_FLOW_OK; GstIdentity *identity = GST_IDENTITY (trans); GstClockTime runtimestamp = G_GINT64_CONSTANT (0); if (identity->check_perfect) gst_identity_check_perfect (identity, buf); if (identity->check_imperfect_timestamp) gst_identity_check_imperfect_timestamp (identity, buf); if (identity->check_imperfect_offset) gst_identity_check_imperfect_offset (identity, buf); /* update prev values */ identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf); identity->prev_duration = GST_BUFFER_DURATION (buf); identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf); identity->prev_offset = GST_BUFFER_OFFSET (buf); if (identity->error_after >= 0) { identity->error_after--; if (identity->error_after == 0) { GST_ELEMENT_ERROR (identity, CORE, FAILED, (_("Failed after iterations as requested.")), (NULL)); return GST_FLOW_ERROR; } } if (identity->drop_probability > 0.0) { if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) { if (!identity->silent) { GST_OBJECT_LOCK (identity); g_free (identity->last_message); identity->last_message = g_strdup_printf ("dropping ******* (%s:%s)i (%d bytes, timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT ", flags: %d) %p", GST_DEBUG_PAD_NAME (trans->sinkpad), GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), buf); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); } /* return DROPPED to basetransform. */ return GST_BASE_TRANSFORM_FLOW_DROPPED; } } if (identity->dump) { gst_util_dump_mem (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); } if (!identity->silent) { GST_OBJECT_LOCK (identity); g_free (identity->last_message); identity->last_message = g_strdup_printf ("chain ******* (%s:%s)i (%d bytes, timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT ", flags: %d) %p", GST_DEBUG_PAD_NAME (trans->sinkpad), GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), buf); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); } if (identity->datarate > 0) { GstClockTime time = gst_util_uint64_scale_int (identity->offset, GST_SECOND, identity->datarate); GST_BUFFER_TIMESTAMP (buf) = time; GST_BUFFER_DURATION (buf) = GST_BUFFER_SIZE (buf) * GST_SECOND / identity->datarate; } if (identity->signal_handoffs) g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf); if (trans->segment.format == GST_FORMAT_TIME) runtimestamp = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf)); if ((identity->sync) && (trans->segment.format == GST_FORMAT_TIME)) { GstClock *clock; GST_OBJECT_LOCK (identity); if ((clock = GST_ELEMENT (identity)->clock)) { GstClockReturn cret; GstClockTime timestamp; timestamp = runtimestamp + GST_ELEMENT (identity)->base_time; /* save id if we need to unlock */ /* FIXME: actually unlock this somewhere in the state changes */ identity->clock_id = gst_clock_new_single_shot_id (clock, timestamp);//.........这里部分代码省略.........
开发者ID:spunktsch,项目名称:svtplayer,代码行数:101,
示例9: gst_slvideo_buffer_allocstatic GstFlowReturngst_slvideo_buffer_alloc (GstBaseSink * bsink, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf){ gint width, height; GstStructure *structure = NULL; GstSLVideo *slvideo; slvideo = GST_SLVIDEO(bsink); // caps == requested caps // we can ignore these and reverse-negotiate our preferred dimensions with // the peer if we like - we need to do this to obey dynamic resize requests // flowing in from the app. structure = llgst_caps_get_structure (caps, 0); if (!llgst_structure_get_int(structure, "width", &width) || !llgst_structure_get_int(structure, "height", &height)) { GST_WARNING_OBJECT (slvideo, "no width/height in caps %" GST_PTR_FORMAT, caps); return GST_FLOW_NOT_NEGOTIATED; } GstBuffer *newbuf = llgst_buffer_new(); bool made_bufferdata_ptr = false;#define MAXDEPTHHACK 4 GST_OBJECT_LOCK(slvideo); if (slvideo->resize_forced_always) // app is giving us a fixed size to work with { gint slwantwidth, slwantheight; slwantwidth = slvideo->resize_try_width; slwantheight = slvideo->resize_try_height; if (slwantwidth != width || slwantheight != height) { // don't like requested caps, we will issue our own suggestion - copy // the requested caps but substitute our own width and height and see // if our peer is happy with that. GstCaps *desired_caps; GstStructure *desired_struct; desired_caps = llgst_caps_copy (caps); desired_struct = llgst_caps_get_structure (desired_caps, 0); GValue value = {0}; g_value_init(&value, G_TYPE_INT); g_value_set_int(&value, slwantwidth); llgst_structure_set_value (desired_struct, "width", &value); g_value_unset(&value); g_value_init(&value, G_TYPE_INT); g_value_set_int(&value, slwantheight); llgst_structure_set_value (desired_struct, "height", &value); if (llgst_pad_peer_accept_caps (GST_VIDEO_SINK_PAD (slvideo), desired_caps)) { // todo: re-use buffers from a pool? // todo: set MALLOCDATA to null, set DATA to point straight to shm? // peer likes our cap suggestion DEBUGMSG("peer loves us :)"); GST_BUFFER_SIZE(newbuf) = slwantwidth * slwantheight * MAXDEPTHHACK; GST_BUFFER_MALLOCDATA(newbuf) = (guint8*)g_malloc(GST_BUFFER_SIZE(newbuf)); GST_BUFFER_DATA(newbuf) = GST_BUFFER_MALLOCDATA(newbuf); llgst_buffer_set_caps (GST_BUFFER_CAST(newbuf), desired_caps); made_bufferdata_ptr = true; } else { // peer hates our cap suggestion INFOMSG("peer hates us :("); llgst_caps_unref(desired_caps); } } } GST_OBJECT_UNLOCK(slvideo); if (!made_bufferdata_ptr) // need to fallback to malloc at original size { GST_BUFFER_SIZE(newbuf) = width * height * MAXDEPTHHACK; GST_BUFFER_MALLOCDATA(newbuf) = (guint8*)g_malloc(GST_BUFFER_SIZE(newbuf)); GST_BUFFER_DATA(newbuf) = GST_BUFFER_MALLOCDATA(newbuf); llgst_buffer_set_caps (GST_BUFFER_CAST(newbuf), caps); } *buf = GST_BUFFER_CAST(newbuf); return GST_FLOW_OK;}
开发者ID:Belxjander,项目名称:Kirito,代码行数:89,
示例10: gst_amrwbenc_chainstatic GstFlowReturngst_amrwbenc_chain (GstPad * pad, GstBuffer * buffer){ GstAmrwbEnc *amrwbenc; GstFlowReturn ret = GST_FLOW_OK; const int buffer_size = sizeof (Word16) * L_FRAME16k; amrwbenc = GST_AMRWBENC (gst_pad_get_parent (pad)); g_return_val_if_fail (amrwbenc->handle, GST_FLOW_WRONG_STATE); if (amrwbenc->rate == 0 || amrwbenc->channels == 0) { ret = GST_FLOW_NOT_NEGOTIATED; goto done; } /* discontinuity clears adapter, FIXME, maybe we can set some * encoder flag to mask the discont. */ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (amrwbenc->adapter); amrwbenc->ts = 0; amrwbenc->discont = TRUE; } if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) amrwbenc->ts = GST_BUFFER_TIMESTAMP (buffer); ret = GST_FLOW_OK; gst_adapter_push (amrwbenc->adapter, buffer); /* Collect samples until we have enough for an output frame */ while (gst_adapter_available (amrwbenc->adapter) >= buffer_size) { GstBuffer *out; guint8 *data; gint outsize; out = gst_buffer_new_and_alloc (buffer_size); GST_BUFFER_DURATION (out) = GST_SECOND * L_FRAME16k / (amrwbenc->rate * amrwbenc->channels); GST_BUFFER_TIMESTAMP (out) = amrwbenc->ts; if (amrwbenc->ts != -1) { amrwbenc->ts += GST_BUFFER_DURATION (out); } if (amrwbenc->discont) { GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT); amrwbenc->discont = FALSE; } gst_buffer_set_caps (out, gst_pad_get_caps (amrwbenc->srcpad)); data = (guint8 *) gst_adapter_peek (amrwbenc->adapter, buffer_size); /* encode */ outsize = E_IF_encode (amrwbenc->handle, amrwbenc->bandmode, (Word16 *) data, (UWord8 *) GST_BUFFER_DATA (out), 0); gst_adapter_flush (amrwbenc->adapter, buffer_size); GST_BUFFER_SIZE (out) = outsize; /* play */ if ((ret = gst_pad_push (amrwbenc->srcpad, out)) != GST_FLOW_OK) break; }done: gst_object_unref (amrwbenc); return ret;}
开发者ID:JJCG,项目名称:gst-plugins-bad,代码行数:70,
示例11: gst_ape_demux_parse_tagstatic GstTagDemuxResultgst_ape_demux_parse_tag (GstTagDemux * demux, GstBuffer * buffer, gboolean start_tag, guint * tag_size, GstTagList ** tags){ const guint8 *data; const guint8 *footer; gboolean have_header; gboolean end_tag = !start_tag; GstCaps *sink_caps; guint version, footer_size; GST_LOG_OBJECT (demux, "Parsing buffer of size %u", GST_BUFFER_SIZE (buffer)); data = GST_BUFFER_DATA (buffer); footer = GST_BUFFER_DATA (buffer) + GST_BUFFER_SIZE (buffer) - 32; GST_LOG_OBJECT (demux, "Checking for footer at offset 0x%04x", (guint) (footer - data)); if (footer > data && memcmp (footer, "APETAGEX", 8) == 0) { GST_DEBUG_OBJECT (demux, "Found footer"); footer_size = 32; } else { GST_DEBUG_OBJECT (demux, "No footer"); footer_size = 0; } /* APE tags at the end must have a footer */ if (end_tag && footer_size == 0) { GST_WARNING_OBJECT (demux, "Tag at end of file without footer!"); return GST_TAG_DEMUX_RESULT_BROKEN_TAG; } /* don't trust the header/footer flags, better detect them ourselves */ have_header = (memcmp (data, "APETAGEX", 8) == 0); if (start_tag && !have_header) { GST_DEBUG_OBJECT (demux, "Tag at beginning of file without header!"); return GST_TAG_DEMUX_RESULT_BROKEN_TAG; } if (end_tag && !have_header) { GST_DEBUG_OBJECT (demux, "Tag at end of file has no header (APEv1)"); *tag_size -= 32; /* adjust tag size */ } if (have_header) { version = GST_READ_UINT32_LE (data + 8); } else { version = GST_READ_UINT32_LE (footer + 8); } /* skip header */ if (have_header) { data += 32; } GST_DEBUG_OBJECT (demux, "APE tag with version %u, size %u at offset 0x%08" G_GINT64_MODIFIER "x", version, *tag_size, GST_BUFFER_OFFSET (buffer) + ((have_header) ? 0 : 32)); if (APE_VERSION_MAJOR (version) != 1 && APE_VERSION_MAJOR (version) != 2) { GST_WARNING ("APE tag is version %u.%03u, but decoder only supports " "v1 or v2. Ignoring.", APE_VERSION_MAJOR (version), version % 1000); return GST_TAG_DEMUX_RESULT_OK; } *tags = ape_demux_parse_tags (data, *tag_size - footer_size); sink_caps = gst_static_pad_template_get_caps (&sink_factory); gst_pb_utils_add_codec_description_to_tag_list (*tags, GST_TAG_CONTAINER_FORMAT, sink_caps); gst_caps_unref (sink_caps); return GST_TAG_DEMUX_RESULT_OK;}
开发者ID:TheBigW,项目名称:gst-plugins-good,代码行数:75,
示例12: gst_base_rtp_audio_payload_push_bufferstatic GstFlowReturngst_base_rtp_audio_payload_push_buffer (GstBaseRTPAudioPayload * baseaudiopayload, GstBuffer * buffer){ GstBaseRTPPayload *basepayload; GstBaseRTPAudioPayloadPrivate *priv; GstBuffer *outbuf; GstClockTime timestamp; guint8 *payload; guint payload_len; GstFlowReturn ret; priv = baseaudiopayload->priv; basepayload = GST_BASE_RTP_PAYLOAD (baseaudiopayload); payload_len = GST_BUFFER_SIZE (buffer); timestamp = GST_BUFFER_TIMESTAMP (buffer); GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT, payload_len, GST_TIME_ARGS (timestamp)); if (priv->buffer_list) { /* create just the RTP header buffer */ outbuf = gst_rtp_buffer_new_allocate (0, 0, 0); } else { /* create buffer to hold the payload */ outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0); } /* set metadata */ gst_base_rtp_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len, timestamp); if (priv->buffer_list) { GstBufferList *list; GstBufferListIterator *it; list = gst_buffer_list_new (); it = gst_buffer_list_iterate (list); /* add both buffers to the buffer list */ gst_buffer_list_iterator_add_group (it); gst_buffer_list_iterator_add (it, outbuf); gst_buffer_list_iterator_add (it, buffer); gst_buffer_list_iterator_free (it); GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list); ret = gst_basertppayload_push_list (basepayload, list); } else { /* copy payload */ payload = gst_rtp_buffer_get_payload (outbuf); memcpy (payload, GST_BUFFER_DATA (buffer), payload_len); gst_buffer_unref (buffer); GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf); ret = gst_basertppayload_push (basepayload, outbuf); } return ret;}
开发者ID:genesi,项目名称:gst-base-plugins,代码行数:61,
示例13: gst_pgmdec_chainstatic GstFlowReturn gst_pgmdec_chain (GstPad * pad, GstBuffer * in){ GstFlowReturn ret = GST_FLOW_OK; GstBuffer *out; Gstpgmdec *filter = GST_PGMDEC (GST_OBJECT_PARENT (pad)); //GstCaps *caps = GST_PAD_CAPS (filter->srcpad); GstCaps *caps; guint byts=0; gchar line[4][20]; //const gchar *l; gint i; //GstStructure *structure = gst_caps_get_structure (caps, 0); guint8 *outbuf; guint8 *inbuf; //Get pgm header if(!GST_BUFFER_OFFSET(in)){ //filter->buff = in; inbuf = (guint8 *) GST_BUFFER_DATA (in); byts = sscanf(inbuf, "%s%s%s%s", line[0], line[1], line[2], line[3]); if (strcmp(line[0], "P5") != 0) { GST_WARNING ("It's not PGM file"); return FALSE; } filter->width = atoi(line[1]); filter->height = atoi(line[2]); filter->bpp = (atoi(line[3]) > 256) ? 16 : 8; for(i=0; i<4; i++) byts += strlen(line[i]); filter->size = (filter->bpp == 8) ? filter->width*filter->height : filter->width*filter->height*2; //gst_buffer_set_data(filter->buff, &inbuf[byts], GST_BUFFER_SIZE(in)-byts); //filter->buff = &inbuf[byts]; GST_DEBUG_OBJECT (filter, "The file type is : %s width = %d height = %d bpp = %d", line[0], filter->width, filter->height, filter->bpp); GST_DEBUG_OBJECT (filter, "DATA = %p SIZE = %d OFFSET = %d", GST_BUFFER_DATA(in), GST_BUFFER_SIZE(in), GST_BUFFER_OFFSET(in)); filter->buff = gst_buffer_new_and_alloc(filter->size); for(i=0;i < GST_BUFFER_SIZE(in)-byts; i++) GST_BUFFER_DATA(filter->buff)[i] = GST_BUFFER_DATA(in)[byts+i]; GST_BUFFER_OFFSET(filter->buff) = GST_BUFFER_SIZE(in)-byts; gst_buffer_unref(in); return GST_FLOW_OK; //gst_event_new_seek (1.0, // GST_FORMAT_BYTES, // GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SEGMENT |GST_SEEK_FLAG_ACCURATE, // GST_SEEK_TYPE_SET, byts, // GST_SEEK_TYPE_SET, filter->size + byts); } //Check for the buffer size if(GST_BUFFER_OFFSET(filter->buff) < GST_BUFFER_SIZE(filter->buff)) { for(i=0; i < GST_BUFFER_SIZE(in); i++) GST_BUFFER_DATA(filter->buff)[GST_BUFFER_OFFSET(filter->buff) + i] = GST_BUFFER_DATA(in)[i]; GST_BUFFER_OFFSET(filter->buff) += GST_BUFFER_SIZE(in); //GST_DEBUG_OBJECT (filter, "DATA = %p SIZE = %d OFFSET = %d",GST_BUFFER_DATA(filter->buff), GST_BUFFER_SIZE(filter->buff),GST_BUFFER_OFFSET(filter->buff)); gst_buffer_unref(in); if(GST_BUFFER_OFFSET(filter->buff) != GST_BUFFER_SIZE(filter->buff)) return GST_FLOW_OK; } GST_DEBUG_OBJECT (filter, "DATA = %p SIZE = %d OFFSET = %d", GST_BUFFER_DATA(filter->buff), GST_BUFFER_SIZE(filter->buff), GST_BUFFER_OFFSET(filter->buff)); caps = gst_caps_new_simple ("video/x-raw-bayer", "width", G_TYPE_INT, filter->width, "height", G_TYPE_INT, filter->height, "bpp", G_TYPE_INT, filter->bpp, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); gst_buffer_set_caps(filter->buff, caps); gst_pad_set_caps (filter->srcpad, caps); gst_pad_use_fixed_caps (filter->srcpad); gst_caps_unref (caps); /* just push out the incoming buffer without touching it */ ret = gst_pad_push(filter->srcpad, filter->buff); return ret;}
开发者ID:ladiko,项目名称:walet,代码行数:81,
示例14: start_play_tunestatic gbooleanstart_play_tune (GstNsfDec * nsfdec){ gboolean res; nsfdec->nsf = nsf_load (NULL, GST_BUFFER_DATA (nsfdec->tune_buffer), GST_BUFFER_SIZE (nsfdec->tune_buffer)); if (!nsfdec->nsf) goto could_not_load; if (!nsfdec_negotiate (nsfdec)) goto could_not_negotiate; nsfdec->taglist = gst_tag_list_new (); gst_tag_list_add (nsfdec->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_AUDIO_CODEC, "NES Sound Format", NULL); if (nsfdec->nsf->artist_name) gst_tag_list_add (nsfdec->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ARTIST, nsfdec->nsf->artist_name, NULL); if (nsfdec->nsf->song_name) gst_tag_list_add (nsfdec->taglist, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE, nsfdec->nsf->song_name, NULL); gst_element_post_message (GST_ELEMENT_CAST (nsfdec), gst_message_new_tag (GST_OBJECT (nsfdec), gst_tag_list_copy (nsfdec->taglist))); nsf_playtrack (nsfdec->nsf, nsfdec->tune_number, nsfdec->frequency, nsfdec->bits, nsfdec->stereo); nsf_setfilter (nsfdec->nsf, nsfdec->filter); nsfdec->bps = (nsfdec->bits >> 3) * nsfdec->channels; /* calculate the number of bytes we need to output after each call to * nsf_frame(). */ nsfdec->blocksize = nsfdec->bps * nsfdec->frequency / nsfdec->nsf->playback_rate; gst_pad_push_event (nsfdec->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); res = gst_pad_start_task (nsfdec->srcpad, (GstTaskFunction) play_loop, nsfdec->srcpad, NULL); return res; /* ERRORS */could_not_load: { GST_ELEMENT_ERROR (nsfdec, LIBRARY, INIT, ("Could not load tune"), ("Could not load tune")); return FALSE; }could_not_negotiate: { GST_ELEMENT_ERROR (nsfdec, CORE, NEGOTIATION, ("Could not negotiate format"), ("Could not negotiate format")); return FALSE; }}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:62,
示例15: gst_v4l2_buffer_newstatic GstV4l2Buffer *gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps){ GstV4l2Buffer *ret; guint8 *data; ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER); GST_LOG_OBJECT (pool->v4l2elem, "creating buffer %u, %p in pool %p", index, ret, pool); ret->pool = (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool)); ret->vbuffer.index = index; ret->vbuffer.type = pool->type; ret->vbuffer.memory = V4L2_MEMORY_MMAP; if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0) goto querybuf_failed; GST_LOG_OBJECT (pool->v4l2elem, " index: %u", ret->vbuffer.index); GST_LOG_OBJECT (pool->v4l2elem, " type: %d", ret->vbuffer.type); GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", ret->vbuffer.bytesused); GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", ret->vbuffer.flags); GST_LOG_OBJECT (pool->v4l2elem, " field: %d", ret->vbuffer.field); GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", ret->vbuffer.memory); if (ret->vbuffer.memory == V4L2_MEMORY_MMAP) GST_LOG_OBJECT (pool->v4l2elem, " MMAP offset: %u", ret->vbuffer.m.offset); GST_LOG_OBJECT (pool->v4l2elem, " length: %u", ret->vbuffer.length); //GST_LOG_OBJECT (pool->v4l2elem, " input: %u", ret->vbuffer.input); data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd, ret->vbuffer.m.offset); if (data == MAP_FAILED) goto mmap_failed; GST_BUFFER_DATA (ret) = data; GST_BUFFER_SIZE (ret) = ret->vbuffer.length; GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY); gst_buffer_set_caps (GST_BUFFER (ret), caps); return ret; /* ERRORS */querybuf_failed: { gint errnosave = errno; GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave)); gst_buffer_unref (GST_BUFFER (ret)); errno = errnosave; return NULL; }mmap_failed: { gint errnosave = errno; GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave)); gst_buffer_unref (GST_BUFFER (ret)); errno = errnosave; return NULL; }}
开发者ID:kpykc,项目名称:ardrone2_gstreamer,代码行数:69,
示例16: gst_kate_util_decoder_base_chain_kate_packetGstFlowReturngst_kate_util_decoder_base_chain_kate_packet (GstKateDecoderBase * decoder, GstElement * element, GstPad * pad, GstBuffer * buf, GstPad * srcpad, GstPad * tagpad, GstCaps ** src_caps, const kate_event ** ev){ kate_packet kp; int ret; GstFlowReturn rflow = GST_FLOW_OK; gboolean is_header; GST_DEBUG_OBJECT (element, "got kate packet, %u bytes, type %02x", GST_BUFFER_SIZE (buf), GST_BUFFER_SIZE (buf) == 0 ? -1 : GST_BUFFER_DATA (buf)[0]); is_header = GST_BUFFER_SIZE (buf) > 0 && (GST_BUFFER_DATA (buf)[0] & 0x80); if (!is_header && decoder->tags) { /* after we've processed headers, send any tags before processing the data packet */ GST_DEBUG_OBJECT (element, "Not a header, sending tags for pad %s:%s", GST_DEBUG_PAD_NAME (tagpad)); gst_element_found_tags_for_pad (element, tagpad, decoder->tags); decoder->tags = NULL; } kate_packet_wrap (&kp, GST_BUFFER_SIZE (buf), GST_BUFFER_DATA (buf)); ret = kate_high_decode_packetin (&decoder->k, &kp, ev); if (G_UNLIKELY (ret < 0)) { GST_ELEMENT_ERROR (element, STREAM, DECODE, (NULL), ("Failed to decode Kate packet: %s", gst_kate_util_get_error_message (ret))); return GST_FLOW_ERROR; } if (G_UNLIKELY (ret > 0)) { GST_DEBUG_OBJECT (element, "kate_high_decode_packetin has received EOS packet"); } /* headers may be interesting to retrieve information from */ if (G_UNLIKELY (is_header)) { switch (GST_BUFFER_DATA (buf)[0]) { case 0x80: /* ID header */ GST_INFO_OBJECT (element, "Parsed ID header: language %s, category %s", decoder->k.ki->language, decoder->k.ki->category); if (src_caps) { if (*src_caps) { gst_caps_unref (*src_caps); *src_caps = NULL; } if (strcmp (decoder->k.ki->category, "K-SPU") == 0 || strcmp (decoder->k.ki->category, "spu-subtitles") == 0) { *src_caps = gst_caps_new_simple ("video/x-dvd-subpicture", NULL); } else if (decoder->k.ki->text_markup_type == kate_markup_none) { *src_caps = gst_caps_new_simple ("text/plain", NULL); } else { *src_caps = gst_caps_new_simple ("text/x-pango-markup", NULL); } GST_INFO_OBJECT (srcpad, "Setting caps: %" GST_PTR_FORMAT, *src_caps); if (!gst_pad_set_caps (srcpad, *src_caps)) { GST_ERROR_OBJECT (srcpad, "Failed to set caps %" GST_PTR_FORMAT, *src_caps); } } if (decoder->k.ki->language && *decoder->k.ki->language) { GstTagList *old = decoder->tags, *tags = gst_tag_list_new (); if (tags) { gchar *lang_code; /* en_GB -> en */ lang_code = g_ascii_strdown (decoder->k.ki->language, -1); g_strdelimit (lang_code, NULL, '/0'); gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_LANGUAGE_CODE, lang_code, NULL); g_free (lang_code); /* TODO: category - where should it go ? */ decoder->tags = gst_tag_list_merge (decoder->tags, tags, GST_TAG_MERGE_REPLACE); gst_tag_list_free (tags); if (old) gst_tag_list_free (old); } } /* update properties */ if (decoder->language) g_free (decoder->language); decoder->language = g_strdup (decoder->k.ki->language); if (decoder->category) g_free (decoder->category); decoder->category = g_strdup (decoder->k.ki->category); decoder->original_canvas_width = decoder->k.ki->original_canvas_width; decoder->original_canvas_height = decoder->k.ki->original_canvas_height; /* we can now send away any event we've delayed, as the src pad now has caps */ gst_kate_util_decoder_base_drain_event_queue (decoder); break; case 0x81: /* Vorbis comments header */ GST_INFO_OBJECT (element, "Parsed comments header");//.........这里部分代码省略.........
开发者ID:dylansong77,项目名称:gstreamer,代码行数:101,
示例17: setup_jitterbufferstatic GstElement *setup_jitterbuffer (gint num_buffers){ GstElement *jitterbuffer; GstClock *clock; GstBuffer *buffer; GstCaps *caps; /* generated with * gst-launch audiotestsrc wave=silence blocksize=40 num-buffers=3 ! * "audio/x-raw-int,channels=1,rate=8000" ! mulawenc ! rtppcmupay ! * fakesink dump=1 */ guint8 in[] = { /* first 4 bytes are rtp-header, next 4 bytes are timestamp */ 0x80, 0x80, 0x1c, 0x24, 0x46, 0xcd, 0xb7, 0x11, 0x3c, 0x3a, 0x7c, 0x5b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff }; GstClockTime ts = G_GUINT64_CONSTANT (0); GstClockTime tso = gst_util_uint64_scale (RTP_FRAME_SIZE, GST_SECOND, 8000); /*guint latency = GST_TIME_AS_MSECONDS (num_buffers*tso); */ gint i; GST_DEBUG ("setup_jitterbuffer"); jitterbuffer = gst_check_setup_element ("gstrtpjitterbuffer"); /* we need a clock here */ clock = gst_system_clock_obtain (); gst_element_set_clock (jitterbuffer, clock); gst_object_unref (clock); /* setup latency */ /* latency would be 7 for 3 buffers here, default is 200 g_object_set (G_OBJECT (jitterbuffer), "latency", latency, NULL); GST_INFO_OBJECT (jitterbuffer, "set latency to %u ms", latency); */ mysrcpad = gst_check_setup_src_pad (jitterbuffer, &srctemplate, NULL); mysinkpad = gst_check_setup_sink_pad (jitterbuffer, &sinktemplate, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_pad_set_active (mysinkpad, TRUE); /* create n buffers */ caps = gst_caps_from_string (RTP_CAPS_STRING); for (i = 0; i < num_buffers; i++) { buffer = gst_buffer_new_and_alloc (sizeof (in)); memcpy (GST_BUFFER_DATA (buffer), in, sizeof (in)); gst_buffer_set_caps (buffer, caps); GST_BUFFER_TIMESTAMP (buffer) = ts; GST_BUFFER_DURATION (buffer) = tso; if (!i) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); inbuffers = g_list_append (inbuffers, buffer); /* hackish way to update the rtp header */ in[1] = 0x00; in[3]++; /* seqnumber */ in[7] += RTP_FRAME_SIZE; /* inc. timestamp with framesize */ ts += tso; } gst_caps_unref (caps); return jitterbuffer;}
开发者ID:zsx,项目名称:ossbuild,代码行数:63,
示例18: gst_mve_video_create_bufferstatic GstFlowReturngst_mve_video_create_buffer (GstMveDemux * mve, guint8 version, const guint8 * data, guint16 len){ GstBuffer *buf; guint16 w, h, n, true_color, bpp; guint required, size; GST_DEBUG_OBJECT (mve, "create video buffer"); if (mve->video_stream == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("trying to create video buffer for uninitialized stream")); return GST_FLOW_ERROR; } /* need 4 to 8 more bytes */ required = (version > 1) ? 8 : (version * 2); if (len < required) return gst_mve_stream_error (mve, required, len); w = GST_READ_UINT16_LE (data) << 3; h = GST_READ_UINT16_LE (data + 2) << 3; if (version > 0) n = GST_READ_UINT16_LE (data + 4); else n = 1; if (version > 1) true_color = GST_READ_UINT16_LE (data + 6); else true_color = 0; bpp = (true_color ? 2 : 1); size = w * h * bpp; if (mve->video_stream->buffer != NULL) { GST_DEBUG_OBJECT (mve, "video buffer already created"); if (GST_BUFFER_SIZE (mve->video_stream->buffer) == size * 2) return GST_FLOW_OK; GST_DEBUG_OBJECT (mve, "video buffer size has changed"); gst_buffer_unref (mve->video_stream->buffer); } GST_DEBUG_OBJECT (mve, "allocating video buffer, w:%u, h:%u, n:%u, true_color:%u", w, h, n, true_color); /* we need a buffer to keep the last 2 frames, since those may be needed for decoding the next one */ buf = gst_buffer_new_and_alloc (size * 2); mve->video_stream->bpp = bpp; mve->video_stream->width = w; mve->video_stream->height = h; mve->video_stream->buffer = buf; mve->video_stream->back_buf1 = GST_BUFFER_DATA (buf); mve->video_stream->back_buf2 = mve->video_stream->back_buf1 + size; mve->video_stream->max_block_offset = (h - 7) * w - 8; memset (mve->video_stream->back_buf1, 0, size * 2); return GST_FLOW_OK;}
开发者ID:drothlis,项目名称:gst-plugins-bad,代码行数:66,
示例19: GST_DEVSOUND_SRC//.........这里部分代码省略......... if( KErrNone == ret ) { ret = recordinit(devsoundsrc->handle); if( KErrNone == ret ) initproperties(devsoundsrc); } /// if initialization of devsound failed, return error, /// on return other thread will send CLOSE cmd to exit from thread. if( ret ) { pthread_mutex_lock(&(create_mutex1)); return_error = ret; pthread_cond_signal(&(create_condition1)); pthread_mutex_unlock(&(create_mutex1)); pthread_mutex_lock(&create_mutex1); pthread_cond_wait(&create_condition1, &create_mutex1); pthread_mutex_unlock(&create_mutex1); } } while (TRUE) { switch (cmd) { case PAUSE: pause_devsound(devsoundsrc); break; case RESUME: resume_devsound(devsoundsrc); break; case STOP: stop_devsound(devsoundsrc); break; case RECORDING: { pre_init_setconf(devsoundsrc);// gst_Apply_SpeechEncoder_Update(devsoundsrc); gst_Apply_G711Encoder_Update(devsoundsrc); gst_Apply_G729Encoder_Update(devsoundsrc ); gst_Apply_IlbcEncoder_Update(devsoundsrc ); populateproperties(devsoundsrc); supportedbitrates = devsoundsrc->supportedbitrates; //numofbitrates = devsoundsrc->numofbitrates; speechbitrate = devsoundsrc->speechbitrate; speechvadmode = devsoundsrc->speechvadmode; g711vadmode = devsoundsrc->g711vadmode; g729vadmode = devsoundsrc->g729vadmode; ilbcvadmode = devsoundsrc->ilbcvadmode; buffersize = get_databuffer_size(devsoundsrc->handle); get_databuffer(devsoundsrc->handle, &gBuffer); pushBuffer = gst_buffer_new_and_alloc(buffersize); memcpy(GST_BUFFER_DATA(pushBuffer),gBuffer,buffersize); GST_OBJECT_LOCK(devsoundsrc); g_queue_push_head (dataqueue,pushBuffer); GST_OBJECT_UNLOCK(devsoundsrc); record_data(devsoundsrc->handle); } break; case CLOSE: { //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) devsoundsrc, "Before Close DevSound ",NULL); close_devsound(devsoundsrc); //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) devsoundsrc, "After Close DevSound ",NULL); devsoundsrc->handle= NULL; //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) devsoundsrc, "After handle NULL ",NULL); pthread_mutex_lock(&(create_mutex1)); pthread_cond_signal(&(create_condition1)); pthread_mutex_unlock(&(create_mutex1)); // TODO obtain mutex here consumer_thread_state = CONSUMER_THREAD_UNINITIALIZED; pthread_exit(NULL); } break; default: // TODO obtain mutex here consumer_thread_state = CONSUMER_THREAD_UNINITIALIZED; pthread_exit(NULL); break; } pthread_mutex_lock(&(create_mutex1)); return_error = call_back_error(devsoundsrc->handle); pthread_cond_signal(&(create_condition1)); pthread_mutex_unlock(&(create_mutex1)); pthread_mutex_lock(&create_mutex1); pthread_cond_wait(&create_condition1, &create_mutex1); pthread_mutex_unlock(&create_mutex1); } }
开发者ID:kuailexs,项目名称:symbiandump-mw1,代码行数:101,
示例20: gst_mve_video_palettestatic GstFlowReturngst_mve_video_palette (GstMveDemux * mve, const guint8 * data, guint16 len){ GstBuffer *buf; guint16 start, count; const guint8 *pal; guint32 *pal_ptr; gint i; GST_DEBUG_OBJECT (mve, "video palette"); if (mve->video_stream == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("found palette before video stream was initialized")); return GST_FLOW_ERROR; } /* need 4 more bytes now, more later */ if (len < 4) return gst_mve_stream_error (mve, 4, len); len -= 4; start = GST_READ_UINT16_LE (data); count = GST_READ_UINT16_LE (data + 2); GST_DEBUG_OBJECT (mve, "found palette start:%u, count:%u", start, count); /* need more bytes */ if (len < count * 3) return gst_mve_stream_error (mve, count * 3, len); /* make sure we don't exceed the buffer */ if (start + count > MVE_PALETTE_COUNT) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("palette too large for buffer")); return GST_FLOW_ERROR; } if (mve->video_stream->palette != NULL) { /* older buffers floating around might still use the old palette, so make sure we can update it */ buf = gst_buffer_make_writable (mve->video_stream->palette); } else { buf = gst_buffer_new_and_alloc (MVE_PALETTE_COUNT * 4); memset (GST_BUFFER_DATA (buf), 0, GST_BUFFER_SIZE (buf)); } mve->video_stream->palette = buf; pal = data + 4; pal_ptr = ((guint32 *) GST_BUFFER_DATA (buf)) + start; for (i = 0; i < count; ++i) { /* convert from 6-bit VGA to 8-bit palette */ guint8 r, g, b; r = (*pal) << 2; ++pal; g = (*pal) << 2; ++pal; b = (*pal) << 2; ++pal; *pal_ptr = (r << 16) | (g << 8) | (b); ++pal_ptr; } return GST_FLOW_OK;}
开发者ID:drothlis,项目名称:gst-plugins-bad,代码行数:66,
示例21: gst_snapshot_chainstatic voidgst_snapshot_chain (GstPad * pad, GstData * _data){ GstBuffer *buf = GST_BUFFER (_data); GstSnapshot *snapshot; guchar *data; gulong size; gint i; png_byte *row_pointers[MAX_HEIGHT]; FILE *fp; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); snapshot = GST_SNAPSHOT (GST_OBJECT_PARENT (pad)); data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); GST_DEBUG ("snapshot: have buffer of %d/n", GST_BUFFER_SIZE (buf)); snapshot->cur_frame++; if (snapshot->cur_frame == snapshot->frame || snapshot->snapshot_asked == TRUE) { snapshot->snapshot_asked = FALSE; GST_INFO ("dumpfile : %s/n", snapshot->location); fp = fopen (snapshot->location, "wb"); if (fp == NULL) g_warning (" Can not open %s/n", snapshot->location); else { snapshot->png_struct_ptr = png_create_write_struct (PNG_LIBPNG_VER_STRING, (png_voidp) NULL, user_error_fn, user_warning_fn); if (snapshot->png_struct_ptr == NULL) g_warning ("Failed to initialize png structure"); snapshot->png_info_ptr = png_create_info_struct (snapshot->png_struct_ptr); if (setjmp (snapshot->png_struct_ptr->jmpbuf)) png_destroy_write_struct (&snapshot->png_struct_ptr, &snapshot->png_info_ptr); png_set_filter (snapshot->png_struct_ptr, 0, PNG_FILTER_NONE | PNG_FILTER_VALUE_NONE); png_init_io (snapshot->png_struct_ptr, fp); png_set_compression_level (snapshot->png_struct_ptr, 9); png_set_IHDR (snapshot->png_struct_ptr, snapshot->png_info_ptr, snapshot->width, snapshot->height, snapshot->to_bpp / 3, PNG_COLOR_TYPE_RGB, PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT); for (i = 0; i < snapshot->height; i++) row_pointers[i] = data + (snapshot->width * i * snapshot->to_bpp / 8); png_write_info (snapshot->png_struct_ptr, snapshot->png_info_ptr); png_write_image (snapshot->png_struct_ptr, row_pointers); png_write_end (snapshot->png_struct_ptr, NULL); png_destroy_info_struct (snapshot->png_struct_ptr, &snapshot->png_info_ptr); png_destroy_write_struct (&snapshot->png_struct_ptr, (png_infopp) NULL); fclose (fp); } } gst_pad_push (snapshot->srcpad, GST_DATA (buf));}
开发者ID:kanongil,项目名称:gst-plugins-bad,代码行数:73,
示例22: gst_mve_video_datastatic GstFlowReturngst_mve_video_data (GstMveDemux * mve, const guint8 * data, guint16 len, GstBuffer ** output){ GstFlowReturn ret = GST_FLOW_OK; gint16 cur_frame, last_frame; gint16 x_offset, y_offset; gint16 x_size, y_size; guint16 flags; gint dec; GstBuffer *buf = NULL; GstMveDemuxStream *s = mve->video_stream; GST_LOG_OBJECT (mve, "video data"); if (s == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("trying to decode video data before stream was initialized")); return GST_FLOW_ERROR; } if (GST_CLOCK_TIME_IS_VALID (mve->frame_duration)) { if (GST_CLOCK_TIME_IS_VALID (s->last_ts)) s->last_ts += mve->frame_duration; else s->last_ts = 0; } if (!s->code_map_avail) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("no code map available for decoding")); return GST_FLOW_ERROR; } /* need at least 14 more bytes */ if (len < 14) return gst_mve_stream_error (mve, 14, len); len -= 14; cur_frame = GST_READ_UINT16_LE (data); last_frame = GST_READ_UINT16_LE (data + 2); x_offset = GST_READ_UINT16_LE (data + 4); y_offset = GST_READ_UINT16_LE (data + 6); x_size = GST_READ_UINT16_LE (data + 8); y_size = GST_READ_UINT16_LE (data + 10); flags = GST_READ_UINT16_LE (data + 12); data += 14; GST_DEBUG_OBJECT (mve, "video data hot:%d, cold:%d, xoff:%d, yoff:%d, w:%d, h:%d, flags:%x", cur_frame, last_frame, x_offset, y_offset, x_size, y_size, flags); if (flags & MVE_VIDEO_DELTA_FRAME) { guint8 *temp = s->back_buf1; s->back_buf1 = s->back_buf2; s->back_buf2 = temp; } ret = gst_mve_buffer_alloc_for_pad (s, s->width * s->height * s->bpp, &buf); if (ret != GST_FLOW_OK) return ret; if (s->bpp == 2) { dec = ipvideo_decode_frame16 (s, data, len); } else { if (s->palette == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("no palette available")); goto error; } dec = ipvideo_decode_frame8 (s, data, len); } if (dec != 0) goto error; memcpy (GST_BUFFER_DATA (buf), s->back_buf1, GST_BUFFER_SIZE (buf)); GST_BUFFER_DURATION (buf) = mve->frame_duration; GST_BUFFER_OFFSET_END (buf) = ++s->offset; if (s->bpp == 1) { GstCaps *caps; /* set the palette on the outgoing buffer */ caps = gst_caps_copy (s->caps); gst_caps_set_simple (caps, "palette_data", GST_TYPE_BUFFER, s->palette, NULL); gst_buffer_set_caps (buf, caps); gst_caps_unref (caps); } *output = buf; return GST_FLOW_OK;error: gst_buffer_unref (buf); return GST_FLOW_ERROR;}
开发者ID:drothlis,项目名称:gst-plugins-bad,代码行数:99,
示例23: dshowaudiodec_set_input_format//.........这里部分代码省略......... * decoder which doesn't need this */ if (adec->layer == 1 || adec->layer == 2) { MPEG1WAVEFORMAT *mpeg1_format; int samples, version; GstStructure *structure = gst_caps_get_structure (caps, 0); size = sizeof (MPEG1WAVEFORMAT); format = (WAVEFORMATEX *)g_malloc0 (size); format->cbSize = sizeof (MPEG1WAVEFORMAT) - sizeof (WAVEFORMATEX); format->wFormatTag = WAVE_FORMAT_MPEG; mpeg1_format = (MPEG1WAVEFORMAT *) format; mpeg1_format->wfx.nChannels = adec->channels; if (adec->channels == 2) mpeg1_format->fwHeadMode = ACM_MPEG_STEREO; else mpeg1_format->fwHeadMode = ACM_MPEG_SINGLECHANNEL; mpeg1_format->fwHeadModeExt = 0; mpeg1_format->wHeadEmphasis = 0; mpeg1_format->fwHeadFlags = 0; switch (adec->layer) { case 1: mpeg1_format->fwHeadLayer = ACM_MPEG_LAYER3; break; case 2: mpeg1_format->fwHeadLayer = ACM_MPEG_LAYER2; break; case 3: mpeg1_format->fwHeadLayer = ACM_MPEG_LAYER1; break; }; gst_structure_get_int (structure, "mpegaudioversion", &version); if (adec->layer == 1) { samples = 384; } else { if (version == 1) { samples = 576; } else { samples = 1152; } } mpeg1_format->wfx.nBlockAlign = (WORD) samples; mpeg1_format->wfx.nSamplesPerSec = adec->rate; mpeg1_format->dwHeadBitrate = 128000; /* This doesn't seem to matter */ mpeg1_format->wfx.nAvgBytesPerSec = mpeg1_format->dwHeadBitrate / 8; } else { size = sizeof (WAVEFORMATEX) + (adec->codec_data ? GST_BUFFER_SIZE (adec->codec_data) : 0); if (adec->layer == 3) { MPEGLAYER3WAVEFORMAT *mp3format; /* The WinXP mp3 decoder doesn't actually check the size of this structure, * but requires that this be allocated and filled out (or we get obscure * random crashes) */ size = sizeof (MPEGLAYER3WAVEFORMAT); mp3format = (MPEGLAYER3WAVEFORMAT *)g_malloc0 (size); format = (WAVEFORMATEX *)mp3format; format->cbSize = MPEGLAYER3_WFX_EXTRA_BYTES; mp3format->wID = MPEGLAYER3_ID_MPEG; mp3format->fdwFlags = MPEGLAYER3_FLAG_PADDING_ISO; /* No idea what this means for a decoder */ /* The XP decoder divides by nBlockSize, so we must set this to a non-zero value, but it doesn't matter what - this is meaningless for VBR mp3 anyway */ mp3format->nBlockSize = 1; mp3format->nFramesPerBlock = 1; mp3format->nCodecDelay = 0; } else { format = (WAVEFORMATEX *)g_malloc0 (size); if (adec->codec_data) { /* Codec data is appended after our header */ memcpy (((guchar *) format) + sizeof (WAVEFORMATEX), GST_BUFFER_DATA (adec->codec_data), GST_BUFFER_SIZE (adec->codec_data)); format->cbSize = GST_BUFFER_SIZE (adec->codec_data); } } format->wFormatTag = codec_entry->format; format->nChannels = adec->channels; format->nSamplesPerSec = adec->rate; format->nAvgBytesPerSec = adec->bitrate / 8; format->nBlockAlign = adec->block_align; format->wBitsPerSample = adec->depth; } mediatype->cbFormat = size; mediatype->pbFormat = (BYTE *) format; return mediatype;}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:101,
示例24: gst_mve_audio_datastatic GstFlowReturngst_mve_audio_data (GstMveDemux * mve, guint8 type, const guint8 * data, guint16 len, GstBuffer ** output){ GstFlowReturn ret; GstMveDemuxStream *s = mve->audio_stream; GstBuffer *buf = NULL; guint16 stream_mask; guint16 size; GST_LOG_OBJECT (mve, "audio data"); if (s == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("trying to queue samples with no audio stream")); return GST_FLOW_ERROR; } /* need at least 6 more bytes */ if (len < 6) return gst_mve_stream_error (mve, 6, len); len -= 6; stream_mask = GST_READ_UINT16_LE (data + 2); size = GST_READ_UINT16_LE (data + 4); data += 6; if (stream_mask & MVE_DEFAULT_AUDIO_STREAM) { guint16 n_samples = size / s->n_channels / (s->sample_size / 8); GstClockTime duration = (GST_SECOND / s->sample_rate) * n_samples; if (type == MVE_OC_AUDIO_DATA) { guint16 required = (s->compression ? size / 2 + s->n_channels : size); if (len < required) return gst_mve_stream_error (mve, required, len); ret = gst_mve_buffer_alloc_for_pad (s, size, &buf); if (ret != GST_FLOW_OK) return ret; if (s->compression) ipaudio_uncompress ((gint16 *) GST_BUFFER_DATA (buf), size, data, s->n_channels); else memcpy (GST_BUFFER_DATA (buf), data, size); GST_DEBUG_OBJECT (mve, "created audio buffer, size:%u, stream_mask:%x", size, stream_mask); } else { /* silence - create a minimal buffer with no sound */ size = s->n_channels * (s->sample_size / 8); ret = gst_mve_buffer_alloc_for_pad (s, size, &buf); memset (GST_BUFFER_DATA (buf), 0, size); } GST_BUFFER_DURATION (buf) = duration; GST_BUFFER_OFFSET_END (buf) = s->offset + n_samples; *output = buf; s->offset += n_samples; s->last_ts += duration; } else { /* alternate audio streams not supported. are there any movies which use them? */ if (type == MVE_OC_AUDIO_DATA) GST_WARNING_OBJECT (mve, "found non-empty alternate audio stream"); } return GST_FLOW_OK;}
开发者ID:drothlis,项目名称:gst-plugins-bad,代码行数:73,
示例25: gst_rtp_amr_pay_handle_bufferstatic GstFlowReturngst_rtp_amr_pay_handle_buffer (GstBaseRTPPayload * basepayload, GstBuffer * buffer){ GstRtpAMRPay *rtpamrpay; const gint *frame_size; GstFlowReturn ret; guint size, payload_len; GstBuffer *outbuf; guint8 *payload, *data, *payload_amr; GstClockTime timestamp, duration; guint packet_len, mtu; gint i, num_packets, num_nonempty_packets; gint amr_len; gboolean sid = FALSE; rtpamrpay = GST_RTP_AMR_PAY (basepayload); mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpamrpay); size = GST_BUFFER_SIZE (buffer); data = GST_BUFFER_DATA (buffer); timestamp = GST_BUFFER_TIMESTAMP (buffer); duration = GST_BUFFER_DURATION (buffer); /* setup frame size pointer */ if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB) frame_size = nb_frame_size; else frame_size = wb_frame_size; GST_DEBUG_OBJECT (basepayload, "got %d bytes", size); /* FIXME, only * octet aligned, no interleaving, single channel, no CRC, * no robust-sorting. To fix this you need to implement the downstream * negotiation function. */ /* first count number of packets and total amr frame size */ amr_len = num_packets = num_nonempty_packets = 0; for (i = 0; i < size; i++) { guint8 FT; gint fr_size; FT = (data[i] & 0x78) >> 3; fr_size = frame_size[FT]; GST_DEBUG_OBJECT (basepayload, "frame type %d, frame size %d", FT, fr_size); /* FIXME, we don't handle this yet.. */ if (fr_size <= 0) goto wrong_size; if (fr_size == 5) sid = TRUE; amr_len += fr_size; num_nonempty_packets++; num_packets++; i += fr_size; } if (amr_len > size) goto incomplete_frame; /* we need one extra byte for the CMR, the ToC is in the input * data */ payload_len = size + 1; /* get packet len to check against MTU */ packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0); if (packet_len > mtu) goto too_big; /* now alloc output buffer */ outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0); /* copy timestamp */ GST_BUFFER_TIMESTAMP (outbuf) = timestamp; if (duration != GST_CLOCK_TIME_NONE) GST_BUFFER_DURATION (outbuf) = duration; else { GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND; } if (GST_BUFFER_IS_DISCONT (buffer)) { GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit"); GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); gst_rtp_buffer_set_marker (outbuf, TRUE); gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp); } if (G_UNLIKELY (sid)) { gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp); } /* perfect rtptime */ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (rtpamrpay->first_ts))) { rtpamrpay->first_ts = timestamp; rtpamrpay->first_rtp_time = rtpamrpay->next_rtp_time; } GST_BUFFER_OFFSET (outbuf) = rtpamrpay->next_rtp_time;//.........这里部分代码省略.........
开发者ID:spunktsch,项目名称:svtplayer,代码行数:101,
示例26: deinterlace_frame_di_greedyh_packedstatic voiddeinterlace_frame_di_greedyh_packed (GstDeinterlaceMethod * method, const GstDeinterlaceField * history, guint history_count, GstBuffer * outbuf){ GstDeinterlaceMethodGreedyH *self = GST_DEINTERLACE_METHOD_GREEDY_H (method); GstDeinterlaceMethodGreedyHClass *klass = GST_DEINTERLACE_METHOD_GREEDY_H_GET_CLASS (self); gint InfoIsOdd = 0; gint Line; gint RowStride = method->row_stride[0]; gint FieldHeight = method->frame_height / 2; gint Pitch = method->row_stride[0] * 2; const guint8 *L1; // ptr to Line1, of 3 const guint8 *L2; // ptr to Line2, the weave line const guint8 *L3; // ptr to Line3 const guint8 *L2P; // ptr to prev Line2 guint8 *Dest = GST_BUFFER_DATA (outbuf); ScanlineFunction scanline; switch (method->format) { case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_YVYU: scanline = klass->scanline_yuy2; break; case GST_VIDEO_FORMAT_UYVY: scanline = klass->scanline_uyvy; break; case GST_VIDEO_FORMAT_AYUV: scanline = klass->scanline_ayuv; break; default: g_assert_not_reached (); break; } // copy first even line no matter what, and the first odd line if we're // processing an EVEN field. (note diff from other deint rtns.) if (history[history_count - 1].flags == PICTURE_INTERLACED_BOTTOM) { InfoIsOdd = 1; L1 = GST_BUFFER_DATA (history[history_count - 2].buf); if (history[history_count - 2].flags & PICTURE_INTERLACED_BOTTOM) L1 += RowStride; L2 = GST_BUFFER_DATA (history[history_count - 1].buf); if (history[history_count - 1].flags & PICTURE_INTERLACED_BOTTOM) L2 += RowStride; L3 = L1 + Pitch; L2P = GST_BUFFER_DATA (history[history_count - 3].buf); if (history[history_count - 3].flags & PICTURE_INTERLACED_BOTTOM) L2P += RowStride; // copy first even line memcpy (Dest, L1, RowStride); Dest += RowStride; } else { InfoIsOdd = 0; L1 = GST_BUFFER_DATA (history[history_count - 2].buf); if (history[history_count - 2].flags & PICTURE_INTERLACED_BOTTOM) L1 += RowStride; L2 = GST_BUFFER_DATA (history[history_count - 1].buf) + Pitch; if (history[history_count - 1].flags & PICTURE_INTERLACED_BOTTOM) L2 += RowStride; L3 = L1 + Pitch; L2P = GST_BUFFER_DATA (history[history_count - 3].buf) + Pitch; if (history[history_count - 3].flags & PICTURE_INTERLACED_BOTTOM) L2P += RowStride; // copy first even line memcpy (Dest, L1, RowStride); Dest += RowStride; // then first odd line memcpy (Dest, L1, RowStride); Dest += RowStride; } for (Line = 0; Line < (FieldHeight - 1); ++Line) { scanline (self, L1, L2, L3, L2P, Dest, RowStride); Dest += RowStride; memcpy (Dest, L3, RowStride); Dest += RowStride; L1 += Pitch; L2 += Pitch; L3 += Pitch; L2P += Pitch; } if (InfoIsOdd) { memcpy (Dest, L2, RowStride); }}
开发者ID:dgerlach,项目名称:gst-plugins-good,代码行数:97,
示例27: gst_decklink_src_taskstatic voidgst_decklink_src_task (void *priv){ GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv); GstBuffer *buffer; GstBuffer *audio_buffer; IDeckLinkVideoInputFrame *video_frame; IDeckLinkAudioInputPacket *audio_frame; void *data; int n_samples; GstFlowReturn ret; const GstDecklinkMode *mode; GST_DEBUG_OBJECT (decklinksrc, "task"); g_mutex_lock (decklinksrc->mutex); while (decklinksrc->video_frame == NULL && !decklinksrc->stop) { g_cond_wait (decklinksrc->cond, decklinksrc->mutex); } video_frame = decklinksrc->video_frame; audio_frame = decklinksrc->audio_frame; decklinksrc->video_frame = NULL; decklinksrc->audio_frame = NULL; g_mutex_unlock (decklinksrc->mutex); if (decklinksrc->stop) { if (video_frame) video_frame->Release (); if (audio_frame) audio_frame->Release (); GST_DEBUG ("stopping task"); return; } /* warning on dropped frames */ if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) { GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ, ("Dropped %d frame(s), for a total of %d frame(s)", decklinksrc->dropped_frames - decklinksrc->dropped_frames_old, decklinksrc->dropped_frames), (NULL)); decklinksrc->dropped_frames_old = decklinksrc->dropped_frames; } if (!decklinksrc->video_new_segment) { GstEvent *event; gboolean ret; event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); ret = gst_pad_push_event (decklinksrc->videosrcpad, event); if (!ret) { GST_WARNING ("new segment event not handled"); } decklinksrc->video_new_segment = TRUE; } mode = gst_decklink_get_mode (decklinksrc->mode); video_frame->GetBytes (&data); if (decklinksrc->copy_data) { buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2); memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2); video_frame->Release (); } else { VideoFrame *vf; vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame)); buffer = gst_buffer_new (); GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2; GST_BUFFER_DATA (buffer) = (guint8 *) data; GST_BUFFER_FREE_FUNC (buffer) = video_frame_free; GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) vf; vf->frame = video_frame; vf->input = decklinksrc->input; vf->input->AddRef (); } GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND, mode->fps_d, mode->fps_n); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND, mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num; GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; if (decklinksrc->frame_num == 0) { GstEvent *event; gboolean ret; GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);//.........这里部分代码省略.........
开发者ID:ylatuya,项目名称:gst-plugins-bad,代码行数:101,
示例28: mfw_gst_vpuenc_chainstatic GstFlowReturn mfw_gst_vpuenc_chain(GstPad * pad, GstBuffer * buffer){ GstVPU_Enc *vpu_enc = NULL; GstFlowReturn retval = GST_FLOW_OK; GstCaps *src_caps; GstBuffer *outbuffer; gint i = 0; int ret; struct pollfd pollfd; unsigned long type = V4L2_BUF_TYPE_VIDEO_OUTPUT; GST_DEBUG(__func__); vpu_enc = MFW_GST_VPU_ENC(GST_PAD_PARENT(pad)); if (vpu_enc->init == FALSE) { retval = mfw_gst_vpuenc_init_encoder(pad, vpu_enc->memory); if (retval != GST_FLOW_OK) return retval; printf("VPU ENC initialised/n"); } i = 0; if (vpu_enc->memory == V4L2_MEMORY_USERPTR) { for (i = 0; i < NUM_BUFFERS; i++) { if (vpu_enc->buf_v4l2[i].m.userptr == (long int)GST_BUFFER_DATA (buffer)) break; } if (i == NUM_BUFFERS) { for (i = 0; i < NUM_BUFFERS; i++) { if (!vpu_enc->buf_v4l2[i].m.userptr) break; } } i = i % NUM_BUFFERS; } if (i == NUM_BUFFERS) { printf("NO BUFFER AVAILABLE/n"); return GST_FLOW_ERROR; } if (!buffer) return GST_FLOW_OK; if (vpu_enc->memory == V4L2_MEMORY_MMAP) { /* copy the input Frame into the allocated buffer */ memcpy(vpu_enc->buf_data[i], GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer)); gst_buffer_unref(buffer); } else { vpu_enc->buf_v4l2[i].m.userptr = (long int)GST_BUFFER_DATA (buffer); vpu_enc->buf_v4l2[i].length = GST_BUFFER_SIZE (buffer); } pollfd.fd = vpu_enc->vpu_fd; pollfd.events = POLLIN | POLLOUT; ret = ioctl(vpu_enc->vpu_fd, VIDIOC_QBUF, &vpu_enc->buf_v4l2[i]); if (ret) { if (vpu_enc->memory == V4L2_MEMORY_USERPTR) { /* fallback to mmap */ vpu_enc->init = FALSE; vpu_enc->memory = V4L2_MEMORY_MMAP; GST_WARNING("mfw_gst_vpuenc_chain: fallback to mmap"); return mfw_gst_vpuenc_chain(pad, buffer); } GST_ERROR("VIDIOC_QBUF failed: %s/n", strerror(errno)); return GST_FLOW_ERROR; } if (!vpu_enc->once) { retval = ioctl(vpu_enc->vpu_fd, VIDIOC_STREAMON, &type); if (retval) { printf("streamon failed with %d", retval); return GST_FLOW_ERROR; } vpu_enc->once = 1; } ret = ioctl(vpu_enc->vpu_fd, VIDIOC_DQBUF, &vpu_enc->buf_v4l2[0]); if (ret) { GST_ERROR("VIDIOC_DQBUF failed: %s/n", strerror(errno)); return GST_FLOW_ERROR; } if (vpu_enc->memory == V4L2_MEMORY_USERPTR) { gst_buffer_unref(buffer); } src_caps = GST_PAD_CAPS(vpu_enc->srcpad); retval = gst_pad_alloc_buffer_and_set_caps(vpu_enc->srcpad, 0, 1024 * 1024, src_caps, &outbuffer); if (retval != GST_FLOW_OK) { GST_ERROR("Allocating buffer failed with %d", ret); return retval; } ret = read(vpu_enc->vpu_fd, GST_BUFFER_DATA(outbuffer), 1024 * 1024); if (ret < 0) {//.........这里部分代码省略.........
开发者ID:jmartinc,项目名称:gst-plugins-fsl-vpu,代码行数:101,
示例29: gst_icydemux_chainstatic GstFlowReturngst_icydemux_chain (GstPad * pad, GstBuffer * buf){ GstICYDemux *icydemux; guint size, chunk, offset; GstBuffer *sub; GstFlowReturn ret = GST_FLOW_OK; icydemux = GST_ICYDEMUX (GST_PAD_PARENT (pad)); if (G_UNLIKELY (icydemux->meta_interval < 0)) goto not_negotiated; if (icydemux->meta_interval == 0) { ret = gst_icydemux_typefind_or_forward (icydemux, buf); goto done; } /* Go through the buffer, chopping it into appropriate chunks. Forward as * tags or buffers, as appropriate */ size = GST_BUFFER_SIZE (buf); offset = 0; while (size) { if (icydemux->remaining) { chunk = (size <= icydemux->remaining) ? size : icydemux->remaining; sub = gst_buffer_create_sub (buf, offset, chunk); offset += chunk; icydemux->remaining -= chunk; size -= chunk; /* This buffer goes onto typefinding, and/or directly pushed out */ ret = gst_icydemux_typefind_or_forward (icydemux, sub); if (ret != GST_FLOW_OK) goto done; } else if (icydemux->meta_remaining) { chunk = (size <= icydemux->meta_remaining) ? size : icydemux->meta_remaining; sub = gst_buffer_create_sub (buf, offset, chunk); gst_icydemux_add_meta (icydemux, sub); offset += chunk; icydemux->meta_remaining -= chunk; size -= chunk; if (icydemux->meta_remaining == 0) { /* Parse tags from meta_adapter, send off as tag messages */ GST_DEBUG_OBJECT (icydemux, "No remaining metadata, parsing for tags"); gst_icydemux_parse_and_send_tags (icydemux); icydemux->remaining = icydemux->meta_interval; } } else { /* We need to read a single byte (always safe at this point in the loop) * to figure out how many bytes of metadata exist. * The 'spec' tells us to read 16 * (byte_value) bytes of metadata after * this (zero is common, and means the metadata hasn't changed). */ icydemux->meta_remaining = 16 * GST_BUFFER_DATA (buf)[offset]; if (icydemux->meta_remaining == 0) icydemux->remaining = icydemux->meta_interval; offset += 1; size -= 1; } }done: gst_buffer_unref (buf); return ret; /* ERRORS */not_negotiated: { GST_WARNING_OBJECT (icydemux, "meta_interval not set, buffer probably had " "no caps set. Try enabling iradio-mode on the http source element"); gst_buffer_unref (buf); return GST_FLOW_NOT_NEGOTIATED; }}
开发者ID:dgerlach,项目名称:gst-plugins-good,代码行数:81,
示例30: gst_file_src_create_readstatic GstFlowReturngst_file_src_create_read (GstFileSrc * src, guint64 offset, guint length, GstBuffer ** buffer){ int ret; GstBuffer *buf; if (G_UNLIKELY (src->read_position != offset)) { off_t res; res = lseek (src->fd, offset, SEEK_SET); if (G_UNLIKELY (res < 0 || res != offset)) goto seek_failed; src->read_position = offset; } buf = gst_buffer_try_new_and_alloc (length); if (G_UNLIKELY (buf == NULL && length > 0)) { GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", length); return GST_FLOW_ERROR; } /* No need to read anything if length is 0 */ if (length > 0) { GST_LOG_OBJECT (src, "Reading %d bytes at offset 0x%" G_GINT64_MODIFIER "x", length, offset); ret = read (src->fd, GST_BUFFER_DATA (buf), length); if (G_UNLIKELY (ret < 0)) goto could_not_read; /* seekable regular files should have given us what we expected */ if (G_UNLIKELY ((guint) ret < length && src->seekable)) goto unexpected_eos; /* other files should eos if they read 0 and more was requested */ if (G_UNLIKELY (ret == 0 && length > 0)) goto eos; length = ret; GST_BUFFER_SIZE (buf) = length; GST_BUFFER_OFFSET (buf) = offset; GST_BUFFER_OFFSET_END (buf) = offset + length; src->read_position += length; } *buffer = buf; return GST_FLOW_OK; /* ERROR */seek_failed: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM); return GST_FLOW_ERROR; }could_not_read: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM); gst_buffer_unref (buf); return GST_FLOW_ERROR; }unexpected_eos: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("unexpected end of file.")); gst_buffer_unref (buf); return GST_FLOW_ERROR; }eos: { GST_DEBUG ("non-regular file hits EOS"); gst_buffer_unref (buf); return GST_FLOW_UNEXPECTED; }}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:77,
注:本文中的GST_BUFFER_DATA函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 C++ GST_BUFFER_DTS函数代码示例 C++ GST_BUFFER_CAST函数代码示例 |