您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_ELEMENT_ERROR函数代码示例

51自学网 2021-06-01 20:56:21
  C++
这篇教程C++ GST_ELEMENT_ERROR函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_ELEMENT_ERROR函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ELEMENT_ERROR函数的具体用法?C++ GST_ELEMENT_ERROR怎么用?C++ GST_ELEMENT_ERROR使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_ELEMENT_ERROR函数的26个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: gst_amc_audio_dec_loop

//.........这里部分代码省略.........      goto flushing;    }    goto failed_release;  }  if (is_eos || flow_ret == GST_FLOW_EOS) {    GST_AUDIO_DECODER_STREAM_UNLOCK (self);    g_mutex_lock (&self->drain_lock);    if (self->draining) {      GST_DEBUG_OBJECT (self, "Drained");      self->draining = FALSE;      g_cond_broadcast (&self->drain_cond);    } else if (flow_ret == GST_FLOW_OK) {      GST_DEBUG_OBJECT (self, "Component signalled EOS");      flow_ret = GST_FLOW_EOS;    }    g_mutex_unlock (&self->drain_lock);    GST_AUDIO_DECODER_STREAM_LOCK (self);  } else {    GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));  }  self->downstream_flow_ret = flow_ret;  if (flow_ret != GST_FLOW_OK)    goto flow_error;  GST_AUDIO_DECODER_STREAM_UNLOCK (self);  return;dequeue_error:  {    GST_ELEMENT_ERROR_FROM_ERROR (self, err);    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));    self->downstream_flow_ret = GST_FLOW_ERROR;    GST_AUDIO_DECODER_STREAM_UNLOCK (self);    g_mutex_lock (&self->drain_lock);    self->draining = FALSE;    g_cond_broadcast (&self->drain_cond);    g_mutex_unlock (&self->drain_lock);    return;  }format_error:  {    if (err)      GST_ELEMENT_ERROR_FROM_ERROR (self, err);    else      GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),          ("Failed to handle format"));    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));    self->downstream_flow_ret = GST_FLOW_ERROR;    GST_AUDIO_DECODER_STREAM_UNLOCK (self);    g_mutex_lock (&self->drain_lock);    self->draining = FALSE;    g_cond_broadcast (&self->drain_cond);    g_mutex_unlock (&self->drain_lock);    return;  }failed_release:  {    GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err);    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:67,


示例2: gst_pipeline_change_state

//.........这里部分代码省略.........            if (clock)                gst_object_unref (clock);            if (start_time != GST_CLOCK_TIME_NONE && now != GST_CLOCK_TIME_NONE) {                GstClockTime new_base_time = now - start_time + delay;                GST_DEBUG_OBJECT (element,                                  "start_time=%" GST_TIME_FORMAT ", now=%" GST_TIME_FORMAT                                  ", base_time %" GST_TIME_FORMAT,                                  GST_TIME_ARGS (start_time), GST_TIME_ARGS (now),                                  GST_TIME_ARGS (new_base_time));                gst_element_set_base_time (element, new_base_time);            } else {                GST_DEBUG_OBJECT (pipeline,                                  "NOT adjusting base_time because start_time is NONE");            }        } else {            GST_DEBUG_OBJECT (pipeline,                              "NOT adjusting base_time because we selected one before");        }        if (cur_clock)            gst_object_unref (cur_clock);        break;    }    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:    {        /* we take a start_time snapshot before calling the children state changes         * so that they know about when the pipeline PAUSED. */        pipeline_update_start_time (element);        break;    }    case GST_STATE_CHANGE_PAUSED_TO_READY:    case GST_STATE_CHANGE_READY_TO_NULL:        break;    }    result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);    switch (transition) {    case GST_STATE_CHANGE_NULL_TO_READY:        break;    case GST_STATE_CHANGE_READY_TO_PAUSED:    {        /* READY to PAUSED starts running_time from 0 */        reset_start_time (pipeline, 0);        break;    }    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:        break;    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:    {        /* Take a new snapshot of the start_time after calling the state change on         * all children. This will be the running_time of the pipeline when we go         * back to PLAYING */        pipeline_update_start_time (element);        break;    }    case GST_STATE_CHANGE_PAUSED_TO_READY:        break;    case GST_STATE_CHANGE_READY_TO_NULL:    {        GstBus *bus;        gboolean auto_flush;        /* grab some stuff before we release the lock to flush out the bus */        GST_OBJECT_LOCK (element);        if ((bus = element->bus))            gst_object_ref (bus);        auto_flush = pipeline->priv->auto_flush_bus;        GST_OBJECT_UNLOCK (element);        if (bus) {            if (auto_flush) {                gst_bus_set_flushing (bus, TRUE);            } else {                GST_INFO_OBJECT (element, "not flushing bus, auto-flushing disabled");            }            gst_object_unref (bus);        }        break;    }    }    return result;    /* ERRORS */invalid_clock:    {        /* we generate this error when the selected clock was not         * accepted by some element */        GST_ELEMENT_ERROR (pipeline, CORE, CLOCK,                           (_("Selected clock cannot be used in pipeline.")),                           ("Pipeline cannot operate with selected clock"));        GST_DEBUG_OBJECT (pipeline,                          "Pipeline cannot operate with selected clock %p", clock);        if (clock)            gst_object_unref (clock);        return GST_STATE_CHANGE_FAILURE;    }}
开发者ID:miettal,项目名称:armadillo420_standard_linux314,代码行数:101,


示例3: gst_cmml_enc_parse_tag_head

/* encode the CMML head tag and push the CMML headers */static voidgst_cmml_enc_parse_tag_head (GstCmmlEnc * enc, GstCmmlTagHead * head){    GList *headers = NULL;    GList *walk;    guchar *head_string;    GstCaps *caps;    GstBuffer *ident_buf, *preamble_buf, *head_buf;    GstBuffer *buffer;    if (enc->preamble == NULL)        goto flow_unexpected;    GST_INFO_OBJECT (enc, "parsing head tag");    enc->flow_return = gst_cmml_enc_new_ident_header (enc, &ident_buf);    if (enc->flow_return != GST_FLOW_OK)        goto alloc_error;    headers = g_list_append (headers, ident_buf);    enc->flow_return = gst_cmml_enc_new_buffer (enc,                       enc->preamble, strlen ((gchar *) enc->preamble), &preamble_buf);    if (enc->flow_return != GST_FLOW_OK)        goto alloc_error;    headers = g_list_append (headers, preamble_buf);    head_string = gst_cmml_parser_tag_head_to_string (enc->parser, head);    enc->flow_return = gst_cmml_enc_new_buffer (enc,                       head_string, strlen ((gchar *) head_string), &head_buf);    g_free (head_string);    if (enc->flow_return != GST_FLOW_OK)        goto alloc_error;    headers = g_list_append (headers, head_buf);    caps = gst_pad_get_caps (enc->srcpad);    caps = gst_cmml_enc_set_header_on_caps (enc, caps,                                            ident_buf, preamble_buf, head_buf);    while (headers) {        buffer = GST_BUFFER (headers->data);        /* set granulepos 0 on headers */        GST_BUFFER_OFFSET_END (buffer) = 0;        gst_buffer_set_caps (buffer, caps);        enc->flow_return = gst_cmml_enc_push (enc, buffer);        headers = g_list_delete_link (headers, headers);        if (enc->flow_return != GST_FLOW_OK)            goto push_error;    }    gst_caps_unref (caps);    enc->sent_headers = TRUE;    return;flow_unexpected:    GST_ELEMENT_ERROR (enc, STREAM, ENCODE,                       (NULL), ("got head tag before preamble"));    enc->flow_return = GST_FLOW_ERROR;    return;push_error:    gst_caps_unref (caps);    /* fallthrough */alloc_error:    for (walk = headers; walk; walk = walk->next)        gst_buffer_unref (GST_BUFFER (walk->data));    g_list_free (headers);    return;}
开发者ID:jwzl,项目名称:ossbuild,代码行数:72,


示例4: gst_visual_gl_change_state

static GstStateChangeReturngst_visual_gl_change_state (GstElement * element, GstStateChange transition){  GstVisualGL *visual = GST_VISUAL_GL (element);  GstStateChangeReturn ret;  switch (transition) {    case GST_STATE_CHANGE_NULL_TO_READY:      break;    case GST_STATE_CHANGE_READY_TO_PAUSED:    {      GstElement *parent = GST_ELEMENT (gst_element_get_parent (visual));      GstStructure *structure = NULL;      GstQuery *query = NULL;      gboolean isPerformed = FALSE;      gchar *name;      if (!parent) {        GST_ELEMENT_ERROR (visual, CORE, STATE_CHANGE, (NULL),            ("A parent bin is required"));        return FALSE;      }      name = gst_element_get_name (visual);      structure = gst_structure_new (name, NULL);      query = gst_query_new_application (GST_QUERY_CUSTOM, structure);      g_free (name);      isPerformed = gst_element_query (parent, query);      if (isPerformed) {        const GValue *id_value =            gst_structure_get_value (structure, "gstgldisplay");        if (G_VALUE_HOLDS_POINTER (id_value))          /* at least one gl element is after in our gl chain */          visual->display =              gst_object_ref (GST_GL_DISPLAY (g_value_get_pointer (id_value)));        else {          /* this gl filter is a sink in terms of the gl chain */          visual->display = gst_gl_display_new ();          gst_gl_display_create_context (visual->display, 0);          //TODO visual->external_gl_context);        }        gst_visual_gl_reset (visual);        visual->actor =            visual_actor_new (GST_VISUAL_GL_GET_CLASS (visual)->plugin->info->            plugname);        visual->video = visual_video_new ();        visual->audio = visual_audio_new ();        if (!visual->actor || !visual->video)          goto actor_setup_failed;        gst_gl_display_thread_add (visual->display,            (GstGLDisplayThreadFunc) actor_setup, visual);        if (visual->actor_setup_result != 0)          goto actor_setup_failed;        else          visual_actor_set_video (visual->actor, visual->video);      }      gst_query_unref (query);      gst_object_unref (GST_OBJECT (parent));      if (!isPerformed)        return GST_STATE_CHANGE_FAILURE;    }      break;    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:      break;    default:      break;  }  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);  switch (transition) {    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:      break;    case GST_STATE_CHANGE_PAUSED_TO_READY:    {      if (visual->fbo) {        gst_gl_display_del_fbo (visual->display, visual->fbo,            visual->depthbuffer);        visual->fbo = 0;        visual->depthbuffer = 0;      }      if (visual->midtexture) {        gst_gl_display_del_texture (visual->display, visual->midtexture,            visual->width, visual->height);        visual->midtexture = 0;      }      if (visual->display) {        gst_object_unref (visual->display);        visual->display = NULL;      }//.........这里部分代码省略.........
开发者ID:asrashley,项目名称:gst-plugins-bad,代码行数:101,


示例5: gst_rtp_ssrc_demux_rtcp_chain

static GstFlowReturngst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstObject * parent,    GstBuffer * buf){  GstFlowReturn ret;  GstRtpSsrcDemux *demux;  guint32 ssrc;  GstRTCPPacket packet;  GstRTCPBuffer rtcp = { NULL, };  GstPad *srcpad;  GstRtpSsrcDemuxPad *dpad;  demux = GST_RTP_SSRC_DEMUX (parent);  if (!gst_rtcp_buffer_validate (buf))    goto invalid_rtcp;  gst_rtcp_buffer_map (buf, GST_MAP_READ, &rtcp);  if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {    gst_rtcp_buffer_unmap (&rtcp);    goto invalid_rtcp;  }  /* first packet must be SR or RR or else the validate would have failed */  switch (gst_rtcp_packet_get_type (&packet)) {    case GST_RTCP_TYPE_SR:      /* get the ssrc so that we can route it to the right source pad */      gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, NULL, NULL,          NULL);      break;    default:      goto unexpected_rtcp;  }  gst_rtcp_buffer_unmap (&rtcp);  GST_DEBUG_OBJECT (demux, "received RTCP of SSRC %08x", ssrc);  srcpad = find_or_create_demux_pad_for_ssrc (demux, ssrc, RTCP_PAD);  if (srcpad == NULL)    goto create_failed;  /* push to srcpad */  ret = gst_pad_push (srcpad, buf);  if (ret != GST_FLOW_OK) {    /* check if the ssrc still there, may have been removed */    GST_PAD_LOCK (demux);    dpad = find_demux_pad_for_ssrc (demux, ssrc);    if (dpad == NULL || dpad->rtcp_pad != srcpad) {      /* SSRC was removed during the push ... ignore the error */      ret = GST_FLOW_OK;    }    GST_PAD_UNLOCK (demux);  }  gst_object_unref (srcpad);  return ret;  /* ERRORS */invalid_rtcp:  {    /* this is fatal and should be filtered earlier */    GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),        ("Dropping invalid RTCP packet"));    gst_buffer_unref (buf);    return GST_FLOW_ERROR;  }unexpected_rtcp:  {    GST_DEBUG_OBJECT (demux, "dropping unexpected RTCP packet");    gst_buffer_unref (buf);    return GST_FLOW_OK;  }create_failed:  {    GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),        ("Could not create new pad"));    gst_buffer_unref (buf);    return GST_FLOW_ERROR;  }}
开发者ID:collects,项目名称:gst-plugins-good,代码行数:82,


示例6: gst_ffmpegvidenc_handle_frame

static GstFlowReturngst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder,    GstVideoCodecFrame * frame){  GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder;  GstBuffer *outbuf;  gint ret_size = 0, c;  GstVideoInfo *info = &ffmpegenc->input_state->info;  GstVideoFrame vframe;  if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame))    ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I;  if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ)) {    GST_ERROR_OBJECT (encoder, "Failed to map input buffer");    return GST_FLOW_ERROR;  }  /* Fill avpicture */  for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {    if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) {      ffmpegenc->picture->data[c] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, c);      ffmpegenc->picture->linesize[c] =          GST_VIDEO_FRAME_COMP_STRIDE (&vframe, c);    } else {      ffmpegenc->picture->data[c] = NULL;      ffmpegenc->picture->linesize[c] = 0;    }  }  ffmpegenc->picture->pts =      gst_ffmpeg_time_gst_to_ff (frame->pts /      ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base);  ffmpegenc_setup_working_buf (ffmpegenc);  ret_size = avcodec_encode_video (ffmpegenc->context,      ffmpegenc->working_buf, ffmpegenc->working_buf_size, ffmpegenc->picture);  gst_video_frame_unmap (&vframe);  if (ret_size < 0)    goto encode_fail;  /* Encoder needs more data */  if (!ret_size)    return GST_FLOW_OK;  /* save stats info if there is some as well as a stats file */  if (ffmpegenc->file && ffmpegenc->context->stats_out)    if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)      GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,          (("Could not write to file /"%s/"."), ffmpegenc->filename),          GST_ERROR_SYSTEM);  gst_video_codec_frame_unref (frame);  /* Get oldest frame */  frame = gst_video_encoder_get_oldest_frame (encoder);  /* Allocate output buffer */  if (gst_video_encoder_allocate_output_frame (encoder, frame,          ret_size) != GST_FLOW_OK) {    gst_video_codec_frame_unref (frame);    goto alloc_fail;  }  outbuf = frame->output_buffer;  gst_buffer_fill (outbuf, 0, ffmpegenc->working_buf, ret_size);  /* buggy codec may not set coded_frame */  if (ffmpegenc->context->coded_frame) {    if (ffmpegenc->context->coded_frame->key_frame)      GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);  } else    GST_WARNING_OBJECT (ffmpegenc, "codec did not provide keyframe info");  /* Reset frame type */  if (ffmpegenc->picture->pict_type)    ffmpegenc->picture->pict_type = 0;  return gst_video_encoder_finish_frame (encoder, frame);  /* ERRORS */encode_fail:  {#ifndef GST_DISABLE_GST_DEBUG    GstFFMpegVidEncClass *oclass =        (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));    GST_ERROR_OBJECT (ffmpegenc,        "avenc_%s: failed to encode buffer", oclass->in_plugin->name);#endif /* GST_DISABLE_GST_DEBUG */    return GST_FLOW_OK;  }alloc_fail:  {#ifndef GST_DISABLE_GST_DEBUG    GstFFMpegVidEncClass *oclass =        (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));    GST_ERROR_OBJECT (ffmpegenc,//.........这里部分代码省略.........
开发者ID:cablelabs,项目名称:gst-libav,代码行数:101,


示例7: gst_shout2send_start

static gbooleangst_shout2send_start (GstBaseSink * basesink){  GstShout2send *sink = GST_SHOUT2SEND (basesink);  const gchar *cur_prop;  gshort proto = 3;  gchar *version_string;  GST_DEBUG_OBJECT (sink, "starting");  sink->conn = shout_new ();  switch (sink->protocol) {    case SHOUT2SEND_PROTOCOL_XAUDIOCAST:      proto = SHOUT_PROTOCOL_XAUDIOCAST;      break;    case SHOUT2SEND_PROTOCOL_ICY:      proto = SHOUT_PROTOCOL_ICY;      break;    case SHOUT2SEND_PROTOCOL_HTTP:      proto = SHOUT_PROTOCOL_HTTP;      break;  }  cur_prop = "protocol";  GST_DEBUG_OBJECT (sink, "setting protocol: %d", sink->protocol);  if (shout_set_protocol (sink->conn, proto) != SHOUTERR_SUCCESS)    goto set_failed;  /* --- FIXME: shout requires an ip, and fails if it is given a host. */  /* may want to put convert_to_ip(shout2send->ip) here */  cur_prop = "ip";  GST_DEBUG_OBJECT (sink, "setting ip: %s", sink->ip);  if (shout_set_host (sink->conn, sink->ip) != SHOUTERR_SUCCESS)    goto set_failed;  cur_prop = "port";  GST_DEBUG_OBJECT (sink, "setting port: %u", sink->port);  if (shout_set_port (sink->conn, sink->port) != SHOUTERR_SUCCESS)    goto set_failed;  cur_prop = "password";  GST_DEBUG_OBJECT (sink, "setting password: %s", sink->password);  if (shout_set_password (sink->conn, sink->password) != SHOUTERR_SUCCESS)    goto set_failed;  cur_prop = "streamname";  GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->streamname);  if (shout_set_name (sink->conn, sink->streamname) != SHOUTERR_SUCCESS)    goto set_failed;  cur_prop = "description";  GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->description);  if (shout_set_description (sink->conn, sink->description) != SHOUTERR_SUCCESS)    goto set_failed;  cur_prop = "genre";  GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->genre);  if (shout_set_genre (sink->conn, sink->genre) != SHOUTERR_SUCCESS)    goto set_failed;  cur_prop = "mount";  GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->mount);  if (shout_set_mount (sink->conn, sink->mount) != SHOUTERR_SUCCESS)    goto set_failed;  cur_prop = "username";  GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, "source");  if (shout_set_user (sink->conn, sink->username) != SHOUTERR_SUCCESS)    goto set_failed;  version_string = gst_version_string ();  cur_prop = "agent";  GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, version_string);  if (shout_set_agent (sink->conn, version_string) != SHOUTERR_SUCCESS) {    g_free (version_string);    goto set_failed;  }  g_free (version_string);  return TRUE;/* ERROR */set_failed:  {    GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL),        ("Error setting %s: %s", cur_prop, shout_get_error (sink->conn)));    return FALSE;  }}
开发者ID:JJCG,项目名称:gst-plugins-good,代码行数:90,


示例8: gst_ffmpegvidenc_handle_frame

static GstFlowReturngst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder,                               GstVideoCodecFrame * frame){    GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder;    GstBuffer *outbuf;    gint ret = 0, c;    GstVideoInfo *info = &ffmpegenc->input_state->info;    AVPacket *pkt;    int have_data = 0;    BufferInfo *buffer_info;    if (ffmpegenc->interlaced) {        ffmpegenc->picture->interlaced_frame = TRUE;        /* if this is not the case, a filter element should be used to swap fields */        ffmpegenc->picture->top_field_first =            GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_VIDEO_BUFFER_FLAG_TFF);    }    if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame))        ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I;    buffer_info = g_slice_new0 (BufferInfo);    buffer_info->buffer = gst_buffer_ref (frame->input_buffer);    if (!gst_video_frame_map (&buffer_info->vframe, info, frame->input_buffer,                              GST_MAP_READ)) {        GST_ERROR_OBJECT (encoder, "Failed to map input buffer");        gst_buffer_unref (buffer_info->buffer);        g_slice_free (BufferInfo, buffer_info);        gst_video_codec_frame_unref (frame);        return GST_FLOW_ERROR;    }    /* Fill avpicture */    ffmpegenc->picture->buf[0] =        av_buffer_create (NULL, 0, buffer_info_free, buffer_info, 0);    for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {        if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) {            ffmpegenc->picture->data[c] =                GST_VIDEO_FRAME_PLANE_DATA (&buffer_info->vframe, c);            ffmpegenc->picture->linesize[c] =                GST_VIDEO_FRAME_COMP_STRIDE (&buffer_info->vframe, c);        } else {            ffmpegenc->picture->data[c] = NULL;            ffmpegenc->picture->linesize[c] = 0;        }    }    ffmpegenc->picture->format = ffmpegenc->context->pix_fmt;    ffmpegenc->picture->width = GST_VIDEO_FRAME_WIDTH (&buffer_info->vframe);    ffmpegenc->picture->height = GST_VIDEO_FRAME_HEIGHT (&buffer_info->vframe);    ffmpegenc->picture->pts =        gst_ffmpeg_time_gst_to_ff (frame->pts /                                   ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base);    have_data = 0;    pkt = g_slice_new0 (AVPacket);    ret =        avcodec_encode_video2 (ffmpegenc->context, pkt, ffmpegenc->picture,                               &have_data);    av_frame_unref (ffmpegenc->picture);    if (ret < 0 || !have_data)        g_slice_free (AVPacket, pkt);    if (ret < 0)        goto encode_fail;    /* Encoder needs more data */    if (!have_data) {        gst_video_codec_frame_unref (frame);        return GST_FLOW_OK;    }    /* save stats info if there is some as well as a stats file */    if (ffmpegenc->file && ffmpegenc->context->stats_out)        if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)            GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,                               (("Could not write to file /"%s/"."), ffmpegenc->filename),                               GST_ERROR_SYSTEM);    gst_video_codec_frame_unref (frame);    /* Get oldest frame */    frame = gst_video_encoder_get_oldest_frame (encoder);    outbuf =        gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data,                                     pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket);    frame->output_buffer = outbuf;    if (pkt->flags & AV_PKT_FLAG_KEY)        GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);    else        GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);//.........这里部分代码省略.........
开发者ID:GStreamer,项目名称:gst-libav,代码行数:101,


示例9: gst_dvbsrc_open_frontend

static gbooleangst_dvbsrc_open_frontend (GstDvbSrc * object, gboolean writable){  struct dvb_frontend_info fe_info;  const char *adapter_desc = NULL;  gchar *frontend_dev;  GstStructure *adapter_structure;  char *adapter_name = NULL;  frontend_dev = g_strdup_printf ("/dev/dvb/adapter%d/frontend%d",      object->adapter_number, object->frontend_number);  GST_INFO_OBJECT (object, "Using frontend device: %s", frontend_dev);  /* open frontend */  if ((object->fd_frontend =          open (frontend_dev, writable ? O_RDWR : O_RDONLY)) < 0) {    switch (errno) {      case ENOENT:        GST_ELEMENT_ERROR (object, RESOURCE, NOT_FOUND,            (_("Device /"%s/" does not exist."), frontend_dev), (NULL));        break;      default:        GST_ELEMENT_ERROR (object, RESOURCE, OPEN_READ_WRITE,            (_("Could not open frontend device /"%s/"."), frontend_dev),            GST_ERROR_SYSTEM);        break;    }    close (object->fd_frontend);    g_free (frontend_dev);    return FALSE;  }  GST_DEBUG_OBJECT (object, "Device opened, querying information");  if (ioctl (object->fd_frontend, FE_GET_INFO, &fe_info) < 0) {    GST_ELEMENT_ERROR (object, RESOURCE, SETTINGS,        (_("Could not get settings from frontend device /"%s/"."),            frontend_dev), GST_ERROR_SYSTEM);    close (object->fd_frontend);    g_free (frontend_dev);    return FALSE;  }  GST_DEBUG_OBJECT (object, "Got information about adapter : %s", fe_info.name);  adapter_name = g_strdup (fe_info.name);  object->adapter_type = fe_info.type;  switch (object->adapter_type) {    case FE_QPSK:      adapter_desc = "DVB-S";      adapter_structure = gst_structure_new ("dvb-adapter",          "type", G_TYPE_STRING, adapter_desc,          "name", G_TYPE_STRING, adapter_name,          "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL);      break;    case FE_QAM:      adapter_desc = "DVB-C";      adapter_structure = gst_structure_new ("dvb-adapter",          "type", G_TYPE_STRING, adapter_desc,          "name", G_TYPE_STRING, adapter_name,          "auto-inversion", G_TYPE_BOOLEAN,          fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN,          fe_info.caps & FE_CAN_QAM_AUTO, "auto-fec", G_TYPE_BOOLEAN,          fe_info.caps & FE_CAN_FEC_AUTO, NULL);      break;    case FE_OFDM:      adapter_desc = "DVB-T";      adapter_structure = gst_structure_new ("dvb-adapter",          "type", G_TYPE_STRING, adapter_desc,          "name", G_TYPE_STRING, adapter_name,          "auto-inversion", G_TYPE_BOOLEAN,          fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN,          fe_info.caps & FE_CAN_QAM_AUTO, "auto-transmission-mode",          G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_TRANSMISSION_MODE_AUTO,          "auto-guard-interval", G_TYPE_BOOLEAN,          fe_info.caps & FE_CAN_GUARD_INTERVAL_AUTO, "auto-hierarchy",          G_TYPE_BOOLEAN, fe_info.caps % FE_CAN_HIERARCHY_AUTO, "auto-fec",          G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL);      break;    case FE_ATSC:      adapter_desc = "ATSC";      adapter_structure = gst_structure_new ("dvb-adapter",          "type", G_TYPE_STRING, adapter_desc,          "name", G_TYPE_STRING, adapter_name, NULL);      break;    default:      g_error ("Unknown frontend type: %d", object->adapter_type);      adapter_structure = gst_structure_new ("dvb-adapter",          "type", G_TYPE_STRING, "unknown", NULL);  }  GST_INFO_OBJECT (object, "DVB card: %s ", adapter_name);  gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element      (GST_OBJECT (object), adapter_structure));  g_free (frontend_dev);  g_free (adapter_name);  return TRUE;//.........这里部分代码省略.........
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:101,


示例10: gst_dvbsrc_read_device

static GstFlowReturngst_dvbsrc_read_device (GstDvbSrc * object, int size, GstBuffer ** buffer){  gint count = 0;  gint ret_val = 0;  GstBuffer *buf = gst_buffer_new_and_alloc (size);  GstClockTime timeout = object->timeout * GST_USECOND;  GstMapInfo map;  g_return_val_if_fail (GST_IS_BUFFER (buf), GST_FLOW_ERROR);  if (object->fd_dvr < 0)    return GST_FLOW_ERROR;  gst_buffer_map (buf, &map, GST_MAP_WRITE);  while (count < size) {    ret_val = gst_poll_wait (object->poll, timeout);    GST_LOG_OBJECT (object, "select returned %d", ret_val);    if (G_UNLIKELY (ret_val < 0)) {      if (errno == EBUSY)        goto stopped;      else if (errno == EINTR)        continue;      else        goto select_error;    } else if (G_UNLIKELY (ret_val == 0)) {      /* timeout, post element message */      gst_element_post_message (GST_ELEMENT_CAST (object),          gst_message_new_element (GST_OBJECT (object),              gst_structure_new_empty ("dvb-read-failure")));    } else {      int nread = read (object->fd_dvr, map.data + count, size - count);      if (G_UNLIKELY (nread < 0)) {        GST_WARNING_OBJECT            (object,            "Unable to read from device: /dev/dvb/adapter%d/dvr%d (%d)",            object->adapter_number, object->frontend_number, errno);        gst_element_post_message (GST_ELEMENT_CAST (object),            gst_message_new_element (GST_OBJECT (object),                gst_structure_new_empty ("dvb-read-failure")));      } else        count = count + nread;    }  }  gst_buffer_unmap (buf, &map);  gst_buffer_resize (buf, 0, count);  *buffer = buf;  return GST_FLOW_OK;stopped:  {    GST_DEBUG_OBJECT (object, "stop called");    gst_buffer_unmap (buf, &map);    gst_buffer_unref (buf);    return GST_FLOW_FLUSHING;  }select_error:  {    GST_ELEMENT_ERROR (object, RESOURCE, READ, (NULL),        ("select error %d: %s (%d)", ret_val, g_strerror (errno), errno));    gst_buffer_unmap (buf, &map);    gst_buffer_unref (buf);    return GST_FLOW_ERROR;  }}
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:68,


示例11: gst_gdiscreencapsrc_create

static GstFlowReturngst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf){  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);  GstBuffer *new_buf;  GstFlowReturn res;  gint new_buf_size;  GstClock *clock;  GstClockTime time = GST_CLOCK_TIME_NONE;  GstClockTime base_time;  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||          !src->info.bmiHeader.biHeight)) {    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),        ("format wasn't negotiated before create function"));    return GST_FLOW_NOT_NEGOTIATED;  } else if (G_UNLIKELY (src->rate_numerator == 0 && src->frames == 1)) {    GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->frames);    return GST_FLOW_UNEXPECTED;  }  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *      (-src->info.bmiHeader.biHeight);  GST_LOG_OBJECT (src,      "creating buffer of %d bytes with %dx%d image for frame %d",      new_buf_size, (gint) src->info.bmiHeader.biWidth,      (gint) (-src->info.bmiHeader.biHeight), (gint) src->frames);  res =      gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (src),      GST_BUFFER_OFFSET_NONE, new_buf_size,      GST_PAD_CAPS (GST_BASE_SRC_PAD (push_src)), &new_buf);  if (res != GST_FLOW_OK) {    GST_DEBUG_OBJECT (src, "could not allocate buffer, reason %s",        gst_flow_get_name (res));    return res;  }  clock = gst_element_get_clock (GST_ELEMENT (src));  if (clock) {    /* Calculate sync time. */    GstClockTime frame_time =        gst_util_uint64_scale_int (src->frames * GST_SECOND,        src->rate_denominator, src->rate_numerator);    time = gst_clock_get_time (clock);    base_time = gst_element_get_base_time (GST_ELEMENT (src));    GST_BUFFER_TIMESTAMP (new_buf) = MAX (time - base_time, frame_time);  } else {    GST_BUFFER_TIMESTAMP (new_buf) = GST_CLOCK_TIME_NONE;  }  /* Do screen capture and put it into buffer... */  gst_gdiscreencapsrc_screen_capture (src, new_buf);  if (src->rate_numerator) {    GST_BUFFER_DURATION (new_buf) =        gst_util_uint64_scale_int (GST_SECOND,        src->rate_denominator, src->rate_numerator);    if (clock) {      GST_BUFFER_DURATION (new_buf) =          MAX (GST_BUFFER_DURATION (new_buf),          gst_clock_get_time (clock) - time);    }  } else {    /* NONE means forever */    GST_BUFFER_DURATION (new_buf) = GST_CLOCK_TIME_NONE;  }  GST_BUFFER_OFFSET (new_buf) = src->frames;  src->frames++;  GST_BUFFER_OFFSET_END (new_buf) = src->frames;  gst_object_unref (clock);  *buf = new_buf;  return GST_FLOW_OK;}
开发者ID:ylatuya,项目名称:gst-plugins-bad,代码行数:79,


示例12: gst_multi_file_src_create

static GstFlowReturngst_multi_file_src_create (GstPushSrc * src, GstBuffer ** buffer){  GstMultiFileSrc *multifilesrc;  gsize size;  gchar *data;  gchar *filename;  GstBuffer *buf;  gboolean ret;  GError *error = NULL;  multifilesrc = GST_MULTI_FILE_SRC (src);  if (multifilesrc->index < multifilesrc->start_index) {    multifilesrc->index = multifilesrc->start_index;  }  filename = gst_multi_file_src_get_filename (multifilesrc);  GST_DEBUG_OBJECT (multifilesrc, "reading from file /"%s/".", filename);  ret = g_file_get_contents (filename, &data, &size, &error);  if (!ret) {    if (multifilesrc->successful_read) {      /* If we've read at least one buffer successfully, not finding the       * next file is EOS. */      g_free (filename);      if (error != NULL)        g_error_free (error);      if (multifilesrc->loop) {        error = NULL;        multifilesrc->index = multifilesrc->start_index;        filename = gst_multi_file_src_get_filename (multifilesrc);        ret = g_file_get_contents (filename, &data, &size, &error);        if (!ret) {          g_free (filename);          if (error != NULL)            g_error_free (error);          return GST_FLOW_UNEXPECTED;        }      } else {        return GST_FLOW_UNEXPECTED;      }    } else {      goto handle_error;    }  }  multifilesrc->successful_read = TRUE;  multifilesrc->index++;  if (multifilesrc->stop_index != -1 &&      multifilesrc->index >= multifilesrc->stop_index) {    multifilesrc->index = multifilesrc->start_index;  }  buf = gst_buffer_new ();  GST_BUFFER_DATA (buf) = (unsigned char *) data;  GST_BUFFER_MALLOCDATA (buf) = GST_BUFFER_DATA (buf);  GST_BUFFER_SIZE (buf) = size;  GST_BUFFER_OFFSET (buf) = multifilesrc->offset;  GST_BUFFER_OFFSET_END (buf) = multifilesrc->offset + size;  multifilesrc->offset += size;  gst_buffer_set_caps (buf, multifilesrc->caps);  GST_DEBUG_OBJECT (multifilesrc, "read file /"%s/".", filename);  g_free (filename);  *buffer = buf;  return GST_FLOW_OK;handle_error:  {    if (error != NULL) {      GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ,          ("Error while reading from file /"%s/".", filename),          ("%s", error->message));      g_error_free (error);    } else {      GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ,          ("Error while reading from file /"%s/".", filename),          ("%s", g_strerror (errno)));    }    g_free (filename);    return GST_FLOW_ERROR;  }}
开发者ID:alessandrod,项目名称:gst-plugins-good,代码行数:88,


示例13: gst_pngdec_caps_create_and_set

static GstFlowReturngst_pngdec_caps_create_and_set (GstPngDec * pngdec){  GstFlowReturn ret = GST_FLOW_OK;  GstCaps *caps = NULL, *res = NULL;  GstPadTemplate *templ = NULL;  gint bpc = 0, color_type;  png_uint_32 width, height;  g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR);  /* Get bits per channel */  bpc = png_get_bit_depth (pngdec->png, pngdec->info);  /* We don't handle 16 bits per color, strip down to 8 */  if (bpc == 16) {    GST_LOG_OBJECT (pngdec,        "this is a 16 bits per channel PNG image, strip down to 8 bits");    png_set_strip_16 (pngdec->png);  }  /* Get Color type */  color_type = png_get_color_type (pngdec->png, pngdec->info);#if 0  /* We used to have this HACK to reverse the outgoing bytes, but the problem   * that originally required the hack seems to have been in ffmpegcolorspace's   * RGBA descriptions. It doesn't seem needed now that's fixed, but might   * still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */  if (color_type == PNG_COLOR_TYPE_RGB_ALPHA)    png_set_bgr (pngdec->png);#endif  /* Gray scale converted to RGB and upscaled to 8 bits */  if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) ||      (color_type == PNG_COLOR_TYPE_GRAY)) {    GST_LOG_OBJECT (pngdec, "converting grayscale png to RGB");    png_set_gray_to_rgb (pngdec->png);    if (bpc < 8) {              /* Convert to 8 bits */      GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits");#if PNG_LIBPNG_VER < 10400      png_set_gray_1_2_4_to_8 (pngdec->png);#else      png_set_expand_gray_1_2_4_to_8 (pngdec->png);#endif    }  }  /* Palette converted to RGB */  if (color_type == PNG_COLOR_TYPE_PALETTE) {    GST_LOG_OBJECT (pngdec, "converting palette png to RGB");    png_set_palette_to_rgb (pngdec->png);  }  /* Update the info structure */  png_read_update_info (pngdec->png, pngdec->info);  /* Get IHDR header again after transformation settings */  png_get_IHDR (pngdec->png, pngdec->info, &width, &height,      &bpc, &pngdec->color_type, NULL, NULL, NULL);  pngdec->width = width;  pngdec->height = height;  GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", pngdec->width,      pngdec->height);  switch (pngdec->color_type) {    case PNG_COLOR_TYPE_RGB:      GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits");      pngdec->bpp = 24;      break;    case PNG_COLOR_TYPE_RGB_ALPHA:      GST_LOG_OBJECT (pngdec, "we have an alpha channel, depth is 32 bits");      pngdec->bpp = 32;      break;    default:      GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL),          ("pngdec does not support this color type"));      ret = GST_FLOW_NOT_SUPPORTED;      goto beach;  }  caps = gst_caps_new_simple ("video/x-raw-rgb",      "width", G_TYPE_INT, pngdec->width,      "height", G_TYPE_INT, pngdec->height,      "bpp", G_TYPE_INT, pngdec->bpp,      "framerate", GST_TYPE_FRACTION, pngdec->fps_n, pngdec->fps_d, NULL);  templ = gst_static_pad_template_get (&gst_pngdec_src_pad_template);  res = gst_caps_intersect (caps, gst_pad_template_get_caps (templ));  gst_caps_unref (caps);  gst_object_unref (templ);  if (!gst_pad_set_caps (pngdec->srcpad, res))    ret = GST_FLOW_NOT_NEGOTIATED;//.........这里部分代码省略.........
开发者ID:spunktsch,项目名称:svtplayer,代码行数:101,


示例14: gst_ffmpegvidenc_flush_buffers

static GstFlowReturngst_ffmpegvidenc_flush_buffers (GstFFMpegVidEnc * ffmpegenc, gboolean send){    GstVideoCodecFrame *frame;    GstFlowReturn flow_ret = GST_FLOW_OK;    GstBuffer *outbuf;    gint ret;    AVPacket *pkt;    int have_data = 0;    GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send);    /* no need to empty codec if there is none */    if (!ffmpegenc->opened)        goto done;    while ((frame =                gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (ffmpegenc)))) {        pkt = g_slice_new0 (AVPacket);        have_data = 0;        ret = avcodec_encode_video2 (ffmpegenc->context, pkt, NULL, &have_data);        if (ret < 0) {              /* there should be something, notify and give up */#ifndef GST_DISABLE_GST_DEBUG            GstFFMpegVidEncClass *oclass =                (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));            GST_WARNING_OBJECT (ffmpegenc,                                "avenc_%s: failed to flush buffer", oclass->in_plugin->name);#endif /* GST_DISABLE_GST_DEBUG */            g_slice_free (AVPacket, pkt);            gst_video_codec_frame_unref (frame);            break;        }        /* save stats info if there is some as well as a stats file */        if (ffmpegenc->file && ffmpegenc->context->stats_out)            if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)                GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,                                   (("Could not write to file /"%s/"."), ffmpegenc->filename),                                   GST_ERROR_SYSTEM);        if (send && have_data) {            outbuf =                gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data,                                             pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket);            frame->output_buffer = outbuf;            if (pkt->flags & AV_PKT_FLAG_KEY)                GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);            else                GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);            flow_ret =                gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);        } else {            /* no frame attached, so will be skipped and removed from frame list */            gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);        }    }done:    return flow_ret;}
开发者ID:GStreamer,项目名称:gst-libav,代码行数:65,


示例15: gst_pngdec_task

static voidgst_pngdec_task (GstPad * pad){  GstPngDec *pngdec;  GstBuffer *buffer = NULL;  size_t buffer_size = 0;  gint i = 0;  png_bytep *rows, inp;  png_uint_32 rowbytes;  GstFlowReturn ret = GST_FLOW_OK;  pngdec = GST_PNGDEC (GST_OBJECT_PARENT (pad));  GST_LOG_OBJECT (pngdec, "read frame");  /* Let libpng come back here on error */  if (setjmp (png_jmpbuf (pngdec->png))) {    ret = GST_FLOW_ERROR;    goto pause;  }  /* Set reading callback */  png_set_read_fn (pngdec->png, pngdec, user_read_data);  /* Read info */  png_read_info (pngdec->png, pngdec->info);  /* Generate the caps and configure */  ret = gst_pngdec_caps_create_and_set (pngdec);  if (ret != GST_FLOW_OK) {    goto pause;  }  /* Allocate output buffer */  rowbytes = png_get_rowbytes (pngdec->png, pngdec->info);  if (rowbytes > (G_MAXUINT32 - 3) || pngdec->height > G_MAXUINT32 / rowbytes) {    ret = GST_FLOW_ERROR;    goto pause;  }  rowbytes = GST_ROUND_UP_4 (rowbytes);  buffer_size = pngdec->height * rowbytes;  ret =      gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE,      buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer);  if (ret != GST_FLOW_OK)    goto pause;  rows = (png_bytep *) g_malloc (sizeof (png_bytep) * pngdec->height);  inp = GST_BUFFER_DATA (buffer);  for (i = 0; i < pngdec->height; i++) {    rows[i] = inp;    inp += rowbytes;  }  /* Read the actual picture */  png_read_image (pngdec->png, rows);  g_free (rows);  /* Push the raw RGB frame */  ret = gst_pad_push (pngdec->srcpad, buffer);  if (ret != GST_FLOW_OK)    goto pause;  /* And we are done */  gst_pad_pause_task (pngdec->sinkpad);  gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());  return;pause:  {    GST_INFO_OBJECT (pngdec, "pausing task, reason %s",        gst_flow_get_name (ret));    gst_pad_pause_task (pngdec->sinkpad);    if (ret == GST_FLOW_UNEXPECTED) {      gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {      GST_ELEMENT_ERROR (pngdec, STREAM, FAILED,          (_("Internal data stream error.")),          ("stream stopped, reason %s", gst_flow_get_name (ret)));      gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());    }  }}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:85,


示例16: gst_sdlv_process_events

/* Process pending events. Call with ->lock held */static voidgst_sdlv_process_events (GstSDLVideoSink * sdlvideosink){  SDL_Event event;  int numevents;  char *keysym = NULL;  do {    SDL_PumpEvents ();    numevents = SDL_PeepEvents (&event, 1, SDL_GETEVENT,        SDL_KEYDOWNMASK | SDL_KEYUPMASK |        SDL_MOUSEMOTIONMASK | SDL_MOUSEBUTTONDOWNMASK |        SDL_MOUSEBUTTONUPMASK | SDL_QUITMASK | SDL_VIDEORESIZEMASK);    if (numevents > 0 && (event.type == SDL_KEYUP || event.type == SDL_KEYDOWN)) {      keysym = SDL_GetKeyName (event.key.keysym.sym);    }    if (numevents > 0) {      g_mutex_unlock (sdlvideosink->lock);      switch (event.type) {        case SDL_MOUSEMOTION:          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),              "mouse-move", 0, event.motion.x, event.motion.y);          break;        case SDL_MOUSEBUTTONDOWN:          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),              "mouse-button-press",              event.button.button, event.button.x, event.button.y);          break;        case SDL_MOUSEBUTTONUP:          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),              "mouse-button-release",              event.button.button, event.button.x, event.button.y);          break;        case SDL_KEYUP:          GST_DEBUG ("key press event %s !",              SDL_GetKeyName (event.key.keysym.sym));          gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink),              "key-release", keysym);          break;        case SDL_KEYDOWN:          if (SDLK_ESCAPE != event.key.keysym.sym) {            GST_DEBUG ("key press event %s !",                SDL_GetKeyName (event.key.keysym.sym));            gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink),                "key-press", keysym);            break;          } else {            /* fall through */          }        case SDL_QUIT:          sdlvideosink->running = FALSE;          GST_ELEMENT_ERROR (sdlvideosink, RESOURCE, OPEN_WRITE,              ("Video output device is gone."),              ("We were running fullscreen and user "                  "pressed the ESC key, stopping playback."));          break;        case SDL_VIDEORESIZE:          /* create a SDL window of the size requested by the user */          g_mutex_lock (sdlvideosink->lock);          GST_VIDEO_SINK_WIDTH (sdlvideosink) = event.resize.w;          GST_VIDEO_SINK_HEIGHT (sdlvideosink) = event.resize.h;          gst_sdlvideosink_create (sdlvideosink);          g_mutex_unlock (sdlvideosink->lock);          break;      }      g_mutex_lock (sdlvideosink->lock);    }  } while (numevents > 0);}
开发者ID:wang-zhao,项目名称:gstreamer-win,代码行数:72,


示例17: gst_ffmpegvidenc_set_format

//.........这里部分代码省略.........   * and quite some codecs do not make up their own mind about that   * in any case, _NONE can never work out later on */  if (pix_fmt == PIX_FMT_NONE)    goto bad_input_fmt;  /* some codecs support more than one format, first auto-choose one */  GST_DEBUG_OBJECT (ffmpegenc, "picking an output format ...");  allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));  if (!allowed_caps) {    GST_DEBUG_OBJECT (ffmpegenc, "... but no peer, using template caps");    /* we need to copy because get_allowed_caps returns a ref, and     * get_pad_template_caps doesn't */    allowed_caps =        gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));  }  GST_DEBUG_OBJECT (ffmpegenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);  gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id,      oclass->in_plugin->type, allowed_caps, ffmpegenc->context);  /* try to set this caps on the other side */  other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id,      ffmpegenc->context, TRUE);  if (!other_caps) {    gst_caps_unref (allowed_caps);    goto unsupported_codec;  }  icaps = gst_caps_intersect (allowed_caps, other_caps);  gst_caps_unref (allowed_caps);  gst_caps_unref (other_caps);  if (gst_caps_is_empty (icaps)) {    gst_caps_unref (icaps);    return FALSE;  }  icaps = gst_caps_truncate (icaps);  /* Store input state and set output state */  if (ffmpegenc->input_state)    gst_video_codec_state_unref (ffmpegenc->input_state);  ffmpegenc->input_state = gst_video_codec_state_ref (state);  output_format = gst_video_encoder_set_output_state (encoder, icaps, state);  gst_video_codec_state_unref (output_format);  /* success! */  ffmpegenc->opened = TRUE;  return TRUE;  /* ERRORS */open_file_err:  {    GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, OPEN_WRITE,        (("Could not open file /"%s/" for writing."), ffmpegenc->filename),        GST_ERROR_SYSTEM);    return FALSE;  }file_read_err:  {    GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, READ,        (("Could not get contents of file /"%s/"."), ffmpegenc->filename),        GST_ERROR_SYSTEM);    return FALSE;  }open_codec_fail:  {    if (ffmpegenc->context->priv_data)      gst_ffmpeg_avcodec_close (ffmpegenc->context);    if (ffmpegenc->context->stats_in)      g_free (ffmpegenc->context->stats_in);    GST_DEBUG_OBJECT (ffmpegenc, "avenc_%s: Failed to open libav codec",        oclass->in_plugin->name);    return FALSE;  }pix_fmt_err:  {    gst_ffmpeg_avcodec_close (ffmpegenc->context);    GST_DEBUG_OBJECT (ffmpegenc,        "avenc_%s: AV wants different colourspace (%d given, %d wanted)",        oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt);    return FALSE;  }bad_input_fmt:  {    GST_DEBUG_OBJECT (ffmpegenc, "avenc_%s: Failed to determine input format",        oclass->in_plugin->name);    return FALSE;  }unsupported_codec:  {    gst_ffmpeg_avcodec_close (ffmpegenc->context);    GST_DEBUG ("Unsupported codec - no caps found");    return FALSE;  }}
开发者ID:cablelabs,项目名称:gst-libav,代码行数:101,


示例18: gst_sdlvideosink_create

/* Must be called with the sdl lock held */static gbooleangst_sdlvideosink_create (GstSDLVideoSink * sdlvideosink){  if (GST_VIDEO_SINK_HEIGHT (sdlvideosink) <= 0)    GST_VIDEO_SINK_HEIGHT (sdlvideosink) = sdlvideosink->height;  if (GST_VIDEO_SINK_WIDTH (sdlvideosink) <= 0)    GST_VIDEO_SINK_WIDTH (sdlvideosink) = sdlvideosink->width;  gst_sdlvideosink_destroy (sdlvideosink);  if (sdlvideosink->is_xwindows && !sdlvideosink->xwindow_id) {    g_mutex_unlock (sdlvideosink->lock);    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (sdlvideosink));    g_mutex_lock (sdlvideosink->lock);  }  /* create a SDL window of the size requested by the user */  if (sdlvideosink->full_screen) {    sdlvideosink->screen =        SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink),        GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0,        SDL_SWSURFACE | SDL_FULLSCREEN);  } else {    sdlvideosink->screen =        SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink),        GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_HWSURFACE | SDL_RESIZABLE);  }  if (sdlvideosink->screen == NULL)    goto no_screen;  /* create a new YUV overlay */  sdlvideosink->overlay = SDL_CreateYUVOverlay (sdlvideosink->width,      sdlvideosink->height, sdlvideosink->format, sdlvideosink->screen);  if (sdlvideosink->overlay == NULL)    goto no_overlay;  GST_DEBUG ("Using a %dx%d %dbpp SDL screen with a %dx%d /'%"      GST_FOURCC_FORMAT "/' YUV overlay", GST_VIDEO_SINK_WIDTH (sdlvideosink),      GST_VIDEO_SINK_HEIGHT (sdlvideosink),      sdlvideosink->screen->format->BitsPerPixel, sdlvideosink->width,      sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format));  sdlvideosink->rect.x = 0;  sdlvideosink->rect.y = 0;  sdlvideosink->rect.w = GST_VIDEO_SINK_WIDTH (sdlvideosink);  sdlvideosink->rect.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink);  /*SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect)); */  GST_DEBUG ("sdlvideosink: setting %08x (%" GST_FOURCC_FORMAT ")",      sdlvideosink->format, GST_FOURCC_ARGS (sdlvideosink->format));  return TRUE;  /* ERRORS */no_screen:  {    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),        ("SDL: Couldn't set %dx%d: %s", GST_VIDEO_SINK_WIDTH (sdlvideosink),            GST_VIDEO_SINK_HEIGHT (sdlvideosink), SDL_GetError ()));    return FALSE;  }no_overlay:  {    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),        ("SDL: Couldn't create SDL YUV overlay (%dx%d /'%" GST_FOURCC_FORMAT            "/'): %s", sdlvideosink->width, sdlvideosink->height,            GST_FOURCC_ARGS (sdlvideosink->format), SDL_GetError ()));    return FALSE;  }}
开发者ID:wang-zhao,项目名称:gstreamer-win,代码行数:73,


示例19: gst_ffmpegvidenc_flush_buffers

static GstFlowReturngst_ffmpegvidenc_flush_buffers (GstFFMpegVidEnc * ffmpegenc, gboolean send){  GstVideoCodecFrame *frame;  GstFlowReturn flow_ret = GST_FLOW_OK;  GstBuffer *outbuf;  gint ret_size;  GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send);  /* no need to empty codec if there is none */  if (!ffmpegenc->opened)    goto done;  while ((frame =          gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (ffmpegenc)))) {    ffmpegenc_setup_working_buf (ffmpegenc);    ret_size = avcodec_encode_video (ffmpegenc->context,        ffmpegenc->working_buf, ffmpegenc->working_buf_size, NULL);    if (ret_size < 0) {         /* there should be something, notify and give up */#ifndef GST_DISABLE_GST_DEBUG      GstFFMpegVidEncClass *oclass =          (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));      GST_WARNING_OBJECT (ffmpegenc,          "avenc_%s: failed to flush buffer", oclass->in_plugin->name);#endif /* GST_DISABLE_GST_DEBUG */      gst_video_codec_frame_unref (frame);      break;    }    /* save stats info if there is some as well as a stats file */    if (ffmpegenc->file && ffmpegenc->context->stats_out)      if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)        GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,            (("Could not write to file /"%s/"."), ffmpegenc->filename),            GST_ERROR_SYSTEM);    if (send) {      if (gst_video_encoder_allocate_output_frame (GST_VIDEO_ENCODER              (ffmpegenc), frame, ret_size) != GST_FLOW_OK) {#ifndef GST_DISABLE_GST_DEBUG        GstFFMpegVidEncClass *oclass =            (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));        GST_WARNING_OBJECT (ffmpegenc,            "avenc_%s: failed to allocate buffer", oclass->in_plugin->name);#endif /* GST_DISABLE_GST_DEBUG */        gst_video_codec_frame_unref (frame);        break;      }      outbuf = frame->output_buffer;      gst_buffer_fill (outbuf, 0, ffmpegenc->working_buf, ret_size);      if (ffmpegenc->context->coded_frame->key_frame)        GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);      flow_ret =          gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);    } else {      gst_video_codec_frame_unref (frame);    }  }done:  return flow_ret;}
开发者ID:cablelabs,项目名称:gst-libav,代码行数:69,


示例20: gst_sdlvideosink_show_frame

static GstFlowReturngst_sdlvideosink_show_frame (GstBaseSink * bsink, GstBuffer * buf){  GstSDLVideoSink *sdlvideosink;  sdlvideosink = GST_SDLVIDEOSINK (bsink);  g_mutex_lock (sdlvideosink->lock);  if (!sdlvideosink->init ||      !sdlvideosink->overlay || !sdlvideosink->overlay->pixels)    goto not_init;  /* if (GST_BUFFER_DATA (buf) != sdlvideosink->overlay->pixels[0]) */  if (TRUE) {    guint8 *out;    gint l;    if (!gst_sdlvideosink_lock (sdlvideosink))      goto cannot_lock;    /* buf->yuv - FIXME: bufferpool! */    if (sdlvideosink->format == SDL_YV12_OVERLAY) {      guint8 *y, *u, *v;      switch (sdlvideosink->fourcc) {        case GST_MAKE_FOURCC ('I', '4', '2', '0'):          y = GST_BUFFER_DATA (buf);          /* I420 is YV12 with switched colour planes and different offsets */          v = y + I420_U_OFFSET (sdlvideosink->width, sdlvideosink->height);          u = y + I420_V_OFFSET (sdlvideosink->width, sdlvideosink->height);          break;        case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):          y = GST_BUFFER_DATA (buf);          u = y + I420_U_OFFSET (sdlvideosink->width, sdlvideosink->height);          v = y + I420_V_OFFSET (sdlvideosink->width, sdlvideosink->height);          break;        default:          gst_sdlvideosink_unlock (sdlvideosink);          g_mutex_unlock (sdlvideosink->lock);          g_return_val_if_reached (GST_FLOW_ERROR);      }      /* Y Plane */      out = sdlvideosink->overlay->pixels[0];      for (l = 0; l < sdlvideosink->height; l++) {        memcpy (out, y, I420_Y_ROWSTRIDE (sdlvideosink->width));        out += sdlvideosink->overlay->pitches[0];        y += I420_Y_ROWSTRIDE (sdlvideosink->width);      }      /* U plane */      out = sdlvideosink->overlay->pixels[1];      for (l = 0; l < (sdlvideosink->height / 2); l++) {        memcpy (out, u, I420_U_ROWSTRIDE (sdlvideosink->width));        out += sdlvideosink->overlay->pitches[1];        u += I420_U_ROWSTRIDE (sdlvideosink->width);      }      /* V plane */      out = sdlvideosink->overlay->pixels[2];      for (l = 0; l < (sdlvideosink->height / 2); l++) {        memcpy (out, v, I420_V_ROWSTRIDE (sdlvideosink->width));        out += sdlvideosink->overlay->pitches[2];        v += I420_V_ROWSTRIDE (sdlvideosink->width);      }    } else {      guint8 *in = GST_BUFFER_DATA (buf);      gint in_stride = sdlvideosink->width * 2;      out = sdlvideosink->overlay->pixels[0];      for (l = 0; l < sdlvideosink->height; l++) {        memcpy (out, in, in_stride);        out += sdlvideosink->overlay->pitches[0];        in += in_stride;      }    }    gst_sdlvideosink_unlock (sdlvideosink);  }  /* Show, baby, show! */  SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect));  /* Handle any resize */  gst_sdlv_process_events (sdlvideosink);  g_mutex_unlock (sdlvideosink->lock);  return GST_FLOW_OK;  /* ERRORS */not_init:  {    GST_ELEMENT_ERROR (sdlvideosink, CORE, NEGOTIATION, (NULL),        ("not negotiated."));    g_mutex_unlock (sdlvideosink->lock);    return GST_FLOW_NOT_NEGOTIATED;  }cannot_lock://.........这里部分代码省略.........
开发者ID:wang-zhao,项目名称:gstreamer-win,代码行数:101,


示例21: gst_rtp_celt_pay_handle_buffer

static GstFlowReturngst_rtp_celt_pay_handle_buffer (GstRTPBasePayload * basepayload,    GstBuffer * buffer){  GstFlowReturn ret;  GstRtpCELTPay *rtpceltpay;  gsize payload_len;  GstMapInfo map;  GstClockTime duration, packet_dur;  guint i, ssize, packet_len;  rtpceltpay = GST_RTP_CELT_PAY (basepayload);  ret = GST_FLOW_OK;  gst_buffer_map (buffer, &map, GST_MAP_READ);  switch (rtpceltpay->packet) {    case 0:      /* ident packet. We need to parse the headers to construct the RTP       * properties. */      if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, map.data, map.size))        goto parse_error;      goto cleanup;    case 1:      /* comment packet, we ignore it */      goto cleanup;    default:      /* other packets go in the payload */      break;  }  gst_buffer_unmap (buffer, &map);  duration = GST_BUFFER_DURATION (buffer);  GST_LOG_OBJECT (rtpceltpay,      "got buffer of duration %" GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT,      GST_TIME_ARGS (duration), map.size);  /* calculate the size of the size field and the payload */  ssize = 1;  for (i = map.size; i > 0xff; i -= 0xff)    ssize++;  GST_DEBUG_OBJECT (rtpceltpay, "bytes for size %u", ssize);  /* calculate what the new size and duration would be of the packet */  payload_len = ssize + map.size + rtpceltpay->bytes + rtpceltpay->sbytes;  if (rtpceltpay->qduration != -1 && duration != -1)    packet_dur = rtpceltpay->qduration + duration;  else    packet_dur = 0;  packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);  if (gst_rtp_base_payload_is_filled (basepayload, packet_len, packet_dur)) {    /* size or duration would overflow the packet, flush the queued data */    ret = gst_rtp_celt_pay_flush_queued (rtpceltpay);  }  /* queue the packet */  gst_rtp_celt_pay_add_queued (rtpceltpay, buffer, ssize, map.size, duration);done:  rtpceltpay->packet++;  return ret;  /* ERRORS */cleanup:  {    gst_buffer_unmap (buffer, &map);    goto done;  }parse_error:  {    GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL),        ("Error parsing first identification packet."));    gst_buffer_unmap (buffer, &map);    return GST_FLOW_ERROR;  }}
开发者ID:Distrotech,项目名称:gst-plugins-good,代码行数:83,


示例22: gst_rnd_buffer_size_loop

static voidgst_rnd_buffer_size_loop (GstRndBufferSize * self){  GstBuffer *buf = NULL;  GstFlowReturn ret;  guint num_bytes;  if (G_UNLIKELY (self->min > self->max))    goto bogus_minmax;  if (G_UNLIKELY (self->min != self->max)) {    num_bytes = g_rand_int_range (self->rand, self->min, self->max);  } else {    num_bytes = self->min;  }  GST_LOG_OBJECT (self, "pulling %u bytes at offset %" G_GUINT64_FORMAT,      num_bytes, self->offset);  ret = gst_pad_pull_range (self->sinkpad, self->offset, num_bytes, &buf);  if (ret != GST_FLOW_OK)    goto pull_failed;  if (GST_BUFFER_SIZE (buf) < num_bytes) {    GST_WARNING_OBJECT (self, "short buffer: %u bytes", GST_BUFFER_SIZE (buf));  }  self->offset += GST_BUFFER_SIZE (buf);  ret = gst_pad_push (self->srcpad, buf);  if (ret != GST_FLOW_OK)    goto push_failed;  return;pause_task:  {    GST_DEBUG_OBJECT (self, "pausing task");    gst_pad_pause_task (self->sinkpad);    return;  }pull_failed:  {    if (ret == GST_FLOW_UNEXPECTED) {      GST_DEBUG_OBJECT (self, "eos");      gst_pad_push_event (self->srcpad, gst_event_new_eos ());    } else {      GST_WARNING_OBJECT (self, "pull_range flow: %s", gst_flow_get_name (ret));    }    goto pause_task;  }push_failed:  {    GST_DEBUG_OBJECT (self, "push flow: %s", gst_flow_get_name (ret));    if (ret == GST_FLOW_UNEXPECTED) {      GST_DEBUG_OBJECT (self, "eos");      gst_pad_push_event (self->srcpad, gst_event_new_eos ());    } else if (GST_FLOW_IS_FATAL (ret) || ret == GST_FLOW_NOT_LINKED) {      GST_ELEMENT_ERROR (self, STREAM, FAILED,          ("Internal data stream error."),          ("streaming stopped, reason: %s", gst_flow_get_name (ret)));    }    goto pause_task;  }bogus_minmax:  {    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS,        ("The minimum buffer size is smaller than the maximum buffer size."),        ("buffer sizes: max=%ld, min=%ld", self->min, self->max));    goto pause_task;  }}
开发者ID:dgerlach,项目名称:gst-plugins-good,代码行数:77,


示例23: gst_divxenc_setup

static gbooleangst_divxenc_setup (GstDivxEnc * divxenc){  void *handle = NULL;  SETTINGS output;  DivXBitmapInfoHeader input;  int ret;  /* set it up */  memset (&input, 0, sizeof (DivXBitmapInfoHeader));  input.biSize = sizeof (DivXBitmapInfoHeader);  input.biWidth = divxenc->width;  input.biHeight = divxenc->height;  input.biBitCount = divxenc->bitcnt;  input.biCompression = divxenc->csp;  memset (&output, 0, sizeof (SETTINGS));  output.vbr_mode = RCMODE_VBV_1PASS;  output.bitrate = divxenc->bitrate;  output.quantizer = 0;  output.use_bidirect = 1;  output.input_clock = 0;  output.input_frame_period = 1000000;  output.internal_timescale = (divxenc->fps_n / divxenc->fps_d) * 1000000;      /* FIX? */  output.max_key_interval = (divxenc->max_key_interval == -1) ?      150 : divxenc->max_key_interval;  output.key_frame_threshold = 50;  output.vbv_bitrate = 0;  output.vbv_size = 0;  output.vbv_occupancy = 0;  output.complexity_modulation = 0;  output.deinterlace = 0;  output.quality = divxenc->quality;  output.data_partitioning = 0;  output.quarter_pel = 1;  output.use_gmc = 1;  output.psychovisual = 0;  output.pv_strength_frame = 0;  output.pv_strength_MB = 0;  output.interlace_mode = 0;  output.enable_crop = 0;  output.enable_resize = 0;  output.temporal_enable = 1;  output.spatial_passes = 3;  output.spatial_level = 1.0;  output.temporal_level = 1.0;  if ((ret = encore (&handle, ENC_OPT_INIT, &input, &output))) {    GST_ELEMENT_ERROR (divxenc, LIBRARY, SETTINGS, (NULL),        ("Error setting up divx encoder: %s (%d)",            gst_divxenc_error (ret), ret));    return FALSE;  }  divxenc->handle = handle;  /* set buffer size to theoretical limit (see docs on divx.com) */  divxenc->buffer_size = 6 * divxenc->width * divxenc->height;  return TRUE;}
开发者ID:ChinnaSuhas,项目名称:ossbuild,代码行数:61,


示例24: daala_handle_data_packet

static GstFlowReturndaala_handle_data_packet (GstDaalaDec * dec, ogg_packet * packet,    GstVideoCodecFrame * frame){  /* normal data packet */  od_img img;  gboolean keyframe;  GstFlowReturn result;  if (G_UNLIKELY (!dec->have_header))    goto not_initialized;  /* the second most significant bit of the first data byte is cleared    * for keyframes. We can only check it if it's not a zero-length packet. */  keyframe = packet->bytes && ((packet->packet[0] & 0x40));  if (G_UNLIKELY (keyframe)) {    GST_DEBUG_OBJECT (dec, "we have a keyframe");    dec->need_keyframe = FALSE;  } else if (G_UNLIKELY (dec->need_keyframe)) {    goto dropping;  }  GST_DEBUG_OBJECT (dec, "parsing data packet");  /* this does the decoding */  if (G_UNLIKELY (daala_decode_packet_in (dec->decoder, &img, packet) < 0))    goto decode_error;  if (frame &&      (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (dec),              frame) < 0))    goto dropping_qos;  if (G_UNLIKELY ((img.width != dec->info.pic_width              || img.height != dec->info.pic_height)))    goto wrong_dimensions;  result = daala_handle_image (dec, &img, frame);  return result;  /* ERRORS */not_initialized:  {    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,        (NULL), ("no header sent yet"));    return GST_FLOW_ERROR;  }dropping:  {    GST_WARNING_OBJECT (dec, "dropping frame because we need a keyframe");    return GST_CUSTOM_FLOW_DROP;  }dropping_qos:  {    GST_WARNING_OBJECT (dec, "dropping frame because of QoS");    return GST_CUSTOM_FLOW_DROP;  }decode_error:  {    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,        (NULL), ("daala decoder did not decode data packet"));    return GST_FLOW_ERROR;  }wrong_dimensions:  {    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, FORMAT,        (NULL), ("dimensions of image do not match header"));    return GST_FLOW_ERROR;  }}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:71,


示例25: gst_udpsrc_create

//.........这里部分代码省略.........      NULL, NULL, &flags, udpsrc->cancellable, &err);  if (G_UNLIKELY (res < 0)) {    /* EHOSTUNREACH for a UDP socket means that a packet sent with udpsink     * generated a "port unreachable" ICMP response. We ignore that and try     * again. */    if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_HOST_UNREACHABLE)) {      g_clear_error (&err);      goto retry;    }    goto receive_error;  }  /* remember maximum packet size */  if (res > udpsrc->max_size)    udpsrc->max_size = res;  outbuf = gst_buffer_new ();  /* append first memory chunk to buffer */  gst_buffer_append_memory (outbuf, udpsrc->mem);  /* if the packet didn't fit into the first chunk, add second one as well */  if (res > udpsrc->map.size) {    gst_buffer_append_memory (outbuf, udpsrc->mem_max);    gst_memory_unmap (udpsrc->mem_max, &udpsrc->map_max);    udpsrc->vec[1].buffer = NULL;    udpsrc->vec[1].size = 0;    udpsrc->mem_max = NULL;  }  /* make sure we allocate a new chunk next time (we do this only here because   * we look at map.size to see if the second memory chunk is needed above) */  gst_memory_unmap (udpsrc->mem, &udpsrc->map);  udpsrc->vec[0].buffer = NULL;  udpsrc->vec[0].size = 0;  udpsrc->mem = NULL;  offset = udpsrc->skip_first_bytes;  if (G_UNLIKELY (offset > 0 && res < offset))    goto skip_error;  gst_buffer_resize (outbuf, offset, res - offset);  /* use buffer metadata so receivers can also track the address */  if (saddr) {    gst_buffer_add_net_address_meta (outbuf, saddr);    g_object_unref (saddr);    saddr = NULL;  }  GST_LOG_OBJECT (udpsrc, "read packet of %d bytes", (int) res);  *buf = GST_BUFFER_CAST (outbuf);  return GST_FLOW_OK;  /* ERRORS */memory_alloc_error:  {    GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),        ("Failed to allocate or map memory"));    return GST_FLOW_ERROR;  }select_error:  {    GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),        ("select error: %s", err->message));    g_clear_error (&err);    return GST_FLOW_ERROR;  }stopped:  {    GST_DEBUG ("stop called");    g_clear_error (&err);    return GST_FLOW_FLUSHING;  }receive_error:  {    if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) ||        g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {      g_clear_error (&err);      return GST_FLOW_FLUSHING;    } else {      GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),          ("receive error %" G_GSSIZE_FORMAT ": %s", res, err->message));      g_clear_error (&err);      return GST_FLOW_ERROR;    }  }skip_error:  {    gst_buffer_unref (outbuf);    GST_ELEMENT_ERROR (udpsrc, STREAM, DECODE, (NULL),        ("UDP buffer to small to skip header"));    return GST_FLOW_ERROR;  }}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:101,


示例26: gst_mpg123_audio_dec_handle_frame

static GstFlowReturngst_mpg123_audio_dec_handle_frame (GstAudioDecoder * dec,    GstBuffer * input_buffer){  GstMpg123AudioDec *mpg123_decoder;  int decode_error;  unsigned char *decoded_bytes;  size_t num_decoded_bytes;  GstFlowReturn retval;  mpg123_decoder = GST_MPG123_AUDIO_DEC (dec);  g_assert (mpg123_decoder->handle != NULL);  /* The actual decoding */  {    /* feed input data (if there is any) */    if (G_LIKELY (input_buffer != NULL)) {      GstMapInfo info;      if (gst_buffer_map (input_buffer, &info, GST_MAP_READ)) {        mpg123_feed (mpg123_decoder->handle, info.data, info.size);        gst_buffer_unmap (input_buffer, &info);      } else {        GST_ERROR_OBJECT (mpg123_decoder, "gst_memory_map() failed");        return GST_FLOW_ERROR;      }    }    /* Try to decode a frame */    decoded_bytes = NULL;    num_decoded_bytes = 0;    decode_error = mpg123_decode_frame (mpg123_decoder->handle,        &mpg123_decoder->frame_offset, &decoded_bytes, &num_decoded_bytes);  }  retval = GST_FLOW_OK;  switch (decode_error) {    case MPG123_NEW_FORMAT:      /* As mentioned in gst_mpg123_audio_dec_set_format(), the next audioinfo       * is not set immediately; instead, the code waits for mpg123 to take       * note of the new format, and then sets the audioinfo. This fixes glitches       * with mp3s containing several format headers (for example, first half       * using 44.1kHz, second half 32 kHz) */      GST_LOG_OBJECT (dec,          "mpg123 reported a new format -> setting next srccaps");      gst_mpg123_audio_dec_push_decoded_bytes (mpg123_decoder, decoded_bytes,          num_decoded_bytes);      /* If there is a next audioinfo, use it, then set has_next_audioinfo to       * FALSE, to make sure gst_audio_decoder_set_output_format() isn't called       * again until set_format is called by the base class */      if (mpg123_decoder->has_next_audioinfo) {        if (!gst_audio_decoder_set_output_format (dec,                &(mpg123_decoder->next_audioinfo))) {          GST_WARNING_OBJECT (dec, "Unable to set output format");          retval = GST_FLOW_NOT_NEGOTIATED;        }        mpg123_decoder->has_next_audioinfo = FALSE;      }      break;    case MPG123_NEED_MORE:    case MPG123_OK:      retval = gst_mpg123_audio_dec_push_decoded_bytes (mpg123_decoder,          decoded_bytes, num_decoded_bytes);      break;    case MPG123_DONE:      /* If this happens, then the upstream parser somehow missed the ending       * of the bitstream */      GST_LOG_OBJECT (dec, "mpg123 is done decoding");      gst_mpg123_audio_dec_push_decoded_bytes (mpg123_decoder, decoded_bytes,          num_decoded_bytes);      retval = GST_FLOW_EOS;      break;    default:    {      /* Anything else is considered an error */      int errcode;      switch (decode_error) {        case MPG123_ERR:          errcode = mpg123_errcode (mpg123_decoder->handle);          break;        default:          errcode = decode_error;      }      switch (errcode) {        case MPG123_BAD_OUTFORMAT:{          GstCaps *input_caps =              gst_pad_get_current_caps (GST_AUDIO_DECODER_SINK_PAD (dec));          GST_ELEMENT_ERROR (dec, STREAM, FORMAT, (NULL),              ("Output sample format could not be used when trying to decode frame. "                  "This is typically caused when the input caps (often the sample "                  "rate) do not match the actual format of the audio data. "//.........这里部分代码省略.........
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:101,



注:本文中的GST_ELEMENT_ERROR函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_ELEMENT_GET_CLASS函数代码示例
C++ GST_ELEMENT_DETAILS函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。