您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ GST_ERROR_OBJECT函数代码示例

51自学网 2021-06-01 20:56:27
  C++
这篇教程C++ GST_ERROR_OBJECT函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中GST_ERROR_OBJECT函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ERROR_OBJECT函数的具体用法?C++ GST_ERROR_OBJECT怎么用?C++ GST_ERROR_OBJECT使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了GST_ERROR_OBJECT函数的30个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: output_loop

//.........这里部分代码省略.........                    memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen);                    if (self->use_timestamps)                    {                        GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp,                                                                                GST_SECOND,                                                                                OMX_TICKS_PER_SECOND);                    }                    if (self->share_output_buffer)                    {                        GST_WARNING_OBJECT (self, "couldn't zero-copy");                        /* If pAppPrivate is NULL, it means it was a dummy                         * allocation, free it. */                        if (!omx_buffer->pAppPrivate)                        {                            g_free (omx_buffer->pBuffer);                            omx_buffer->pBuffer = NULL;                        }                    }                    ret = push_buffer (self, buf);                }                else                {                    GST_WARNING_OBJECT (self, "couldn't allocate buffer of size %" G_GUINT32_FORMAT,                                        omx_buffer->nFilledLen);                }            }        }        else        {            GST_WARNING_OBJECT (self, "empty buffer");        }        if (G_UNLIKELY (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS))        {            GST_DEBUG_OBJECT (self, "got eos");            gst_pad_push_event (self->srcpad, gst_event_new_eos ());            ret = GST_FLOW_UNEXPECTED;            goto leave;        }        if (self->share_output_buffer &&            !omx_buffer->pBuffer &&            omx_buffer->nOffset == 0)        {            GstBuffer *buf;            GstFlowReturn result;            GST_LOG_OBJECT (self, "allocate buffer");            result = gst_pad_alloc_buffer_and_set_caps (self->srcpad,                                                        GST_BUFFER_OFFSET_NONE,                                                        omx_buffer->nAllocLen,                                                        GST_PAD_CAPS (self->srcpad),                                                        &buf);            if (G_LIKELY (result == GST_FLOW_OK))            {                gst_buffer_ref (buf);                omx_buffer->pAppPrivate = buf;                omx_buffer->pBuffer = GST_BUFFER_DATA (buf);                omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf);            }            else            {                GST_WARNING_OBJECT (self, "could not pad allocate buffer, using malloc");                omx_buffer->pBuffer = g_malloc (omx_buffer->nAllocLen);            }        }        if (self->share_output_buffer &&            !omx_buffer->pBuffer)        {            GST_ERROR_OBJECT (self, "no input buffer to share");        }        omx_buffer->nFilledLen = 0;        GST_LOG_OBJECT (self, "release_buffer");        g_omx_port_release_buffer (out_port, omx_buffer);    }leave:    self->last_pad_push_return = ret;    if (gomx->omx_error != OMX_ErrorNone)        ret = GST_FLOW_ERROR;    if (ret != GST_FLOW_OK)    {        GST_INFO_OBJECT (self, "pause task, reason:  %s",                         gst_flow_get_name (ret));        gst_pad_pause_task (self->srcpad);    }    GST_LOG_OBJECT (self, "end");    gst_object_unref (self);}
开发者ID:lhzhang,项目名称:gst-openmax-devel,代码行数:101,


示例2: gst_omx_audio_dec_loop

static voidgst_omx_audio_dec_loop (GstOMXAudioDec * self){  GstOMXPort *port = self->dec_out_port;  GstOMXBuffer *buf = NULL;  GstFlowReturn flow_ret = GST_FLOW_OK;  GstOMXAcquireBufferReturn acq_return;  OMX_ERRORTYPE err;  acq_return = gst_omx_port_acquire_buffer (port, &buf);  if (acq_return == GST_OMX_ACQUIRE_BUFFER_ERROR) {    goto component_error;  } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_FLUSHING) {    goto flushing;  } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_EOS) {    goto eos;  }  if (!gst_pad_has_current_caps (GST_AUDIO_DECODER_SRC_PAD (self)) ||      acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) {    OMX_PARAM_PORTDEFINITIONTYPE port_def;    OMX_AUDIO_PARAM_PCMMODETYPE pcm_param;    GstAudioChannelPosition omx_position[OMX_AUDIO_MAXCHANNELS];    GstOMXAudioDecClass *klass = GST_OMX_AUDIO_DEC_GET_CLASS (self);    gint i;    GST_DEBUG_OBJECT (self, "Port settings have changed, updating caps");    /* Reallocate all buffers */    if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE        && gst_omx_port_is_enabled (port)) {      err = gst_omx_port_set_enabled (port, FALSE);      if (err != OMX_ErrorNone)        goto reconfigure_error;      err = gst_omx_port_wait_buffers_released (port, 5 * GST_SECOND);      if (err != OMX_ErrorNone)        goto reconfigure_error;      err = gst_omx_port_deallocate_buffers (port);      if (err != OMX_ErrorNone)        goto reconfigure_error;      err = gst_omx_port_wait_enabled (port, 1 * GST_SECOND);      if (err != OMX_ErrorNone)        goto reconfigure_error;    }    /* Just update caps */    GST_AUDIO_DECODER_STREAM_LOCK (self);    gst_omx_port_get_port_definition (port, &port_def);    g_assert (port_def.format.audio.eEncoding == OMX_AUDIO_CodingPCM);    GST_OMX_INIT_STRUCT (&pcm_param);    pcm_param.nPortIndex = self->dec_out_port->index;    err =        gst_omx_component_get_parameter (self->dec, OMX_IndexParamAudioPcm,        &pcm_param);    if (err != OMX_ErrorNone) {      GST_ERROR_OBJECT (self, "Failed to get PCM parameters: %s (0x%08x)",          gst_omx_error_to_string (err), err);      goto caps_failed;    }    g_assert (pcm_param.ePCMMode == OMX_AUDIO_PCMModeLinear);    g_assert (pcm_param.bInterleaved == OMX_TRUE);    gst_audio_info_init (&self->info);    for (i = 0; i < pcm_param.nChannels; i++) {      switch (pcm_param.eChannelMapping[i]) {        case OMX_AUDIO_ChannelLF:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;          break;        case OMX_AUDIO_ChannelRF:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;          break;        case OMX_AUDIO_ChannelCF:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;          break;        case OMX_AUDIO_ChannelLS:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT;          break;        case OMX_AUDIO_ChannelRS:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT;          break;        case OMX_AUDIO_ChannelLFE:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_LFE1;          break;        case OMX_AUDIO_ChannelCS:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_CENTER;          break;        case OMX_AUDIO_ChannelLR:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;          break;        case OMX_AUDIO_ChannelRR:          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;          break;        case OMX_AUDIO_ChannelNone://.........这里部分代码省略.........
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer__gst-omx,代码行数:101,


示例3: gst_decklink_video_src_set_caps

static gbooleangst_decklink_video_src_set_caps (GstBaseSrc * bsrc, GstCaps * caps){  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);  GstCaps *current_caps;  const GstDecklinkMode *mode;  BMDVideoInputFlags flags;  HRESULT ret;  GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps);  if ((current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc)))) {    GST_DEBUG_OBJECT (self, "Pad already has caps %" GST_PTR_FORMAT, caps);    if (!gst_caps_is_equal (caps, current_caps)) {      GST_DEBUG_OBJECT (self, "New caps, reconfiguring");      gst_caps_unref (current_caps);      if (self->mode == GST_DECKLINK_MODE_AUTO) {        return TRUE;      } else {        return FALSE;      }    } else {      gst_caps_unref (current_caps);      return TRUE;    }  }  if (!gst_video_info_from_caps (&self->info, caps))    return FALSE;  if (self->input->config && self->connection != GST_DECKLINK_CONNECTION_AUTO) {    ret = self->input->config->SetInt (bmdDeckLinkConfigVideoInputConnection,        gst_decklink_get_connection (self->connection));    if (ret != S_OK) {      GST_ERROR_OBJECT (self, "Failed to set configuration (input source)");      return FALSE;    }    if (self->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {      ret = self->input->config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,          bmdAnalogVideoFlagCompositeSetup75);      if (ret != S_OK) {        GST_ERROR_OBJECT (self,            "Failed to set configuration (composite setup)");        return FALSE;      }    }  }  flags = bmdVideoInputFlagDefault;  if (self->mode == GST_DECKLINK_MODE_AUTO) {    bool autoDetection = false;    if (self->input->attributes) {      ret =          self->input->          attributes->GetFlag (BMDDeckLinkSupportsInputFormatDetection,          &autoDetection);      if (ret != S_OK) {        GST_ERROR_OBJECT (self, "Failed to get attribute (autodetection)");        return FALSE;      }      if (autoDetection)        flags |= bmdVideoInputEnableFormatDetection;    }    if (!autoDetection) {      GST_ERROR_OBJECT (self, "Failed to activate auto-detection");      return FALSE;    }  }  mode = gst_decklink_get_mode (self->mode);  g_assert (mode != NULL);  ret = self->input->input->EnableVideoInput (mode->mode,      bmdFormat8BitYUV, flags);  if (ret != S_OK) {    GST_WARNING_OBJECT (self, "Failed to enable video input");    return FALSE;  }  g_mutex_lock (&self->input->lock);  self->input->mode = mode;  self->input->video_enabled = TRUE;  if (self->input->start_streams)    self->input->start_streams (self->input->videosrc);  g_mutex_unlock (&self->input->lock);  return TRUE;}
开发者ID:Distrotech,项目名称:gst-plugins-bad,代码行数:91,


示例4: gst_net_client_clock_start

static gbooleangst_net_client_clock_start (GstNetClientClock * self){  GSocketAddress *servaddr;  GSocketAddress *myaddr;  GInetAddress *inetaddr;  GSocket *socket;  GError *error = NULL;  g_return_val_if_fail (self->priv->address != NULL, FALSE);  g_return_val_if_fail (self->priv->servaddr == NULL, FALSE);  socket = g_socket_new (G_SOCKET_FAMILY_IPV4, G_SOCKET_TYPE_DATAGRAM,      G_SOCKET_PROTOCOL_UDP, &error);  if (socket == NULL)    goto no_socket;  /* check address we're bound to, mostly for debugging purposes */  myaddr = g_socket_get_local_address (socket, &error);  if (myaddr == NULL)    goto getsockname_error;  GST_DEBUG_OBJECT (self, "socket opened on UDP port %hd",      g_inet_socket_address_get_port (G_INET_SOCKET_ADDRESS (myaddr)));  g_object_unref (myaddr);  /* create target address */  inetaddr = g_inet_address_new_from_string (self->priv->address);  if (inetaddr == NULL)    goto bad_address;  servaddr = g_inet_socket_address_new (inetaddr, self->priv->port);  g_object_unref (inetaddr);  g_assert (servaddr != NULL);  GST_DEBUG_OBJECT (self, "will communicate with %s:%d", self->priv->address,      self->priv->port);  self->priv->cancel = g_cancellable_new ();  self->priv->socket = socket;  self->priv->servaddr = G_SOCKET_ADDRESS (servaddr);  self->priv->thread = g_thread_try_new ("GstNetClientClock",      gst_net_client_clock_thread, self, &error);  if (error != NULL)    goto no_thread;  return TRUE;  /* ERRORS */no_socket:  {    GST_ERROR_OBJECT (self, "socket_new() failed: %s", error->message);    g_error_free (error);    return FALSE;  }getsockname_error:  {    GST_ERROR_OBJECT (self, "get_local_address() failed: %s", error->message);    g_error_free (error);    g_object_unref (socket);    return FALSE;  }bad_address:  {    GST_ERROR_OBJECT (self, "inet_address_new_from_string('%s') failed",        self->priv->address);    g_object_unref (socket);    return FALSE;  }no_thread:  {    GST_ERROR_OBJECT (self, "could not create thread: %s", error->message);    g_object_unref (self->priv->servaddr);    self->priv->servaddr = NULL;    g_object_unref (self->priv->socket);    self->priv->socket = NULL;    g_error_free (error);    return FALSE;  }}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:87,


示例5: gst_vp8_enc_finish

static gbooleangst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder){  GstVP8Enc *encoder;  GstVideoFrame *frame;  int flags = 0;  vpx_codec_err_t status;  vpx_codec_iter_t iter = NULL;  const vpx_codec_cx_pkt_t *pkt;  GST_DEBUG_OBJECT (base_video_encoder, "finish");  encoder = GST_VP8_ENC (base_video_encoder);  status =      vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags,      0);  if (status != 0) {    GST_ERROR_OBJECT (encoder, "encode returned %d %s", status,        gst_vpx_error_name (status));    return FALSE;  }  pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);  while (pkt != NULL) {    GstBuffer *buffer;    GstVP8EncCoderHook *hook;    gboolean invisible, keyframe;    GST_DEBUG_OBJECT (encoder, "packet %u type %d", (guint) pkt->data.frame.sz,        pkt->kind);    if (pkt->kind == VPX_CODEC_STATS_PKT        && encoder->multipass_mode == VPX_RC_FIRST_PASS) {      GST_LOG_OBJECT (encoder, "handling STATS packet");      g_byte_array_append (encoder->first_pass_cache_content,          pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz);      frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder);      if (frame != NULL) {        buffer = gst_buffer_new ();        GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL);        frame->src_buffer = buffer;        gst_base_video_encoder_finish_frame (base_video_encoder, frame);      }      pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);      continue;    } else if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) {      GST_LOG_OBJECT (encoder, "non frame pkt: %d", pkt->kind);      pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);      continue;    }    invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0;    keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;    frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder);    g_assert (frame != NULL);    hook = frame->coder_hook;    buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz);    memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz);    frame->is_sync_point = frame->is_sync_point || keyframe;    if (hook->image)      g_slice_free (vpx_image_t, hook->image);    hook->image = NULL;    if (invisible) {      hook->invisible = g_list_append (hook->invisible, buffer);    } else {      frame->src_buffer = buffer;      gst_base_video_encoder_finish_frame (base_video_encoder, frame);      frame = NULL;    }    pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);  }  if (encoder->multipass_mode == VPX_RC_FIRST_PASS      && encoder->multipass_cache_file) {    GError *err = NULL;    if (!g_file_set_contents (encoder->multipass_cache_file,            (const gchar *) encoder->first_pass_cache_content->data,            encoder->first_pass_cache_content->len, &err)) {      GST_ELEMENT_ERROR (encoder, RESOURCE, WRITE, (NULL),          ("Failed to write multipass cache file: %s", err->message));      g_error_free (err);    }  }  return TRUE;}
开发者ID:ChinnaSuhas,项目名称:ossbuild,代码行数:96,


示例6: gst_rsvg_decode_image

static GstFlowReturngst_rsvg_decode_image (GstRsvgDec * rsvg, GstBuffer * buffer,    GstVideoCodecFrame * frame){  GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);  GstFlowReturn ret = GST_FLOW_OK;  cairo_t *cr;  cairo_surface_t *surface;  RsvgHandle *handle;  GError *error = NULL;  RsvgDimensionData dimension;  gdouble scalex, scaley;  GstMapInfo minfo;  GstVideoFrame vframe;  GstVideoCodecState *output_state;  GST_LOG_OBJECT (rsvg, "parsing svg");  if (!gst_buffer_map (buffer, &minfo, GST_MAP_READ)) {    GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");    return GST_FLOW_ERROR;  }  handle = rsvg_handle_new_from_data (minfo.data, minfo.size, &error);  if (!handle) {    GST_ERROR_OBJECT (rsvg, "Failed to parse SVG image: %s", error->message);    g_error_free (error);    return GST_FLOW_ERROR;  }  rsvg_handle_get_dimensions (handle, &dimension);  output_state = gst_video_decoder_get_output_state (decoder);  if ((output_state == NULL)      || GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width      || GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {    /* Create the output state */    gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT,        dimension.width, dimension.height, rsvg->input_state);    if (output_state)      gst_video_codec_state_unref (output_state);    output_state = gst_video_decoder_get_output_state (decoder);  }  ret = gst_video_decoder_allocate_output_frame (decoder, frame);  if (ret != GST_FLOW_OK) {    g_object_unref (handle);    GST_ERROR_OBJECT (rsvg, "Buffer allocation failed %s",        gst_flow_get_name (ret));    return ret;  }  GST_LOG_OBJECT (rsvg, "render image at %d x %d",      GST_VIDEO_INFO_HEIGHT (&output_state->info),      GST_VIDEO_INFO_WIDTH (&output_state->info));  if (!gst_video_frame_map (&vframe,          &gst_video_decoder_get_output_state (decoder)->info,          frame->output_buffer, GST_MAP_READWRITE)) {    GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");    return GST_FLOW_ERROR;  }  surface =      cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (&vframe,          0), CAIRO_FORMAT_ARGB32, GST_VIDEO_FRAME_WIDTH (&vframe),      GST_VIDEO_FRAME_HEIGHT (&vframe), GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,          0));  cr = cairo_create (surface);  cairo_set_operator (cr, CAIRO_OPERATOR_CLEAR);  cairo_set_source_rgba (cr, 1.0, 1.0, 1.0, 0.0);  cairo_paint (cr);  cairo_set_operator (cr, CAIRO_OPERATOR_OVER);  cairo_set_source_rgba (cr, 0.0, 0.0, 0.0, 1.0);  scalex = scaley = 1.0;  if (GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width) {    scalex =        ((gdouble) GST_VIDEO_INFO_WIDTH (&output_state->info)) /        ((gdouble) dimension.width);  }  if (GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {    scaley =        ((gdouble) GST_VIDEO_INFO_HEIGHT (&output_state->info)) /        ((gdouble) dimension.height);  }  cairo_scale (cr, scalex, scaley);  rsvg_handle_render_cairo (handle, cr);  g_object_unref (handle);  cairo_destroy (cr);  cairo_surface_destroy (surface);  /* Now unpremultiply Cairo's ARGB to match GStreamer's */  gst_rsvg_decode_unpremultiply (GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0),      GST_VIDEO_FRAME_WIDTH (&vframe), GST_VIDEO_FRAME_HEIGHT (&vframe));  gst_video_codec_state_unref (output_state);//.........这里部分代码省略.........
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:101,


示例7: gst_egl_adaptation_init_egl_display

gbooleangst_egl_adaptation_init_egl_display (GstEglAdaptationContext * ctx){  GstMessage *msg;  EGLDisplay display;  GST_DEBUG_OBJECT (ctx->element, "Enter EGL initial configuration");  if (!platform_wrapper_init ()) {    GST_ERROR_OBJECT (ctx->element, "Couldn't init EGL platform wrapper");    goto HANDLE_ERROR;  }  msg =      gst_message_new_need_context (GST_OBJECT_CAST (ctx->element),      GST_EGL_DISPLAY_CONTEXT_TYPE);  gst_element_post_message (GST_ELEMENT_CAST (ctx->element), msg);  GST_OBJECT_LOCK (ctx->element);  if (!ctx->set_display) {    GstContext *context;    GST_OBJECT_UNLOCK (ctx->element);    display = eglGetDisplay (EGL_DEFAULT_DISPLAY);    if (display == EGL_NO_DISPLAY) {      GST_ERROR_OBJECT (ctx->element, "Could not get EGL display connection");      goto HANDLE_ERROR;        /* No EGL error is set by eglGetDisplay() */    }    ctx->display = gst_egl_display_new (display, (GDestroyNotify) eglTerminate);    context = gst_context_new_egl_display (ctx->display, FALSE);    msg = gst_message_new_have_context (GST_OBJECT (ctx->element), context);    gst_element_post_message (GST_ELEMENT_CAST (ctx->element), msg);  }  if (!eglInitialize (gst_egl_display_get (ctx->display),          &ctx->eglglesctx->egl_major, &ctx->eglglesctx->egl_minor)) {    got_egl_error ("eglInitialize");    GST_ERROR_OBJECT (ctx->element, "Could not init EGL display connection");    goto HANDLE_EGL_ERROR;  }  /* Check against required EGL version   * XXX: Need to review the version requirement in terms of the needed API   */  if (ctx->eglglesctx->egl_major < GST_EGLGLESSINK_EGL_MIN_VERSION) {    GST_ERROR_OBJECT (ctx->element, "EGL v%d needed, but you only have v%d.%d",        GST_EGLGLESSINK_EGL_MIN_VERSION, ctx->eglglesctx->egl_major,        ctx->eglglesctx->egl_minor);    goto HANDLE_ERROR;  }  GST_INFO_OBJECT (ctx->element, "System reports supported EGL version v%d.%d",      ctx->eglglesctx->egl_major, ctx->eglglesctx->egl_minor);  eglBindAPI (EGL_OPENGL_ES_API);  return TRUE;  /* Errors */HANDLE_EGL_ERROR:  GST_ERROR_OBJECT (ctx->element, "EGL call returned error %x", eglGetError ());HANDLE_ERROR:  GST_ERROR_OBJECT (ctx->element, "Couldn't setup window/surface from handle");  return FALSE;}
开发者ID:darrengarvey,项目名称:gst_plugins_bad_patches,代码行数:66,


示例8: gst_structure_empty_new

//.........这里部分代码省略.........	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_8) {		g_value_set_int(value, 8);		gst_value_list_prepend_value(list, value);	}	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_4) {		g_value_set_int(value, 4);		gst_value_list_prepend_value(list, value);	}	g_value_unset(value);	if (list) {		gst_structure_set_value(structure, "blocks", list);		g_free(list);		list = NULL;	}	/* allocation */	g_value_init(value, G_TYPE_STRING);	list = g_value_init(g_new0(GValue,1), GST_TYPE_LIST);	if (sbc->allocation_method & BT_A2DP_ALLOCATION_LOUDNESS) {		g_value_set_static_string(value, "loudness");		gst_value_list_prepend_value(list, value);	}	if (sbc->allocation_method & BT_A2DP_ALLOCATION_SNR) {		g_value_set_static_string(value, "snr");		gst_value_list_prepend_value(list, value);	}	g_value_unset(value);	if (list) {		gst_structure_set_value(structure, "allocation", list);		g_free(list);		list = NULL;	}	/* rate */	g_value_init(value, G_TYPE_INT);	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_48000) {		g_value_set_int(value, 48000);		gst_value_list_prepend_value(list, value);	}	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_44100) {		g_value_set_int(value, 44100);		gst_value_list_prepend_value(list, value);	}	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_32000) {		g_value_set_int(value, 32000);		gst_value_list_prepend_value(list, value);	}	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_16000) {		g_value_set_int(value, 16000);		gst_value_list_prepend_value(list, value);	}	g_value_unset(value);	if (list) {		gst_structure_set_value(structure, "rate", list);		g_free(list);		list = NULL;	}	/* bitpool */	value = g_value_init(value, GST_TYPE_INT_RANGE);	gst_value_set_int_range(value,			MIN(sbc->min_bitpool, TEMPLATE_MAX_BITPOOL),			MIN(sbc->max_bitpool, TEMPLATE_MAX_BITPOOL));	gst_structure_set_value(structure, "bitpool", value);	g_value_unset(value);	/* channels */	mono = FALSE;	stereo = FALSE;	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)		mono = TRUE;	if ((sbc->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||			(sbc->channel_mode &			BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||			(sbc->channel_mode &			BT_A2DP_CHANNEL_MODE_JOINT_STEREO))		stereo = TRUE;	if (mono && stereo) {		g_value_init(value, GST_TYPE_INT_RANGE);		gst_value_set_int_range(value, 1, 2);	} else {		g_value_init(value, G_TYPE_INT);		if (mono)			g_value_set_int(value, 1);		else if (stereo)			g_value_set_int(value, 2);		else {			GST_ERROR_OBJECT(self,				"Unexpected number of channels");			g_value_set_int(value, 0);		}	}	gst_structure_set_value(structure, "channels", value);	g_free(value);	return structure;}
开发者ID:Mcjesus15,项目名称:Zio_Other,代码行数:101,


示例9: GST_LOG_OBJECT

//.........这里部分代码省略.........	gst_structure_set_value(structure, "mpegversion", list);	g_free(list);	/* layer */	GST_LOG_OBJECT(self, "setting mpeg layer");	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);	if (mpeg->layer & BT_MPEG_LAYER_1) {		g_value_set_int(value, 1);		gst_value_list_prepend_value(list, value);		valid_layer = TRUE;	}	if (mpeg->layer & BT_MPEG_LAYER_2) {		g_value_set_int(value, 2);		gst_value_list_prepend_value(list, value);		valid_layer = TRUE;	}	if (mpeg->layer & BT_MPEG_LAYER_3) {		g_value_set_int(value, 3);		gst_value_list_prepend_value(list, value);		valid_layer = TRUE;	}	if (list) {		gst_structure_set_value(structure, "layer", list);		g_free(list);		list = NULL;	}	if (!valid_layer) {		gst_structure_free(structure);		g_free(value);		return NULL;	}	/* rate */	GST_LOG_OBJECT(self, "setting mpeg rate");	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_48000) {		g_value_set_int(value, 48000);		gst_value_list_prepend_value(list, value);	}	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_44100) {		g_value_set_int(value, 44100);		gst_value_list_prepend_value(list, value);	}	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_32000) {		g_value_set_int(value, 32000);		gst_value_list_prepend_value(list, value);	}	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_24000) {		g_value_set_int(value, 24000);		gst_value_list_prepend_value(list, value);	}	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_22050) {		g_value_set_int(value, 22050);		gst_value_list_prepend_value(list, value);	}	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_16000) {		g_value_set_int(value, 16000);		gst_value_list_prepend_value(list, value);	}	g_value_unset(value);	if (list) {		gst_structure_set_value(structure, "rate", list);		g_free(list);		list = NULL;	}	/* channels */	GST_LOG_OBJECT(self, "setting mpeg channels");	mono = FALSE;	stereo = FALSE;	if (mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)		mono = TRUE;	if ((mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||			(mpeg->channel_mode &			BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||			(mpeg->channel_mode &			BT_A2DP_CHANNEL_MODE_JOINT_STEREO))		stereo = TRUE;	if (mono && stereo) {		g_value_init(value, GST_TYPE_INT_RANGE);		gst_value_set_int_range(value, 1, 2);	} else {		g_value_init(value, G_TYPE_INT);		if (mono)			g_value_set_int(value, 1);		else if (stereo)			g_value_set_int(value, 2);		else {			GST_ERROR_OBJECT(self,				"Unexpected number of channels");			g_value_set_int(value, 0);		}	}	gst_structure_set_value(structure, "channels", value);	g_free(value);	return structure;}
开发者ID:Mcjesus15,项目名称:Zio_Other,代码行数:101,


示例10: gst_avdtp_sink_init_sbc_pkt_conf

static gboolean gst_avdtp_sink_init_sbc_pkt_conf(GstAvdtpSink *sink,					GstCaps *caps,					sbc_capabilities_t *pkt){	sbc_capabilities_t *cfg;	const GValue *value = NULL;	const char *pref, *name;	gint rate, subbands, blocks;	GstStructure *structure = gst_caps_get_structure(caps, 0);	cfg = (void *) gst_avdtp_find_caps(sink, BT_A2DP_SBC_SINK);	name = gst_structure_get_name(structure);	if (!(IS_SBC(name))) {		GST_ERROR_OBJECT(sink, "Unexpected format %s, "				"was expecting sbc", name);		return FALSE;	}	value = gst_structure_get_value(structure, "rate");	rate = g_value_get_int(value);	if (rate == 44100)		cfg->frequency = BT_SBC_SAMPLING_FREQ_44100;	else if (rate == 48000)		cfg->frequency = BT_SBC_SAMPLING_FREQ_48000;	else if (rate == 32000)		cfg->frequency = BT_SBC_SAMPLING_FREQ_32000;	else if (rate == 16000)		cfg->frequency = BT_SBC_SAMPLING_FREQ_16000;	else {		GST_ERROR_OBJECT(sink, "Invalid rate while setting caps");		return FALSE;	}	value = gst_structure_get_value(structure, "mode");	pref = g_value_get_string(value);	if (strcmp(pref, "mono") == 0)		cfg->channel_mode = BT_A2DP_CHANNEL_MODE_MONO;	else if (strcmp(pref, "dual") == 0)		cfg->channel_mode = BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL;	else if (strcmp(pref, "stereo") == 0)		cfg->channel_mode = BT_A2DP_CHANNEL_MODE_STEREO;	else if (strcmp(pref, "joint") == 0)		cfg->channel_mode = BT_A2DP_CHANNEL_MODE_JOINT_STEREO;	else {		GST_ERROR_OBJECT(sink, "Invalid mode %s", pref);		return FALSE;	}	value = gst_structure_get_value(structure, "allocation");	pref = g_value_get_string(value);	if (strcmp(pref, "loudness") == 0)		cfg->allocation_method = BT_A2DP_ALLOCATION_LOUDNESS;	else if (strcmp(pref, "snr") == 0)		cfg->allocation_method = BT_A2DP_ALLOCATION_SNR;	else {		GST_ERROR_OBJECT(sink, "Invalid allocation: %s", pref);		return FALSE;	}	value = gst_structure_get_value(structure, "subbands");	subbands = g_value_get_int(value);	if (subbands == 8)		cfg->subbands = BT_A2DP_SUBBANDS_8;	else if (subbands == 4)		cfg->subbands = BT_A2DP_SUBBANDS_4;	else {		GST_ERROR_OBJECT(sink, "Invalid subbands %d", subbands);		return FALSE;	}	value = gst_structure_get_value(structure, "blocks");	blocks = g_value_get_int(value);	if (blocks == 16)		cfg->block_length = BT_A2DP_BLOCK_LENGTH_16;	else if (blocks == 12)		cfg->block_length = BT_A2DP_BLOCK_LENGTH_12;	else if (blocks == 8)		cfg->block_length = BT_A2DP_BLOCK_LENGTH_8;	else if (blocks == 4)		cfg->block_length = BT_A2DP_BLOCK_LENGTH_4;	else {		GST_ERROR_OBJECT(sink, "Invalid blocks %d", blocks);		return FALSE;	}	value = gst_structure_get_value(structure, "bitpool");	cfg->max_bitpool = cfg->min_bitpool = g_value_get_int(value);	memcpy(pkt, cfg, sizeof(*pkt));	return TRUE;}
开发者ID:Mcjesus15,项目名称:Zio_Other,代码行数:93,


示例11: gst_avdtp_sink_conf_recv_stream_fd

static gboolean gst_avdtp_sink_conf_recv_stream_fd(					GstAvdtpSink *self){	struct bluetooth_data *data = self->data;	gint ret;	GIOError err;	GError *gerr = NULL;	GIOStatus status;	GIOFlags flags;	gsize read;	ret = gst_avdtp_sink_bluetooth_recvmsg_fd(self);	if (ret < 0)		return FALSE;	if (!self->stream) {		GST_ERROR_OBJECT(self, "Error while configuring device: "				"could not acquire audio socket");		return FALSE;	}	/* set stream socket to nonblock */	GST_LOG_OBJECT(self, "setting stream socket to nonblock");	flags = g_io_channel_get_flags(self->stream);	flags |= G_IO_FLAG_NONBLOCK;	status = g_io_channel_set_flags(self->stream, flags, &gerr);	if (status != G_IO_STATUS_NORMAL) {		if (gerr)			GST_WARNING_OBJECT(self, "Error while "				"setting server socket to nonblock: "				"%s", gerr->message);		else			GST_WARNING_OBJECT(self, "Error while "					"setting server "					"socket to nonblock");	}	/* It is possible there is some outstanding	data in the pipe - we have to empty it */	GST_LOG_OBJECT(self, "emptying stream pipe");	while (1) {		err = g_io_channel_read(self->stream, data->buffer,					(gsize) data->link_mtu,					&read);		if (err != G_IO_ERROR_NONE || read <= 0)			break;	}	/* set stream socket to block */	GST_LOG_OBJECT(self, "setting stream socket to block");	flags = g_io_channel_get_flags(self->stream);	flags &= ~G_IO_FLAG_NONBLOCK;	status = g_io_channel_set_flags(self->stream, flags, &gerr);	if (status != G_IO_STATUS_NORMAL) {		if (gerr)			GST_WARNING_OBJECT(self, "Error while "				"setting server socket to block:"				"%s", gerr->message);		else			GST_WARNING_OBJECT(self, "Error while "				"setting server "				"socket to block");	}	memset(data->buffer, 0, sizeof(data->buffer));	return TRUE;}
开发者ID:Mcjesus15,项目名称:Zio_Other,代码行数:68,


示例12: gst_avdtp_sink_configure

static gboolean gst_avdtp_sink_configure(GstAvdtpSink *self,			GstCaps *caps){	gchar buf[BT_SUGGESTED_BUFFER_SIZE];	struct bt_open_req *open_req = (void *) buf;	struct bt_open_rsp *open_rsp = (void *) buf;	struct bt_set_configuration_req *req = (void *) buf;	struct bt_set_configuration_rsp *rsp = (void *) buf;	gboolean ret;	GIOError io_error;	gchar *temp;	GstStructure *structure;	codec_capabilities_t *codec = NULL;	temp = gst_caps_to_string(caps);	GST_DEBUG_OBJECT(self, "configuring device with caps: %s", temp);	g_free(temp);	structure = gst_caps_get_structure(caps, 0);	if (gst_structure_has_name(structure, "audio/x-sbc"))		codec = (void *) gst_avdtp_find_caps(self, BT_A2DP_SBC_SINK);	else if (gst_structure_has_name(structure, "audio/mpeg"))		codec = (void *) gst_avdtp_find_caps(self, BT_A2DP_MPEG12_SINK);	if (codec == NULL) {		GST_ERROR_OBJECT(self, "Couldn't parse caps "				"to packet configuration");		return FALSE;	}	memset(req, 0, BT_SUGGESTED_BUFFER_SIZE);	open_req->h.type = BT_REQUEST;	open_req->h.name = BT_OPEN;	open_req->h.length = sizeof(*open_req);	strncpy(open_req->destination, self->device, 18);	open_req->seid = codec->seid;	open_req->lock = BT_WRITE_LOCK;	io_error = gst_avdtp_sink_audioservice_send(self, &open_req->h);	if (io_error != G_IO_ERROR_NONE) {		GST_ERROR_OBJECT(self, "Error ocurred while sending "					"open packet");		return FALSE;	}	open_rsp->h.length = sizeof(*open_rsp);	io_error = gst_avdtp_sink_audioservice_expect(self,			&open_rsp->h, BT_OPEN);	if (io_error != G_IO_ERROR_NONE) {		GST_ERROR_OBJECT(self, "Error while receiving device "					"confirmation");		return FALSE;	}	memset(req, 0, sizeof(buf));	req->h.type = BT_REQUEST;	req->h.name = BT_SET_CONFIGURATION;	req->h.length = sizeof(*req);	if (codec->type == BT_A2DP_SBC_SINK)		ret = gst_avdtp_sink_init_sbc_pkt_conf(self, caps,				(void *) &req->codec);	else		ret = gst_avdtp_sink_init_mp3_pkt_conf(self, caps,				(void *) &req->codec);	if (!ret) {		GST_ERROR_OBJECT(self, "Couldn't parse caps "				"to packet configuration");		return FALSE;	}	req->h.length += req->codec.length - sizeof(req->codec);	io_error = gst_avdtp_sink_audioservice_send(self, &req->h);	if (io_error != G_IO_ERROR_NONE) {		GST_ERROR_OBJECT(self, "Error ocurred while sending "					"configurarion packet");		return FALSE;	}	rsp->h.length = sizeof(*rsp);	io_error = gst_avdtp_sink_audioservice_expect(self,			&rsp->h, BT_SET_CONFIGURATION);	if (io_error != G_IO_ERROR_NONE) {		GST_ERROR_OBJECT(self, "Error while receiving device "					"confirmation");		return FALSE;	}	self->data->link_mtu = rsp->link_mtu;	return TRUE;}
开发者ID:Mcjesus15,项目名称:Zio_Other,代码行数:95,


示例13: gst_v4l2_video_dec_handle_frame

//.........这里部分代码省略.........    gst_video_codec_state_unref (output_state);    if (!gst_video_decoder_negotiate (decoder)) {      if (GST_PAD_IS_FLUSHING (decoder->srcpad))        goto flushing;      else        goto not_negotiated;    }    /* Ensure our internal pool is activated */    if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),            TRUE))      goto activate_failed;  }  if (g_atomic_int_get (&self->processing) == FALSE) {    /* It's possible that the processing thread stopped due to an error */    if (self->output_flow != GST_FLOW_OK &&        self->output_flow != GST_FLOW_FLUSHING) {      GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");      ret = self->output_flow;      goto drop;    }    GST_DEBUG_OBJECT (self, "Starting decoding thread");    /* Start the processing task, when it quits, the task will disable input     * processing to unlock input if draining, or prevent potential block */    g_atomic_int_set (&self->processing, TRUE);    if (!gst_pad_start_task (decoder->srcpad,            (GstTaskFunction) gst_v4l2_video_dec_loop, self,            (GDestroyNotify) gst_v4l2_video_dec_loop_stopped))      goto start_task_failed;  }  if (frame->input_buffer) {    GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);    ret =        gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->            pool), &frame->input_buffer);    GST_VIDEO_DECODER_STREAM_LOCK (decoder);    if (ret == GST_FLOW_FLUSHING) {      if (g_atomic_int_get (&self->processing) == FALSE)        ret = self->output_flow;      goto drop;    } else if (ret != GST_FLOW_OK) {      goto process_failed;    }    /* No need to keep input arround */    gst_buffer_replace (&frame->input_buffer, NULL);  }  gst_video_codec_frame_unref (frame);  return ret;  /* ERRORS */not_negotiated:  {    GST_ERROR_OBJECT (self, "not negotiated");    ret = GST_FLOW_NOT_NEGOTIATED;    goto drop;  }activate_failed:  {    GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,        (_("Failed to allocate required memory.")),        ("Buffer pool activation failed"));    ret = GST_FLOW_ERROR;    goto drop;  }flushing:  {    ret = GST_FLOW_FLUSHING;    goto drop;  }start_task_failed:  {    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,        (_("Failed to start decoding thread.")), (NULL));    g_atomic_int_set (&self->processing, FALSE);    ret = GST_FLOW_ERROR;    goto drop;  }process_failed:  {    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,        (_("Failed to process frame.")),        ("Maybe be due to not enough memory or failing driver"));    ret = GST_FLOW_ERROR;    goto drop;  }drop:  {    gst_video_decoder_drop_frame (decoder, frame);    return ret;  }}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:101,


示例14: pad_chain

static GstFlowReturnpad_chain (GstPad *pad,           GstBuffer *buf){    GOmxCore *gomx;    GOmxPort *in_port;    GstOmxBaseFilter *self;    GstFlowReturn ret = GST_FLOW_OK;    self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad));    gomx = self->gomx;    GST_LOG_OBJECT (self, "begin");    GST_LOG_OBJECT (self, "gst_buffer: size=%u", GST_BUFFER_SIZE (buf));    GST_LOG_OBJECT (self, "state: %d", gomx->omx_state);    if (G_UNLIKELY (gomx->omx_state == OMX_StateLoaded))    {        g_mutex_lock (self->ready_lock);        GST_INFO_OBJECT (self, "omx: prepare");        /** @todo this should probably go after doing preparations. */        if (self->omx_setup)        {            self->omx_setup (self);        }        setup_ports (self);        g_omx_core_prepare (self->gomx);        if (gomx->omx_state == OMX_StateIdle)        {            self->ready = TRUE;            gst_pad_start_task (self->srcpad, output_loop, self->srcpad);        }        g_mutex_unlock (self->ready_lock);        if (gomx->omx_state != OMX_StateIdle)            goto out_flushing;    }    in_port = self->in_port;    if (G_LIKELY (in_port->enabled))    {        guint buffer_offset = 0;        if (G_UNLIKELY (gomx->omx_state == OMX_StateIdle))        {            GST_INFO_OBJECT (self, "omx: play");            g_omx_core_start (gomx);            if (gomx->omx_state != OMX_StateExecuting)                goto out_flushing;            /* send buffer with codec data flag */            /** @todo move to util */            if (self->codec_data)            {                OMX_BUFFERHEADERTYPE *omx_buffer;                GST_LOG_OBJECT (self, "request buffer");                omx_buffer = g_omx_port_request_buffer (in_port);                if (G_LIKELY (omx_buffer))                {                    omx_buffer->nFlags |= 0x00000080; /* codec data flag */                    omx_buffer->nFilledLen = GST_BUFFER_SIZE (self->codec_data);                    memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (self->codec_data), omx_buffer->nFilledLen);                    GST_LOG_OBJECT (self, "release_buffer");                    g_omx_port_release_buffer (in_port, omx_buffer);                }            }        }        if (G_UNLIKELY (gomx->omx_state != OMX_StateExecuting))        {            GST_ERROR_OBJECT (self, "Whoa! very wrong");        }        while (G_LIKELY (buffer_offset < GST_BUFFER_SIZE (buf)))        {            OMX_BUFFERHEADERTYPE *omx_buffer;            if (self->last_pad_push_return != GST_FLOW_OK ||                !(gomx->omx_state == OMX_StateExecuting ||                  gomx->omx_state == OMX_StatePause))            {                goto out_flushing;            }            GST_LOG_OBJECT (self, "request buffer");            omx_buffer = g_omx_port_request_buffer (in_port);//.........这里部分代码省略.........
开发者ID:lhzhang,项目名称:gst-openmax-devel,代码行数:101,


示例15: gst_inter_audio_src_create

static GstFlowReturngst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size,    GstBuffer ** buf){  GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);  GstCaps *caps;  GstBuffer *buffer;  guint n, bpf;  guint64 period_time;  guint64 period_samples;  GST_DEBUG_OBJECT (interaudiosrc, "create");  buffer = NULL;  caps = NULL;  g_mutex_lock (&interaudiosrc->surface->mutex);  if (interaudiosrc->surface->audio_info.finfo) {    if (!gst_audio_info_is_equal (&interaudiosrc->surface->audio_info,            &interaudiosrc->info)) {      caps = gst_audio_info_to_caps (&interaudiosrc->surface->audio_info);      interaudiosrc->timestamp_offset +=          gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,          interaudiosrc->info.rate);      interaudiosrc->n_samples = 0;    }  }  bpf = interaudiosrc->surface->audio_info.bpf;  period_time = interaudiosrc->surface->audio_period_time;  period_samples =      gst_util_uint64_scale (period_time, interaudiosrc->info.rate, GST_SECOND);  if (bpf > 0)    n = gst_adapter_available (interaudiosrc->surface->audio_adapter) / bpf;  else    n = 0;  if (n > period_samples)    n = period_samples;  if (n > 0) {    buffer = gst_adapter_take_buffer (interaudiosrc->surface->audio_adapter,        n * bpf);  } else {    buffer = gst_buffer_new ();    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_GAP);  }  g_mutex_unlock (&interaudiosrc->surface->mutex);  if (caps) {    gboolean ret = gst_base_src_set_caps (src, caps);    gst_caps_unref (caps);    if (!ret) {      GST_ERROR_OBJECT (src, "Failed to set caps %" GST_PTR_FORMAT, caps);      if (buffer)        gst_buffer_unref (buffer);      return GST_FLOW_NOT_NEGOTIATED;    }  }  buffer = gst_buffer_make_writable (buffer);  bpf = interaudiosrc->info.bpf;  if (n < period_samples) {    GstMapInfo map;    GstMemory *mem;    GST_DEBUG_OBJECT (interaudiosrc,        "creating %" G_GUINT64_FORMAT " samples of silence",        period_samples - n);    mem = gst_allocator_alloc (NULL, (period_samples - n) * bpf, NULL);    if (gst_memory_map (mem, &map, GST_MAP_WRITE)) {      gst_audio_format_fill_silence (interaudiosrc->info.finfo, map.data,          map.size);      gst_memory_unmap (mem, &map);    }    gst_buffer_prepend_memory (buffer, mem);  }  n = period_samples;  GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples;  GST_BUFFER_OFFSET_END (buffer) = interaudiosrc->n_samples + n;  GST_BUFFER_TIMESTAMP (buffer) = interaudiosrc->timestamp_offset +      gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,      interaudiosrc->info.rate);  GST_DEBUG_OBJECT (interaudiosrc, "create ts %" GST_TIME_FORMAT,      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));  GST_BUFFER_DURATION (buffer) = interaudiosrc->timestamp_offset +      gst_util_uint64_scale (interaudiosrc->n_samples + n, GST_SECOND,      interaudiosrc->info.rate) - GST_BUFFER_TIMESTAMP (buffer);  GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);  if (interaudiosrc->n_samples == 0) {    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);  }  interaudiosrc->n_samples += n;  *buf = buffer;  return GST_FLOW_OK;}
开发者ID:Distrotech,项目名称:gst-plugins-bad,代码行数:100,


示例16: gst_avdtp_sink_init_mp3_pkt_conf

static gboolean gst_avdtp_sink_init_mp3_pkt_conf(		GstAvdtpSink *self, GstCaps *caps,		mpeg_capabilities_t *pkt){	const GValue *value = NULL;	gint rate, layer;	const gchar *name;	GstStructure *structure = gst_caps_get_structure(caps, 0);	name = gst_structure_get_name(structure);	if (!(IS_MPEG_AUDIO(name))) {		GST_ERROR_OBJECT(self, "Unexpected format %s, "				"was expecting mp3", name);		return FALSE;	}	/* layer */	value = gst_structure_get_value(structure, "layer");	layer = g_value_get_int(value);	if (layer == 1)		pkt->layer = BT_MPEG_LAYER_1;	else if (layer == 2)		pkt->layer = BT_MPEG_LAYER_2;	else if (layer == 3)		pkt->layer = BT_MPEG_LAYER_3;	else {		GST_ERROR_OBJECT(self, "Unexpected layer: %d", layer);		return FALSE;	}	/* crc */	if (self->mp3_using_crc != -1)		pkt->crc = self->mp3_using_crc;	else {		GST_ERROR_OBJECT(self, "No info about crc was received, "				" can't proceed");		return FALSE;	}	/* channel mode */	if (self->channel_mode != -1)		pkt->channel_mode = self->channel_mode;	else {		GST_ERROR_OBJECT(self, "No info about channel mode "				"received, can't proceed");		return FALSE;	}	/* mpf - we will only use the mandatory one */	pkt->mpf = 0;	value = gst_structure_get_value(structure, "rate");	rate = g_value_get_int(value);	if (rate == 44100)		pkt->frequency = BT_MPEG_SAMPLING_FREQ_44100;	else if (rate == 48000)		pkt->frequency = BT_MPEG_SAMPLING_FREQ_48000;	else if (rate == 32000)		pkt->frequency = BT_MPEG_SAMPLING_FREQ_32000;	else if (rate == 24000)		pkt->frequency = BT_MPEG_SAMPLING_FREQ_24000;	else if (rate == 22050)		pkt->frequency = BT_MPEG_SAMPLING_FREQ_22050;	else if (rate == 16000)		pkt->frequency = BT_MPEG_SAMPLING_FREQ_16000;	else {		GST_ERROR_OBJECT(self, "Invalid rate while setting caps");		return FALSE;	}	/* vbr - we always say its vbr, we don't have how to know it */	pkt->bitrate = 0x8000;	return TRUE;}
开发者ID:Mcjesus15,项目名称:Zio_Other,代码行数:76,


示例17: gst_openjpeg_dec_negotiate

static GstFlowReturngst_openjpeg_dec_negotiate (GstOpenJPEGDec * self, opj_image_t * image){  GstVideoFormat format;  gint width, height;  if (image->color_space == OPJ_CLRSPC_UNKNOWN || image->color_space == 0)    image->color_space = self->color_space;  switch (image->color_space) {    case OPJ_CLRSPC_SRGB:      if (image->numcomps == 4) {        if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||            image->comps[1].dx != 1 || image->comps[1].dy != 1 ||            image->comps[2].dx != 1 || image->comps[2].dy != 1 ||            image->comps[3].dx != 1 || image->comps[3].dy != 1) {          GST_ERROR_OBJECT (self, "Sub-sampling for RGB not supported");          return GST_FLOW_NOT_NEGOTIATED;        }        if (get_highest_prec (image) == 8) {          self->fill_frame = fill_frame_packed8_4;          format = GST_VIDEO_FORMAT_ARGB;        } else if (get_highest_prec (image) <= 16) {          self->fill_frame = fill_frame_packed16_4;          format = GST_VIDEO_FORMAT_ARGB64;        } else {          GST_ERROR_OBJECT (self, "Unsupported depth %d", image->comps[3].prec);          return GST_FLOW_NOT_NEGOTIATED;        }      } else if (image->numcomps == 3) {        if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||            image->comps[1].dx != 1 || image->comps[1].dy != 1 ||            image->comps[2].dx != 1 || image->comps[2].dy != 1) {          GST_ERROR_OBJECT (self, "Sub-sampling for RGB not supported");          return GST_FLOW_NOT_NEGOTIATED;        }        if (get_highest_prec (image) == 8) {          self->fill_frame = fill_frame_packed8_3;          format = GST_VIDEO_FORMAT_ARGB;        } else if (get_highest_prec (image) <= 16) {          self->fill_frame = fill_frame_packed16_3;          format = GST_VIDEO_FORMAT_ARGB64;        } else {          GST_ERROR_OBJECT (self, "Unsupported depth %d",              get_highest_prec (image));          return GST_FLOW_NOT_NEGOTIATED;        }      } else {        GST_ERROR_OBJECT (self, "Unsupported number of RGB components: %d",            image->numcomps);        return GST_FLOW_NOT_NEGOTIATED;      }      break;    case OPJ_CLRSPC_GRAY:      if (image->numcomps == 1) {        if (image->comps[0].dx != 1 && image->comps[0].dy != 1) {          GST_ERROR_OBJECT (self, "Sub-sampling for GRAY not supported");          return GST_FLOW_NOT_NEGOTIATED;        }        if (get_highest_prec (image) == 8) {          self->fill_frame = fill_frame_planar8_1;          format = GST_VIDEO_FORMAT_GRAY8;        } else if (get_highest_prec (image) <= 16) {          self->fill_frame = fill_frame_planar16_1;#if G_BYTE_ORDER == G_LITTLE_ENDIAN          format = GST_VIDEO_FORMAT_GRAY16_LE;#else          format = GST_VIDEO_FORMAT_GRAY16_BE;#endif        } else {          GST_ERROR_OBJECT (self, "Unsupported depth %d",              get_highest_prec (image));          return GST_FLOW_NOT_NEGOTIATED;        }      } else {        GST_ERROR_OBJECT (self, "Unsupported number of GRAY components: %d",            image->numcomps);        return GST_FLOW_NOT_NEGOTIATED;      }      break;    case OPJ_CLRSPC_SYCC:      if (image->numcomps != 3 && image->numcomps != 4) {        GST_ERROR_OBJECT (self, "Unsupported number of YUV components: %d",            image->numcomps);        return GST_FLOW_NOT_NEGOTIATED;      }      if (image->comps[0].dx != 1 || image->comps[0].dy != 1) {        GST_ERROR_OBJECT (self, "Sub-sampling of luma plane not supported");        return GST_FLOW_NOT_NEGOTIATED;      }      if (image->comps[1].dx != image->comps[2].dx ||          image->comps[1].dy != image->comps[2].dy) {        GST_ERROR_OBJECT (self,            "Different sub-sampling of chroma planes not supported");        return GST_FLOW_ERROR;//.........这里部分代码省略.........
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:101,


示例18: gst_video_mark_yuv

static GstFlowReturngst_video_mark_yuv (GstSimpleVideoMark * simplevideomark, GstVideoFrame * frame){  gint i, pw, ph, row_stride, pixel_stride;  gint width, height, offset_calc, x, y;  guint8 *d;  guint64 pattern_shift;  guint8 color;  gint total_pattern;  width = frame->info.width;  height = frame->info.height;  pw = simplevideomark->pattern_width;  ph = simplevideomark->pattern_height;  row_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);  d = GST_VIDEO_FRAME_COMP_DATA (frame, 0);  offset_calc =      row_stride * (height - ph - simplevideomark->bottom_offset) +      pixel_stride * simplevideomark->left_offset;  x = simplevideomark->left_offset;  y = height - ph - simplevideomark->bottom_offset;  total_pattern =      simplevideomark->pattern_count + simplevideomark->pattern_data_count;  /* If x and y offset values are outside the video, no need to draw */  if ((x + (pw * total_pattern)) < 0 || x > width || (y + height) < 0      || y > height) {    GST_ERROR_OBJECT (simplevideomark,        "simplevideomark pattern is outside the video. Not drawing.");    return GST_FLOW_OK;  }  /* Offset calculation less than 0, then reset to 0 */  if (offset_calc < 0)    offset_calc = 0;  /* Y position of mark is negative or pattern exceeds the video height,     then recalculate pattern height for partial display */  if (y < 0)    ph += y;  else if ((y + ph) > height)    ph = height - y;  /* If pattern height is less than 0, need not draw anything */  if (ph < 0)    return GST_FLOW_OK;  /* move to start of bottom left */  d += offset_calc;  /* draw the bottom left pixels */  for (i = 0; i < simplevideomark->pattern_count; i++) {    gint draw_pw;    if (i & 1)      /* odd pixels must be white */      color = 255;    else      color = 0;    /* X position of mark is negative or pattern exceeds the video width,       then recalculate pattern width for partial display */    draw_pw = calculate_pw (pw, x, width);    /* If pattern width is less than 0, continue with the next pattern */    if (draw_pw < 0)      continue;    /* draw box of width * height */    gst_video_mark_draw_box (simplevideomark, d, draw_pw, ph, row_stride,        pixel_stride, color);    /* move to i-th pattern */    d += pixel_stride * draw_pw;    x += draw_pw;    if ((x + (pw * (total_pattern - i - 1))) < 0 || x >= width)      return GST_FLOW_OK;  }  pattern_shift =      G_GUINT64_CONSTANT (1) << (simplevideomark->pattern_data_count - 1);  /* get the data of the pattern */  for (i = 0; i < simplevideomark->pattern_data_count; i++) {    gint draw_pw;    if (simplevideomark->pattern_data & pattern_shift)      color = 255;    else      color = 0;    /* X position of mark is negative or pattern exceeds the video width,       then recalculate pattern width for partial display */    draw_pw = calculate_pw (pw, x, width);    /* If pattern width is less than 0, continue with the next pattern */    if (draw_pw < 0)      continue;    gst_video_mark_draw_box (simplevideomark, d, draw_pw, ph, row_stride,        pixel_stride, color);//.........这里部分代码省略.........
开发者ID:0p1pp1,项目名称:gst-plugins-bad,代码行数:101,


示例19: gst_insert_bin_do_change

//.........这里部分代码省略.........        other_pad = get_single_pad (data->element, GST_PAD_SRC);      if (!other_pad) {        GST_WARNING_OBJECT (self, "Can not get element's other pad");        goto error;      }      other_peer = gst_pad_get_peer (other_pad);      gst_object_unref (other_pad);      if (!other_peer) {        GST_WARNING_OBJECT (self, "Can not get element's other peer");        goto error;      }      /* Get the negotiated caps for the source pad peer,       * because renegotiation while the pipeline is playing doesn't work       * that fast.       */      if (gst_pad_get_direction (pad) == GST_PAD_SRC)        caps = gst_pad_get_current_caps (pad);      else        peercaps = gst_pad_get_current_caps (other_peer);      if (!caps)        caps = gst_pad_query_caps (pad, NULL);      if (!peercaps)        peercaps = gst_pad_query_caps (other_peer, NULL);      can_intersect = gst_caps_can_intersect (caps, peercaps);      gst_caps_unref (caps);      gst_caps_unref (peercaps);      if (!can_intersect) {        GST_WARNING_OBJECT (self, "Pads are incompatible without the element");        goto error;      }      if (gst_pad_get_direction (other_peer) == GST_PAD_SRC &&          gst_pad_is_active (other_peer)) {        gulong probe_id;        probe_id = gst_pad_add_probe (other_peer,            GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,            wait_and_drop_eos_cb, NULL, NULL);        gst_pad_send_event (peer, gst_event_new_eos ());        gst_pad_remove_probe (other_peer, probe_id);      }      gst_element_set_locked_state (data->element, TRUE);      gst_element_set_state (data->element, GST_STATE_NULL);      if (!gst_bin_remove (GST_BIN (self), data->element)) {        GST_WARNING_OBJECT (self, "Element removal rejected");        goto error;      }      gst_element_set_locked_state (data->element, FALSE);      if (gst_pad_get_direction (pad) == GST_PAD_SRC)        success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (pad, other_peer,                GST_PAD_LINK_CHECK_HIERARCHY |                GST_PAD_LINK_CHECK_TEMPLATE_CAPS));      else        success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (other_peer, pad,                GST_PAD_LINK_CHECK_HIERARCHY |                GST_PAD_LINK_CHECK_TEMPLATE_CAPS));      gst_object_unref (other_peer);      other_peer = NULL;      if (!success) {        GST_ERROR_OBJECT (self, "Could not re-link after the element's"            " removal");        goto error;      }    }    gst_insert_bin_change_data_complete (self, data, TRUE);    gst_object_unref (peer);    GST_OBJECT_LOCK (self);    continue;  done:    if (other_peer != NULL)      gst_object_unref (other_peer);    if (peer != NULL)      gst_object_unref (peer);    break;  retry:    GST_OBJECT_LOCK (self);    g_queue_push_head (&self->priv->change_queue, data);    goto done;  error:    /* Handle error */    gst_insert_bin_change_data_complete (self, data, FALSE);    GST_OBJECT_LOCK (self);    goto done;  }next:  gst_insert_bin_block_pad_unlock (self);}
开发者ID:Lachann,项目名称:gst-plugins-bad,代码行数:101,


示例20: gst_video_convert_set_info

static gbooleangst_video_convert_set_info (GstVideoFilter * filter,    GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,    GstVideoInfo * out_info){  GstVideoConvert *space;  space = GST_VIDEO_CONVERT_CAST (filter);  if (space->convert) {    gst_video_converter_free (space->convert);    space->convert = NULL;  }  /* these must match */  if (in_info->width != out_info->width || in_info->height != out_info->height      || in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)    goto format_mismatch;  /* if present, these must match too */  if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)    goto format_mismatch;  /* if present, these must match too */  if (in_info->interlace_mode != out_info->interlace_mode)    goto format_mismatch;  space->convert = gst_video_converter_new (in_info, out_info,      gst_structure_new ("GstVideoConvertConfig",          GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD,          space->dither,          GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, G_TYPE_UINT,          space->dither_quantization,          GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD,          GST_TYPE_VIDEO_RESAMPLER_METHOD, space->chroma_resampler,          GST_VIDEO_CONVERTER_OPT_ALPHA_MODE,          GST_TYPE_VIDEO_ALPHA_MODE, space->alpha_mode,          GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE,          G_TYPE_DOUBLE, space->alpha_value,          GST_VIDEO_CONVERTER_OPT_CHROMA_MODE,          GST_TYPE_VIDEO_CHROMA_MODE, space->chroma_mode,          GST_VIDEO_CONVERTER_OPT_MATRIX_MODE,          GST_TYPE_VIDEO_MATRIX_MODE, space->matrix_mode,          GST_VIDEO_CONVERTER_OPT_GAMMA_MODE,          GST_TYPE_VIDEO_GAMMA_MODE, space->gamma_mode,          GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE,          GST_TYPE_VIDEO_PRIMARIES_MODE, space->primaries_mode,          GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT,          space->n_threads, NULL));  if (space->convert == NULL)    goto no_convert;  GST_DEBUG ("reconfigured %d %d", GST_VIDEO_INFO_FORMAT (in_info),      GST_VIDEO_INFO_FORMAT (out_info));  return TRUE;  /* ERRORS */format_mismatch:  {    GST_ERROR_OBJECT (space, "input and output formats do not match");    return FALSE;  }no_convert:  {    GST_ERROR_OBJECT (space, "could not create converter");    return FALSE;  }}
开发者ID:thaytan,项目名称:gst-plugins-base,代码行数:70,


示例21: gst_egl_image_allocator_alloc_eglimage

//.........这里部分代码省略.........    case GST_VIDEO_FORMAT_RGBA:    case GST_VIDEO_FORMAT_BGRA:    case GST_VIDEO_FORMAT_ARGB:    case GST_VIDEO_FORMAT_ABGR:    case GST_VIDEO_FORMAT_RGBx:    case GST_VIDEO_FORMAT_BGRx:    case GST_VIDEO_FORMAT_xRGB:    case GST_VIDEO_FORMAT_xBGR:    case GST_VIDEO_FORMAT_AYUV:{      gsize size;      EGLImageKHR image;      mem[0] =          gst_egl_image_allocator_alloc (allocator, display,          GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&info),          GST_VIDEO_INFO_HEIGHT (&info), &size);      if (mem[0]) {        stride[0] = size / GST_VIDEO_INFO_HEIGHT (&info);        n_mem = 1;        GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE);      } else {        data = g_slice_new0 (GstEGLGLESImageData);        stride[0] = GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (&info) * 4);        size = stride[0] * GST_VIDEO_INFO_HEIGHT (&info);        glGenTextures (1, &data->texture);        if (got_gl_error ("glGenTextures"))          goto mem_error;        glBindTexture (GL_TEXTURE_2D, data->texture);        if (got_gl_error ("glBindTexture"))          goto mem_error;        /* Set 2D resizing params */        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);        /* If these are not set the texture image unit will return         * * (R, G, B, A) = black on glTexImage2D for non-POT width/height         * * frames. For a deeper explanation take a look at the OpenGL ES         * * documentation for glTexParameter */        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);        if (got_gl_error ("glTexParameteri"))          goto mem_error;        glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA,            GST_VIDEO_INFO_WIDTH (&info),            GST_VIDEO_INFO_HEIGHT (&info), 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);        if (got_gl_error ("glTexImage2D"))          goto mem_error;        image =            eglCreateImageKHR (gst_egl_display_get (display),            eglcontext, EGL_GL_TEXTURE_2D_KHR,            (EGLClientBuffer) (guintptr) data->texture, NULL);        if (got_egl_error ("eglCreateImageKHR"))          goto mem_error;        mem[0] =            gst_egl_image_allocator_wrap (allocator, display,            image, GST_VIDEO_GL_TEXTURE_TYPE_RGBA,            flags, size, data, (GDestroyNotify) gst_egl_gles_image_data_free);        n_mem = 1;      }      break;    }    default:      g_assert_not_reached ();      break;  }  buffer = gst_buffer_new ();  gst_buffer_add_video_meta_full (buffer, 0, format, width, height,      GST_VIDEO_INFO_N_PLANES (&info), offset, stride);  for (i = 0; i < n_mem; i++)    gst_buffer_append_memory (buffer, mem[i]);  return buffer;mem_error:  {    GST_ERROR_OBJECT (GST_CAT_DEFAULT, "Failed to create EGLImage");    if (data)      gst_egl_gles_image_data_free (data);    if (mem[0])      gst_memory_unref (mem[0]);    if (mem[1])      gst_memory_unref (mem[1]);    if (mem[2])      gst_memory_unref (mem[2]);    return NULL;  }}
开发者ID:darrengarvey,项目名称:gst_plugins_bad_patches,代码行数:101,


示例22: gst_gio_base_src_get_size

static gbooleangst_gio_base_src_get_size (GstBaseSrc * base_src, guint64 * size){  GstGioBaseSrc *src = GST_GIO_BASE_SRC (base_src);  if (G_IS_FILE_INPUT_STREAM (src->stream)) {    GFileInfo *info;    GError *err = NULL;    info = g_file_input_stream_query_info (G_FILE_INPUT_STREAM (src->stream),        G_FILE_ATTRIBUTE_STANDARD_SIZE, src->cancel, &err);    if (info != NULL) {      *size = g_file_info_get_size (info);      g_object_unref (info);      GST_DEBUG_OBJECT (src, "found size: %" G_GUINT64_FORMAT, *size);      return TRUE;    }    if (!gst_gio_error (src, "g_file_input_stream_query_info", &err, NULL)) {      if (GST_GIO_ERROR_MATCHES (err, NOT_SUPPORTED))        GST_DEBUG_OBJECT (src, "size information not available");      else        GST_WARNING_OBJECT (src, "size information retrieval failed: %s",            err->message);      g_clear_error (&err);    }  }  if (GST_GIO_STREAM_IS_SEEKABLE (src->stream)) {    goffset old;    goffset stream_size;    gboolean ret;    GSeekable *seekable = G_SEEKABLE (src->stream);    GError *err = NULL;    old = g_seekable_tell (seekable);    ret = g_seekable_seek (seekable, 0, G_SEEK_END, src->cancel, &err);    if (!ret) {      if (!gst_gio_error (src, "g_seekable_seek", &err, NULL)) {        if (GST_GIO_ERROR_MATCHES (err, NOT_SUPPORTED))          GST_DEBUG_OBJECT (src,              "Seeking to the end of stream is not supported");        else          GST_WARNING_OBJECT (src, "Seeking to end of stream failed: %s",              err->message);        g_clear_error (&err);      } else {        GST_WARNING_OBJECT (src, "Seeking to end of stream failed");      }      return FALSE;    }    stream_size = g_seekable_tell (seekable);    ret = g_seekable_seek (seekable, old, G_SEEK_SET, src->cancel, &err);    if (!ret) {      if (!gst_gio_error (src, "g_seekable_seek", &err, NULL)) {        if (GST_GIO_ERROR_MATCHES (err, NOT_SUPPORTED))          GST_ERROR_OBJECT (src, "Seeking to the old position not supported");        else          GST_ERROR_OBJECT (src, "Seeking to the old position failed: %s",              err->message);        g_clear_error (&err);      } else {        GST_ERROR_OBJECT (src, "Seeking to the old position faile");      }      return FALSE;    }    if (stream_size >= 0) {      *size = stream_size;      return TRUE;    }  }  return FALSE;}
开发者ID:adenexter,项目名称:gst-plugins-base,代码行数:81,


示例23: gst_video_filter_propose_allocation

/* Answer the allocation query downstream. */static gbooleangst_video_filter_propose_allocation (GstBaseTransform * trans,    GstQuery * decide_query, GstQuery * query){  GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);  GstVideoInfo info;  GstBufferPool *pool;  GstCaps *caps;  guint size;  if (!GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,          decide_query, query))    return FALSE;  /* passthrough, we're done */  if (decide_query == NULL)    return TRUE;  gst_query_parse_allocation (query, &caps, NULL);  if (caps == NULL)    return FALSE;  if (!gst_video_info_from_caps (&info, caps))    return FALSE;  size = GST_VIDEO_INFO_SIZE (&info);  if (gst_query_get_n_allocation_pools (query) == 0) {    GstStructure *structure;    GstAllocator *allocator = NULL;    GstAllocationParams params = { 0, 0, 0, 15, };    if (gst_query_get_n_allocation_params (query) > 0)      gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);    else      gst_query_add_allocation_param (query, allocator, &params);    pool = gst_video_buffer_pool_new ();    structure = gst_buffer_pool_get_config (pool);    gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);    gst_buffer_pool_config_set_allocator (structure, allocator, &params);    if (allocator)      gst_object_unref (allocator);    if (!gst_buffer_pool_set_config (pool, structure))      goto config_failed;    gst_query_add_allocation_pool (query, pool, size, 0, 0);    gst_object_unref (pool);    gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);  }  return TRUE;  /* ERRORS */config_failed:  {    GST_ERROR_OBJECT (filter, "failed to set config");    gst_object_unref (pool);    return FALSE;  }}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:66,


示例24: gst_gio_base_src_create

static GstFlowReturngst_gio_base_src_create (GstBaseSrc * base_src, guint64 offset, guint size,    GstBuffer ** buf_return){  GstGioBaseSrc *src = GST_GIO_BASE_SRC (base_src);  GstBuffer *buf;  GstFlowReturn ret = GST_FLOW_OK;  g_return_val_if_fail (G_IS_INPUT_STREAM (src->stream), GST_FLOW_ERROR);  /* If we have the requested part in our cache take a subbuffer of that,   * otherwise fill the cache again with at least 4096 bytes from the   * requested offset and return a subbuffer of that.   *   * We need caching because every read/seek operation will need to go   * over DBus if our backend is GVfs and this is painfully slow. */  if (src->cache && offset >= GST_BUFFER_OFFSET (src->cache) &&      offset + size <= GST_BUFFER_OFFSET_END (src->cache)) {    GST_DEBUG_OBJECT (src, "Creating subbuffer from cached buffer: offset %"        G_GUINT64_FORMAT " length %u", offset, size);    buf = gst_buffer_create_sub (src->cache,        offset - GST_BUFFER_OFFSET (src->cache), size);    GST_BUFFER_OFFSET (buf) = offset;    GST_BUFFER_OFFSET_END (buf) = offset + size;    GST_BUFFER_SIZE (buf) = size;  } else {    guint cachesize = MAX (4096, size);    gssize read, res;    gboolean success, eos;    GError *err = NULL;    if (src->cache) {      gst_buffer_unref (src->cache);      src->cache = NULL;    }    if (G_UNLIKELY (offset != src->position)) {      if (!GST_GIO_STREAM_IS_SEEKABLE (src->stream))        return GST_FLOW_NOT_SUPPORTED;      GST_DEBUG_OBJECT (src, "Seeking to position %" G_GUINT64_FORMAT, offset);      ret = gst_gio_seek (src, G_SEEKABLE (src->stream), offset, src->cancel);      if (ret == GST_FLOW_OK)        src->position = offset;      else        return ret;    }    src->cache = gst_buffer_try_new_and_alloc (cachesize);    if (G_UNLIKELY (src->cache == NULL)) {      GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", cachesize);      return GST_FLOW_ERROR;    }    GST_LOG_OBJECT (src, "Reading %u bytes from offset %" G_GUINT64_FORMAT,        cachesize, offset);    /* GIO sometimes gives less bytes than requested although     * it's not at the end of file. SMB for example only     * supports reads up to 64k. So we loop here until we get at     * at least the requested amount of bytes or a read returns     * nothing. */    read = 0;    while (size - read > 0 && (res =            g_input_stream_read (G_INPUT_STREAM (src->stream),                GST_BUFFER_DATA (src->cache) + read, cachesize - read,                src->cancel, &err)) > 0) {      read += res;    }    success = (read >= 0);    eos = (cachesize > 0 && read == 0);    if (!success && !gst_gio_error (src, "g_input_stream_read", &err, &ret)) {      GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),          ("Could not read from stream: %s", err->message));      g_clear_error (&err);    }    if (success && !eos) {      src->position += read;      GST_BUFFER_SIZE (src->cache) = read;      GST_BUFFER_OFFSET (src->cache) = offset;      GST_BUFFER_OFFSET_END (src->cache) = offset + read;      GST_DEBUG_OBJECT (src, "Read successful");      GST_DEBUG_OBJECT (src, "Creating subbuffer from new "          "cached buffer: offset %" G_GUINT64_FORMAT " length %u", offset,          size);      buf = gst_buffer_create_sub (src->cache, 0, MIN (size, read));      GST_BUFFER_OFFSET (buf) = offset;      GST_BUFFER_OFFSET_END (buf) = offset + MIN (size, read);      GST_BUFFER_SIZE (buf) = MIN (size, read);    } else {//.........这里部分代码省略.........
开发者ID:adenexter,项目名称:gst-plugins-base,代码行数:101,


示例25: gst_omx_audio_dec_drain

static GstFlowReturngst_omx_audio_dec_drain (GstOMXAudioDec * self){  GstOMXAudioDecClass *klass;  GstOMXBuffer *buf;  GstOMXAcquireBufferReturn acq_ret;  OMX_ERRORTYPE err;  GST_DEBUG_OBJECT (self, "Draining component");  klass = GST_OMX_AUDIO_DEC_GET_CLASS (self);  if (!self->started) {    GST_DEBUG_OBJECT (self, "Component not started yet");    return GST_FLOW_OK;  }  self->started = FALSE;  if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) {    GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers");    return GST_FLOW_OK;  }  /* Make sure to release the base class stream lock, otherwise   * _loop() can't call _finish_frame() and we might block forever   * because no input buffers are released */  GST_AUDIO_DECODER_STREAM_UNLOCK (self);  /* Send an EOS buffer to the component and let the base   * class drop the EOS event. We will send it later when   * the EOS buffer arrives on the output port. */  acq_ret = gst_omx_port_acquire_buffer (self->dec_in_port, &buf);  if (acq_ret != GST_OMX_ACQUIRE_BUFFER_OK) {    GST_AUDIO_DECODER_STREAM_LOCK (self);    GST_ERROR_OBJECT (self, "Failed to acquire buffer for draining: %d",        acq_ret);    return GST_FLOW_ERROR;  }  g_mutex_lock (&self->drain_lock);  self->draining = TRUE;  buf->omx_buf->nFilledLen = 0;  buf->omx_buf->nTimeStamp =      gst_util_uint64_scale (self->last_upstream_ts, OMX_TICKS_PER_SECOND,      GST_SECOND);  buf->omx_buf->nTickCount = 0;  buf->omx_buf->nFlags |= OMX_BUFFERFLAG_EOS;  err = gst_omx_port_release_buffer (self->dec_in_port, buf);  if (err != OMX_ErrorNone) {    GST_ERROR_OBJECT (self, "Failed to drain component: %s (0x%08x)",        gst_omx_error_to_string (err), err);    g_mutex_unlock (&self->drain_lock);    GST_AUDIO_DECODER_STREAM_LOCK (self);    return GST_FLOW_ERROR;  }  GST_DEBUG_OBJECT (self, "Waiting until component is drained");  if (G_UNLIKELY (self->dec->hacks & GST_OMX_HACK_DRAIN_MAY_NOT_RETURN)) {    gint64 wait_until = g_get_monotonic_time () + G_TIME_SPAN_SECOND / 2;    if (!g_cond_wait_until (&self->drain_cond, &self->drain_lock, wait_until))      GST_WARNING_OBJECT (self, "Drain timed out");    else      GST_DEBUG_OBJECT (self, "Drained component");  } else {    g_cond_wait (&self->drain_cond, &self->drain_lock);    GST_DEBUG_OBJECT (self, "Drained component");  }  g_mutex_unlock (&self->drain_lock);  GST_AUDIO_DECODER_STREAM_LOCK (self);  self->started = FALSE;  return GST_FLOW_OK;}
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer__gst-omx,代码行数:78,


示例26: gst_flac_dec_write

static FLAC__StreamDecoderWriteStatusgst_flac_dec_write (GstFlacDec * flacdec, const FLAC__Frame * frame,    const FLAC__int32 * const buffer[]){  GstFlowReturn ret = GST_FLOW_OK;  GstBuffer *outbuf;  guint depth = frame->header.bits_per_sample;  guint width, gdepth;  guint sample_rate = frame->header.sample_rate;  guint channels = frame->header.channels;  guint samples = frame->header.blocksize;  guint j, i;  GstMapInfo map;  gboolean caps_changed;  GstAudioChannelPosition chanpos[8];  GST_LOG_OBJECT (flacdec, "samples in frame header: %d", samples);  if (depth == 0) {    if (flacdec->depth < 4 || flacdec->depth > 32) {      GST_ERROR_OBJECT (flacdec, "unsupported depth %d from STREAMINFO",          flacdec->depth);      ret = GST_FLOW_ERROR;      goto done;    }    depth = flacdec->depth;  }  switch (depth) {    case 8:      gdepth = width = 8;      break;    case 12:    case 16:      gdepth = width = 16;      break;    case 20:    case 24:      gdepth = 24;      width = 32;      break;    case 32:      gdepth = width = 32;      break;    default:      GST_ERROR_OBJECT (flacdec, "unsupported depth %d", depth);      ret = GST_FLOW_ERROR;      goto done;  }  if (sample_rate == 0) {    if (flacdec->info.rate != 0) {      sample_rate = flacdec->info.rate;    } else {      GST_ERROR_OBJECT (flacdec, "unknown sample rate");      ret = GST_FLOW_ERROR;      goto done;    }  }  caps_changed = (sample_rate != GST_AUDIO_INFO_RATE (&flacdec->info))      || (width != GST_AUDIO_INFO_WIDTH (&flacdec->info))      || (gdepth != GST_AUDIO_INFO_DEPTH (&flacdec->info))      || (channels != GST_AUDIO_INFO_CHANNELS (&flacdec->info));  if (caps_changed      || !gst_pad_has_current_caps (GST_AUDIO_DECODER_SRC_PAD (flacdec))) {    GST_DEBUG_OBJECT (flacdec, "Negotiating %d Hz @ %d channels", sample_rate,        channels);    memcpy (chanpos, channel_positions[flacdec->info.channels - 1],        sizeof (chanpos));    gst_audio_channel_positions_to_valid_order (chanpos,        flacdec->info.channels);    gst_audio_info_set_format (&flacdec->info,        gst_audio_format_build_integer (TRUE, G_BYTE_ORDER, width, gdepth),        sample_rate, channels, chanpos);    /* Note: we create the inverse reordering map here */    gst_audio_get_channel_reorder_map (flacdec->info.channels,        flacdec->info.position, channel_positions[flacdec->info.channels - 1],        flacdec->channel_reorder_map);    flacdec->depth = depth;    gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (flacdec),        &flacdec->info);  }  outbuf =      gst_buffer_new_allocate (NULL, samples * channels * (width / 8), NULL);  gst_buffer_map (outbuf, &map, GST_MAP_WRITE);  if (width == 8) {    gint8 *outbuffer = (gint8 *) map.data;    gint *reorder_map = flacdec->channel_reorder_map;    g_assert (gdepth == 8 && depth == 8);    for (i = 0; i < samples; i++) {//.........这里部分代码省略.........
开发者ID:Distrotech,项目名称:gst-plugins-good,代码行数:101,


示例27: gst_omx_audio_dec_set_format

static gbooleangst_omx_audio_dec_set_format (GstAudioDecoder * decoder, GstCaps * caps){  GstOMXAudioDec *self;  GstOMXAudioDecClass *klass;  GstStructure *s;  const GValue *codec_data;  gboolean is_format_change = FALSE;  gboolean needs_disable = FALSE;  self = GST_OMX_AUDIO_DEC (decoder);  klass = GST_OMX_AUDIO_DEC_GET_CLASS (decoder);  GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, caps);  /* Check if the caps change is a real format change or if only irrelevant   * parts of the caps have changed or nothing at all.   */  if (klass->is_format_change)    is_format_change = klass->is_format_change (self, self->dec_in_port, caps);  needs_disable =      gst_omx_component_get_state (self->dec,      GST_CLOCK_TIME_NONE) != OMX_StateLoaded;  /* If the component is not in Loaded state and a real format change happens   * we have to disable the port and re-allocate all buffers. If no real   * format change happened we can just exit here.   */  if (needs_disable && !is_format_change) {    GST_DEBUG_OBJECT (self,        "Already running and caps did not change the format");    return TRUE;  }  if (needs_disable && is_format_change) {    GstOMXPort *out_port = self->dec_out_port;    GST_DEBUG_OBJECT (self, "Need to disable and drain decoder");    gst_omx_audio_dec_drain (self);    gst_omx_audio_dec_flush (decoder, FALSE);    gst_omx_port_set_flushing (out_port, 5 * GST_SECOND, TRUE);    if (klass->cdata.hacks & GST_OMX_HACK_NO_COMPONENT_RECONFIGURE) {      GST_AUDIO_DECODER_STREAM_UNLOCK (self);      gst_omx_audio_dec_stop (GST_AUDIO_DECODER (self));      gst_omx_audio_dec_close (GST_AUDIO_DECODER (self));      GST_AUDIO_DECODER_STREAM_LOCK (self);      if (!gst_omx_audio_dec_open (GST_AUDIO_DECODER (self)))        return FALSE;      needs_disable = FALSE;    } else {      if (gst_omx_port_set_enabled (self->dec_in_port, FALSE) != OMX_ErrorNone)        return FALSE;      if (gst_omx_port_set_enabled (out_port, FALSE) != OMX_ErrorNone)        return FALSE;      if (gst_omx_port_wait_buffers_released (self->dec_in_port,              5 * GST_SECOND) != OMX_ErrorNone)        return FALSE;      if (gst_omx_port_wait_buffers_released (out_port,              1 * GST_SECOND) != OMX_ErrorNone)        return FALSE;      if (gst_omx_port_deallocate_buffers (self->dec_in_port) != OMX_ErrorNone)        return FALSE;      if (gst_omx_port_deallocate_buffers (self->dec_out_port) != OMX_ErrorNone)        return FALSE;      if (gst_omx_port_wait_enabled (self->dec_in_port,              1 * GST_SECOND) != OMX_ErrorNone)        return FALSE;      if (gst_omx_port_wait_enabled (out_port, 1 * GST_SECOND) != OMX_ErrorNone)        return FALSE;    }    GST_DEBUG_OBJECT (self, "Decoder drained and disabled");  }  if (klass->set_format) {    if (!klass->set_format (self, self->dec_in_port, caps)) {      GST_ERROR_OBJECT (self, "Subclass failed to set the new format");      return FALSE;    }  }  GST_DEBUG_OBJECT (self, "Updating outport port definition");  if (gst_omx_port_update_port_definition (self->dec_out_port,          NULL) != OMX_ErrorNone)    return FALSE;  /* Get codec data from caps */  gst_buffer_replace (&self->codec_data, NULL);  s = gst_caps_get_structure (caps, 0);  codec_data = gst_structure_get_value (s, "codec_data");  if (codec_data) {    /* Vorbis and some other codecs have multiple buffers in     * the stream-header field */    self->codec_data = gst_value_get_buffer (codec_data);    if (self->codec_data)      gst_buffer_ref (self->codec_data);  }//.........这里部分代码省略.........
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer__gst-omx,代码行数:101,


示例28: gst_rtsp_server_get_io_channel

//.........这里部分代码省略.........      GST_WARNING_OBJECT (server, "failed to reuse socker (%s)",          g_strerror (errno));    }    if (bind (sockfd, rp->ai_addr, rp->ai_addrlen) == 0) {      GST_DEBUG_OBJECT (server, "bind on %s", rp->ai_canonname);      break;    }    GST_DEBUG_OBJECT (server, "failed to bind socket (%s), try next",        g_strerror (errno));    close (sockfd);    sockfd = -1;  }  freeaddrinfo (result);  if (sockfd == -1)    goto no_socket;  GST_DEBUG_OBJECT (server, "opened sending server socket with fd %d", sockfd);  /* keep connection alive; avoids SIGPIPE during write */  ret = 1;  if (setsockopt (sockfd, SOL_SOCKET, SO_KEEPALIVE,          (void *) &ret, sizeof (ret)) < 0)    goto keepalive_failed;#ifdef USE_SOLINGER  /* make sure socket is reset 5 seconds after close. This ensure that we can   * reuse the socket quickly while still having a chance to send data to the   * client. */  linger.l_onoff = 1;  linger.l_linger = 5;  if (setsockopt (sockfd, SOL_SOCKET, SO_LINGER,          (void *) &linger, sizeof (linger)) < 0)    goto linger_failed;#endif  /* set the server socket to nonblocking */  fcntl (sockfd, F_SETFL, O_NONBLOCK);  GST_DEBUG_OBJECT (server, "listening on server socket %d with queue of %d",      sockfd, server->backlog);  if (listen (sockfd, server->backlog) == -1)    goto listen_failed;  GST_DEBUG_OBJECT (server,      "listened on server socket %d, returning from connection setup", sockfd);  /* create IO channel for the socket */  channel = g_io_channel_unix_new (sockfd);  g_io_channel_set_close_on_unref (channel, TRUE);  GST_INFO_OBJECT (server, "listening on service %s", server->service);  GST_RTSP_SERVER_UNLOCK (server);  return channel;  /* ERRORS */no_address:  {    GST_ERROR_OBJECT (server, "failed to resolve address: %s",        gai_strerror (ret));    goto close_error;  }no_socket:  {    GST_ERROR_OBJECT (server, "failed to create socket: %s",        g_strerror (errno));    goto close_error;  }keepalive_failed:  {    GST_ERROR_OBJECT (server, "failed to configure keepalive socket: %s",        g_strerror (errno));    goto close_error;  }#ifdef USE_SOLINGERlinger_failed:  {    GST_ERROR_OBJECT (server, "failed to no linger socket: %s",        g_strerror (errno));    goto close_error;  }#endiflisten_failed:  {    GST_ERROR_OBJECT (server, "failed to listen on socket: %s",        g_strerror (errno));    goto close_error;  }close_error:  {    if (sockfd >= 0) {      close (sockfd);    }    GST_RTSP_SERVER_UNLOCK (server);    return NULL;  }}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:101,


示例29: gst_timecodestamper_sink_event

static gbooleangst_timecodestamper_sink_event (GstBaseTransform * trans, GstEvent * event){  gboolean ret = FALSE;  GstTimeCodeStamper *timecodestamper = GST_TIME_CODE_STAMPER (trans);  GST_DEBUG_OBJECT (trans, "received event %" GST_PTR_FORMAT, event);  switch (GST_EVENT_TYPE (event)) {    case GST_EVENT_SEGMENT:    {      GstSegment segment;      guint64 frames;      gchar *tc_str;      gboolean notify = FALSE;      GST_OBJECT_LOCK (timecodestamper);      gst_event_copy_segment (event, &segment);      if (segment.format != GST_FORMAT_TIME) {        GST_OBJECT_UNLOCK (timecodestamper);        GST_ERROR_OBJECT (timecodestamper, "Invalid segment format");        return FALSE;      }      if (GST_VIDEO_INFO_FORMAT (&timecodestamper->vinfo) ==          GST_VIDEO_FORMAT_UNKNOWN) {        GST_ERROR_OBJECT (timecodestamper,            "Received segment event without caps");        GST_OBJECT_UNLOCK (timecodestamper);        return FALSE;      }      if (timecodestamper->first_tc_now && !timecodestamper->first_tc) {        GDateTime *dt = g_date_time_new_now_local ();        GstVideoTimeCode *tc;        gst_timecodestamper_set_drop_frame (timecodestamper);        tc = gst_video_time_code_new_from_date_time (timecodestamper->            vinfo.fps_n, timecodestamper->vinfo.fps_d, dt,            timecodestamper->current_tc->config.flags, 0);        g_date_time_unref (dt);        timecodestamper->first_tc = tc;        notify = TRUE;      }      frames =          gst_util_uint64_scale (segment.time, timecodestamper->vinfo.fps_n,          timecodestamper->vinfo.fps_d * GST_SECOND);      gst_timecodestamper_reset_timecode (timecodestamper);      gst_video_time_code_add_frames (timecodestamper->current_tc, frames);      GST_DEBUG_OBJECT (timecodestamper,          "Got %" G_GUINT64_FORMAT " frames when segment time is %"          GST_TIME_FORMAT, frames, GST_TIME_ARGS (segment.time));      tc_str = gst_video_time_code_to_string (timecodestamper->current_tc);      GST_DEBUG_OBJECT (timecodestamper, "New timecode is %s", tc_str);      g_free (tc_str);      GST_OBJECT_UNLOCK (timecodestamper);      if (notify)        g_object_notify (G_OBJECT (timecodestamper), "first-timecode");      break;    }    case GST_EVENT_CAPS:    {      GstCaps *caps;      GST_OBJECT_LOCK (timecodestamper);      gst_event_parse_caps (event, &caps);      if (!gst_video_info_from_caps (&timecodestamper->vinfo, caps)) {        GST_OBJECT_UNLOCK (timecodestamper);        return FALSE;      }      gst_timecodestamper_reset_timecode (timecodestamper);      GST_OBJECT_UNLOCK (timecodestamper);      break;    }    default:      break;  }  ret =      GST_BASE_TRANSFORM_CLASS (gst_timecodestamper_parent_class)->sink_event      (trans, event);  return ret;}
开发者ID:0p1pp1,项目名称:gst-plugins-bad,代码行数:85,


示例30: daala_handle_type_packet

static GstFlowReturndaala_handle_type_packet (GstDaalaDec * dec){  gint par_num, par_den;  GstFlowReturn ret = GST_FLOW_OK;  GstVideoCodecState *state;  GstVideoFormat fmt;  GstVideoInfo *info;  if (!dec->input_state)    return GST_FLOW_NOT_NEGOTIATED;  info = &dec->input_state->info;  GST_DEBUG_OBJECT (dec, "fps %d/%d, PAR %d/%d",      dec->info.timebase_numerator, dec->info.timebase_denominator,      dec->info.pixel_aspect_numerator, dec->info.pixel_aspect_denominator);  /* calculate par   * the info.aspect_* values reflect PAR;   * 0:x and x:0 are allowed and can be interpreted as 1:1.   */  par_num = GST_VIDEO_INFO_PAR_N (info);  par_den = GST_VIDEO_INFO_PAR_D (info);  /* If we have a default PAR, see if the decoder specified a different one */  if (par_num == 1 && par_den == 1 &&      (dec->info.pixel_aspect_numerator != 0          && dec->info.pixel_aspect_denominator != 0)) {    par_num = dec->info.pixel_aspect_numerator;    par_den = dec->info.pixel_aspect_denominator;  }  /* daala has:   *   *  width/height : dimension of the encoded frame    *  pic_width/pic_height : dimension of the visible part   *  pic_x/pic_y : offset in encoded frame where visible part starts   */  GST_DEBUG_OBJECT (dec, "dimension %dx%d, PAR %d/%d", dec->info.pic_width,      dec->info.pic_height, par_num, par_den);  if (dec->info.nplanes == 3 && dec->info.plane_info[0].xdec == 0 &&      dec->info.plane_info[0].ydec == 0 &&      dec->info.plane_info[1].xdec == 1 &&      dec->info.plane_info[1].ydec == 1 &&      dec->info.plane_info[2].xdec == 1 && dec->info.plane_info[2].ydec == 1) {    fmt = GST_VIDEO_FORMAT_I420;  } else if (dec->info.nplanes == 3 && dec->info.plane_info[0].xdec == 0 &&      dec->info.plane_info[0].ydec == 0 &&      dec->info.plane_info[1].xdec == 0 &&      dec->info.plane_info[1].ydec == 0 &&      dec->info.plane_info[2].xdec == 0 && dec->info.plane_info[2].ydec == 0) {    fmt = GST_VIDEO_FORMAT_Y444;  } else {    goto unsupported_format;  }  GST_VIDEO_INFO_WIDTH (info) = dec->info.pic_width;  GST_VIDEO_INFO_HEIGHT (info) = dec->info.pic_height;  /* done */  dec->decoder = daala_decode_alloc (&dec->info, dec->setup);  /* Create the output state */  dec->output_state = state =      gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), fmt,      info->width, info->height, dec->input_state);  /* FIXME : Do we still need to set fps/par now that we pass the reference input stream ? */  state->info.fps_n = dec->info.timebase_numerator;  state->info.fps_d = dec->info.timebase_denominator;  state->info.par_n = par_num;  state->info.par_d = par_den;  gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));  dec->have_header = TRUE;  return ret;  /* ERRORS */unsupported_format:  {    GST_ERROR_OBJECT (dec, "Invalid pixel format");    return GST_FLOW_ERROR;  }}
开发者ID:asrashley,项目名称:gst-plugins-bad,代码行数:87,



注:本文中的GST_ERROR_OBJECT函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ GST_EVENT_TYPE函数代码示例
C++ GST_ERROR函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。