这篇教程C++ GST_ERROR函数代码示例写得很实用,希望能帮到您。
本文整理汇总了C++中GST_ERROR函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ERROR函数的具体用法?C++ GST_ERROR怎么用?C++ GST_ERROR使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。 在下文中一共展示了GST_ERROR函数的29个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。 示例1: gst_vaapi_surface_create_from_buffer_proxystatic gbooleangst_vaapi_surface_create_from_buffer_proxy (GstVaapiSurface * surface, GstVaapiBufferProxy * proxy, const GstVideoInfo * vip){#if VA_CHECK_VERSION (0,36,0) GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (surface); GstVideoFormat format; VASurfaceID surface_id; VAStatus status; guint chroma_type, va_chroma_format; const VAImageFormat *va_format; VASurfaceAttrib attribs[2], *attrib; VASurfaceAttribExternalBuffers extbuf; unsigned long extbuf_handle; guint i, width, height; format = GST_VIDEO_INFO_FORMAT (vip); width = GST_VIDEO_INFO_WIDTH (vip); height = GST_VIDEO_INFO_HEIGHT (vip); gst_vaapi_buffer_proxy_replace (&surface->extbuf_proxy, proxy); va_format = gst_vaapi_video_format_to_va_format (format); if (!va_format) goto error_unsupported_format; chroma_type = gst_vaapi_video_format_get_chroma_type (format); if (!chroma_type) goto error_unsupported_format; va_chroma_format = from_GstVaapiChromaType (chroma_type); if (!va_chroma_format) goto error_unsupported_format; extbuf_handle = GST_VAAPI_BUFFER_PROXY_HANDLE (proxy); extbuf.pixel_format = va_format->fourcc; extbuf.width = width; extbuf.height = height; extbuf.data_size = GST_VAAPI_BUFFER_PROXY_SIZE (proxy); extbuf.num_planes = GST_VIDEO_INFO_N_PLANES (vip); for (i = 0; i < extbuf.num_planes; i++) { extbuf.pitches[i] = GST_VIDEO_INFO_PLANE_STRIDE (vip, i); extbuf.offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (vip, i); } extbuf.buffers = &extbuf_handle; extbuf.num_buffers = 1; extbuf.flags = 0; extbuf.private_data = NULL; attrib = attribs; attrib->type = VASurfaceAttribExternalBufferDescriptor; attrib->flags = VA_SURFACE_ATTRIB_SETTABLE; attrib->value.type = VAGenericValueTypePointer; attrib->value.value.p = &extbuf; attrib++; attrib->type = VASurfaceAttribMemoryType; attrib->flags = VA_SURFACE_ATTRIB_SETTABLE; attrib->value.type = VAGenericValueTypeInteger; attrib->value.value.i = from_GstVaapiBufferMemoryType (GST_VAAPI_BUFFER_PROXY_TYPE (proxy)); attrib++; GST_VAAPI_DISPLAY_LOCK (display); status = vaCreateSurfaces (GST_VAAPI_DISPLAY_VADISPLAY (display), va_chroma_format, width, height, &surface_id, 1, attribs, attrib - attribs); GST_VAAPI_DISPLAY_UNLOCK (display); if (!vaapi_check_status (status, "vaCreateSurfaces()")) return FALSE; surface->format = format; surface->chroma_type = chroma_type; surface->width = width; surface->height = height; GST_DEBUG ("surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id)); GST_VAAPI_OBJECT_ID (surface) = surface_id; return TRUE; /* ERRORS */error_unsupported_format: GST_ERROR ("unsupported format %s", gst_vaapi_video_format_to_string (format)); return FALSE;#else return FALSE;#endif}
开发者ID:01org,项目名称:iotg-lin-gfx-gstreamer-vaapi,代码行数:88,
示例2: default_can_save_uristatic gbooleandefault_can_save_uri (const gchar * uri){ GST_ERROR ("No 'can_save_uri' vmethod implementation"); return FALSE;}
开发者ID:matasbbb,项目名称:GES,代码行数:6,
示例3: decode_scanstatic GstVaapiDecoderStatusdecode_scan (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg){ GstVaapiDecoderJpegPrivate *const priv = &decoder->priv; GstVaapiPicture *const picture = priv->current_picture; GstVaapiSlice *slice; VASliceParameterBufferJPEGBaseline *slice_param; GstJpegScanHdr scan_hdr; guint scan_hdr_size, scan_data_size; guint i, h_max, v_max, mcu_width, mcu_height; if (!VALID_STATE (decoder, GOT_SOF)) return GST_VAAPI_DECODER_STATUS_SUCCESS; scan_hdr_size = (seg->data[seg->offset] << 8) | seg->data[seg->offset + 1]; scan_data_size = seg->size - scan_hdr_size; memset (&scan_hdr, 0, sizeof (scan_hdr)); if (!gst_jpeg_segment_parse_scan_header (seg, &scan_hdr)) { GST_ERROR ("failed to parse scan header"); return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER; } slice = GST_VAAPI_SLICE_NEW (JPEGBaseline, decoder, seg->data + seg->offset + scan_hdr_size, scan_data_size); if (!slice) { GST_ERROR ("failed to allocate slice"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } gst_vaapi_picture_add_slice (picture, slice); if (!VALID_STATE (decoder, GOT_HUF_TABLE)) gst_jpeg_get_default_huffman_tables (&priv->huf_tables); // Update VA Huffman table if it changed for this scan if (huffman_tables_updated (&priv->huf_tables)) { slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW (JPEGBaseline, decoder); if (!slice->huf_table) { GST_ERROR ("failed to allocate Huffman tables"); huffman_tables_reset (&priv->huf_tables); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } fill_huffman_table (slice->huf_table, &priv->huf_tables); huffman_tables_reset (&priv->huf_tables); } slice_param = slice->param; slice_param->num_components = scan_hdr.num_components; for (i = 0; i < scan_hdr.num_components; i++) { slice_param->components[i].component_selector = scan_hdr.components[i].component_selector; slice_param->components[i].dc_table_selector = scan_hdr.components[i].dc_selector; slice_param->components[i].ac_table_selector = scan_hdr.components[i].ac_selector; } slice_param->restart_interval = priv->mcu_restart; slice_param->slice_horizontal_position = 0; slice_param->slice_vertical_position = 0; get_max_sampling_factors (&priv->frame_hdr, &h_max, &v_max); mcu_width = 8 * h_max; mcu_height = 8 * v_max; if (scan_hdr.num_components == 1) { // Non-interleaved const guint Csj = slice_param->components[0].component_selector; const GstJpegFrameComponent *const fcp = get_component (&priv->frame_hdr, Csj); if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) { GST_ERROR ("failed to validate image component %u", Csj); return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER; } mcu_width /= fcp->horizontal_factor; mcu_height /= fcp->vertical_factor; } slice_param->num_mcus = ((priv->frame_hdr.width + mcu_width - 1) / mcu_width) * ((priv->frame_hdr.height + mcu_height - 1) / mcu_height); priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS; return GST_VAAPI_DECODER_STATUS_SUCCESS;}
开发者ID:ceyusa,项目名称:gstreamer-vaapi,代码行数:83,
示例4: mxf_partition_pack_parse//.........这里部分代码省略......... size -= 2; GST_DEBUG (" MXF version = %u.%u", pack->major_version, pack->minor_version); pack->kag_size = GST_READ_UINT32_BE (data); data += 4; size -= 4; GST_DEBUG (" KAG size = %u", pack->kag_size); pack->this_partition = GST_READ_UINT64_BE (data); data += 8; size -= 8; GST_DEBUG (" this partition offset = %" G_GUINT64_FORMAT, pack->this_partition); pack->prev_partition = GST_READ_UINT64_BE (data); data += 8; size -= 8; GST_DEBUG (" previous partition offset = %" G_GUINT64_FORMAT, pack->prev_partition); pack->footer_partition = GST_READ_UINT64_BE (data); data += 8; size -= 8; GST_DEBUG (" footer partition offset = %" G_GUINT64_FORMAT, pack->footer_partition); pack->header_byte_count = GST_READ_UINT64_BE (data); data += 8; size -= 8; GST_DEBUG (" header byte count = %" G_GUINT64_FORMAT, pack->header_byte_count); pack->index_byte_count = GST_READ_UINT64_BE (data); data += 8; size -= 8; pack->index_sid = GST_READ_UINT32_BE (data); data += 4; size -= 4; GST_DEBUG (" index sid = %u, size = %" G_GUINT64_FORMAT, pack->index_sid, pack->index_byte_count); pack->body_offset = GST_READ_UINT64_BE (data); data += 8; size -= 8; pack->body_sid = GST_READ_UINT32_BE (data); data += 4; size -= 4; GST_DEBUG (" body sid = %u, offset = %" G_GUINT64_FORMAT, pack->body_sid, pack->body_offset); memcpy (&pack->operational_pattern, data, 16); data += 16; size -= 16; GST_DEBUG (" operational pattern = %s", mxf_ul_to_string (&pack->operational_pattern, str)); pack->n_essence_containers = GST_READ_UINT32_BE (data); data += 4; size -= 4; GST_DEBUG (" number of essence containers = %u", pack->n_essence_containers); if (GST_READ_UINT32_BE (data) != 16) goto error; data += 4; size -= 4; if (size < 16 * pack->n_essence_containers) goto error; if (pack->n_essence_containers) { pack->essence_containers = g_new (MXFUL, pack->n_essence_containers); for (i = 0; i < pack->n_essence_containers; i++) { memcpy (&pack->essence_containers[i], data + i * 16, 16); GST_DEBUG (" essence container %u = %s", i, mxf_ul_to_string (&pack->essence_containers[i], str)); } } pack->valid = TRUE; return TRUE;error: GST_ERROR ("Invalid partition pack"); mxf_partition_pack_reset (pack); return FALSE;}
开发者ID:prajnashi,项目名称:gst-plugins-bad,代码行数:101,
示例5: gst_v4l2_memory_group_newstatic GstV4l2MemoryGroup *gst_v4l2_memory_group_new (GstV4l2Allocator * allocator, guint32 index){ GstV4l2Object *obj = allocator->obj; guint32 memory = allocator->memory; struct v4l2_format *format = &obj->format; GstV4l2MemoryGroup *group; gsize img_size, buf_size; group = g_slice_new0 (GstV4l2MemoryGroup); group->buffer.type = format->type; group->buffer.index = index; group->buffer.memory = memory; if (V4L2_TYPE_IS_MULTIPLANAR (format->type)) { group->n_mem = group->buffer.length = format->fmt.pix_mp.num_planes; group->buffer.m.planes = group->planes; } else { group->n_mem = 1; } if (obj->ioctl (obj->video_fd, VIDIOC_QUERYBUF, &group->buffer) < 0) goto querybuf_failed; if (group->buffer.index != index) { GST_ERROR_OBJECT (allocator, "Buffer index returned by VIDIOC_QUERYBUF " "didn't match, this indicate the presence of a bug in your driver or " "libv4l2"); g_slice_free (GstV4l2MemoryGroup, group); return NULL; } /* Check that provided size matches the format we have negotiation. Failing * there usually means a driver of libv4l bug. */ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) { gint i; for (i = 0; i < group->n_mem; i++) { img_size = obj->format.fmt.pix_mp.plane_fmt[i].sizeimage; buf_size = group->planes[i].length; if (buf_size < img_size) goto buffer_too_short; } } else { img_size = obj->format.fmt.pix.sizeimage; buf_size = group->buffer.length; if (buf_size < img_size) goto buffer_too_short; } /* We save non planar buffer information into the multi-planar plane array * to avoid duplicating the code later */ if (!V4L2_TYPE_IS_MULTIPLANAR (format->type)) { group->planes[0].bytesused = group->buffer.bytesused; group->planes[0].length = group->buffer.length; group->planes[0].data_offset = 0; g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m)); memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m)); } GST_LOG_OBJECT (allocator, "Got %s buffer", memory_type_to_str (memory)); GST_LOG_OBJECT (allocator, " index: %u", group->buffer.index); GST_LOG_OBJECT (allocator, " type: %d", group->buffer.type); GST_LOG_OBJECT (allocator, " flags: %08x", group->buffer.flags); GST_LOG_OBJECT (allocator, " field: %d", group->buffer.field); GST_LOG_OBJECT (allocator, " memory: %d", group->buffer.memory); GST_LOG_OBJECT (allocator, " planes: %d", group->n_mem);#ifndef GST_DISABLE_GST_DEBUG if (memory == V4L2_MEMORY_MMAP) { gint i; for (i = 0; i < group->n_mem; i++) { GST_LOG_OBJECT (allocator, " [%u] bytesused: %u, length: %u, offset: %u", i, group->planes[i].bytesused, group->planes[i].length, group->planes[i].data_offset); GST_LOG_OBJECT (allocator, " [%u] MMAP offset: %u", i, group->planes[i].m.mem_offset); } }#endif return group;querybuf_failed: { GST_ERROR ("error querying buffer %d: %s", index, g_strerror (errno)); goto failed; }buffer_too_short: { GST_ERROR ("buffer size %" G_GSIZE_FORMAT " is smaller then negotiated size %" G_GSIZE_FORMAT ", this is usually the result of a bug in the v4l2 driver or libv4l.", buf_size, img_size); goto failed; }failed: gst_v4l2_memory_group_free (group);//.........这里部分代码省略.........
开发者ID:nnikos123,项目名称:gst-plugins-good,代码行数:101,
示例6: gst_base_video_decoder_drainstatic GstFlowReturngst_base_video_decoder_drain (GstBaseVideoDecoder * dec, gboolean at_eos){ GstBaseVideoDecoderClass *klass; GstBaseVideoDecoderScanResult res; guint size; klass = GST_BASE_VIDEO_DECODER_GET_CLASS (dec); if (gst_adapter_available (dec->input_adapter) == 0) return GST_FLOW_OK;lost_sync: if (!dec->have_sync) { gint n, m; GST_DEBUG ("no sync, scanning"); n = gst_adapter_available (dec->input_adapter); m = klass->scan_for_sync (dec, dec->input_adapter); if (m == -1) { gst_object_unref (dec); return GST_FLOW_OK; } if (m < 0) { g_warning ("subclass returned negative scan %d", m); } if (m >= n) { GST_ERROR ("subclass scanned past end %d >= %d", m, n); } gst_adapter_flush (dec->input_adapter, m); if (m < n) { GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); /* this is only "maybe" sync */ dec->have_sync = TRUE; } if (!dec->have_sync) { return GST_FLOW_OK; } } res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos); while (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK) { GstBuffer *buf; GstFlowReturn ret; GST_DEBUG ("Packet size: %u", size); if (size > gst_adapter_available (dec->input_adapter)) return GST_FLOW_OK; buf = gst_adapter_take_buffer (dec->input_adapter, size); dec->prev_buf_offset = dec->current_buf_offset; dec->current_buf_offset = dec->input_offset - gst_adapter_available (dec->input_adapter); ret = klass->parse_data (dec, buf, at_eos); if (ret != GST_FLOW_OK) return ret; res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos); } switch (res) { case GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC: dec->have_sync = FALSE; goto lost_sync; case GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA: return GST_FLOW_OK; default: GST_ERROR_OBJECT (dec, "Subclass returned invalid scan result"); return GST_FLOW_ERROR; }}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:81,
示例7: test_one_after_other_full//.........这里部分代码省略......... GST_DEBUG ("Setting pipeline to PLAYING"); ASSERT_OBJECT_REFCOUNT (source1, "source1", 1); fail_if (gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE); GST_DEBUG ("Let's poll the bus"); while (carry_on) { message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2); if (message) { switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_EOS: /* we should check if we really finished here */ GST_WARNING ("Got an EOS"); carry_on = FALSE; break; case GST_MESSAGE_SEGMENT_START: case GST_MESSAGE_SEGMENT_DONE: /* We shouldn't see any segement messages, since we didn't do a segment seek */ GST_WARNING ("Saw a Segment start/stop"); fail_if (TRUE); break; case GST_MESSAGE_ERROR: GST_WARNING ("Saw an ERROR"); fail_if (TRUE); default: break; } gst_mini_object_unref (GST_MINI_OBJECT (message)); } } GST_DEBUG ("Setting pipeline to NULL"); fail_if (gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_READY) == GST_STATE_CHANGE_FAILURE); fail_if (collect->expected_segments != NULL); GST_DEBUG ("Resetted pipeline to READY"); /* Expected segments */ collect->expected_segments = g_list_append (collect->expected_segments, segment_new (1.0, GST_FORMAT_TIME, 5 * GST_SECOND, 6 * GST_SECOND, 0)); collect->expected_segments = g_list_append (collect->expected_segments, segment_new (1.0, GST_FORMAT_TIME, 2 * GST_SECOND, 3 * GST_SECOND, 1 * GST_SECOND)); collect->gotsegment = FALSE; GST_DEBUG ("Setting pipeline to PLAYING again"); fail_if (gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE); carry_on = TRUE; GST_DEBUG ("Let's poll the bus AGAIN"); while (carry_on) { message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2); if (message) { switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_EOS: /* we should check if we really finished here */ carry_on = FALSE; break; case GST_MESSAGE_SEGMENT_START: case GST_MESSAGE_SEGMENT_DONE: /* We shouldn't see any segement messages, since we didn't do a segment seek */ GST_WARNING ("Saw a Segment start/stop"); fail_if (TRUE); break; case GST_MESSAGE_ERROR: GST_ERROR ("Saw an ERROR"); fail_if (TRUE); default: break; } gst_mini_object_unref (GST_MINI_OBJECT (message)); } else { GST_DEBUG ("bus_poll responded, but there wasn't any message..."); } } fail_if (collect->expected_segments != NULL); gst_object_unref (GST_OBJECT (sinkpad)); fail_if (gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE); ASSERT_OBJECT_REFCOUNT_BETWEEN (pipeline, "main pipeline", 1, 2); gst_object_unref (pipeline); ASSERT_OBJECT_REFCOUNT_BETWEEN (bus, "main bus", 1, 2); gst_object_unref (bus); g_free (collect);}
开发者ID:alessandrod,项目名称:gnonlin,代码行数:101,
示例8: read_onestatic gbooleanread_one (GstPluginLoader * l){ guint64 magic; guint32 to_read, packet_len, tag; guint8 *in; gint res; to_read = HEADER_SIZE; in = l->rx_buf; do { res = read (l->fd_r.fd, in, to_read); if (G_UNLIKELY (res < 0)) { if (errno == EAGAIN || errno == EINTR) continue; GST_LOG ("Failed reading packet header"); return FALSE; } to_read -= res; in += res; } while (to_read > 0); magic = GST_READ_UINT32_BE (l->rx_buf + 8); if (magic != HEADER_MAGIC) { GST_WARNING ("Invalid packet (bad magic number) received from plugin scanner subprocess"); return FALSE; } packet_len = GST_READ_UINT32_BE (l->rx_buf + 4); if (packet_len + HEADER_SIZE > BUF_MAX_SIZE) { GST_WARNING ("Received excessively large packet for plugin scanner subprocess"); return FALSE; } tag = GST_READ_UINT24_BE (l->rx_buf + 1); if (packet_len > 0) { if (packet_len + HEADER_SIZE >= l->rx_buf_size) { GST_LOG ("Expanding rx buf from %d to %d", l->rx_buf_size, packet_len + HEADER_SIZE + BUF_GROW_EXTRA); l->rx_buf_size = packet_len + HEADER_SIZE + BUF_GROW_EXTRA; l->rx_buf = g_realloc (l->rx_buf, l->rx_buf_size); } in = l->rx_buf + HEADER_SIZE; to_read = packet_len; do { res = read (l->fd_r.fd, in, to_read); if (G_UNLIKELY (res < 0)) { if (errno == EAGAIN || errno == EINTR) continue; GST_ERROR ("Packet payload read failed"); return FALSE; } to_read -= res; in += res; } while (to_read > 0); } else { GST_LOG ("No payload to read for 0 length packet type %d tag %u", l->rx_buf[0], tag); } return handle_rx_packet (l, l->rx_buf[0], tag, l->rx_buf + HEADER_SIZE, packet_len);}
开发者ID:AlerIl,项目名称:gstreamer0.10,代码行数:66,
示例9: fill_planes//.........这里部分代码省略......... info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*((ROUNDUP16(w)/4)*h */ break; case GST_VIDEO_FORMAT_Y42B: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_8 (width) / 2; info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*(ROUNDUP8(w)/2)*h */ break; case GST_VIDEO_FORMAT_Y444: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; break; case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV21: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_A420: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * (GST_ROUND_UP_2 (height) / 2); info->offset[3] = info->offset[2] + info->stride[2] * (GST_ROUND_UP_2 (height) / 2); break; case GST_VIDEO_FORMAT_YUV9: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * (GST_ROUND_UP_4 (height) / 4); break; case GST_VIDEO_FORMAT_YVU9: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[2] = info->stride[0] * height; info->offset[1] = info->offset[2] + info->stride[1] * (GST_ROUND_UP_4 (height) / 4); break; case GST_VIDEO_FORMAT_I420_10LE: case GST_VIDEO_FORMAT_I420_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * (GST_ROUND_UP_2 (height) / 2); info->size = info->offset[2] + info->stride[2] * (GST_ROUND_UP_2 (height) / 2); break; case GST_VIDEO_FORMAT_I422_10LE: case GST_VIDEO_FORMAT_I422_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * GST_ROUND_UP_2 (height); info->size = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_Y444_10LE: case GST_VIDEO_FORMAT_Y444_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->size = info->stride[0] * height * 3; break; case GST_VIDEO_FORMAT_UNKNOWN: default: GST_ERROR ("invalid format"); g_warning ("invalid format"); break; } return 0;}
开发者ID:pli3,项目名称:gst-plugins-base,代码行数:101,
示例10: recv_sample//.........这里部分代码省略......... BaseTimeType *base_time; GstClockTime offset; g_signal_emit_by_name (appsink, "pull-sample", &sample); if (sample == NULL) return GST_FLOW_OK; buffer = gst_sample_get_buffer (sample); if (buffer == NULL) { ret = GST_FLOW_OK; goto end; } segment = gst_sample_get_segment (sample); g_object_get (G_OBJECT (self), "state", &state, NULL); if (state != KMS_URI_ENDPOINT_STATE_START) { GST_WARNING ("Dropping buffer received in invalid state %" GST_PTR_FORMAT, buffer); // TODO: Add a flag to discard buffers until keyframe ret = GST_FLOW_OK; goto end; } gst_buffer_ref (buffer); buffer = gst_buffer_make_writable (buffer); if (GST_BUFFER_PTS_IS_VALID (buffer)) buffer->pts = gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->pts); if (GST_BUFFER_DTS_IS_VALID (buffer)) buffer->dts = gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->dts); BASE_TIME_LOCK (self); base_time = g_object_get_data (G_OBJECT (self), BASE_TIME_DATA); if (base_time == NULL) { base_time = g_slice_new0 (BaseTimeType); base_time->pts = buffer->pts; base_time->dts = buffer->dts; GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT, base_time->pts); g_object_set_data_full (G_OBJECT (self), BASE_TIME_DATA, base_time, release_base_time_type); } if (!GST_CLOCK_TIME_IS_VALID (base_time->pts) && GST_BUFFER_PTS_IS_VALID (buffer)) { base_time->pts = buffer->pts; GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT, base_time->pts); base_time->dts = buffer->dts; } if (GST_CLOCK_TIME_IS_VALID (base_time->pts)) { if (GST_BUFFER_PTS_IS_VALID (buffer)) { offset = base_time->pts + self->priv->paused_time; if (buffer->pts > offset) { buffer->pts -= offset; } else { buffer->pts = 0; } } } if (GST_CLOCK_TIME_IS_VALID (base_time->dts)) { if (GST_BUFFER_DTS_IS_VALID (buffer)) { offset = base_time->dts + self->priv->paused_time; if (buffer->dts > offset) { buffer->dts -= offset; } else { buffer->dts = 0; } } } BASE_TIME_UNLOCK (GST_OBJECT_PARENT (appsink)); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE); if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ret = gst_app_src_push_buffer (appsrc, buffer); if (ret != GST_FLOW_OK) { /* something wrong */ GST_ERROR ("Could not send buffer to appsrc %s. Cause: %s", GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret)); }end: if (sample != NULL) { gst_sample_unref (sample); } return ret;}
开发者ID:s-silva,项目名称:kms-elements,代码行数:101,
示例11: gst_vaapidecode_handle_framestatic GstFlowReturngst_vaapidecode_handle_frame (GstVideoDecoder * vdec, GstVideoCodecFrame * frame){ GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec); GstVaapiDecoderStatus status; GstFlowReturn ret; if (!decode->input_state) goto not_negotiated; /* Decode current frame */ for (;;) { status = gst_vaapi_decoder_decode (decode->decoder, frame); if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) { /* Make sure that there are no decoded frames waiting in the output queue. */ ret = gst_vaapidecode_push_all_decoded_frames (decode); if (ret != GST_FLOW_OK) goto error_push_all_decoded_frames; g_mutex_lock (&decode->surface_ready_mutex); if (gst_vaapi_decoder_check_status (decode->decoder) == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) g_cond_wait (&decode->surface_ready, &decode->surface_ready_mutex); g_mutex_unlock (&decode->surface_ready_mutex); continue; } if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) goto error_decode; break; } /* Note that gst_vaapi_decoder_decode cannot return success without completing the decode and pushing all decoded frames into the output queue */ return gst_vaapidecode_push_all_decoded_frames (decode); /* ERRORS */error_push_all_decoded_frames: { GST_ERROR ("push loop error while decoding %d", ret); gst_video_decoder_drop_frame (vdec, frame); return ret; }error_decode: { GST_ERROR ("decode error %d", status); switch (status) { case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC: case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE: case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT: ret = GST_FLOW_NOT_SUPPORTED; break; default: GST_VIDEO_DECODER_ERROR (vdec, 1, STREAM, DECODE, ("Decoding error"), ("Decode error %d", status), ret); break; } gst_video_decoder_drop_frame (vdec, frame); return ret; }not_negotiated: { GST_ERROR_OBJECT (decode, "not negotiated"); ret = GST_FLOW_NOT_NEGOTIATED; gst_video_decoder_drop_frame (vdec, frame); return ret; }}
开发者ID:zzoon,项目名称:gstreamer-vaapi,代码行数:70,
示例12: gst_gcs_set_capsstatic gboolean gst_gcs_set_caps(GstBaseTransform * btrans, GstCaps * incaps, GstCaps * outcaps) { GstGcs *gcs = GST_GCS (btrans); gint in_width, in_height; gint out_width, out_height; GST_GCS_LOCK (gcs); gst_video_format_parse_caps(incaps, &gcs->in_format, &in_width, &in_height); gst_video_format_parse_caps(outcaps, &gcs->out_format, &out_width, &out_height); if (!(gcs->in_format == gcs->out_format) || !(in_width == out_width && in_height == out_height)) { GST_WARNING("Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps, outcaps); GST_GCS_UNLOCK (gcs); return FALSE; } gcs->width = in_width; gcs->height = in_height; GST_INFO("Initialising Gcs..."); gst_pad_set_event_function(GST_BASE_TRANSFORM_SINK_PAD(gcs), gst_gcs_sink_event); const CvSize size = cvSize(gcs->width, gcs->height); GST_WARNING (" width %d, height %d", gcs->width, gcs->height); ////////////////////////////////////////////////////////////////////////////// // allocate image structs in all spaces/////////////////////////////////////// gcs->pImageRGBA = cvCreateImageHeader(size, IPL_DEPTH_8U, 4); gcs->pImgRGB = cvCreateImage(size, IPL_DEPTH_8U, 3); gcs->pImgScratch = cvCreateImage(size, IPL_DEPTH_8U, 3); gcs->pImgGRAY = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImgGRAY_copy = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImgGRAY_diff = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImgGRAY_1 = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImgGRAY_1copy= cvCreateImage(size, IPL_DEPTH_8U, 1); cvZero( gcs->pImgGRAY_1 ); cvZero( gcs->pImgGRAY_1copy ); gcs->pImgChA = cvCreateImageHeader(size, IPL_DEPTH_8U, 1); gcs->pImgCh1 = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImgCh2 = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImgCh3 = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImgChX = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->pImg_skin = cvCreateImage(size, IPL_DEPTH_8U, 1); gcs->grabcut_mask = cvCreateMat( size.height, size.width, CV_8UC1); cvZero(gcs->grabcut_mask); initialise_grabcut( &(gcs->GC), gcs->pImgRGB, gcs->grabcut_mask ); gcs->bbox_prev = cvRect( 60,70, 210, 170 ); //////////////////////////////////////////////////////////////////////////////#ifdef KMEANS // k-means allocation //////////////////////////////////////////////////////// gcs->pImgRGB_kmeans = cvCreateImage(size, IPL_DEPTH_8U, 3); gcs->num_samples = size.height * size.width; gcs->kmeans_points = cvCreateMat( gcs->num_samples, 5, CV_32FC1); gcs->kmeans_clusters = cvCreateMat( gcs->num_samples, 1, CV_32SC1);#endif //KMEANS ////////////////////////////////////////////////////////////////////////////// // Init ghost file /////////////////////////////////////////////////////////// curlMemoryStructGCS chunk; //gchar url[]="file:///home/mcasassa/imco2/mods/gstreamer/cyclops/shaders/mask8.png"; //gchar url[]="file:///apps/devnfs/mcasassa/mask_320x240.png"; char curlErrBuf[255]; if( gcs->ghostfilename){ if(FALSE == curl_download(gcs->ghostfilename, "", &chunk, curlErrBuf)) { GST_ERROR("download failed, err: %s", curlErrBuf); } char errBuf[255]; if( FALSE == read_png(&chunk, &(gcs->raw_image), &(gcs->info), errBuf)){ GST_ERROR("png load failed, err: %s", errBuf); } const CvSize sizegh = cvSize(gcs->info.width, gcs->info.height); gcs->cvGhost = cvCreateImageHeader(sizegh, IPL_DEPTH_8U, gcs->info.channels); gcs->cvGhost->imageData = (char*)gcs->raw_image; gcs->cvGhostBw = cvCreateImage(sizegh, IPL_DEPTH_8U, 1); if( gcs->info.channels > 1){ cvCvtColor( gcs->cvGhost, gcs->cvGhostBw, CV_RGB2GRAY ); } else{ cvCopy(gcs->cvGhost, gcs->cvGhostBw, NULL); } gcs->cvGhostBwResized = cvCreateImage(size, IPL_DEPTH_8U, 1); cvResize( gcs->cvGhostBw, gcs->cvGhostBwResized, CV_INTER_LINEAR); gcs->cvGhostBwAffined = cvCreateImage(size, IPL_DEPTH_8U, 1); } GST_INFO(" Collected caps, image in size (%dx%d), ghost size (%dx%d) %dch",gcs->width, gcs->height, gcs->info.width, gcs->info.height, gcs->info.channels );//.........这里部分代码省略.........
开发者ID:miguelao,项目名称:gst_plugins_tsunami,代码行数:101,
示例13: gst_rtp_qdm2_depay_process//.........这里部分代码省略......... /* HEADERS */ GST_DEBUG ("Headers"); /* Store the incoming timestamp */ rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp; rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf); /* flush the internal data if needed */ flush_data (rtpqdm2depay); if (G_UNLIKELY (!rtpqdm2depay->configured)) { guint8 *ourdata; GstBuffer *codecdata; GstCaps *caps; /* First bytes are unknown */ GST_MEMDUMP ("Header", payload + pos, 32); ourdata = payload + pos + 10; pos += 10; rtpqdm2depay->channs = GST_READ_UINT32_BE (payload + pos + 4); rtpqdm2depay->samplerate = GST_READ_UINT32_BE (payload + pos + 8); rtpqdm2depay->bitrate = GST_READ_UINT32_BE (payload + pos + 12); rtpqdm2depay->blocksize = GST_READ_UINT32_BE (payload + pos + 16); rtpqdm2depay->framesize = GST_READ_UINT32_BE (payload + pos + 20); rtpqdm2depay->packetsize = GST_READ_UINT32_BE (payload + pos + 24); /* 16 bit empty block (0x02 0x00) */ pos += 30; GST_DEBUG ("channs:%d, samplerate:%d, bitrate:%d, blocksize:%d, framesize:%d, packetsize:%d", rtpqdm2depay->channs, rtpqdm2depay->samplerate, rtpqdm2depay->bitrate, rtpqdm2depay->blocksize, rtpqdm2depay->framesize, rtpqdm2depay->packetsize); /* Caps */ codecdata = gst_buffer_new_and_alloc (48); memcpy (GST_BUFFER_DATA (codecdata), headheader, 20); memcpy (GST_BUFFER_DATA (codecdata) + 20, ourdata, 28); caps = gst_caps_new_simple ("audio/x-qdm2", "samplesize", G_TYPE_INT, 16, "rate", G_TYPE_INT, rtpqdm2depay->samplerate, "channels", G_TYPE_INT, rtpqdm2depay->channs, "codec_data", GST_TYPE_BUFFER, codecdata, NULL); gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), caps); gst_caps_unref (caps); rtpqdm2depay->configured = TRUE; } else { GST_DEBUG ("Already configured, skipping headers"); pos += 40; } break; default:{ /* Shuffled packet contents */ guint packetid = payload[pos++]; guint packettype = payload[pos++]; guint packlen = payload[pos++]; guint hsize = 2; GST_DEBUG ("Packet id:%d, type:0x%x, len:%d", packetid, packettype, packlen); /* Packets bigger than 0xff bytes have a type with the high bit set */ if (G_UNLIKELY (packettype & 0x80)) { packettype &= 0x7f; packlen <<= 8; packlen |= payload[pos++]; hsize = 3; GST_DEBUG ("Packet id:%d, type:0x%x, len:%d", packetid, packettype, packlen); } if (packettype > 0x7f) { GST_ERROR ("HOUSTON WE HAVE A PROBLEM !!!!"); } add_packet (rtpqdm2depay, packetid, packlen + hsize, payload + pos - hsize); pos += packlen; } } } GST_DEBUG ("final pos %d", pos); avail = gst_adapter_available (rtpqdm2depay->adapter); if (G_UNLIKELY (avail)) { GST_DEBUG ("Pushing out %d bytes of collected data", avail); outbuf = gst_adapter_take_buffer (rtpqdm2depay->adapter, avail); GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp; GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (rtpqdm2depay->ptimestamp)); return outbuf; } } return NULL; /* ERRORS */bad_packet: { GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE, (NULL), ("Packet was too short")); return NULL; }}
开发者ID:pli3,项目名称:gst-plugins-good,代码行数:101,
示例14: on_error_cbstatic voidon_error_cb (GMarkupParseContext * context, GError * error, gpointer user_data){ GST_ERROR ("Error parsing file: %s", error->message);}
开发者ID:MathieuDuponchelle,项目名称:gst-devtools,代码行数:5,
示例15: gst_egl_allocate_eglimage//.........这里部分代码省略......... if (!gst_egl_image_memory_is_mappable ()) flags |= GST_MEMORY_FLAG_NOT_MAPPABLE; /* See https://bugzilla.gnome.org/show_bug.cgi?id=695203 */ flags |= GST_MEMORY_FLAG_NO_SHARE; gst_video_info_set_format (&info, format, width, height); GST_DEBUG ("Allocating EGL Image format %s width %d height %d", gst_video_format_to_string (format), width, height); switch (format) { case GST_VIDEO_FORMAT_RGBA:{ gsize size; EGLImageKHR image; mem[0] = gst_egl_image_allocator_alloc (allocator, ctx->gst_display, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), &size); if (mem[0]) { stride[0] = size / GST_VIDEO_INFO_HEIGHT (&info); n_mem = 1; GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE); } else { data = g_slice_new0 (GstEGLGLESImageData); stride[0] = GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (&info) * 4); size = stride[0] * GST_VIDEO_INFO_HEIGHT (&info); glGenTextures (1, &data->texture); if (got_gl_error ("glGenTextures")) goto mem_error; glBindTexture (GL_TEXTURE_2D, data->texture); if (got_gl_error ("glBindTexture")) goto mem_error; /* Set 2D resizing params */ glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); /* If these are not set the texture image unit will return * * (R, G, B, A) = black on glTexImage2D for non-POT width/height * * frames. For a deeper explanation take a look at the OpenGL ES * * documentation for glTexParameter */ glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); if (got_gl_error ("glTexParameteri")) goto mem_error; glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); if (got_gl_error ("glTexImage2D")) goto mem_error; image = eglCreateImageKHR (gst_egl_display_get (ctx->gst_display), ctx->context, EGL_GL_TEXTURE_2D_KHR, (EGLClientBuffer) (guintptr) data->texture, NULL); if (got_egl_error ("eglCreateImageKHR")) goto mem_error; mem[0] = gst_egl_image_allocator_wrap (allocator, ctx->gst_display, image, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, flags, size, data, NULL); n_mem = 1; } } break; default: goto mem_error; break; } buffer = gst_buffer_new (); gst_buffer_add_video_meta_full (buffer, 0, format, width, height, GST_VIDEO_INFO_N_PLANES (&info), offset, stride); /* n_mem could be reused for planar colorspaces, for now its == 1 for RGBA */ for (i = 0; i < n_mem; i++) gst_buffer_append_memory (buffer, mem[i]); return buffer;mem_error: { GST_ERROR ("Failed to create EGLImage"); if (data) gst_egl_gles_image_data_free (data); if (mem[0]) gst_memory_unref (mem[0]); return NULL; }}
开发者ID:01org,项目名称:gst-omx,代码行数:101,
示例16: create//.........这里部分代码省略......... ("Failed to allocate buffer/n"), (NULL)); return GST_FLOW_ERROR; } /* Create a DMAI transport buffer object to carry a DMAI buffer to * the source pad. The transport buffer knows how to release the * buffer for re-use in this element when the source pad calls * gst_buffer_unref(). */ outBuf = gst_tidmaibuffertransport_new(hDstBuf, src->hBufTab, capture_buffer_finalize, (void*)src); gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf), Buffer_getSize(hDstBuf)); *buf = outBuf; /* set buffer metadata */ if (G_LIKELY (ret == GST_FLOW_OK && *buf)) { GstClock *clock; GstClockTime timestamp; GST_BUFFER_OFFSET (*buf) = src->offset++; GST_BUFFER_OFFSET_END (*buf) = src->offset; /* timestamps, LOCK to get clock and base time. */ GST_OBJECT_LOCK (src); if ((clock = GST_ELEMENT_CLOCK (src))) { /* we have a clock, get base time and ref clock */ timestamp = GST_ELEMENT (src)->base_time; gst_object_ref (clock); } else { /* no clock, can't set timestamps */ timestamp = GST_CLOCK_TIME_NONE; } GST_OBJECT_UNLOCK (src); if (G_LIKELY (clock)) { /* the time now is the time of the clock minus the base time */ timestamp = gst_clock_get_time (clock) - timestamp; gst_object_unref (clock); /* if we have a framerate adjust timestamp for frame latency */ if (GST_CLOCK_TIME_IS_VALID (src->duration)) { if (timestamp > src->duration) timestamp -= src->duration; else timestamp = 0; } } /* FIXME: use the timestamp from the buffer itself! */ GST_BUFFER_TIMESTAMP (*buf) = timestamp; GST_BUFFER_DURATION (*buf) = src->duration; } /* Create caps for buffer */ GstCaps *mycaps; GstStructure *structure; mycaps = gst_caps_new_empty(); if (src->cAttrs.colorSpace == ColorSpace_UYVY) { structure = gst_structure_new( "video/x-raw-yuv", "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'), "framerate", GST_TYPE_FRACTION, gst_value_get_fraction_numerator(&src->framerate), gst_value_get_fraction_denominator(&src->framerate), "width", G_TYPE_INT, src->width, "height", G_TYPE_INT, src->height, (gchar*) NULL); } else if(src->cAttrs.colorSpace == ColorSpace_YUV420PSEMI) { structure = gst_structure_new( "video/x-raw-yuv", "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('N', 'V', '1', '2'), "framerate", GST_TYPE_FRACTION, gst_value_get_fraction_numerator(&src->framerate), gst_value_get_fraction_denominator(&src->framerate), "width", G_TYPE_INT, src->width, "height", G_TYPE_INT, src->height, (gchar*) NULL); } else { GST_ERROR("unsupported fourcc/n"); return FALSE; } gst_caps_append_structure(mycaps, gst_structure_copy (structure)); gst_structure_free(structure); gst_buffer_set_caps(*buf, mycaps); gst_caps_unref(mycaps); { static int fn; fn++; GST_INFO("capture frame %d", fn); } GST_LOG("create end"); return GST_FLOW_OK;}
开发者ID:xieran1988,项目名称:parents,代码行数:101,
示例17: pad_added_cbstatic voidpad_added_cb (GstElement * element, GstPad * pad, gpointer data){ GstElement *pipeline = GST_ELEMENT (data); GstElement *wavenc, *sink; GstPadLinkReturn ret; GstPad *sinkpad; gchar *msg; if (gst_pad_get_direction (pad) != GST_PAD_SRC) return; wavenc = gst_element_factory_make ("wavenc", NULL);#ifdef MANUAL_CHECK { gchar *filename; G_LOCK (mutex); filename = g_strdup_printf ("file_%u.wv", id++); GST_DEBUG ("Creating file %s", filename); G_UNLOCK (mutex); sink = gst_element_factory_make ("filesink", NULL); g_object_set (G_OBJECT (sink), "location", filename, NULL); g_free (filename); }#else { sink = gst_element_factory_make ("fakesink", NULL); }#endif g_object_set (G_OBJECT (sink), "sync", FALSE, "async", FALSE, NULL); gst_bin_add_many (GST_BIN (pipeline), wavenc, sink, NULL); sinkpad = gst_element_get_static_pad (wavenc, "sink"); if ((ret = gst_pad_link (pad, sinkpad)) != GST_PAD_LINK_OK) { msg = g_strdup_printf ("Can not link pads (%d)", ret); gst_object_unref (sinkpad); goto failed; } gst_object_unref (sinkpad); if (!gst_element_link (wavenc, sink)) { msg = g_strdup_printf ("Can not link elements"); goto failed; } sinkpad = gst_element_get_static_pad (sink, "sink"); gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, buffer_probe_cb, NULL, NULL); gst_object_unref (sinkpad); gst_element_sync_state_with_parent (wavenc); gst_element_sync_state_with_parent (sink); G_LOCK (hash_mutex); g_hash_table_insert (hash, GST_OBJECT_NAME (pad), wavenc); G_UNLOCK (hash_mutex); return;failed: gst_element_set_state (wavenc, GST_STATE_NULL); gst_element_set_state (sink, GST_STATE_NULL); gst_bin_remove_many (GST_BIN (pipeline), wavenc, sink, NULL); GST_ERROR ("Error %s", msg); fail (msg); g_free (msg); g_idle_add ((GSourceFunc) quit_main_loop, NULL);}
开发者ID:rveejay,项目名称:kms-core,代码行数:77,
示例18: gst_base_video_decoder_chainstatic GstFlowReturngst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf){ GstBaseVideoDecoder *base_video_decoder; GstBaseVideoDecoderClass *base_video_decoder_class; GstFlowReturn ret; GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));#if 0 /* requiring the pad to be negotiated makes it impossible to use * oggdemux or filesrc ! decoder */ if (!gst_pad_is_negotiated (pad)) { GST_DEBUG ("not negotiated"); return GST_FLOW_NOT_NEGOTIATED; }#endif base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); GST_DEBUG_OBJECT (base_video_decoder, "chain"); if (!base_video_decoder->have_segment) { GstEvent *event; GstFlowReturn ret; GST_WARNING ("Received buffer without a new-segment. Assuming timestamps start from 0."); gst_segment_set_newsegment_full (&base_video_decoder->segment, FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); base_video_decoder->have_segment = TRUE; event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); ret = gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), event); if (!ret) { GST_ERROR ("new segment event ret=%d", ret); return GST_FLOW_ERROR; } } if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer"); gst_base_video_decoder_flush (base_video_decoder); } if (base_video_decoder->current_frame == NULL) { base_video_decoder->current_frame = gst_base_video_decoder_new_frame (base_video_decoder); } base_video_decoder->input_offset += GST_BUFFER_SIZE (buf); if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { gst_base_video_decoder_add_timestamp (base_video_decoder, buf); } if (base_video_decoder->packetized) { base_video_decoder->current_frame->sink_buffer = buf; ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL); } else { gst_adapter_push (base_video_decoder->input_adapter, buf); ret = gst_base_video_decoder_drain (base_video_decoder, FALSE); } gst_object_unref (base_video_decoder); return ret;}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:79,
示例19: gst_decklink_audio_src_set_capsstatic gbooleangst_decklink_audio_src_set_caps (GstBaseSrc * bsrc, GstCaps * caps){ GstDecklinkAudioSrc *self = GST_DECKLINK_AUDIO_SRC_CAST (bsrc); BMDAudioSampleType sample_depth; GstCaps *current_caps; HRESULT ret; BMDAudioConnection conn = (BMDAudioConnection) - 1; GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps); if ((current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc)))) { GST_DEBUG_OBJECT (self, "Pad already has caps %" GST_PTR_FORMAT, caps); if (!gst_caps_is_equal (caps, current_caps)) { GST_ERROR_OBJECT (self, "New caps are not equal to old caps"); gst_caps_unref (current_caps); return FALSE; } else { gst_caps_unref (current_caps); return TRUE; } } if (!gst_audio_info_from_caps (&self->info, caps)) return FALSE; if (self->info.finfo->format == GST_AUDIO_FORMAT_S16LE) { sample_depth = bmdAudioSampleType16bitInteger; } else { sample_depth = bmdAudioSampleType32bitInteger; } switch (self->connection) { case GST_DECKLINK_AUDIO_CONNECTION_AUTO:{ GstElement *videosrc = NULL; GstDecklinkConnectionEnum vconn; // Try to get the connection from the videosrc and try // to select a sensible audio connection based on that g_mutex_lock (&self->input->lock); if (self->input->videosrc) videosrc = GST_ELEMENT_CAST (gst_object_ref (self->input->videosrc)); g_mutex_unlock (&self->input->lock); if (videosrc) { g_object_get (videosrc, "connection", &vconn, NULL); gst_object_unref (videosrc); switch (vconn) { case GST_DECKLINK_CONNECTION_SDI: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_HDMI: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_OPTICAL_SDI: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_COMPONENT: conn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_COMPOSITE: conn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_SVIDEO: conn = bmdAudioConnectionAnalog; break; default: // Use default break; } } break; } case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED: conn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU: conn = bmdAudioConnectionAESEBU; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG: conn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG_XLR: conn = bmdAudioConnectionAnalogXLR; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG_RCA: conn = bmdAudioConnectionAnalogRCA; break; default: g_assert_not_reached (); break; } if (conn != (BMDAudioConnection) - 1) { ret = self->input->config->SetInt (bmdDeckLinkConfigAudioInputConnection, conn);//.........这里部分代码省略.........
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:101,
示例20: connection_cbstatic voidconnection_cb (FsMsnConnection *self, FsMsnPollFD *pollfd){ gboolean success = FALSE; GST_DEBUG ("handler called on fd:%d server: %d status:%d r:%d w:%d", pollfd->pollfd.fd, pollfd->server, pollfd->status, gst_poll_fd_can_read (self->poll, &pollfd->pollfd), gst_poll_fd_can_write (self->poll, &pollfd->pollfd)); if (gst_poll_fd_has_error (self->poll, &pollfd->pollfd) || gst_poll_fd_has_closed (self->poll, &pollfd->pollfd)) { GST_WARNING ("connecton closed or error (error: %d closed: %d)", gst_poll_fd_has_error (self->poll, &pollfd->pollfd), gst_poll_fd_has_closed (self->poll, &pollfd->pollfd)); goto error; } if (gst_poll_fd_can_read (self->poll, &pollfd->pollfd)) { switch (pollfd->status) { case FS_MSN_STATUS_AUTH: if (pollfd->server) { gchar str[35] = {0}; gchar check[35] = {0}; if (recv (pollfd->pollfd.fd, str, 34, 0) == 34) { GST_DEBUG ("Got %s, checking if it's auth", str); FS_MSN_CONNECTION_LOCK(self); snprintf(check, 35, "recipientid=%s&sessionid=%d/r/n/r/n", self->local_recipient_id, self->session_id); FS_MSN_CONNECTION_UNLOCK(self); if (strncmp (str, check, 35) == 0) { GST_DEBUG ("Authentication successful"); pollfd->status = FS_MSN_STATUS_CONNECTED; pollfd->want_write = TRUE; gst_poll_fd_ctl_write (self->poll, &pollfd->pollfd, TRUE); } else { GST_WARNING ("Authentication failed check=%s", check); goto error; } } else { gchar error_str[256]; strerror_r (errno, error_str, 256); GST_WARNING ("auth: %s", error_str); goto error; } } else { GST_ERROR ("shouldn't receive data when client on AUTH state"); goto error; } break; case FS_MSN_STATUS_CONNECTED: if (!pollfd->server) { gchar str[14] = {0}; ssize_t size; size = recv (pollfd->pollfd.fd, str, 13, MSG_PEEK); if (size > 0) { GST_DEBUG ("Got %s, checking if it's connected", str); if (size == 13 && strcmp (str, "connected/r/n/r/n") == 0) { GST_DEBUG ("connection successful"); recv (pollfd->pollfd.fd, str, 13, 0); pollfd->status = FS_MSN_STATUS_CONNECTED2; pollfd->want_write = TRUE; gst_poll_fd_ctl_write (self->poll, &pollfd->pollfd, TRUE); } else if (!self->producer) { GST_DEBUG ("connection successful"); pollfd->status = FS_MSN_STATUS_SEND_RECEIVE; success = TRUE; } else { GST_WARNING ("connected failed"); goto error; } } else { gchar error_str[256]; strerror_r (errno, error_str, 256); GST_WARNING ("recv: %s", error_str); goto error; }//.........这里部分代码省略.........
开发者ID:mssurajkaiga,项目名称:farstream,代码行数:101,
示例21: defined/* * owr_local_media_source_get_pad * * The beginning of a media source chain in the pipeline looks like this: * +------------+ * /---+ fakesink | * +--------+ +------------+ +-----+ / +------------+ * | source +---+ capsfilter +---+ tee +---/ * +--------+ +------------+ +-----+ / * / +------------+ * /---+ inter*sink | * +------------+ * * For each newly requested pad a new inter*sink is added to the tee. * Note that this is a completely independent pipeline, and the complete * pipeline is only created once for a specific media source. * * Then for each newly requested pad another bin with a inter*src is * created, which is then going to be part of the transport agent * pipeline. The ghostpad of it is what we return here. * * +-----------+ +-------------------------------+ +----------+ * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad | * +-----------+ +-------------------------------+ +----------+ * */static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_source, GstCaps *caps){ OwrLocalMediaSource *local_source; OwrLocalMediaSourcePrivate *priv; GstElement *source_element = NULL; GstElement *source_pipeline;#if defined(__linux__) && !defined(__ANDROID__) gchar *tmp;#endif g_assert(media_source); local_source = OWR_LOCAL_MEDIA_SOURCE(media_source); priv = local_source->priv; /* only create the source bin for this media source once */ if ((source_pipeline = _owr_media_source_get_source_bin(media_source))) { GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin"); } else { OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN; OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN; GstElement *source, *capsfilter = NULL, *tee; GstElement *queue, *fakesink; GstPad *sinkpad; GEnumClass *media_enum_class, *source_enum_class; GEnumValue *media_enum_value, *source_enum_value; gchar *bin_name; GstCaps *source_caps; GstStructure *source_structure; GstBus *bus; g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL); media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE)); source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE)); media_enum_value = g_enum_get_value(media_enum_class, media_type); source_enum_value = g_enum_get_value(source_enum_class, source_type); bin_name = g_strdup_printf("local-%s-%s-source-bin-%u", media_enum_value ? media_enum_value->value_nick : "unknown", source_enum_value ? source_enum_value->value_nick : "unknown", g_atomic_int_add(&unique_bin_id, 1)); g_type_class_unref(media_enum_class); g_type_class_unref(source_enum_class); source_pipeline = gst_pipeline_new(bin_name); g_free(bin_name); bin_name = NULL;#ifdef OWR_DEBUG g_signal_connect(source_pipeline, "deep-notify", G_CALLBACK(gst_object_default_deep_notify), NULL);#endif bus = gst_pipeline_get_bus(GST_PIPELINE(source_pipeline)); g_main_context_push_thread_default(_owr_get_main_context()); gst_bus_add_watch(bus, (GstBusFunc)bus_call, source_pipeline); g_main_context_pop_thread_default(_owr_get_main_context()); gst_object_unref(bus); GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type); if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) { GST_ERROR_OBJECT(local_source, "Cannot connect source with unknown type or media type to other component"); goto done; } switch (media_type) { case OWR_MEDIA_TYPE_AUDIO: { switch (source_type) { case OWR_SOURCE_TYPE_CAPTURE: CREATE_ELEMENT(source, AUDIO_SRC, "audio-source");#if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR//.........这里部分代码省略.........
开发者ID:diorahman,项目名称:openwebrtc,代码行数:101,
示例22: mxf_index_table_segment_parse//.........这里部分代码省略......... tag_size -= 1; GST_DEBUG (" slice = %u", segment->delta_entries[i].slice); segment->delta_entries[i].element_delta = GST_READ_UINT32_BE (tag_data); tag_data += 4; tag_size -= 4; GST_DEBUG (" element delta = %u", segment->delta_entries[i].element_delta); } break; } case 0x3f0a:{ guint len, i, j; if (tag_size < 8) goto error; len = GST_READ_UINT32_BE (tag_data); segment->n_index_entries = len; GST_DEBUG (" number of index entries = %u", segment->n_index_entries); if (len == 0) goto next; tag_data += 4; tag_size -= 4; if (GST_READ_UINT32_BE (tag_data) != (11 + 4 * segment->slice_count + 8 * segment->pos_table_count)) goto error; tag_data += 4; tag_size -= 4; if (tag_size < len * (11 + 4 * segment->slice_count + 8 * segment->pos_table_count)) goto error; segment->index_entries = g_new (MXFIndexEntry, len); for (i = 0; i < len; i++) { MXFIndexEntry *entry = &segment->index_entries[i]; GST_DEBUG (" index entry %u:", i); entry->temporal_offset = GST_READ_UINT8 (tag_data); tag_data += 1; tag_size -= 1; GST_DEBUG (" temporal offset = %d", entry->temporal_offset); entry->key_frame_offset = GST_READ_UINT8 (tag_data); tag_data += 1; tag_size -= 1; GST_DEBUG (" keyframe offset = %d", entry->key_frame_offset); entry->flags = GST_READ_UINT8 (tag_data); tag_data += 1; tag_size -= 1; GST_DEBUG (" flags = 0x%02x", entry->flags); entry->stream_offset = GST_READ_UINT64_BE (tag_data); tag_data += 8; tag_size -= 8; GST_DEBUG (" stream offset = %" G_GUINT64_FORMAT, entry->stream_offset); for (j = 0; j < segment->slice_count; j++) { entry->slice_offset[j] = GST_READ_UINT32_BE (tag_data); tag_data += 4; tag_size -= 4; GST_DEBUG (" slice %u offset = %u", j, entry->slice_offset[j]); } for (j = 0; j < segment->pos_table_count; j++) { mxf_fraction_parse (&entry->pos_table[j], tag_data, tag_size); tag_data += 8; tag_size -= 8; GST_DEBUG (" pos table %u = %d/%d", j, entry->pos_table[j].n, entry->pos_table[j].d); } } break; } default: if (!mxf_local_tag_add_to_hash_table (primer, tag, tag_data, tag_size, &segment->other_tags)) goto error; break; } next: data += 4 + tag_size; size -= 4 + tag_size; } return TRUE;error: GST_ERROR ("Invalid index table segment"); return FALSE;}
开发者ID:prajnashi,项目名称:gst-plugins-bad,代码行数:101,
示例23: gst_jack_audio_make_connection/* make a connection with @id and @server. Returns NULL on failure with the * status set. */static GstJackAudioConnection *gst_jack_audio_make_connection (const gchar * id, const gchar * server, jack_client_t * jclient, jack_status_t * status){ GstJackAudioConnection *conn; jack_options_t options; gint res; *status = 0; GST_DEBUG ("new client %s, connecting to server %s", id, GST_STR_NULL (server)); /* never start a server */ options = JackNoStartServer; /* if we have a servername, use it */ if (server != NULL) options |= JackServerName; /* open the client */ if (jclient == NULL) jclient = jack_client_open (id, options, status, server); if (jclient == NULL) goto could_not_open; /* now create object */ conn = g_new (GstJackAudioConnection, 1); conn->refcount = 1; g_mutex_init (&conn->lock); g_cond_init (&conn->flush_cond); conn->id = g_strdup (id); conn->server = g_strdup (server); conn->client = jclient; conn->n_clients = 0; conn->src_clients = NULL; conn->sink_clients = NULL; conn->cur_ts = -1; conn->transport_state = GST_STATE_VOID_PENDING; /* set our callbacks */ jack_set_process_callback (jclient, jack_process_cb, conn); /* these callbacks cause us to error */ jack_set_buffer_size_callback (jclient, jack_buffer_size_cb, conn); jack_set_sample_rate_callback (jclient, jack_sample_rate_cb, conn); jack_on_shutdown (jclient, jack_shutdown_cb, conn); /* all callbacks are set, activate the client */ GST_INFO ("activate jack_client %p", jclient); if ((res = jack_activate (jclient))) goto could_not_activate; GST_DEBUG ("opened connection %p", conn); return conn; /* ERRORS */could_not_open: { GST_DEBUG ("failed to open jack client, %d", *status); return NULL; }could_not_activate: { GST_ERROR ("Could not activate client (%d)", res); *status = JackFailure; g_mutex_clear (&conn->lock); g_free (conn->id); g_free (conn->server); g_free (conn); return NULL; }}
开发者ID:GrokImageCompression,项目名称:gst-plugins-good,代码行数:73,
示例24: user_error_fnstatic voiduser_error_fn (png_structp png_ptr, png_const_charp error_msg){ GST_ERROR ("%s", error_msg);}
开发者ID:JJCG,项目名称:gst-plugins-good,代码行数:5,
示例25: jack_log_errorstatic voidjack_log_error (const gchar * msg){ GST_ERROR ("%s", msg);}
开发者ID:GrokImageCompression,项目名称:gst-plugins-good,代码行数:5,
示例26: gst_base_video_decoder_finish_frameGstFlowReturngst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame){ GstBaseVideoDecoderClass *base_video_decoder_class; GstBuffer *src_buffer; GST_DEBUG ("finish frame"); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); GST_DEBUG ("finish frame sync=%d pts=%" G_GINT64_FORMAT, frame->is_sync_point, frame->presentation_timestamp); if (frame->is_sync_point) { if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { GST_DEBUG ("sync timestamp %" G_GINT64_FORMAT " diff %" G_GINT64_FORMAT, frame->presentation_timestamp, frame->presentation_timestamp - base_video_decoder->state.segment.start); base_video_decoder->timestamp_offset = frame->presentation_timestamp; base_video_decoder->field_index = 0; } else { /* This case is for one initial timestamp and no others, e.g., * filesrc ! decoder ! xvimagesink */ GST_WARNING ("sync timestamp didn't change, ignoring"); frame->presentation_timestamp = GST_CLOCK_TIME_NONE; } } else { GST_WARNING ("sync point doesn't have timestamp"); if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { GST_ERROR ("No base timestamp. Assuming frames start at 0"); base_video_decoder->timestamp_offset = 0; base_video_decoder->field_index = 0; } } } frame->field_index = base_video_decoder->field_index; base_video_decoder->field_index += frame->n_fields; if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { frame->presentation_timestamp = gst_base_video_decoder_get_field_timestamp (base_video_decoder, frame->field_index); frame->presentation_duration = GST_CLOCK_TIME_NONE; frame->decode_timestamp = gst_base_video_decoder_get_timestamp (base_video_decoder, frame->decode_frame_number); } if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { frame->presentation_duration = gst_base_video_decoder_get_field_timestamp (base_video_decoder, frame->field_index + frame->n_fields) - frame->presentation_timestamp; } if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) { if (frame->presentation_timestamp < base_video_decoder->last_timestamp) { GST_WARNING ("decreasing timestamp (%" G_GINT64_FORMAT " < %" G_GINT64_FORMAT ")", frame->presentation_timestamp, base_video_decoder->last_timestamp); } } base_video_decoder->last_timestamp = frame->presentation_timestamp; GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); if (base_video_decoder->state.interlaced) {#ifndef GST_VIDEO_BUFFER_TFF#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)#endif#ifndef GST_VIDEO_BUFFER_RFF#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)#endif#ifndef GST_VIDEO_BUFFER_ONEFIELD#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)#endif int tff = base_video_decoder->state.top_field_first; if (frame->field_index & 1) { tff ^= 1; } if (tff) { GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF); } else { GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF); } GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF); GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD); if (frame->n_fields == 3) { GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF); } else if (frame->n_fields == 1) { GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD); } } GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp; GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration; GST_BUFFER_OFFSET (frame->src_buffer) = -1; GST_BUFFER_OFFSET_END (frame->src_buffer) = -1;//.........这里部分代码省略.........
开发者ID:zsx,项目名称:ossbuild,代码行数:101,
示例27: query_cbstatic GstPadProbeReturnquery_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data){ APP_STATE_T *state = (APP_STATE_T *) user_data; GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_ALLOCATION:{ GstBufferPool *pool; GstStructure *config; GstCaps *caps; GstVideoInfo info; gboolean need_pool; guint size; GstAllocator *allocator; GstAllocationParams params; gst_allocation_params_init (¶ms); gst_query_parse_allocation (query, &caps, &need_pool); if (!caps) { GST_ERROR ("allocation query without caps"); return GST_PAD_PROBE_OK; } if (!gst_video_info_from_caps (&info, caps)) { GST_ERROR ("allocation query with invalid caps"); return GST_PAD_PROBE_OK; } g_mutex_lock (state->queue_lock); pool = state->pool ? gst_object_ref (state->pool) : NULL; g_mutex_unlock (state->queue_lock); if (pool) { GstCaps *pcaps; /* we had a pool, check caps */ config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_get_params (config, &pcaps, &size, NULL, NULL); GST_DEBUG ("check existing pool caps %" GST_PTR_FORMAT " with new caps %" GST_PTR_FORMAT, pcaps, caps); if (!gst_caps_is_equal (caps, pcaps)) { GST_DEBUG ("pool has different caps"); /* different caps, we can't use this pool */ gst_object_unref (pool); pool = NULL; } gst_structure_free (config); } GST_DEBUG ("pool %p", pool); if (pool == NULL && need_pool) { GstVideoInfo info; if (!gst_video_info_from_caps (&info, caps)) { GST_ERROR ("allocation query has invalid caps %" GST_PTR_FORMAT, caps); return GST_PAD_PROBE_OK; } GST_DEBUG ("create new pool"); state->pool = pool = gst_egl_image_buffer_pool_new (state, state->display); GST_DEBUG ("done create new pool %p", pool); /* the normal size of a frame */ size = info.size; config = gst_buffer_pool_get_config (pool); /* we need at least 2 buffer because we hold on to the last one */ gst_buffer_pool_config_set_params (config, caps, size, 2, 0); gst_buffer_pool_config_set_allocator (config, NULL, ¶ms); if (!gst_buffer_pool_set_config (pool, config)) { gst_object_unref (pool); GST_ERROR ("failed to set pool configuration"); return GST_PAD_PROBE_OK; } } if (pool) { /* we need at least 2 buffer because we hold on to the last one */ gst_query_add_allocation_pool (query, pool, size, 2, 0); gst_object_unref (pool); } /* First the default allocator */ if (!gst_egl_image_memory_is_mappable ()) { allocator = gst_allocator_find (NULL); gst_query_add_allocation_param (query, allocator, ¶ms); gst_object_unref (allocator); } allocator = gst_egl_image_allocator_obtain (); GST_WARNING ("Allocator obtained %p", allocator); if (!gst_egl_image_memory_is_mappable ()) params.flags |= GST_MEMORY_FLAG_NOT_MAPPABLE;//.........这里部分代码省略.........
开发者ID:01org,项目名称:gst-omx,代码行数:101,
示例28: gst_video_info_from_caps//.........这里部分代码省略......... } info->fps_n = fps_n; info->fps_d = fps_d; } else { /* unspecified is variable framerate */ info->fps_n = 0; info->fps_d = 1; } if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d)) { info->par_n = par_n; info->par_d = par_d; } else { info->par_n = 1; info->par_d = 1; } if ((s = gst_structure_get_string (structure, "interlace-mode"))) info->interlace_mode = gst_video_interlace_mode_from_string (s); else info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; { if ((s = gst_structure_get_string (structure, "multiview-mode"))) GST_VIDEO_INFO_MULTIVIEW_MODE (info) = gst_video_multiview_mode_from_caps_string (s); else GST_VIDEO_INFO_MULTIVIEW_MODE (info) = GST_VIDEO_MULTIVIEW_MODE_NONE; gst_structure_get_flagset (structure, "multiview-flags", &GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), NULL); if (!gst_structure_get_int (structure, "views", &info->views)) info->views = 1; /* At one point, I tried normalising the half-aspect flag here, * but it behaves weird for GstVideoInfo operations other than * directly converting to/from caps - sometimes causing the * PAR to be doubled/halved too many times */ } if ((s = gst_structure_get_string (structure, "chroma-site"))) info->chroma_site = gst_video_chroma_from_string (s); else info->chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN; if ((s = gst_structure_get_string (structure, "colorimetry"))) { if (!gst_video_colorimetry_from_string (&info->colorimetry, s)) { GST_WARNING ("unparsable colorimetry, using default"); set_default_colorimetry (info); } else if (!validate_colorimetry (info)) { GST_WARNING ("invalid colorimetry, using default"); set_default_colorimetry (info); } else { /* force RGB matrix for RGB formats */ if (GST_VIDEO_FORMAT_INFO_IS_RGB (info->finfo) && info->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) { GST_WARNING ("invalid matrix %d for RGB format, using RGB", info->colorimetry.matrix); info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB; } } } else { GST_DEBUG ("no colorimetry, using default"); set_default_colorimetry (info); } fill_planes (info); return TRUE; /* ERROR */wrong_name: { GST_ERROR ("wrong name '%s', expected video/ or image/", gst_structure_get_name (structure)); return FALSE; }no_format: { GST_ERROR ("no format given"); return FALSE; }unknown_format: { GST_ERROR ("unknown format '%s' given", s); return FALSE; }no_width: { GST_ERROR ("no width property given"); return FALSE; }no_height: { GST_ERROR ("no height property given"); return FALSE; }}
开发者ID:Kurento,项目名称:gst-plugins-base,代码行数:101,
示例29: gst_vaapi_surface_create_fullstatic gbooleangst_vaapi_surface_create_full (GstVaapiSurface * surface, const GstVideoInfo * vip, guint flags){#if VA_CHECK_VERSION(0,34,0) GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (surface); const GstVideoFormat format = GST_VIDEO_INFO_FORMAT (vip); VASurfaceID surface_id; VAStatus status; guint chroma_type, va_chroma_format, i; const VAImageFormat *va_format; VASurfaceAttrib attribs[3], *attrib; VASurfaceAttribExternalBuffers extbuf; gboolean extbuf_needed = FALSE; va_format = gst_vaapi_video_format_to_va_format (format); if (!va_format) goto error_unsupported_format; chroma_type = gst_vaapi_video_format_get_chroma_type (format); if (!chroma_type) goto error_unsupported_format; va_chroma_format = from_GstVaapiChromaType (chroma_type); if (!va_chroma_format) goto error_unsupported_format; memset (&extbuf, 0, sizeof (extbuf)); extbuf.pixel_format = va_format->fourcc; extbuf.width = GST_VIDEO_INFO_WIDTH (vip); extbuf.height = GST_VIDEO_INFO_HEIGHT (vip); extbuf_needed = ! !(flags & GST_VAAPI_SURFACE_ALLOC_FLAG_LINEAR_STORAGE); extbuf.num_planes = GST_VIDEO_INFO_N_PLANES (vip); if (flags & GST_VAAPI_SURFACE_ALLOC_FLAG_FIXED_STRIDES) { for (i = 0; i < extbuf.num_planes; i++) extbuf.pitches[i] = GST_VIDEO_INFO_PLANE_STRIDE (vip, i); extbuf_needed = TRUE; } if (flags & GST_VAAPI_SURFACE_ALLOC_FLAG_FIXED_OFFSETS) { for (i = 0; i < extbuf.num_planes; i++) extbuf.offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (vip, i); extbuf_needed = TRUE; } attrib = attribs; attrib->flags = VA_SURFACE_ATTRIB_SETTABLE; attrib->type = VASurfaceAttribPixelFormat; attrib->value.type = VAGenericValueTypeInteger; attrib->value.value.i = va_format->fourcc; attrib++; if (extbuf_needed) { attrib->flags = VA_SURFACE_ATTRIB_SETTABLE; attrib->type = VASurfaceAttribMemoryType; attrib->value.type = VAGenericValueTypeInteger; attrib->value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA; attrib++; attrib->flags = VA_SURFACE_ATTRIB_SETTABLE; attrib->type = VASurfaceAttribExternalBufferDescriptor; attrib->value.type = VAGenericValueTypePointer; attrib->value.value.p = &extbuf; attrib++; } GST_VAAPI_DISPLAY_LOCK (display); status = vaCreateSurfaces (GST_VAAPI_DISPLAY_VADISPLAY (display), va_chroma_format, extbuf.width, extbuf.height, &surface_id, 1, attribs, attrib - attribs); GST_VAAPI_DISPLAY_UNLOCK (display); if (!vaapi_check_status (status, "vaCreateSurfaces()")) return FALSE; surface->format = format; surface->chroma_type = chroma_type; surface->width = extbuf.width; surface->height = extbuf.height; GST_DEBUG ("surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id)); GST_VAAPI_OBJECT_ID (surface) = surface_id; return TRUE; /* ERRORS */error_unsupported_format: GST_ERROR ("unsupported format %s", gst_vaapi_video_format_to_string (format)); return FALSE;#else return FALSE;#endif}
开发者ID:01org,项目名称:iotg-lin-gfx-gstreamer-vaapi,代码行数:92,
注:本文中的GST_ERROR函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 C++ GST_ERROR_OBJECT函数代码示例 C++ GST_ELEMENT_WARNING函数代码示例 |