这篇教程C++ GST_ROUND_UP_4函数代码示例写得很实用,希望能帮到您。
本文整理汇总了C++中GST_ROUND_UP_4函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ROUND_UP_4函数的具体用法?C++ GST_ROUND_UP_4怎么用?C++ GST_ROUND_UP_4使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。 在下文中一共展示了GST_ROUND_UP_4函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。 示例1: gst_pnmdec_pushstatic GstFlowReturngst_pnmdec_push (GstPnmdec * s, GstPad * src, GstBuffer * buf){ /* Need to convert from PNM rowstride to GStreamer rowstride */ if (s->mngr.info.width % 4 != 0) { guint i_rowstride; guint o_rowstride; GstBuffer *obuf; guint i; if (s->mngr.info.type == GST_PNM_TYPE_PIXMAP_RAW || s->mngr.info.type == GST_PNM_TYPE_PIXMAP_ASCII) { i_rowstride = 3 * s->mngr.info.width; o_rowstride = GST_ROUND_UP_4 (i_rowstride); } else { i_rowstride = s->mngr.info.width; o_rowstride = GST_ROUND_UP_4 (i_rowstride); } obuf = gst_buffer_new_and_alloc (o_rowstride * s->mngr.info.height); gst_buffer_copy_metadata (obuf, buf, GST_BUFFER_COPY_ALL); for (i = 0; i < s->mngr.info.height; i++) memcpy (GST_BUFFER_DATA (obuf) + i * o_rowstride, GST_BUFFER_DATA (buf) + i * i_rowstride, i_rowstride); gst_buffer_unref (buf); return gst_pad_push (src, obuf); } else { return gst_pad_push (src, buf); }}
开发者ID:bilboed,项目名称:gst-plugins-bad,代码行数:33,
示例2: mxf_up_handle_essence_elementstatic GstFlowReturnmxf_up_handle_essence_element (const MXFUL * key, GstBuffer * buffer, GstCaps * caps, MXFMetadataTimelineTrack * track, gpointer mapping_data, GstBuffer ** outbuf){ MXFUPMappingData *data = mapping_data; /* SMPTE 384M 7.1 */ if (key->u[12] != 0x15 || (key->u[14] != 0x01 && key->u[14] != 0x02 && key->u[14] != 0x03 && key->u[14] != 0x04)) { GST_ERROR ("Invalid uncompressed picture essence element"); return GST_FLOW_ERROR; } if (!data || (data->image_start_offset == 0 && data->image_end_offset == 0)) { } else { if (data->image_start_offset + data->image_end_offset > GST_BUFFER_SIZE (buffer)) { gst_buffer_unref (buffer); GST_ERROR ("Invalid buffer size"); return GST_FLOW_ERROR; } else { GST_BUFFER_DATA (buffer) += data->image_start_offset; GST_BUFFER_SIZE (buffer) -= data->image_start_offset; GST_BUFFER_SIZE (buffer) -= data->image_end_offset; } } if (GST_BUFFER_SIZE (buffer) != data->bpp * data->width * data->height) { GST_ERROR ("Invalid buffer size"); return GST_FLOW_ERROR; } if (data->bpp != 4 || GST_ROUND_UP_4 (data->width * data->bpp) != data->width * data->bpp) { guint y; GstBuffer *ret; guint8 *indata, *outdata; ret = gst_buffer_new_and_alloc (GST_ROUND_UP_4 (data->width * data->bpp) * data->height); indata = GST_BUFFER_DATA (buffer); outdata = GST_BUFFER_DATA (ret); for (y = 0; y < data->height; y++) { memcpy (outdata, indata, data->width * data->bpp); outdata += GST_ROUND_UP_4 (data->width * data->bpp); indata += data->width * data->bpp; } gst_buffer_unref (buffer); *outbuf = ret; } else { *outbuf = buffer; } return GST_FLOW_OK;}
开发者ID:pli3,项目名称:gst-plugins-bad,代码行数:60,
示例3: gst_phoenixsrc_set_capsstatic gbooleangst_phoenixsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps){ GstPhoenixSrc *src = GST_PHOENIX_SRC (bsrc); GstVideoInfo vinfo; GstStructure *s = gst_caps_get_structure (caps, 0); GST_DEBUG_OBJECT (src, "The caps being set are %" GST_PTR_FORMAT, caps); gst_video_info_from_caps (&vinfo, caps); if (g_str_equal ("video/x-bayer", gst_structure_get_name (s))) { gint width; const gchar *format; gst_structure_get_int (s, "width", &width); gst_structure_get_int (s, "height", &src->height); format = gst_structure_get_string (s, "format"); if (g_str_has_suffix (format, "16")) src->gst_stride = GST_ROUND_UP_4 (width * 2); else src->gst_stride = GST_ROUND_UP_4 (width); } else if (GST_VIDEO_INFO_FORMAT (&vinfo) != GST_VIDEO_FORMAT_UNKNOWN) { src->gst_stride = GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0); src->height = vinfo.height; } else { goto unsupported_caps; } return TRUE;unsupported_caps: GST_ERROR_OBJECT (src, "Unsupported caps: %" GST_PTR_FORMAT, caps); return FALSE;}
开发者ID:foolab,项目名称:gst-plugins-vision,代码行数:34,
示例4: generate_sampling_factors/* based on upstream gst-plugins-good jpegencoder */static voidgenerate_sampling_factors (GstVaapiEncoderJpeg * encoder){ GstVideoInfo *vinfo; gint i; vinfo = GST_VAAPI_ENCODER_VIDEO_INFO (encoder); if (GST_VIDEO_INFO_FORMAT (vinfo) == GST_VIDEO_FORMAT_ENCODED) { /* Use native I420 format */ encoder->n_components = 3; for (i = 0; i < encoder->n_components; ++i) { if (i == 0) encoder->h_samp[i] = encoder->v_samp[i] = 2; else encoder->h_samp[i] = encoder->v_samp[i] = 1; GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i], encoder->v_samp[i]); } return; } encoder->n_components = GST_VIDEO_INFO_N_COMPONENTS (vinfo); encoder->h_max_samp = 0; encoder->v_max_samp = 0; for (i = 0; i < encoder->n_components; ++i) { encoder->cwidth[i] = GST_VIDEO_INFO_COMP_WIDTH (vinfo, i); encoder->cheight[i] = GST_VIDEO_INFO_COMP_HEIGHT (vinfo, i); encoder->h_samp[i] = GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (vinfo)) / encoder->cwidth[i]; encoder->h_max_samp = MAX (encoder->h_max_samp, encoder->h_samp[i]); encoder->v_samp[i] = GST_ROUND_UP_4 (GST_VIDEO_INFO_HEIGHT (vinfo)) / encoder->cheight[i]; encoder->v_max_samp = MAX (encoder->v_max_samp, encoder->v_samp[i]); } /* samp should only be 1, 2 or 4 */ g_assert (encoder->h_max_samp <= 4); g_assert (encoder->v_max_samp <= 4); /* now invert */ /* maximum is invariant, as one of the components should have samp 1 */ for (i = 0; i < encoder->n_components; ++i) { encoder->h_samp[i] = encoder->h_max_samp / encoder->h_samp[i]; encoder->v_samp[i] = encoder->v_max_samp / encoder->v_samp[i]; GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i], encoder->v_samp[i]); }}
开发者ID:GStreamer,项目名称:gstreamer-vaapi,代码行数:50,
示例5: user_endrow_callbackstatic voiduser_endrow_callback (png_structp png_ptr, png_bytep new_row, png_uint_32 row_num, int pass){ GstPngDec *pngdec = NULL; pngdec = GST_PNGDEC (png_get_io_ptr (png_ptr)); /* FIXME: implement interlaced pictures */ /* If buffer_out doesn't exist, it means buffer_alloc failed, which * will already have set the return code */ if (GST_IS_BUFFER (pngdec->current_frame->output_buffer)) { GstVideoFrame frame; GstBuffer *buffer = pngdec->current_frame->output_buffer; size_t offset; gint width; guint8 *data; if (!gst_video_frame_map (&frame, &pngdec->output_state->info, buffer, GST_MAP_WRITE)) { pngdec->ret = GST_FLOW_ERROR; return; } data = GST_VIDEO_FRAME_COMP_DATA (&frame, 0); offset = row_num * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0); GST_LOG ("got row %u, copying in buffer %p at offset %" G_GSIZE_FORMAT, (guint) row_num, pngdec->current_frame->output_buffer, offset); width = GST_ROUND_UP_4 (png_get_rowbytes (pngdec->png, pngdec->info)); memcpy (data + offset, new_row, width); gst_video_frame_unmap (&frame); pngdec->ret = GST_FLOW_OK; }}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:35,
示例6: gst_jpegenc_resyncstatic voidgst_jpegenc_resync (GstJpegEnc * jpegenc){ gint width, height; gint i, j; GST_DEBUG_OBJECT (jpegenc, "resync"); jpegenc->cinfo.image_width = width = jpegenc->width; jpegenc->cinfo.image_height = height = jpegenc->height; jpegenc->cinfo.input_components = jpegenc->channels; GST_DEBUG_OBJECT (jpegenc, "width %d, height %d", width, height); GST_DEBUG_OBJECT (jpegenc, "format %d", jpegenc->format); if (gst_video_format_is_rgb (jpegenc->format)) { GST_DEBUG_OBJECT (jpegenc, "RGB"); jpegenc->cinfo.in_color_space = JCS_RGB; } else if (gst_video_format_is_gray (jpegenc->format)) { GST_DEBUG_OBJECT (jpegenc, "gray"); jpegenc->cinfo.in_color_space = JCS_GRAYSCALE; } else { GST_DEBUG_OBJECT (jpegenc, "YUV"); jpegenc->cinfo.in_color_space = JCS_YCbCr; } /* input buffer size as max output */ jpegenc->bufsize = gst_video_format_get_size (jpegenc->format, width, height); jpeg_set_defaults (&jpegenc->cinfo); jpegenc->cinfo.raw_data_in = TRUE; /* duh, libjpeg maps RGB to YUV ... and don't expect some conversion */ if (jpegenc->cinfo.in_color_space == JCS_RGB) jpeg_set_colorspace (&jpegenc->cinfo, JCS_RGB); GST_DEBUG_OBJECT (jpegenc, "h_max_samp=%d, v_max_samp=%d", jpegenc->h_max_samp, jpegenc->v_max_samp); /* image dimension info */ for (i = 0; i < jpegenc->channels; i++) { GST_DEBUG_OBJECT (jpegenc, "comp %i: h_samp=%d, v_samp=%d", i, jpegenc->h_samp[i], jpegenc->v_samp[i]); jpegenc->cinfo.comp_info[i].h_samp_factor = jpegenc->h_samp[i]; jpegenc->cinfo.comp_info[i].v_samp_factor = jpegenc->v_samp[i]; jpegenc->line[i] = g_realloc (jpegenc->line[i], jpegenc->v_max_samp * DCTSIZE * sizeof (char *)); if (!jpegenc->planar) { for (j = 0; j < jpegenc->v_max_samp * DCTSIZE; j++) { jpegenc->row[i][j] = g_realloc (jpegenc->row[i][j], width); jpegenc->line[i][j] = jpegenc->row[i][j]; } } } /* guard against a potential error in gst_jpegenc_term_destination which occurs iff bufsize % 4 < free_space_remaining */ jpegenc->bufsize = GST_ROUND_UP_4 (jpegenc->bufsize); jpeg_suppress_tables (&jpegenc->cinfo, TRUE); GST_DEBUG_OBJECT (jpegenc, "resync done");}
开发者ID:dgerlach,项目名称:gst-plugins-good,代码行数:60,
示例7: _evas_video_i420static void_evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height){ const unsigned char **rows; unsigned int i, j; unsigned int rh; unsigned int stride_y, stride_uv; rh = output_height; rows = (const unsigned char **)evas_data; stride_y = GST_ROUND_UP_4(w); stride_uv = GST_ROUND_UP_8(w) / 2; for (i = 0; i < rh; i++) rows[i] = &gst_data[i * stride_y]; for (j = 0; j < (rh / 2); j++, i++) rows[i] = &gst_data[h * stride_y + j * stride_uv]; for (j = 0; j < (rh / 2); j++, i++) rows[i] = &gst_data[h * stride_y + (rh / 2) * stride_uv + j * stride_uv];}
开发者ID:Limsik,项目名称:e17,代码行数:26,
示例8: paint_setup_Y800static voidpaint_setup_Y800 (paintinfo * p, unsigned char *dest){ /* untested */ p->yp = dest; p->ystride = GST_ROUND_UP_4 (p->width); p->endptr = dest + p->ystride * p->height;}
开发者ID:genesi,项目名称:gst-base-plugins,代码行数:8,
示例9: paint_setup_IYU2static voidpaint_setup_IYU2 (paintinfo * p, unsigned char *dest){ /* untested */ p->yp = dest + 1; p->up = dest + 0; p->vp = dest + 2; p->ystride = GST_ROUND_UP_4 (p->width * 3); p->endptr = dest + p->ystride * p->height;}
开发者ID:genesi,项目名称:gst-base-plugins,代码行数:10,
示例10: paint_setup_Y42Bstatic voidpaint_setup_Y42B (paintinfo * p, unsigned char *dest){ p->yp = dest; p->ystride = GST_ROUND_UP_4 (p->width); p->up = p->yp + p->ystride * p->height; p->ustride = GST_ROUND_UP_8 (p->width) / 2; p->vp = p->up + p->ustride * p->height; p->vstride = GST_ROUND_UP_8 (p->width) / 2; p->endptr = p->vp + p->vstride * p->height;}
开发者ID:genesi,项目名称:gst-base-plugins,代码行数:11,
示例11: mxf_up_write_funcstatic GstFlowReturnmxf_up_write_func (GstBuffer * buffer, GstCaps * caps, gpointer mapping_data, GstAdapter * adapter, GstBuffer ** outbuf, gboolean flush){ MXFUPMappingData *data = mapping_data; if (!buffer) return GST_FLOW_OK; if (GST_BUFFER_SIZE (buffer) != GST_ROUND_UP_4 (data->bpp * data->width) * data->height) { GST_ERROR ("Invalid buffer size"); return GST_FLOW_ERROR; } if (data->bpp != 4 || GST_ROUND_UP_4 (data->width * data->bpp) != data->width * data->bpp) { guint y; GstBuffer *ret; guint8 *indata, *outdata; ret = gst_buffer_new_and_alloc (data->width * data->bpp * data->height); indata = GST_BUFFER_DATA (buffer); outdata = GST_BUFFER_DATA (ret); for (y = 0; y < data->height; y++) { memcpy (outdata, indata, data->width * data->bpp); indata += GST_ROUND_UP_4 (data->width * data->bpp); outdata += data->width * data->bpp; } gst_buffer_unref (buffer); *outbuf = ret; } else { *outbuf = buffer; } return GST_FLOW_OK;}
开发者ID:pli3,项目名称:gst-plugins-bad,代码行数:40,
示例12: _evas_video_yuy2static void_evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height){ const unsigned char **rows; unsigned int i; unsigned int stride; rows = (const unsigned char **)evas_data; stride = GST_ROUND_UP_4(w * 2); for (i = 0; i < output_height; i++) rows[i] = &gst_data[i * stride];}
开发者ID:Limsik,项目名称:e17,代码行数:14,
示例13: gst_pngenc_setcapsstatic gbooleangst_pngenc_setcaps (GstPad * pad, GstCaps * caps){ GstPngEnc *pngenc; const GValue *fps; GstStructure *structure; GstCaps *pcaps; gboolean ret = TRUE; pngenc = GST_PNGENC (gst_pad_get_parent (pad)); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &pngenc->width); gst_structure_get_int (structure, "height", &pngenc->height); fps = gst_structure_get_value (structure, "framerate"); gst_structure_get_int (structure, "bpp", &pngenc->bpp); if (pngenc->bpp == 32) pngenc->stride = pngenc->width * 4; else if (pngenc->bpp == 8) pngenc->stride = GST_ROUND_UP_4 (pngenc->width); else pngenc->stride = GST_ROUND_UP_4 (pngenc->width * 3); pcaps = gst_caps_new_simple ("image/png", "width", G_TYPE_INT, pngenc->width, "height", G_TYPE_INT, pngenc->height, NULL); structure = gst_caps_get_structure (pcaps, 0); gst_structure_set_value (structure, "framerate", fps); ret = gst_pad_set_caps (pngenc->srcpad, pcaps); gst_caps_unref (pcaps); gst_object_unref (pngenc); return ret;}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:37,
示例14: paint_setup_YVU9static voidpaint_setup_YVU9 (paintinfo * p, unsigned char *dest){ int h = GST_ROUND_UP_4 (p->height); p->yp = dest; p->ystride = GST_ROUND_UP_4 (p->width); p->vp = p->yp + p->ystride * GST_ROUND_UP_4 (p->height); p->vstride = GST_ROUND_UP_4 (p->ystride / 4); p->up = p->vp + p->vstride * GST_ROUND_UP_4 (h / 4); p->ustride = GST_ROUND_UP_4 (p->ystride / 4); p->endptr = p->up + p->ustride * GST_ROUND_UP_4 (h / 4);}
开发者ID:genesi,项目名称:gst-base-plugins,代码行数:13,
示例15: mvoid MediaPlayer::drawVideoFrame(QPainter &p, const QRect &rect){ QMutexLocker m( &m_lastVideoSampleMutex ); if ( !m_lastVideoSample ) return; // get the snapshot buffer format now. We set the caps on the appsink so // that it can only be an rgb buffer. GstCaps *caps = gst_sample_get_caps( m_lastVideoSample ); if ( !caps ) { reportError( "could not get caps for the new video sample" ); return; } GstStructure * structure = gst_caps_get_structure( caps, 0 ); // We need to get the final caps on the buffer to get the size int width = 0; int height = 0; gst_structure_get_int( structure, "width", &width ); gst_structure_get_int( structure, "height", &height ); if ( !width || !height ) { reportError( "could not get video height and width" ); return; } // Create pixmap from buffer and save, gstreamer video buffers have a stride that // is rounded up to the nearest multiple of 4 GstBuffer *buffer = gst_sample_get_buffer( m_lastVideoSample ); GstMapInfo map; if ( !gst_buffer_map( buffer, &map, GST_MAP_READ ) ) { reportError( "could not map video buffer" ); return; } p.drawImage( rect, QImage( map.data, width, height, GST_ROUND_UP_4 (width * 4), QImage::Format_RGB32 ), QRect( 0, 0, width, height ) ); // And clean up gst_buffer_unmap( buffer, &map );}
开发者ID:renielcanlas,项目名称:spivak,代码行数:48,
示例16: gst_audio_cheb_band_set_propertystatic voidgst_audio_cheb_band_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec){ GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (object); switch (prop_id) { case PROP_MODE: g_mutex_lock (filter->lock); filter->mode = g_value_get_enum (value); generate_coefficients (filter); g_mutex_unlock (filter->lock); break; case PROP_TYPE: g_mutex_lock (filter->lock); filter->type = g_value_get_int (value); generate_coefficients (filter); g_mutex_unlock (filter->lock); break; case PROP_LOWER_FREQUENCY: g_mutex_lock (filter->lock); filter->lower_frequency = g_value_get_float (value); generate_coefficients (filter); g_mutex_unlock (filter->lock); break; case PROP_UPPER_FREQUENCY: g_mutex_lock (filter->lock); filter->upper_frequency = g_value_get_float (value); generate_coefficients (filter); g_mutex_unlock (filter->lock); break; case PROP_RIPPLE: g_mutex_lock (filter->lock); filter->ripple = g_value_get_float (value); generate_coefficients (filter); g_mutex_unlock (filter->lock); break; case PROP_POLES: g_mutex_lock (filter->lock); filter->poles = GST_ROUND_UP_4 (g_value_get_int (value)); generate_coefficients (filter); g_mutex_unlock (filter->lock); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; }}
开发者ID:Sandec,项目名称:openjfx-8u-dev-rt,代码行数:48,
示例17: clutter_gst_ayuv_uploadstatic voidclutter_gst_ayuv_upload (ClutterGstVideoSink *sink, GstBuffer *buffer){ ClutterGstVideoSinkPrivate *priv= sink->priv; clutter_texture_set_from_rgb_data (priv->texture, GST_BUFFER_DATA (buffer), TRUE, priv->width, priv->height, GST_ROUND_UP_4 (4 * priv->width), 4, 0, NULL);}
开发者ID:3dfxmadscientist,项目名称:gnome-apps,代码行数:16,
示例18: gst_jpegenc_term_destinationstatic voidgst_jpegenc_term_destination (j_compress_ptr cinfo){ GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data); GST_DEBUG_OBJECT (jpegenc, "gst_jpegenc_chain: term_source"); /* Trim the buffer size and push it. */ GST_BUFFER_SIZE (jpegenc->output_buffer) = GST_ROUND_UP_4 (GST_BUFFER_SIZE (jpegenc->output_buffer) - jpegenc->jdest.free_in_buffer); g_signal_emit (G_OBJECT (jpegenc), gst_jpegenc_signals[FRAME_ENCODED], 0); jpegenc->last_ret = gst_pad_push (jpegenc->srcpad, jpegenc->output_buffer); jpegenc->output_buffer = NULL;}
开发者ID:dgerlach,项目名称:gst-plugins-good,代码行数:16,
示例19: clutter_gst_rgb32_uploadstatic voidclutter_gst_rgb32_upload (ClutterGstVideoSink *sink, GstBuffer *buffer){ ClutterGstVideoSinkPrivate *priv= sink->priv; clutter_texture_set_from_rgb_data (priv->texture, GST_BUFFER_DATA (buffer), TRUE, priv->width, priv->height, GST_ROUND_UP_4 (4 * priv->width), 4, priv->bgr ? CLUTTER_TEXTURE_RGB_FLAG_BGR : 0, NULL);}
开发者ID:3dfxmadscientist,项目名称:gnome-apps,代码行数:17,
示例20: my_vidmem_allocGstMemory *my_vidmem_alloc (guint format, guint width, guint height){ MyVidmem *mem; gsize maxsize; GST_DEBUG ("alloc frame format %u %ux%u", format, width, height); maxsize = (GST_ROUND_UP_4 (width) * height); mem = g_slice_new (MyVidmem); gst_memory_init (GST_MEMORY_CAST (mem), 0, _my_allocator, NULL, maxsize, 31, 0, maxsize); mem->format = format; mem->width = width; mem->height = height; mem->data = NULL; return (GstMemory *) mem;}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:22,
注:本文中的GST_ROUND_UP_4函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 C++ GST_RTSP_MEDIA_FACTORY_LOCK函数代码示例 C++ GST_READ_UINT32_BE函数代码示例 |