这篇教程C++ GF_LOG函数代码示例写得很实用,希望能帮到您。
本文整理汇总了C++中GF_LOG函数的典型用法代码示例。如果您正苦于以下问题:C++ GF_LOG函数的具体用法?C++ GF_LOG怎么用?C++ GF_LOG使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。 在下文中一共展示了GF_LOG函数的27个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。 示例1: ISOR_ServiceCommandGF_Err ISOR_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com){ Double track_dur, media_dur; ISOMChannel *ch; ISOMReader *read; if (!plug || !plug->priv || !com) return GF_SERVICE_ERROR; read = (ISOMReader *) plug->priv; if (com->command_type==GF_NET_SERVICE_INFO) { u32 tag_len; const char *tag; if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_NAME, &tag, &tag_len)==GF_OK) com->info.name = tag; if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_ARTIST, &tag, &tag_len)==GF_OK) com->info.artist = tag; if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_ALBUM, &tag, &tag_len)==GF_OK) com->info.album = tag; if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COMMENT, &tag, &tag_len)==GF_OK) com->info.comment = tag; if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_TRACK, &tag, &tag_len)==GF_OK) { com->info.track_info = (((tag[2]<<8)|tag[3]) << 16) | ((tag[4]<<8)|tag[5]); } if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COMPOSER, &tag, &tag_len)==GF_OK) com->info.composer = tag; if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_WRITER, &tag, &tag_len)==GF_OK) com->info.writer = tag; if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_GENRE, &tag, &tag_len)==GF_OK) { if (tag[0]) { com->info.genre = 0; } else { com->info.genre = (tag[0]<<8) | tag[1]; } } return GF_OK; } if (com->command_type==GF_NET_SERVICE_HAS_AUDIO) { u32 i, count; count = gf_isom_get_track_count(read->mov); for (i=0; i<count; i++) { if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_AUDIO) return GF_OK; } return GF_NOT_SUPPORTED; } if (!com->base.on_channel) return GF_NOT_SUPPORTED; ch = isor_get_channel(read, com->base.on_channel); if (!ch) return GF_STREAM_NOT_FOUND; switch (com->command_type) { case GF_NET_CHAN_SET_PADDING: if (!ch->track) return GF_OK; gf_isom_set_sample_padding(read->mov, ch->track, com->pad.padding_bytes); return GF_OK; case GF_NET_CHAN_SET_PULL: ch->is_pulling = 1; return GF_OK; case GF_NET_CHAN_INTERACTIVE: return GF_OK; case GF_NET_CHAN_BUFFER: com->buffer.max = com->buffer.min = 0; return GF_OK; case GF_NET_CHAN_DURATION: if (!ch->track) { com->duration.duration = 0; return GF_OK; } ch->duration = gf_isom_get_track_duration(read->mov, ch->track); track_dur = (Double) (s64) ch->duration; track_dur /= read->time_scale; if (gf_isom_get_edit_segment_count(read->mov, ch->track)) { com->duration.duration = (Double) track_dur; ch->duration = (u32) (track_dur * ch->time_scale); } else { /*some file indicate a wrong TrackDuration, get the longest*/ ch->duration = gf_isom_get_media_duration(read->mov, ch->track); media_dur = (Double) (s64) ch->duration; media_dur /= ch->time_scale; com->duration.duration = MAX(track_dur, media_dur); } return GF_OK; case GF_NET_CHAN_PLAY: if (!ch->is_pulling) return GF_NOT_SUPPORTED; assert(!ch->is_playing); isor_reset_reader(ch); ch->speed = com->play.speed; ch->start = ch->end = 0; if (com->play.speed>0) { if (com->play.start_range>=0) { ch->start = (u64) (s64) (com->play.start_range * ch->time_scale); ch->start = check_round(ch, ch->start, com->play.start_range, 1); } if (com->play.end_range >= com->play.start_range) { ch->end = (u64) (s64) (com->play.end_range*ch->time_scale); ch->end = check_round(ch, ch->end, com->play.end_range, 0); } } else if (com->play.speed<0) { if (com->play.end_range>=com->play.start_range) ch->start = (u64) (s64) (com->play.start_range * ch->time_scale); if (com->play.end_range >= 0) ch->end = (u64) (s64) (com->play.end_range*ch->time_scale); } ch->is_playing = 1; if (com->play.dash_segment_switch) ch->wait_for_segment_switch = 1; GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[IsoMedia] Starting channel playback "LLD" to "LLD" (%g to %g)/n", ch->start, ch->end, com->play.start_range, com->play.end_range)); return GF_OK; case GF_NET_CHAN_STOP: isor_reset_reader(ch); return GF_OK;//.........这里部分代码省略.........
开发者ID:supperlitt,项目名称:gpac,代码行数:101,
示例2: gf_sm_load_init_isomGF_Err gf_sm_load_init_isom(GF_SceneLoader *load){ u32 i; GF_BIFSConfig *bc; GF_ESD *esd; GF_Err e; char *scene_msg = "MPEG-4 BIFS Scene Parsing"; if (!load->isom) return GF_BAD_PARAM; /*load IOD*/ load->ctx->root_od = (GF_ObjectDescriptor *) gf_isom_get_root_od(load->isom); if (!load->ctx->root_od) { e = gf_isom_last_error(load->isom); if (e) return e; } else if ((load->ctx->root_od->tag != GF_ODF_OD_TAG) && (load->ctx->root_od->tag != GF_ODF_IOD_TAG)) { gf_odf_desc_del((GF_Descriptor *) load->ctx->root_od); load->ctx->root_od = NULL; } esd = NULL; /*get root scene stream*/ for (i=0; i<gf_isom_get_track_count(load->isom); i++) { u32 type = gf_isom_get_media_type(load->isom, i+1); if (type != GF_ISOM_MEDIA_SCENE) continue; if (! gf_isom_is_track_in_root_od(load->isom, i+1) ) continue; esd = gf_isom_get_esd(load->isom, i+1, 1); if (esd && esd->URLString) { gf_odf_desc_del((GF_Descriptor *)esd); esd = NULL; continue; } /*make sure we load the root BIFS stream first*/ if (esd && esd->dependsOnESID && (esd->dependsOnESID!=esd->ESID) ) { u32 track = gf_isom_get_track_by_id(load->isom, esd->dependsOnESID); if (gf_isom_get_media_type(load->isom, track) != GF_ISOM_MEDIA_OD) { gf_odf_desc_del((GF_Descriptor *)esd); esd = NULL; continue; } } if (esd->decoderConfig->objectTypeIndication==0x09) scene_msg = "MPEG-4 LASeR Scene Parsing"; break; } if (!esd) return GF_OK; e = GF_OK; GF_LOG(GF_LOG_INFO, GF_LOG_PARSER, ("%s/n", scene_msg)); /*BIFS: update size & pixel metrics info*/ if (esd->decoderConfig->objectTypeIndication<=2) { bc = gf_odf_get_bifs_config(esd->decoderConfig->decoderSpecificInfo, esd->decoderConfig->objectTypeIndication); if (!bc->elementaryMasks && bc->pixelWidth && bc->pixelHeight) { load->ctx->scene_width = bc->pixelWidth; load->ctx->scene_height = bc->pixelHeight; load->ctx->is_pixel_metrics = bc->pixelMetrics; } gf_odf_desc_del((GF_Descriptor *) bc); /*note we don't load the first BIFS AU to avoid storing the BIFS decoder, needed to properly handle quantization*/ } /*LASeR*/ else if (esd->decoderConfig->objectTypeIndication==0x09) { load->ctx->is_pixel_metrics = 1; } gf_odf_desc_del((GF_Descriptor *) esd); esd = NULL; load->process = gf_sm_load_run_isom; load->done = gf_sm_load_done_isom; load->suspend = gf_sm_isom_suspend; return GF_OK;}
开发者ID:Brilon314,项目名称:gpac,代码行数:74,
示例3: OSVC_AttachStreamstatic GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd){ u32 i, count; s32 res; OPENSVCFRAME Picture; int Layer[4]; OSVCDec *ctx = (OSVCDec*) ifcg->privateStack; /*todo: we should check base layer of this stream is indeed our base layer*/ if (!ctx->ES_ID) { ctx->ES_ID = esd->ESID; ctx->width = ctx->height = ctx->out_size = 0; if (!esd->dependsOnESID) ctx->baseES_ID = esd->ESID; } if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); if (!cfg) return GF_NON_COMPLIANT_BITSTREAM; if (!esd->dependsOnESID) { ctx->nalu_size_length = cfg->nal_unit_size; if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR; } /*decode all NALUs*/ count = gf_list_count(cfg->sequenceParameterSets); SetCommandLayer(Layer, 255, 0, &res, 0);//bufindex can be reset without pb for (i=0; i<count; i++) { u32 w=0, h=0, sid; s32 par_n=0, par_d=0; GF_AVCConfigSlot *slc = (GF_AVCConfigSlot*)gf_list_get(cfg->sequenceParameterSets, i);#ifndef GPAC_DISABLE_AV_PARSERS gf_avc_get_sps_info(slc->data, slc->size, &sid, &w, &h, &par_n, &par_d);#endif /*by default use the base layer*/ if (!i) { if ((ctx->width<w) || (ctx->height<h)) { ctx->width = w; ctx->height = h; if ( ((s32)par_n>0) && ((s32)par_d>0) ) ctx->pixel_ar = (par_n<<16) || par_d; } } res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d/n", res)); } GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: SPS id=/"%d/" code=/"%d/" size=/"%d/"/n", slc->id, slc->data[0] & 0x1F, slc->size)); } count = gf_list_count(cfg->pictureParameterSets); for (i=0; i<count; i++) { u32 sps_id, pps_id; GF_AVCConfigSlot *slc = (GF_AVCConfigSlot*)gf_list_get(cfg->pictureParameterSets, i); gf_avc_get_pps_info(slc->data, slc->size, &pps_id, &sps_id); res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d/n", res)); } GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: PPS id=/"%d/" code=/"%d/" size=/"%d/" sps_id=/"%d/"/n", pps_id, slc->data[0] & 0x1F, slc->size, sps_id)); } ctx->state_found = GF_TRUE; gf_odf_avc_cfg_del(cfg); } else { if (ctx->nalu_size_length) { return GF_NOT_SUPPORTED; } ctx->nalu_size_length = 0; if (!esd->dependsOnESID) { if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR; } ctx->pixel_ar = (1<<16) || 1; } ctx->stride = ctx->width + 32; ctx->CurrentDqId = ctx->MaxDqId = 0; ctx->out_size = ctx->stride * ctx->height * 3 / 2; return GF_OK;}
开发者ID:ARSekkat,项目名称:gpac,代码行数:78,
示例4: gf_enum_directory//.........这里部分代码省略......... case '/': case '//': swprintf(path, MAX_PATH, L"%s*", w_dir); break; default: swprintf(path, MAX_PATH, L"%s%c*", w_dir, GF_PATH_SEPARATOR); break; } { const char* tmpfilter = filter; gf_utf8_mbstowcs(w_filter, sizeof(w_filter), &tmpfilter); }#else strcpy(path, dir); if (path[strlen(path)-1] != '/') strcat(path, "/");#endif#ifdef WIN32 SearchH= FindFirstFileW(path, &FindData); if (SearchH == INVALID_HANDLE_VALUE) return GF_IO_ERR;#if defined (_WIN32_WCE) _path[strlen(_path)-1] = 0;#else path[wcslen(path)-1] = 0;#endif while (SearchH != INVALID_HANDLE_VALUE) {#else the_dir = opendir(path); if (the_dir == NULL) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Cannot open directory %s for enumeration: %d/n", path, errno)); return GF_IO_ERR; } the_file = readdir(the_dir); while (the_file) {#endif memset(&file_info, 0, sizeof(GF_FileEnumInfo) );#if defined (_WIN32_WCE) if (!wcscmp(FindData.cFileName, _T(".") )) goto next; if (!wcscmp(FindData.cFileName, _T("..") )) goto next;#elif defined(WIN32) if (!wcscmp(FindData.cFileName, L".")) goto next; if (!wcscmp(FindData.cFileName, L"..")) goto next;#else if (!strcmp(the_file->d_name, "..")) goto next; if (the_file->d_name[0] == '.') goto next;#endif#ifdef WIN32 file_info.directory = (FindData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) ? GF_TRUE : GF_FALSE; if (!enum_directory && file_info.directory) goto next; if (enum_directory && !file_info.directory) goto next;#endif if (filter) {#if defined (_WIN32_WCE) short ext[30]; short *sep = wcsrchr(FindData.cFileName, (wchar_t) '.'); if (!sep) goto next;
开发者ID:CCExtractor,项目名称:ccextractor,代码行数:67,
示例5: svg_drawable_pickvoid svg_drawable_pick(GF_Node *node, Drawable *drawable, GF_TraverseState *tr_state){ DrawAspect2D asp; GF_Matrix2D inv_2d; Fixed x, y; Bool picked = 0; GF_Compositor *compositor = tr_state->visual->compositor; SVGPropertiesPointers backup_props; GF_Matrix2D backup_matrix; GF_Matrix mx_3d; SVGAllAttributes all_atts; if (!drawable->path) return; gf_svg_flatten_attributes((SVG_Element *)node, &all_atts); memcpy(&backup_props, tr_state->svg_props, sizeof(SVGPropertiesPointers)); gf_svg_apply_inheritance(&all_atts, tr_state->svg_props); if (compositor_svg_is_display_off(tr_state->svg_props)) return; compositor_svg_apply_local_transformation(tr_state, &all_atts, &backup_matrix, &mx_3d); memset(&asp, 0, sizeof(DrawAspect2D)); drawable_get_aspect_2d_svg(node, &asp, tr_state);#ifndef GPAC_DISABLE_3D if (tr_state->visual->type_3d) { svg_drawable_3d_pick(drawable, tr_state, &asp); compositor_svg_restore_parent_transformation(tr_state, &backup_matrix, &mx_3d); memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers)); return; } #endif gf_mx2d_copy(inv_2d, tr_state->transform); gf_mx2d_inverse(&inv_2d); x = tr_state->ray.orig.x; y = tr_state->ray.orig.y; gf_mx2d_apply_coords(&inv_2d, &x, &y); picked = svg_drawable_is_over(drawable, x, y, &asp, tr_state, NULL); if (picked) { u32 count, i; compositor->hit_local_point.x = x; compositor->hit_local_point.y = y; compositor->hit_local_point.z = 0; gf_mx_from_mx2d(&compositor->hit_world_to_local, &tr_state->transform); gf_mx_from_mx2d(&compositor->hit_local_to_world, &inv_2d); compositor->hit_node = drawable->node; compositor->hit_use_dom_events = 1; compositor->hit_normal.x = compositor->hit_normal.y = 0; compositor->hit_normal.z = FIX_ONE; compositor->hit_texcoords.x = gf_divfix(x, drawable->path->bbox.width) + FIX_ONE/2; compositor->hit_texcoords.y = gf_divfix(y, drawable->path->bbox.height) + FIX_ONE/2; svg_clone_use_stack(compositor, tr_state); /*not use in SVG patterns*/ compositor->hit_appear = NULL; /*also stack any VRML sensors present at the current level. If the event is not catched by a listener in the SVG tree, the event will be forwarded to the VRML tree*/ gf_list_reset(tr_state->visual->compositor->sensors); count = gf_list_count(tr_state->vrml_sensors); for (i=0; i<count; i++) { gf_list_add(tr_state->visual->compositor->sensors, gf_list_get(tr_state->vrml_sensors, i)); } GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SVG Picking] node %s is under mouse - hit %g %g 0/n", gf_node_get_log_name(drawable->node), FIX2FLT(x), FIX2FLT(y))); } compositor_svg_restore_parent_transformation(tr_state, &backup_matrix, &mx_3d); memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));}
开发者ID:bigbensk,项目名称:gpac,代码行数:73,
示例6: AC3_ProcessDatastatic GF_Err AC3_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel){ short *out_samples; int i, len, bit_rate; sample_t level; A52CTX(); /*check not using scalabilty*/ if (ctx->ES_ID != ES_ID) return GF_BAD_PARAM; /*if late or seeking don't decode*/ switch (mmlevel) { case GF_CODEC_LEVEL_SEEK: case GF_CODEC_LEVEL_DROP: *outBufferLength = 0; return GF_OK; default: break; } if (ctx->out_size > *outBufferLength) { *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[A52] Decoding AU/n")); len = a52_syncinfo(inBuffer, &ctx->flags, &ctx->sample_rate, &bit_rate); if (!len) return GF_NON_COMPLIANT_BITSTREAM; /*init decoder*/ if (!ctx->out_size) { ctx->num_channels = ac3_channels[ctx->flags & 7]; if (ctx->flags & A52_LFE) ctx->num_channels++; ctx->flags |= A52_ADJUST_LEVEL; ctx->out_size = ctx->num_channels * sizeof(short) * 1536; *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; } level = 1; if ( a52_frame(ctx->codec, inBuffer, &ctx->flags, &level, 384)) { GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[A52] Error decoding AU/n" )); *outBufferLength = 0; return GF_NON_COMPLIANT_BITSTREAM; } out_samples = (short*)outBuffer; for (i=0; i<6; i++) { if (a52_block(ctx->codec)) return GF_NON_COMPLIANT_BITSTREAM; float_to_int(ctx->samples, out_samples + i * 256 * ctx->num_channels, ctx->num_channels); } *outBufferLength = 6 * ctx->num_channels * 256 * sizeof(short); return GF_OK;}
开发者ID:ARSekkat,项目名称:gpac,代码行数:64,
示例7: ts_amux_newGF_AbstractTSMuxer * ts_amux_new(GF_AVRedirect * avr, u32 videoBitrateInBitsPerSec, u32 width, u32 height, u32 audioBitRateInBitsPerSec) { GF_AbstractTSMuxer * ts = gf_malloc( sizeof(GF_AbstractTSMuxer)); memset( ts, 0, sizeof( GF_AbstractTSMuxer)); ts->oc = avformat_alloc_context(); ts->destination = avr->destination; av_register_all(); ts->oc->oformat = GUESS_FORMAT(NULL, avr->destination, NULL); if (!ts->oc->oformat) ts->oc->oformat = GUESS_FORMAT("mpegts", NULL, NULL); assert( ts->oc->oformat);#if REDIRECT_AV_AUDIO_ENABLED ts->audio_st = av_new_stream(ts->oc, avr->audioCodec->id); { AVCodecContext * c = ts->audio_st->codec; c->codec_id = avr->audioCodec->id; c->codec_type = AVMEDIA_TYPE_AUDIO; /* put sample parameters */ c->sample_fmt = SAMPLE_FMT_S16; c->bit_rate = audioBitRateInBitsPerSec; c->sample_rate = avr->audioSampleRate; c->channels = 2; c->time_base.num = 1; c->time_base.den = 1000; // some formats want stream headers to be separate if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER) c->flags |= CODEC_FLAG_GLOBAL_HEADER; }#endif ts->video_st = av_new_stream(ts->oc, avr->videoCodec->id); { AVCodecContext * c = ts->video_st->codec; c->codec_id = avr->videoCodec->id; c->codec_type = AVMEDIA_TYPE_VIDEO; /* put sample parameters */ c->bit_rate = videoBitrateInBitsPerSec; /* resolution must be a multiple of two */ c->width = width; c->height = height; /* time base: this is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. for fixed-fps content, timebase should be 1/framerate and timestamp increments should be identically 1. */ c->time_base.den = STREAM_FRAME_RATE; c->time_base.num = 1; c->gop_size = 12; /* emit one intra frame every twelve frames at most */ c->pix_fmt = STREAM_PIX_FMT; if (c->codec_id == CODEC_ID_MPEG2VIDEO) { /* just for testing, we also add B frames */ c->max_b_frames = 2; } if (c->codec_id == CODEC_ID_MPEG1VIDEO) { /* Needed to avoid using macroblocks in which some coeffs overflow. This does not happen with normal video, it just happens here as the motion of the chroma plane does not match the luma plane. */ c->mb_decision=2; } // some formats want stream headers to be separate if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER) c->flags |= CODEC_FLAG_GLOBAL_HEADER; } //av_set_pts_info(ts->audio_st, 33, 1, audioBitRateInBitsPerSec);#ifndef AVIO_FLAG_WRITE /* set the output parameters (must be done even if no parameters). */ if (av_set_parameters(ts->oc, NULL) < 0) { fprintf(stderr, "Invalid output format parameters/n"); return NULL; }#endif dump_format(ts->oc, 0, avr->destination, 1); GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[AVRedirect] DUMPING to %s.../n", ts->destination));#if (LIBAVCODEC_VERSION_MAJOR<55) if (avcodec_open(ts->video_st->codec, avr->videoCodec) < 0) {#else if (avcodec_open2(ts->video_st->codec, avr->videoCodec, NULL) < 0) {#endif GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open video codec/n")); return NULL; }#if REDIRECT_AV_AUDIO_ENABLED#if (LIBAVCODEC_VERSION_MAJOR<55) if (avcodec_open(ts->audio_st->codec, avr->audioCodec) < 0) {#else if (avcodec_open2(ts->audio_st->codec, avr->audioCodec, NULL) < 0) {#endif GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open audio codec/n")); return NULL; } ts->audioMx = gf_mx_new("TS_AudioMx");#endif ts->videoMx = gf_mx_new("TS_VideoMx"); ts->tsEncodingThread = gf_th_new("ts_interleave_thread_run"); ts->encode = 1; ts->audioPackets = NULL;//.........这里部分代码省略.........
开发者ID:fcsteagu,项目名称:gpac-1,代码行数:101,
示例8: dc_video_decoder_openint dc_video_decoder_open(VideoInputFile *video_input_file, VideoDataConf *video_data_conf, int mode, int no_loop, int nb_consumers){ s32 ret; u32 i; s32 open_res; AVInputFormat *in_fmt = NULL; AVDictionary *options = NULL; AVCodecContext *codec_ctx; AVCodec *codec; memset(video_input_file, 0, sizeof(VideoInputFile)); if (video_data_conf->width > 0 && video_data_conf->height > 0) { char vres[16]; snprintf(vres, sizeof(vres), "%dx%d", video_data_conf->width, video_data_conf->height); ret = av_dict_set(&options, "video_size", vres, 0); if (ret < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set video size %s./n", vres)); return -1; } } if (video_data_conf->framerate > 0) { char vfr[16]; snprintf(vfr, sizeof(vfr), "%d", video_data_conf->framerate); ret = av_dict_set(&options, "framerate", vfr, 0); if (ret < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set video framerate %s./n", vfr)); return -1; } } if (strlen(video_data_conf->pixel_format)) { ret = av_dict_set(&options, "pixel_format", video_data_conf->pixel_format, 0); if (ret < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set pixel format %s./n", video_data_conf->pixel_format)); return -1; } }#ifndef WIN32 if (strcmp(video_data_conf->v4l2f, "") != 0) { ret = av_dict_set(&options, "input_format", video_data_conf->v4l2f, 0); if (ret < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set input format %s./n", video_data_conf->v4l2f)); return -1; } }#endif if (video_data_conf->format && strcmp(video_data_conf->format, "") != 0) { in_fmt = av_find_input_format(video_data_conf->format); if (in_fmt == NULL) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot find the format %s./n", video_data_conf->format)); return -1; } } video_input_file->av_fmt_ctx = NULL; if (video_data_conf->demux_buffer_size) { char szBufSize[100]; sprintf(szBufSize, "%d", video_data_conf->demux_buffer_size); ret = av_dict_set(&options, "buffer_size", szBufSize, 0); if (ret < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set demuxer's input buffer size./n")); return -1; } } /* Open video */ open_res = avformat_open_input(&video_input_file->av_fmt_ctx, video_data_conf->filename, in_fmt, options ? &options : NULL); if ( (open_res < 0) && !stricmp(video_data_conf->filename, "screen-capture-recorder") ) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Buggy screen capture input (open failed with code %d), retrying without specifying resolution/n", open_res)); av_dict_set(&options, "video_size", NULL, 0); open_res = avformat_open_input(&video_input_file->av_fmt_ctx, video_data_conf->filename, in_fmt, options ? &options : NULL); } if ( (open_res < 0) && options) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Error %d opening input - retrying without options/n", open_res)); av_dict_free(&options); open_res = avformat_open_input(&video_input_file->av_fmt_ctx, video_data_conf->filename, in_fmt, NULL); } if (open_res < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot open file %s/n", video_data_conf->filename)); return -1; } /* Retrieve stream information */ if (avformat_find_stream_info(video_input_file->av_fmt_ctx, NULL) < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot find stream information/n")); return -1; } av_dump_format(video_input_file->av_fmt_ctx, 0, video_data_conf->filename, 0); /* Find the first video stream */ video_input_file->vstream_idx = -1; for (i = 0; i < video_input_file->av_fmt_ctx->nb_streams; i++) {//.........这里部分代码省略.........
开发者ID:HungMingWu,项目名称:gpac,代码行数:101,
示例9: gf_isom_add_meta_item_extendedGF_Err gf_isom_add_meta_item_extended(GF_ISOFile *file, Bool root_meta, u32 track_num, Bool self_reference, char *resource_path, const char *item_name, u32 item_id, u32 item_type, const char *mime_type, const char *content_encoding, GF_ImageItemProperties *image_props, const char *URL, const char *URN, char *data, u32 data_len, GF_List *item_extent_refs){ u32 i; GF_Err e; GF_ItemLocationEntry *location_entry; GF_ItemInfoEntryBox *infe; GF_MetaBox *meta; u32 lastItemID = 0; if (!self_reference && !resource_path && !data) return GF_BAD_PARAM; e = CanAccessMovie(file, GF_ISOM_OPEN_WRITE); if (e) return e; meta = gf_isom_get_meta(file, root_meta, track_num); if (!meta) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("Trying to add item, but missing meta box")); return GF_BAD_PARAM; } e = FlushCaptureMode(file); if (e) return e; /*check file exists */ if (!URN && !URL && !self_reference && !data) { FILE *src = gf_fopen(resource_path, "rb"); if (!src) return GF_URL_ERROR; gf_fclose(src); } if (meta->item_infos) { u32 item_count = gf_list_count(meta->item_infos->item_infos); for (i = 0; i < item_count; i++) { GF_ItemInfoEntryBox *e = (GF_ItemInfoEntryBox *)gf_list_get(meta->item_infos->item_infos, i); if (e->item_ID > lastItemID) lastItemID = e->item_ID; if (item_id == e->item_ID) { GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[IsoMedia] Item with id %d already exists, ignoring id/n", item_id)); item_id = 0; } } } infe = (GF_ItemInfoEntryBox *)infe_New(); if (item_id) { infe->item_ID = item_id; } else { infe->item_ID = ++lastItemID; } /*get relative name*/ if (item_name) { infe->item_name = gf_strdup(item_name); } else if (resource_path) { if (strrchr(resource_path, GF_PATH_SEPARATOR)) { infe->item_name = gf_strdup(strrchr(resource_path, GF_PATH_SEPARATOR) + 1); } else { infe->item_name = gf_strdup(resource_path); } } infe->item_type = item_type; if (mime_type) { infe->content_type = gf_strdup(mime_type); } else { infe->content_type = gf_strdup("application/octet-stream"); } if (content_encoding) infe->content_encoding = gf_strdup(content_encoding); /*Creation of the ItemLocation */ location_entry = (GF_ItemLocationEntry*)gf_malloc(sizeof(GF_ItemLocationEntry)); if (!location_entry) { gf_isom_box_del((GF_Box *)infe); return GF_OUT_OF_MEM; } memset(location_entry, 0, sizeof(GF_ItemLocationEntry)); location_entry->extent_entries = gf_list_new(); /*Creates an mdat if it does not exist*/ if (!file->mdat) { file->mdat = (GF_MediaDataBox *)mdat_New(); gf_list_add(file->TopBoxes, file->mdat); } /*Creation an ItemLocation Box if it does not exist*/ if (!meta->item_locations) meta->item_locations = (GF_ItemLocationBox *)iloc_New(); gf_list_add(meta->item_locations->location_entries, location_entry); location_entry->item_ID = infe->item_ID; if (!meta->item_infos) meta->item_infos = (GF_ItemInfoBox *)iinf_New(); e = gf_list_add(meta->item_infos->item_infos, infe); if (e) return e; if (image_props) {//.........这里部分代码省略.........
开发者ID:Abhinav95,项目名称:ccextractor,代码行数:101,
示例10: dc_video_decoder_readint dc_video_decoder_read(VideoInputFile *video_input_file, VideoInputData *video_input_data, int source_number, int use_source_timing, int is_live_capture, const int *exit_signal_addr){#ifdef DASHCAST_DEBUG_TIME_ struct timeval start, end; long elapsed_time;#endif AVPacket packet; int ret, got_frame, already_locked = 0; AVCodecContext *codec_ctx; VideoDataNode *video_data_node; /* Get a pointer to the codec context for the video stream */ codec_ctx = video_input_file->av_fmt_ctx->streams[video_input_file->vstream_idx]->codec; /* Read frames */ while (1) {#ifdef DASHCAST_DEBUG_TIME_ gf_gettimeofday(&start, NULL);#endif memset(&packet, 0, sizeof(AVPacket)); ret = av_read_frame(video_input_file->av_fmt_ctx, &packet);#ifdef DASHCAST_DEBUG_TIME_ gf_gettimeofday(&end, NULL); elapsed_time = (end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec); fprintf(stdout, "fps: %f/n", 1000000.0/elapsed_time);#endif /* If we demux for the audio thread, send the packet to the audio */ if (video_input_file->av_fmt_ctx_ref_cnt && ((packet.stream_index != video_input_file->vstream_idx) || (ret == AVERROR_EOF))) { AVPacket *packet_copy = NULL; if (ret != AVERROR_EOF) { GF_SAFEALLOC(packet_copy, AVPacket); memcpy(packet_copy, &packet, sizeof(AVPacket)); } assert(video_input_file->av_pkt_list); gf_mx_p(video_input_file->av_pkt_list_mutex); gf_list_add(video_input_file->av_pkt_list, packet_copy); gf_mx_v(video_input_file->av_pkt_list_mutex); if (ret != AVERROR_EOF) { continue; } } if (ret == AVERROR_EOF) { if (video_input_file->mode == LIVE_MEDIA && video_input_file->no_loop == 0) { av_seek_frame(video_input_file->av_fmt_ctx, video_input_file->vstream_idx, 0, 0); av_free_packet(&packet); continue; } dc_producer_lock(&video_input_data->producer, &video_input_data->circular_buf); dc_producer_unlock_previous(&video_input_data->producer, &video_input_data->circular_buf); video_data_node = (VideoDataNode *) dc_producer_produce(&video_input_data->producer, &video_input_data->circular_buf); video_data_node->source_number = source_number; /* Flush decoder */ memset(&packet, 0, sizeof(AVPacket));#ifndef FF_API_AVFRAME_LAVC avcodec_get_frame_defaults(video_data_node->vframe);#else av_frame_unref(video_data_node->vframe);#endif avcodec_decode_video2(codec_ctx, video_data_node->vframe, &got_frame, &packet); if (got_frame) { dc_producer_advance(&video_input_data->producer, &video_input_data->circular_buf); return 0; } dc_producer_end_signal(&video_input_data->producer, &video_input_data->circular_buf); dc_producer_unlock(&video_input_data->producer, &video_input_data->circular_buf); return -2; } else if (ret < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot read video frame./n")); continue; } /* Is this a packet from the video stream? */ if (packet.stream_index == video_input_file->vstream_idx) { u32 nb_retry = 10; while (!already_locked) { if (dc_producer_lock(&video_input_data->producer, &video_input_data->circular_buf) < 0) { if (!nb_retry) break; gf_sleep(10); nb_retry--; continue; } dc_producer_unlock_previous(&video_input_data->producer, &video_input_data->circular_buf); already_locked = 1; } if (!already_locked) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[dashcast] Live system dropped a video frame/n")); continue; } video_data_node = (VideoDataNode *) dc_producer_produce(&video_input_data->producer, &video_input_data->circular_buf); video_data_node->source_number = source_number;//.........这里部分代码省略.........
开发者ID:HungMingWu,项目名称:gpac,代码行数:101,
示例11: gf_isom_extract_meta_item_extendedGF_EXPORTGF_Err gf_isom_extract_meta_item_extended(GF_ISOFile *file, Bool root_meta, u32 track_num, u32 item_id, const char *dump_file_name, char **out_data, u32 *out_size, const char **out_mime){ GF_BitStream *item_bs; char szPath[1024]; GF_ItemExtentEntry *extent_entry; FILE *resource = NULL; u32 i, count; GF_ItemLocationEntry *location_entry; u32 item_num; char *item_name = NULL; GF_MetaBox *meta = gf_isom_get_meta(file, root_meta, track_num); if (!meta || !meta->item_infos || !meta->item_locations) return GF_BAD_PARAM; if (out_mime) *out_mime = NULL; item_num = gf_isom_get_meta_item_by_id(file, root_meta, track_num, item_id); if (item_num) { GF_ItemInfoEntryBox *item_entry = (GF_ItemInfoEntryBox *)gf_list_get(meta->item_infos->item_infos, item_num - 1); item_name = item_entry->item_name; if (out_mime) *out_mime = item_entry->content_type; } location_entry = NULL; count = gf_list_count(meta->item_locations->location_entries); for (i = 0; i<count; i++) { location_entry = (GF_ItemLocationEntry *)gf_list_get(meta->item_locations->location_entries, i); if (location_entry->item_ID == item_id) break; location_entry = NULL; } if (!location_entry) return GF_BAD_PARAM; /*FIXME*/ if (location_entry->data_reference_index) { char *item_url = NULL, *item_urn = NULL; GF_Box *a = (GF_Box *)gf_list_get(meta->file_locations->dref->other_boxes, location_entry->data_reference_index - 1); if (a->type == GF_ISOM_BOX_TYPE_URL) { item_url = ((GF_DataEntryURLBox*)a)->location; } else if (a->type == GF_ISOM_BOX_TYPE_URN) { item_url = ((GF_DataEntryURNBox*)a)->location; item_urn = ((GF_DataEntryURNBox*)a)->nameURN; } GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[IsoMedia] Item already outside the ISO file at URL: %s, URN: %s/n", (item_url ? item_url : "N/A"), (item_urn ? item_urn : "N/A"))); return GF_OK; } /*don't extract self-reference item*/ count = gf_list_count(location_entry->extent_entries); if (!location_entry->base_offset && (count == 1)) { extent_entry = (GF_ItemExtentEntry *)gf_list_get(location_entry->extent_entries, 0); if (!extent_entry->extent_length#ifndef GPAC_DISABLE_ISOM_WRITE && !extent_entry->original_extent_offset#endif ) return GF_BAD_PARAM; } item_bs = NULL; if (out_data) { item_bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); } else if (dump_file_name) { strcpy(szPath, dump_file_name); resource = gf_fopen(szPath, "wb"); item_bs = gf_bs_from_file(resource, GF_BITSTREAM_WRITE); } else { if (item_name) strcpy(szPath, item_name); else sprintf(szPath, "item_id%02d", item_id); resource = gf_fopen(szPath, "wb"); item_bs = gf_bs_from_file(resource, GF_BITSTREAM_WRITE); } for (i = 0; i<count; i++) { char buf_cache[4096]; u64 remain; GF_ItemExtentEntry *extent_entry = (GF_ItemExtentEntry *)gf_list_get(location_entry->extent_entries, i); gf_bs_seek(file->movieFileMap->bs, location_entry->base_offset + extent_entry->extent_offset); remain = extent_entry->extent_length; while (remain) { u32 cache_size = (remain>4096) ? 4096 : (u32)remain; gf_bs_read_data(file->movieFileMap->bs, buf_cache, cache_size); gf_bs_write_data(item_bs, buf_cache, cache_size); remain -= cache_size; } } if (out_data) { gf_bs_get_content(item_bs, out_data, out_size); } if (resource) { gf_fclose(resource); } gf_bs_del(item_bs); return GF_OK;}
开发者ID:Abhinav95,项目名称:ccextractor,代码行数:99,
示例12: gf_bifs_enc_sf_fieldGF_Err gf_bifs_enc_sf_field(GF_BifsEncoder *codec, GF_BitStream *bs, GF_Node *node, GF_FieldInfo *field){ GF_Err e; if (node) { e = gf_bifs_enc_quant_field(codec, bs, node, field); if (e != GF_EOS) return e; } switch (field->fieldType) { case GF_SG_VRML_SFBOOL: GF_BIFS_WRITE_INT(codec, bs, * ((SFBool *)field->far_ptr), 1, "SFBool", NULL); break; case GF_SG_VRML_SFCOLOR: BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->red, bs, "color.red"); BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->green, bs, "color.green"); BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->blue, bs, "color.blue"); break; case GF_SG_VRML_SFFLOAT: BE_WriteSFFloat(codec, * ((SFFloat *)field->far_ptr), bs, NULL); break; case GF_SG_VRML_SFINT32: GF_BIFS_WRITE_INT(codec, bs, * ((SFInt32 *)field->far_ptr), 32, "SFInt32", NULL); break; case GF_SG_VRML_SFROTATION: BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->x, bs, "rot.x"); BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->y, bs, "rot.y"); BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->z, bs, "rot.z"); BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->q, bs, "rot.theta"); break; case GF_SG_VRML_SFSTRING: if (node && (node->sgprivate->tag==TAG_MPEG4_CacheTexture) && (field->fieldIndex<=2)) { u32 size, val; char buf[4096]; FILE *f = gf_f64_open(((SFString*)field->far_ptr)->buffer, "rb"); if (!f) return GF_URL_ERROR; gf_f64_seek(f, 0, SEEK_END); size = (u32) gf_f64_tell(f); val = gf_get_bit_size(size); GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL); GF_BIFS_WRITE_INT(codec, bs, size, val, "length", NULL); gf_f64_seek(f, 0, SEEK_SET); while (size) { u32 read = fread(buf, 1, 4096, f); gf_bs_write_data(bs, buf, read); size -= read; } } else { u32 i; char *str = (char *) ((SFString*)field->far_ptr)->buffer; u32 len = str ? strlen(str) : 0; u32 val = gf_get_bit_size(len); GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL); GF_BIFS_WRITE_INT(codec, bs, len, val, "length", NULL); for (i=0; i<len; i++) gf_bs_write_int(bs, str[i], 8); GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] string/t/t%d/t/t%s/n", 8*len, str) ); } break; case GF_SG_VRML_SFTIME: gf_bs_write_double(bs, *((SFTime *)field->far_ptr)); GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] SFTime/t/t%d/t/t%g/n", 64, *((SFTime *)field->far_ptr))); break; case GF_SG_VRML_SFVEC2F: BE_WriteSFFloat(codec, ((SFVec2f *)field->far_ptr)->x, bs, "vec2f.x"); BE_WriteSFFloat(codec, ((SFVec2f *)field->far_ptr)->y, bs, "vec2f.y"); break; case GF_SG_VRML_SFVEC3F: BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->x, bs, "vec3f.x"); BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->y, bs, "vec3f.y"); BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->z, bs, "vec3f.z"); break; case GF_SG_VRML_SFURL: { SFURL *url = (SFURL *) field->far_ptr; GF_BIFS_WRITE_INT(codec, bs, (url->OD_ID>0) ? 1 : 0, 1, "hasODID", "SFURL"); if (url->OD_ID>0) { GF_BIFS_WRITE_INT(codec, bs, url->OD_ID, 10, "ODID", "SFURL"); } else { u32 i; u32 len = url->url ? strlen(url->url) : 0; u32 val = gf_get_bit_size(len); GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL); GF_BIFS_WRITE_INT(codec, bs, len, val, "length", NULL); for (i=0; i<len; i++) gf_bs_write_int(bs, url->url[i], 8); GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] string/t/t%d/t/t%s/t/t//SFURL/n", 8*len, url->url)); } } break; case GF_SG_VRML_SFIMAGE: { u32 size, i; SFImage *img = (SFImage *)field->far_ptr; GF_BIFS_WRITE_INT(codec, bs, img->width, 12, "width", "SFImage"); GF_BIFS_WRITE_INT(codec, bs, img->height, 12, "height", "SFImage"); GF_BIFS_WRITE_INT(codec, bs, img->numComponents - 1, 2, "nbComp", "SFImage"); size = img->width * img->height * img->numComponents;//.........这里部分代码省略.........
开发者ID:golgol7777,项目名称:gpac,代码行数:101,
示例13: gf_sc_texture_update_frameGF_EXPORTvoid gf_sc_texture_update_frame(GF_TextureHandler *txh, Bool disable_resync){ Bool needs_reload = 0; u32 size, ts; s32 ms_until_pres, ms_until_next; /*already refreshed*/ if ((txh->stream_finished && txh->tx_io) || txh->needs_refresh) return; if (!txh->stream) { txh->data = NULL; return; } /*should never happen!!*/ if (txh->needs_release) gf_mo_release_data(txh->stream, 0xFFFFFFFF, 0); /*check init flag*/ if (!(gf_mo_get_flags(txh->stream) & GF_MO_IS_INIT)) { needs_reload = 1; txh->data = NULL; if (txh->tx_io) { gf_sc_texture_release(txh); } } txh->data = gf_mo_fetch_data(txh->stream, disable_resync ? GF_MO_FETCH : GF_MO_FETCH_RESYNC, &txh->stream_finished, &ts, &size, &ms_until_pres, &ms_until_next); if (!(gf_mo_get_flags(txh->stream) & GF_MO_IS_INIT)) { needs_reload = 1; } else if (size && txh->size && (size != txh->size)) { needs_reload = 1; } if (needs_reload) { /*if we had a texture this means the object has changed - delete texture and resetup. Do not skip texture update as this may lead to an empty rendering pass (blank frame for this object), especially in DASH*/ if (txh->tx_io) { gf_sc_texture_release(txh); txh->needs_refresh = 1; } if (gf_mo_is_private_media(txh->stream)) { setup_texture_object(txh, 1); gf_node_dirty_set(txh->owner, 0, 0); } } /*if no frame or muted don't draw*/ if (!txh->data || !size) { GF_LOG(GF_LOG_INFO, GF_LOG_COMPOSE, ("[Visual Texture] No output frame available /n")); /*TODO - check if this is needed */ if (txh->flags & GF_SR_TEXTURE_PRIVATE_MEDIA) { //txh->needs_refresh = 1; gf_sc_invalidate(txh->compositor, NULL); } return; } if (txh->compositor->frame_delay > ms_until_pres) txh->compositor->frame_delay = ms_until_pres; /*if setup and same frame return*/ if (txh->tx_io && (txh->stream_finished || (txh->last_frame_time==ts)) ) { gf_mo_release_data(txh->stream, 0xFFFFFFFF, 0); txh->needs_release = 0; if (!txh->stream_finished) { GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Visual Texture] Same frame fetched (TS %d)/n", ts)); if (txh->compositor->ms_until_next_frame > ms_until_next) txh->compositor->ms_until_next_frame = ms_until_next; } return; } txh->stream_finished = 0; txh->needs_release = 1; txh->last_frame_time = ts; txh->size = size; if (txh->raw_memory) { gf_mo_get_raw_image_planes(txh->stream, (u8 **) &txh->data, (u8 **) &txh->pU, (u8 **) &txh->pV); } if (gf_mo_is_muted(txh->stream)) return; if (txh->nb_frames) { s32 push_delay = txh->upload_time / txh->nb_frames; if (push_delay > ms_until_pres) ms_until_pres = 0; else ms_until_pres -= push_delay; } if (txh->compositor->ms_until_next_frame > ms_until_next) txh->compositor->ms_until_next_frame = ms_until_next; if (!txh->tx_io) { setup_texture_object(txh, 0); } /*try to push texture on graphics but don't complain if failure*/ gf_sc_texture_set_data(txh); txh->needs_refresh = 1; gf_sc_invalidate(txh->compositor, NULL);//.........这里部分代码省略.........
开发者ID:JamesLinus,项目名称:gpac,代码行数:101,
示例14: text_Read/*this is a quicktime specific box - see apple documentation*/GF_Err text_Read(GF_Box *s, GF_BitStream *bs){ u16 pSize; GF_TextSampleEntryBox *ptr = (GF_TextSampleEntryBox*)s; gf_bs_read_data(bs, ptr->reserved, 6); ptr->dataReferenceIndex = gf_bs_read_u16(bs); ptr->displayFlags = gf_bs_read_u32(bs); /*Display flags*/ ptr->textJustification = gf_bs_read_u32(bs); /*Text justification*/ gf_bs_read_data(bs, ptr->background_color, 6); /*Background color*/ gpp_read_box(bs, &ptr->default_box); /*Default text box*/ gf_bs_read_data(bs, ptr->reserved1, 8); /*Reserved*/ ptr->fontNumber = gf_bs_read_u16(bs); /*Font number*/ ptr->fontFace = gf_bs_read_u16(bs); /*Font face*/ ptr->reserved2 = gf_bs_read_u8(bs); /*Reserved*/ ptr->reserved3 = gf_bs_read_u16(bs); /*Reserved*/ gf_bs_read_data(bs, ptr->foreground_color, 6); /*Foreground color*/ if (ptr->size < 51) return GF_ISOM_INVALID_FILE; ptr->size -= 51; if (!ptr->size) return GF_OK; /*ffmpeg compatibility with iPod streams: no pascal string*/ pSize = gf_bs_read_u8(bs); /*a Pascal string begins with its size: get textName size*/ ptr->size -= 1; if (ptr->size < pSize) { u32 s = pSize; size_t i = 0; GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[iso file] text box doesn't use a Pascal string: trying to decode anyway./n")); ptr->textName = (char*)gf_malloc((u32) ptr->size + 1 + 1); do { char c = (char)s; if (c == '/0') { break; } else if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')) { ptr->textName[i] = c; } else { gf_free(ptr->textName); ptr->textName = NULL; GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[iso file] text box doesn't use a Pascal string and contains non-chars. Abort./n")); return GF_ISOM_INVALID_FILE; } i++; if (!ptr->size) break; ptr->size--; s = gf_bs_read_u8(bs); } while (s); ptr->textName[i] = '/0'; /*Font name*/ GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[iso file] text box doesn't use a Pascal string: /"%s/" detected./n", ptr->textName)); return GF_OK; } if (pSize) { ptr->textName = (char*) gf_malloc(pSize+1 * sizeof(char)); if (gf_bs_read_data(bs, ptr->textName, pSize) != pSize) { gf_free(ptr->textName); ptr->textName = NULL; return GF_ISOM_INVALID_FILE; } ptr->textName[pSize] = '/0'; /*Font name*/ } ptr->size -= pSize; return GF_OK;}
开发者ID:Brilon314,项目名称:gpac,代码行数:67,
示例15: term_on_media_addstatic void term_on_media_add(GF_ClientService *service, GF_Descriptor *media_desc, Bool no_scene_check){ u32 i, min_od_id; GF_MediaObject *the_mo; GF_Scene *scene; GF_ObjectManager *odm, *root; GF_ObjectDescriptor *od; GF_Terminal *term = service->term; root = service->owner; if (!root) { GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Service %s] has not root, aborting !/n", service->url)); return; } if (root->flags & GF_ODM_DESTROYED) { GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Service %s] root has been scheduled for destruction - aborting !/n", service->url)); return; } scene = root->subscene ? root->subscene : root->parentscene; if (scene->root_od->addon && (scene->root_od->addon->addon_type == GF_ADDON_TYPE_MAIN)) { no_scene_check = 1; scene->root_od->flags |= GF_ODM_REGENERATE_SCENE; } GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Service %s] %s/n", service->url, media_desc ? "Adding new media object" : "Regenerating scene graph")); if (!media_desc) { if (!no_scene_check) gf_scene_regenerate(scene); return; } switch (media_desc->tag) { case GF_ODF_OD_TAG: case GF_ODF_IOD_TAG: if (root && (root->net_service == service)) { od = (GF_ObjectDescriptor *) media_desc; break; } default: gf_odf_desc_del(media_desc); return; } gf_term_lock_net(term, 1); /*object declared this way are not part of an OD stream and are considered as dynamic*/ /* od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID; */ /*check if we have a mediaObject in the scene not attached and matching this object*/ the_mo = NULL; odm = NULL; min_od_id = 0; for (i=0; i<gf_list_count(scene->scene_objects); i++) { char *frag, *ext; GF_ESD *esd; char *url; u32 match_esid = 0; GF_MediaObject *mo = gf_list_get(scene->scene_objects, i); if ((mo->OD_ID != GF_MEDIA_EXTERNAL_ID) && (min_od_id<mo->OD_ID)) min_od_id = mo->OD_ID; if (!mo->odm) continue; /*if object is attached to a service, don't bother looking in a different one*/ if (mo->odm->net_service && (mo->odm->net_service != service)) continue; /*already assigned object - this may happen since the compositor has no control on when objects are declared by the service, therefore opening file#video and file#audio may result in the objects being declared twice if the service doesn't keep track of declared objects*/ if (mo->odm->OD) { if (od->objectDescriptorID && is_same_od(mo->odm->OD, od)) { /*reassign OD ID*/ if (mo->OD_ID != GF_MEDIA_EXTERNAL_ID) { od->objectDescriptorID = mo->OD_ID; } else { mo->OD_ID = od->objectDescriptorID; } gf_odf_desc_del(media_desc); gf_term_lock_net(term, 0); return; } continue; } if (mo->OD_ID != GF_MEDIA_EXTERNAL_ID) { if (mo->OD_ID == od->objectDescriptorID) { the_mo = mo; odm = mo->odm; break; } continue; } if (!mo->URLs.count || !mo->URLs.vals[0].url) continue; frag = NULL; ext = strrchr(mo->URLs.vals[0].url, '#'); if (ext) { frag = strchr(ext, '='); ext[0] = 0; } url = mo->URLs.vals[0].url; if (!strnicmp(url, "file://localhost", 16)) url += 16;//.........这里部分代码省略.........
开发者ID:ezdev128,项目名称:gpac,代码行数:101,
示例16: FFD_ConnectServicestatic GF_Err FFD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const char *url){ GF_Err e; s64 last_aud_pts; u32 i; s32 res; Bool is_local; const char *sOpt; char *ext, szName[1024]; FFDemux *ffd = plug->priv; AVInputFormat *av_in = NULL; char szExt[20]; if (ffd->ctx) return GF_SERVICE_ERROR; assert( url && strlen(url) < 1024); strcpy(szName, url); ext = strrchr(szName, '#'); ffd->service_type = 0; e = GF_NOT_SUPPORTED; ffd->service = serv; if (ext) { if (!stricmp(&ext[1], "video")) ffd->service_type = 1; else if (!stricmp(&ext[1], "audio")) ffd->service_type = 2; ext[0] = 0; } /*some extensions not supported by ffmpeg, overload input format*/ ext = strrchr(szName, '.'); strcpy(szExt, ext ? ext+1 : ""); strlwr(szExt); if (!strcmp(szExt, "cmp")) av_in = av_find_input_format("m4v"); is_local = (strnicmp(url, "file://", 7) && strstr(url, "://")) ? 0 : 1; GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG] opening file %s - local %d - av_in %08x/n", url, is_local, av_in)); if (!is_local) { AVProbeData pd; /*setup wraper for FFMPEG I/O*/ ffd->buffer_size = 8192; sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "FFMPEG", "IOBufferSize"); if (sOpt) ffd->buffer_size = atoi(sOpt); ffd->buffer = gf_malloc(sizeof(char)*ffd->buffer_size);#ifdef FFMPEG_DUMP_REMOTE ffd->outdbg = gf_f64_open("ffdeb.raw", "wb");#endif#ifdef USE_PRE_0_7 init_put_byte(&ffd->io, ffd->buffer, ffd->buffer_size, 0, ffd, ff_url_read, NULL, NULL); ffd->io.is_streamed = 1;#else ffd->io.seekable = 1;#endif ffd->dnload = gf_term_download_new(ffd->service, url, GF_NETIO_SESSION_NOT_THREADED | GF_NETIO_SESSION_NOT_CACHED, NULL, ffd); if (!ffd->dnload) return GF_URL_ERROR; while (1) { u32 read; e = gf_dm_sess_fetch_data(ffd->dnload, ffd->buffer + ffd->buffer_used, ffd->buffer_size - ffd->buffer_used, &read); if (e==GF_EOS) break; /*we're sync!!*/ if (e==GF_IP_NETWORK_EMPTY) continue; if (e) goto err_exit; ffd->buffer_used += read; if (ffd->buffer_used == ffd->buffer_size) break; } if (e==GF_EOS) { const char *cache_file = gf_dm_sess_get_cache_name(ffd->dnload); res = open_file(&ffd->ctx, cache_file, av_in); } else { pd.filename = szName; pd.buf_size = ffd->buffer_used; pd.buf = ffd->buffer; av_in = av_probe_input_format(&pd, 1); if (!av_in) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] error probing file %s - probe start with %c %c %c %c/n", url, ffd->buffer[0], ffd->buffer[1], ffd->buffer[2], ffd->buffer[3])); return GF_NOT_SUPPORTED; } /*setup downloader*/ av_in->flags |= AVFMT_NOFILE;#if FF_API_FORMAT_PARAMETERS /*commit ffmpeg 603b8bc2a109978c8499b06d2556f1433306eca7*/ res = avformat_open_input(&ffd->ctx, szName, av_in, NULL);#else res = av_open_input_stream(&ffd->ctx, &ffd->io, szName, av_in, NULL);#endif } } else { res = open_file(&ffd->ctx, szName, av_in); } switch (res) {#ifndef _WIN32_WCE case 0: e = GF_OK; break; case AVERROR_IO: e = GF_URL_ERROR; goto err_exit; case AVERROR_INVALIDDATA: e = GF_NON_COMPLIANT_BITSTREAM; goto err_exit; case AVERROR_NOMEM: e = GF_OUT_OF_MEM; goto err_exit; case AVERROR_NOFMT: e = GF_NOT_SUPPORTED; goto err_exit;#endif//.........这里部分代码省略.........
开发者ID:bigbensk,项目名称:gpac,代码行数:101,
示例17: term_on_connectstatic void term_on_connect(GF_ClientService *service, LPNETCHANNEL netch, GF_Err err){ GF_Channel *ch; GF_ObjectManager *root; GF_Terminal *term = service->term; GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] %s connection ACK received from %s - %s/n", netch ? "Channel" : "Service", service->url, gf_error_to_string(err) )); root = service->owner; if (root && (root->net_service != service)) { gf_term_message(term, service->url, "Incompatible module type", GF_SERVICE_ERROR); return; } /*this is service connection*/ if (!netch) { gf_term_service_media_event(service->owner, GF_EVENT_MEDIA_SETUP_DONE); if (err) { char msg[5000]; snprintf(msg, sizeof(msg), "Cannot open %s", service->url); gf_term_message(term, service->url, msg, err); gf_term_service_media_event(service->owner, GF_EVENT_ERROR); /*destroy service only if attached*/ if (root) { gf_term_lock_media_queue(term, 1); //notify before disconnecting if (root->subscene) gf_scene_notify_event(root->subscene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, err, GF_FALSE); service->ifce->CloseService(service->ifce); root->net_service = NULL; if (service->owner && service->nb_odm_users) service->nb_odm_users--; service->owner = NULL; /*depends on module: some module could forget to call gf_service_disconnect_ack */ if ( gf_list_del_item(term->net_services, service) >= 0) { /*and queue for destroy*/ gf_list_add(term->net_services_to_remove, service); } gf_term_lock_media_queue(term, 0); if (!root->parentscene) { GF_Event evt; evt.type = GF_EVENT_CONNECT; evt.connect.is_connected = 0; gf_term_send_event(term, &evt); } else { /*try to reinsert OD for VRML/X3D with multiple URLs: 1- first remove from parent scene without destroying object, this will trigger a re-setup if other URLs are present 2- then destroy object*/ gf_scene_remove_object(root->parentscene, root, 0); gf_odm_disconnect(root, 1); } return; } } if (!root) { /*channel service connect*/ u32 i; GF_ChannelSetup *cs; GF_List *ODs; if (!gf_list_count(term->channels_pending)) { return; } ODs = gf_list_new(); gf_term_lock_net(term, 1); i=0; while ((cs = (GF_ChannelSetup*)gf_list_enum(term->channels_pending, &i))) { if (cs->ch->service != service) continue; gf_list_rem(term->channels_pending, i-1); i--; /*even if error do setup (channel needs to be deleted)*/ if (gf_odm_post_es_setup(cs->ch, cs->dec, err) == GF_OK) { if (cs->ch->odm && (gf_list_find(ODs, cs->ch->odm)==-1) ) gf_list_add(ODs, cs->ch->odm); } gf_free(cs); } gf_term_lock_net(term, 0); /*finally setup all ODs concerned (we do this later in case of scalability)*/ while (gf_list_count(ODs)) { GF_ObjectManager *odm = (GF_ObjectManager*)gf_list_get(ODs, 0); gf_list_rem(ODs, 0); /*force re-setup*/ gf_scene_setup_object(odm->parentscene, odm); } gf_list_del(ODs); } else { /*setup od*/ gf_odm_setup_entry_point(root, service->url); } /*load cache if requested*/ if (!err && term->enable_cache) { err = gf_term_service_cache_load(service); /*not a fatal error*/ if (err) gf_term_message(term, "GPAC Cache", "Cannot load cache", err); } return;//.........这里部分代码省略.........
开发者ID:ezdev128,项目名称:gpac,代码行数:101,
示例18: ts_interleave_thread_runstatic u32 ts_interleave_thread_run(void *param) { GF_AbstractTSMuxer * mux = (GF_AbstractTSMuxer *) param; AVStream * video_st = mux->video_st; AVStream * audio_st = mux->audio_st; u64 audio_pts, video_pts; u64 audioSize, videoSize, videoKbps, audioKbps; u32 pass; u32 now, start; /* open the output file, if needed */ if (!(mux->oc->oformat->flags & AVFMT_NOFILE)) { if (url_fopen(&mux->oc->pb, mux->destination, URL_WRONLY) < 0) { fprintf(stderr, "Could not open '%s'/n", mux->destination); return 0; } } /* write the stream header, if any */ av_write_header(mux->oc); audio_pts = video_pts = 0; // Buffering... gf_sleep(1000); now = start = gf_sys_clock(); audioSize = videoSize = 0; audioKbps = videoKbps = 0; pass = 0; while ( mux->encode) { pass++; if (0== (pass%16)) { now = gf_sys_clock(); if (now - start > 1000) { videoKbps = videoSize * 8000 / (now-start) / 1024; audioKbps = audioSize * 8000 / (now-start) / 1024; audioSize = videoSize = 0; start = now; GF_LOG(GF_LOG_DEBUG, GF_LOG_MODULE, ("/rPTS audio="LLU" ("LLU"kbps), video="LLU" ("LLU"kbps)", audio_pts, audioKbps, video_pts, videoKbps)); } } /* write interleaved audio and video frames */ if (!video_st || (audio_pts == AV_NOPTS_VALUE && has_packet_ready(mux, mux->audioMx, &mux->audioPackets)) || ((audio_st && audio_pts < video_pts && audio_pts!= AV_NOPTS_VALUE))) { AVPacketList * pl = wait_for_packet(mux, mux->audioMx, &mux->audioPackets); if (!pl) goto exit; audio_pts = pl->pkt.pts ; audioSize+=pl->pkt.size; if (pl->pkt.pts == AV_NOPTS_VALUE) { pl->pkt.pts = 0; } if (av_interleaved_write_frame(mux->oc, &(pl->pkt)) < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] : failed to write audio interleaved frame audio_pts="LLU", video_pts="LLU"/n", audio_pts, video_pts)); } gf_free(pl); } else { AVPacketList * pl = wait_for_packet(mux, mux->videoMx, &mux->videoPackets); if (!pl) goto exit; video_pts = pl->pkt.pts; /* write the compressed frame in the media file */ if (0 && audio_pts != AV_NOPTS_VALUE && audio_pts > video_pts && pl->next) { u32 skipped = 0; u64 first = video_pts; /* We may be too slow... */ gf_mx_p(mux->videoMx); while (video_pts < audio_pts && pl->next) { AVPacketList * old = pl; // We skip frames... pl = pl->next; video_pts = pl->pkt.pts; skipped++; gf_free(old); } mux->videoPackets = pl->next; gf_mx_v(mux->videoMx); if (skipped > 0) GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("Skipped %u video frames, frame was "LLU", but is now "LLU"/n", skipped, first, video_pts)); } videoSize+=pl->pkt.size; video_pts = pl->pkt.pts; // * video_st->time_base.num / video_st->time_base.den; assert( video_pts); if (av_interleaved_write_frame(mux->oc, &(pl->pkt)) < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] : failed to write video interleaved frame audio_pts="LLU", video_pts="LLU"/n", audio_pts, video_pts)); } gf_free(pl); } gf_sleep(1); }exit: GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[AVRedirect] Ending TS thread.../n")); av_write_trailer(mux->oc); if (!(mux->oc->oformat->flags & AVFMT_NOFILE)) { /* close the output file */ url_fclose(mux->oc->pb); } return 0;}
开发者ID:fcsteagu,项目名称:gpac-1,代码行数:95,
示例19: term_on_commandstatic void term_on_command(GF_ClientService *service, GF_NetworkCommand *com, GF_Err response){ GF_Channel *ch; GF_Terminal *term = service->term; if (com->command_type==GF_NET_BUFFER_QUERY) { GF_Scene *scene; u32 i, max_buffer_time; GF_ObjectManager *odm; com->buffer.max = 0; com->buffer.min = com->buffer.occupancy = (u32) -1; com->buffer.buffering = GF_FALSE; if (!service->owner) { com->buffer.occupancy = 0; return; } /*browse all channels in the scene, running on this service, and get buffer info*/ scene = NULL; if (service->owner->subscene) { scene = service->owner->subscene; } else if (service->owner->parentscene) { scene = service->owner->parentscene; } if (!scene) { com->buffer.occupancy = 0; return; } /*get exclusive access to scene resources , to make sure ODs are not being inserted/remove*/ gf_mx_p(scene->mx_resources); max_buffer_time=0; if (!gf_list_count(scene->resources)) GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("[ODM] No object manager found for the scene (URL: %s), buffer occupancy will remain unchanged/n", service->url)); i=0; while ((odm = (GF_ObjectManager*)gf_list_enum(scene->resources, &i))) { gather_buffer_level(odm, service, com, &max_buffer_time); } gf_mx_v(scene->mx_resources); if (com->buffer.occupancy==(u32) -1) com->buffer.occupancy = 0; //in bench mode return the 1 if one of the buffer is full (eg sleep until all buffers are not full), 0 otherwise if (term->bench_mode) { com->buffer.occupancy = (max_buffer_time>com->buffer.max) ? 2 : 0; com->buffer.max = 1; com->buffer.min = 0; } return; } if (com->command_type==GF_NET_SERVICE_INFO) { GF_Event evt; evt.type = GF_EVENT_METADATA; gf_term_send_event(term, &evt); return; } if (com->command_type==GF_NET_SERVICE_MEDIA_CAP_QUERY) { gf_sc_get_av_caps(term->compositor, &com->mcaps.width, &com->mcaps.height, &com->mcaps.display_bit_depth, &com->mcaps.audio_bpp, &com->mcaps.channels, &com->mcaps.sample_rate); return; } if (com->command_type==GF_NET_SERVICE_EVENT) { /*check for UDP timeout*/ if (com->send_event.evt.message.error == GF_IP_UDP_TIMEOUT) { const char *sOpt = gf_cfg_get_key(term->user->config, "Network", "AutoReconfigUDP"); if (sOpt && !stricmp(sOpt, "yes")) { char szMsg[1024]; sprintf(szMsg, "!! UDP down (%s) - Retrying with TCP !!/n", com->send_event.evt.message.message); gf_term_message(term, service->url, szMsg, GF_IP_NETWORK_FAILURE); /*reload scene - FIXME this shall work on inline nodes, not on the root !*/ if (term->reload_url) gf_free(term->reload_url); term->reload_state = 1; term->reload_url = gf_strdup(term->root_scene->root_od->net_service->url); gf_cfg_set_key(term->user->config, "Network", "UDPNotAvailable", "yes"); return; } } com->send_event.res = 0; gf_term_send_event(term, &com->send_event.evt); return; } if (com->command_type==GF_NET_ASSOCIATED_CONTENT_LOCATION) { GF_Scene *scene = NULL; if (service->owner->subscene) { scene = service->owner->subscene; } else if (service->owner->parentscene) { scene = service->owner->parentscene; } if (scene) gf_scene_register_associated_media(scene, &com->addon_info); return; } if (com->command_type==GF_NET_ASSOCIATED_CONTENT_TIMING) { GF_Scene *scene = NULL; if (service->owner->subscene) { scene = service->owner->subscene; } else if (service->owner->parentscene) { scene = service->owner->parentscene;//.........这里部分代码省略.........
开发者ID:ezdev128,项目名称:gpac,代码行数:101,
示例20: svg_drawable_3d_pickvoid svg_drawable_3d_pick(Drawable *drawable, GF_TraverseState *tr_state, DrawAspect2D *asp) { SFVec3f local_pt, world_pt, vdiff; SFVec3f hit_normal; SFVec2f text_coords; u32 i, count; Fixed sqdist; Bool node_is_over; GF_Compositor *compositor; GF_Matrix mx; GF_Ray r; compositor = tr_state->visual->compositor; node_is_over = 0; r = tr_state->ray; gf_mx_copy(mx, tr_state->model_matrix); gf_mx_inverse(&mx); gf_mx_apply_ray(&mx, &r); /*if we already have a hit point don't check anything below...*/ if (compositor->hit_square_dist && !compositor->grabbed_sensor && !tr_state->layer3d) { GF_Plane p; GF_BBox box; SFVec3f hit = compositor->hit_world_point; gf_mx_apply_vec(&mx, &hit); p.normal = r.dir; p.d = -1 * gf_vec_dot(p.normal, hit); gf_bbox_from_rect(&box, &drawable->path->bbox); if (gf_bbox_plane_relation(&box, &p) == GF_BBOX_FRONT) { GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SVG Picking] bounding box of node %s (DEF %s) below current hit point - skipping/n", gf_node_get_class_name(drawable->node), gf_node_get_name(drawable->node))); return; } } node_is_over = 0; if (compositor_get_2d_plane_intersection(&r, &local_pt)) { node_is_over = svg_drawable_is_over(drawable, local_pt.x, local_pt.y, asp, tr_state, NULL); } if (!node_is_over) return; hit_normal.x = hit_normal.y = 0; hit_normal.z = FIX_ONE; text_coords.x = gf_divfix(local_pt.x, drawable->path->bbox.width) + FIX_ONE/2; text_coords.y = gf_divfix(local_pt.y, drawable->path->bbox.height) + FIX_ONE/2; /*check distance from user and keep the closest hitpoint*/ world_pt = local_pt; gf_mx_apply_vec(&tr_state->model_matrix, &world_pt); for (i=0; i<tr_state->num_clip_planes; i++) { if (gf_plane_get_distance(&tr_state->clip_planes[i], &world_pt) < 0) { GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SVG Picking] node %s (def %s) is not in clipper half space/n", gf_node_get_class_name(drawable->node), gf_node_get_name(drawable->node))); return; } } gf_vec_diff(vdiff, world_pt, tr_state->ray.orig); sqdist = gf_vec_lensq(vdiff); if (compositor->hit_square_dist && (compositor->hit_square_dist+FIX_EPSILON<sqdist)) { GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SVG Picking] node %s (def %s) is farther (%g) than current pick (%g)/n", gf_node_get_class_name(drawable->node), gf_node_get_name(drawable->node), FIX2FLT(sqdist), FIX2FLT(compositor->hit_square_dist))); return; } compositor->hit_square_dist = sqdist; /*also stack any VRML sensors present at the current level. If the event is not catched by a listener in the SVG tree, the event will be forwarded to the VRML tree*/ gf_list_reset(compositor->sensors); count = gf_list_count(tr_state->vrml_sensors); for (i=0; i<count; i++) { gf_list_add(compositor->sensors, gf_list_get(tr_state->vrml_sensors, i)); } gf_mx_copy(compositor->hit_world_to_local, tr_state->model_matrix); gf_mx_copy(compositor->hit_local_to_world, mx); compositor->hit_local_point = local_pt; compositor->hit_world_point = world_pt; compositor->hit_world_ray = tr_state->ray; compositor->hit_normal = hit_normal; compositor->hit_texcoords = text_coords; svg_clone_use_stack(compositor, tr_state); /*not use in SVG patterns*/ compositor->hit_appear = NULL; compositor->hit_node = drawable->node; compositor->hit_text = NULL; compositor->hit_use_dom_events = 1; GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SVG Picking] node %s (def %s) is under mouse - hit %g %g %g/n", gf_node_get_class_name(drawable->node), gf_node_get_name(drawable->node), FIX2FLT(world_pt.x), FIX2FLT(world_pt.y), FIX2FLT(world_pt.z)));}
开发者ID:bigbensk,项目名称:gpac,代码行数:92,
示例21: memsetstatic GF_InputService *gf_term_can_handle_service(GF_Terminal *term, const char *url, const char *parent_url, Bool no_mime_check, char **out_url, GF_Err *ret_code, GF_DownloadSession **the_session, char **out_mime_type){ u32 i; GF_Err e; char *sURL, *qm, *frag, *ext, *mime_type, *url_res; char szExt[50]; const char *force_module = NULL; GF_InputService *ifce; Bool skip_mime = 0; memset(szExt, 0, sizeof(szExt)); (*ret_code) = GF_OK; ifce = NULL; mime_type = NULL; GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Looking for plugin for URL %s/n", url)); *out_url = NULL; *out_mime_type = NULL; sURL = NULL; if (!url || !strncmp(url, "////", 2) ) { (*ret_code) = GF_URL_ERROR; goto exit; } if (!strnicmp(url, "libplayer://", 12)) { force_module = "LibPlayer"; } /*used by GUIs scripts to skip URL concatenation*/ if (!strncmp(url, "gpac://", 7)) sURL = gf_strdup(url+7); /*opera-style localhost URLs*/ else if (!strncmp(url, "file://localhost", 16)) sURL = gf_strdup(url+16); else if (parent_url) sURL = gf_url_concatenate(parent_url, url); /*path absolute*/ if (!sURL) sURL = gf_strdup(url); if (gf_url_is_local(sURL)) gf_url_to_fs_path(sURL); if (the_session) *the_session = NULL; if (no_mime_check) { mime_type = NULL; } else { /*fetch a mime type if any. If error don't even attempt to open the service */ mime_type = get_mime_type(term, sURL, &e, the_session); if (e) { (*ret_code) = e; goto exit; } } if (mime_type && (!stricmp(mime_type, "text/plain") || !stricmp(mime_type, "video/quicktime") || !stricmp(mime_type, "application/octet-stream") ) ) { skip_mime = 1; } ifce = NULL; /*load from mime type*/ if (mime_type && !skip_mime) { const char *sPlug = gf_cfg_get_key(term->user->config, "MimeTypes", mime_type); GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Mime type found: %s/n", mime_type)); if (!sPlug) { *out_mime_type = mime_type; mime_type=NULL; } if (sPlug) sPlug = strrchr(sPlug, '"'); if (sPlug) { sPlug += 2; GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("%s:%d FOUND matching module %s/n", __FILE__, __LINE__, sPlug)); ifce = (GF_InputService *) gf_modules_load_interface_by_name(term->user->modules, sPlug, GF_NET_CLIENT_INTERFACE); if (force_module && ifce && !strstr(ifce->module_name, force_module)) { gf_modules_close_interface((GF_BaseInterface *) ifce); ifce = NULL; } if (ifce && !net_check_interface(ifce) ) { gf_modules_close_interface((GF_BaseInterface *) ifce); ifce = NULL; } } } /* The file extension, if any, is before '?' if any or before '#' if any.*/ url_res = strrchr(sURL, '/'); if (!url_res) url_res = strrchr(sURL, '//'); if (!url_res) url_res = sURL; qm = strchr(url_res, '?'); if (qm) { qm[0] = 0; ext = strrchr(url_res, '.'); qm[0] = '?'; } else { frag = strchr(url_res, '#'); if (frag) { frag[0] = 0;//.........这里部分代码省略.........
开发者ID:ezdev128,项目名称:gpac,代码行数:101,
示例22: gf_text_import_srt_bifsstatic GF_Err gf_text_import_srt_bifs(GF_SceneManager *ctx, GF_ESD *src, GF_MuxInfo *mux){ GF_Err e; GF_Node *text, *font; GF_StreamContext *srt; FILE *srt_in; GF_FieldInfo string, style; u32 sh, sm, ss, sms, eh, em, es, ems, start, end; GF_AUContext *au; GF_Command *com; SFString *sfstr; GF_CommandField *inf; Bool italic, underlined, bold; u32 state, curLine, line, i, len; char szLine[2048], szText[2048], *ptr; GF_StreamContext *sc = NULL; if (!ctx->scene_graph) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] base scene not assigned/n")); return GF_BAD_PARAM; } i=0; while ((sc = (GF_StreamContext*)gf_list_enum(ctx->streams, &i))) { if (sc->streamType==GF_STREAM_SCENE) break; sc = NULL; } if (!sc) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] cannot locate base scene/n")); return GF_BAD_PARAM; } if (!mux->textNode) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] Target text node unspecified/n")); return GF_BAD_PARAM; } text = gf_sg_find_node_by_name(ctx->scene_graph, mux->textNode); if (!text) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] cannot find target text node %s/n", mux->textNode)); return GF_BAD_PARAM; } if (gf_node_get_field_by_name(text, "string", &string) != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] Target text node %s doesn't look like text/n", mux->textNode)); return GF_BAD_PARAM; } font = NULL; if (mux->fontNode) { font = gf_sg_find_node_by_name(ctx->scene_graph, mux->fontNode); if (!font) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] cannot find target font node %s/n", mux->fontNode)); return GF_BAD_PARAM; } if (gf_node_get_field_by_name(font, "style", &style) != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] Target font node %s doesn't look like font/n", mux->fontNode)); return GF_BAD_PARAM; } } srt_in = gf_f64_open(mux->file_name, "rt"); if (!srt_in) { GF_LOG(GF_LOG_ERROR, GF_LOG_PARSER, ("[srt->bifs] cannot open input file %s/n", mux->file_name)); return GF_URL_ERROR; } srt = gf_sm_stream_new(ctx, src->ESID, GF_STREAM_SCENE, 1); if (!srt) return GF_OUT_OF_MEM; if (!src->slConfig) src->slConfig = (GF_SLConfig *) gf_odf_desc_new(GF_ODF_SLC_TAG); src->slConfig->timestampResolution = 1000; if (!src->decoderConfig) src->decoderConfig = (GF_DecoderConfig *) gf_odf_desc_new(GF_ODF_DCD_TAG); src->decoderConfig->streamType = GF_STREAM_SCENE; src->decoderConfig->objectTypeIndication = 1; e = GF_OK; state = end = 0; curLine = 0; au = NULL; com = NULL; italic = underlined = bold = 0; inf = NULL; while (1) { char *sOK = fgets(szLine, 2048, srt_in); if (sOK) REM_TRAIL_MARKS(szLine, "/r/n/t ") if (!sOK || !strlen(szLine)) { state = 0; if (au) { /*if italic or underscore do it*/ if (font && (italic || underlined || bold)) { com = gf_sg_command_new(ctx->scene_graph, GF_SG_FIELD_REPLACE); com->node = font; gf_node_register(font, NULL); inf = gf_sg_command_field_new(com); inf->fieldIndex = style.fieldIndex; inf->fieldType = style.fieldType; inf->field_ptr = gf_sg_vrml_field_pointer_new(style.fieldType); sfstr = (SFString *)inf->field_ptr; if (bold && italic && underlined) sfstr->buffer = gf_strdup("BOLDITALIC UNDERLINED");//.........这里部分代码省略.........
开发者ID:olegloa,项目名称:mp4box,代码行数:101,
示例23: imagetexture_updatestatic void imagetexture_update(GF_TextureHandler *txh){ if (gf_node_get_tag(txh->owner)!=TAG_MPEG4_CacheTexture) { MFURL url = ((M_ImageTexture *) txh->owner)->url; /*setup texture if needed*/ if (!txh->is_open && url.count) { gf_sc_texture_play(txh, &url); } gf_sc_texture_update_frame(txh, 0); if ( /*URL is present but not opened - redraw till fetch*/ /* (txh->stream && !txh->tx_io) && */ /*image has been updated*/ txh->needs_refresh) { /*mark all subtrees using this image as dirty*/ gf_node_dirty_parents(txh->owner); gf_sc_invalidate(txh->compositor, NULL); } return; } /*cache texture case*/ else { M_CacheTexture *ct = (M_CacheTexture *) txh->owner; /*decode cacheTexture data */ if ((ct->data || ct->image.buffer) && !txh->data) {#ifndef GPAC_DISABLE_AV_PARSERS u32 out_size; GF_Err e; /*BT/XMT playback: load to memory*/ if (ct->image.buffer) { char *par = (char *) gf_scene_get_service_url( gf_node_get_graph(txh->owner ) ); char *src_url = gf_url_concatenate(par, ct->image.buffer); FILE *test = gf_fopen( src_url ? src_url : ct->image.buffer, "rb"); if (test) { fseek(test, 0, SEEK_END); ct->data_len = (u32) gf_ftell(test); ct->data = gf_malloc(sizeof(char)*ct->data_len); fseek(test, 0, SEEK_SET); if (ct->data_len != fread(ct->data, 1, ct->data_len, test)) { GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to load CacheTexture data from file %s: IO err/n", src_url ? src_url : ct->image.buffer ) ); gf_free(ct->data); ct->data = NULL; ct->data_len = 0; } gf_fclose(test); } else { GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to load CacheTexture data from file %s: not found/n", src_url ? src_url : ct->image.buffer ) ); } ct->image.buffer = NULL; if (src_url) gf_free(src_url); } /*BIFS decoded playback*/ switch (ct->objectTypeIndication) { case GPAC_OTI_IMAGE_JPEG: out_size = 0; e = gf_img_jpeg_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, NULL, &out_size, 3); if (e==GF_BUFFER_TOO_SMALL) { u32 BPP; txh->data = gf_malloc(sizeof(char) * out_size); if (txh->pixelformat==GF_PIXEL_GREYSCALE) BPP = 1; else BPP = 3; e = gf_img_jpeg_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, txh->data, &out_size, BPP); if (e==GF_OK) { gf_sc_texture_allocate(txh); gf_sc_texture_set_data(txh); txh->needs_refresh = 1; txh->stride = out_size / txh->height; } } break; case GPAC_OTI_IMAGE_PNG: out_size = 0; e = gf_img_png_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, NULL, &out_size); if (e==GF_BUFFER_TOO_SMALL) { txh->data = gf_malloc(sizeof(char) * out_size); e = gf_img_png_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, txh->data, &out_size); if (e==GF_OK) { gf_sc_texture_allocate(txh); gf_sc_texture_set_data(txh); txh->needs_refresh = 1; txh->stride = out_size / txh->height; } } break; }#endif // GPAC_DISABLE_AV_PARSERS /*cacheURL is specified, store the image*/ if (ct->cacheURL.buffer) { u32 i; u8 hash[20]; FILE *cached_texture; char szExtractName[GF_MAX_PATH], section[64], *opt, *src_url;//.........这里部分代码省略.........
开发者ID:Brilon314,项目名称:gpac,代码行数:101,
示例24: OSVC_ProcessDatastatic GF_Err OSVC_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel){ s32 got_pic; OPENSVCFRAME pic; int Layer[4]; u32 i, nalu_size, sc_size; u8 *ptr; OSVCDec *ctx = (OSVCDec*) ifcg->privateStack; u32 curMaxDqId = ctx->MaxDqId; if (!ES_ID || !ctx->codec) { *outBufferLength = 0; return GF_OK; } if (*outBufferLength < ctx->out_size) { *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; } ctx->MaxDqId = GetDqIdMax((unsigned char *) inBuffer, inBufferLength, ctx->nalu_size_length, ctx->DqIdTable, ctx->nalu_size_length ? 1 : 0); if (!ctx->init_layer_set) { //AVC stream in a h264 file if (ctx->MaxDqId == -1) ctx->MaxDqId = 0; ctx->CurrentDqId = ctx->MaxDqId; ctx->init_layer_set = GF_TRUE; } if (curMaxDqId != ctx->MaxDqId) ctx->CurrentDqId = ctx->MaxDqId; /*decode only current layer*/ SetCommandLayer(Layer, ctx->MaxDqId, ctx->CurrentDqId, &ctx->TemporalCom, ctx->TemporalId); got_pic = 0; nalu_size = 0; ptr = (u8 *) inBuffer; sc_size = 0; if (!ctx->nalu_size_length) { u32 size; size = gf_media_nalu_next_start_code((u8 *) inBuffer, inBufferLength, &sc_size); if (sc_size) { ptr += size+sc_size; assert(inBufferLength >= size+sc_size); inBufferLength -= size+sc_size; } else { /*no annex-B start-code found, discard */ *outBufferLength = 0; return GF_OK; } } while (inBufferLength) { if (ctx->nalu_size_length) { for (i=0; i<ctx->nalu_size_length; i++) { nalu_size = (nalu_size<<8) + ptr[i]; } ptr += ctx->nalu_size_length; } else { nalu_size = gf_media_nalu_next_start_code(ptr, inBufferLength, &sc_size); }#ifndef GPAC_DISABLE_LOG switch (ptr[0] & 0x1F) { case GF_AVC_NALU_SEQ_PARAM: case GF_AVC_NALU_SVC_SUBSEQ_PARAM: { u32 sps_id; gf_avc_get_sps_info((char *)ptr, nalu_size, &sps_id, NULL, NULL, NULL, NULL); GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] ES%d: SPS id=/"%d/" code=/"%d/" size=/"%d/"/n", ES_ID, sps_id, ptr[0] & 0x1F, nalu_size)); } break; case GF_AVC_NALU_PIC_PARAM: { u32 sps_id, pps_id; gf_avc_get_pps_info((char *)ptr, nalu_size, &pps_id, &sps_id); GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] ES%d: PPS id=/"%d/" code=/"%d/" size=/"%d/" sps_id=/"%d/"/n", ES_ID, pps_id, ptr[0] & 0x1F, nalu_size, sps_id)); } break; default: GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] ES%d: NALU code=/"%d/" size=/"%d/"/n", ES_ID, ptr[0] & 0x1F, nalu_size)); }#endif if (!ctx->state_found) { u8 nal_type = (ptr[0] & 0x1F) ; switch (nal_type) { case GF_AVC_NALU_SEQ_PARAM: case GF_AVC_NALU_PIC_PARAM: if (ctx->baseES_ID == ES_ID) ctx->state_found = GF_TRUE; break; } } if (ctx->state_found) { if (!got_pic)//.........这里部分代码省略.........
开发者ID:ARSekkat,项目名称:gpac,代码行数:101,
示例25: gf_enum_directory//.........这里部分代码省略......... CE_CharToWide(_path, path); CE_CharToWide((char *)filter, w_filter);#elif defined(WIN32) switch (dir[strlen(dir) - 1]) { case '/': case '//': sprintf(path, "%s*", dir); break; default: sprintf(path, "%s%c*", dir, GF_PATH_SEPARATOR); break; }#else strcpy(path, dir); if (path[strlen(path)-1] != '/') strcat(path, "/");#endif#ifdef WIN32 SearchH= FindFirstFile(path, &FindData); if (SearchH == INVALID_HANDLE_VALUE) return GF_IO_ERR;#if defined (_WIN32_WCE) _path[strlen(_path)-1] = 0;#else path[strlen(path)-1] = 0;#endif while (SearchH != INVALID_HANDLE_VALUE) {#else the_dir = opendir(path); if (the_dir == NULL) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Cannot open directory %s for enumeration: %d/n", path, errno)); return GF_IO_ERR; } the_file = readdir(the_dir); while (the_file) {#endif memset(&file_info, 0, sizeof(GF_FileEnumInfo) );#if defined (_WIN32_WCE) if (!wcscmp(FindData.cFileName, _T(".") )) goto next; if (!wcscmp(FindData.cFileName, _T("..") )) goto next;#elif defined(WIN32) if (!strcmp(FindData.cFileName, ".")) goto next; if (!strcmp(FindData.cFileName, "..")) goto next;#else if (!strcmp(the_file->d_name, "..")) goto next; if (the_file->d_name[0] == '.') goto next;#endif#ifdef WIN32 file_info.directory = (FindData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) ? GF_TRUE : GF_FALSE; if (!enum_directory && file_info.directory) goto next; if (enum_directory && !file_info.directory) goto next;#endif if (filter) {#if defined (_WIN32_WCE) short ext[30]; short *sep = wcsrchr(FindData.cFileName, (wchar_t) '.'); if (!sep) goto next;
开发者ID:TotoLulu94,项目名称:gpac,代码行数:67,
示例26: enum_modulesBool enum_modules(void *cbck, char *item_name, char *item_path){ ModuleInstance *inst;#if CHECK_MODULE QueryInterface query_func; LoadInterface load_func; ShutdownInterface del_func;#ifdef WIN32 HMODULE ModuleLib;#else void *ModuleLib; s32 _flags;#endif#endif GF_ModuleManager *pm = (GF_ModuleManager*)cbck; if (strstr(item_name, "nposmozilla")) return 0; if (strncmp(item_name, "gm_", 3) && strncmp(item_name, "libgm_", 6)) return 0; if (gf_module_is_loaded(pm, item_name) ) return 0;#if CHECK_MODULE#ifdef WIN32 ModuleLib = LoadLibrary(item_path); if (!ModuleLib) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Cannot load module file %s/n", item_name)); return 0; }#ifdef _WIN32_WCE query_func = (QueryInterface) GetProcAddress(ModuleLib, _T("QueryInterface")); load_func = (LoadInterface) GetProcAddress(ModuleLib, _T("LoadInterface")); del_func = (ShutdownInterface) GetProcAddress(ModuleLib, _T("ShutdownInterface"));#else query_func = (QueryInterface) GetProcAddress(ModuleLib, "QueryInterface"); load_func = (LoadInterface) GetProcAddress(ModuleLib, "LoadInterface"); del_func = (ShutdownInterface) GetProcAddress(ModuleLib, "ShutdownInterface");#endif FreeLibrary(ModuleLib);#else#ifdef RTLD_GLOBAL _flags =RTLD_LAZY | RTLD_GLOBAL;#else _flags =RTLD_LAZY;#endif ModuleLib = dlopen(item_name, _flags); if (!ModuleLib) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Cannot load module file %s, error is %s/n", item_name, dlerror())); goto next; } query_func = (QueryInterface) dlsym(ModuleLib, "QueryInterface"); load_func = (LoadInterface) dlsym(ModuleLib, "LoadInterface"); del_func = (ShutdownInterface) dlsym(ModuleLib, "ShutdownInterface"); dlclose(ModuleLib);#endif if (!load_func || !query_func || !del_func){ GF_LOG(GF_LOG_WARNING, GF_LOG_CORE, ("[Core] Could not find some signatures in module %s: QueryInterface=%p, LoadInterface=%p, ShutdownInterface=%p/n", item_name, load_func, query_func, del_func)); return 0; }#endif GF_SAFEALLOC(inst, ModuleInstance); inst->interfaces = gf_list_new(); inst->plugman = pm; inst->name = gf_strdup(item_name); inst->dir = gf_strdup(item_path); gf_url_get_resource_path(item_path, inst->dir); GF_LOG(GF_LOG_INFO, GF_LOG_CORE, ("[Core] Added module %s./n", inst->name)); gf_list_add(pm->plug_list, inst); return GF_FALSE;}
开发者ID:Keemotion,项目名称:GPAC4iOS,代码行数:81,
示例27: gf_isom_text_set_streaming_mode/*fixme, this doesn't work properly with respect to @expect_type*/static GF_Descriptor *ISOR_GetServiceDesc(GF_InputService *plug, u32 expect_type, const char *sub_url){ u32 count, nb_st, i, trackID; GF_ESD *esd; ISOMReader *read; GF_InitialObjectDescriptor *iod; if (!plug || !plug->priv) return NULL; read = (ISOMReader *) plug->priv; if (!read->mov) return NULL; /*no matter what always read text as TTUs*/ gf_isom_text_set_streaming_mode(read->mov, 1); trackID = 0; if (!sub_url) { trackID = read->base_track_id; read->base_track_id = 0; } else { char *ext = strrchr(sub_url, '#'); if (!ext) { trackID = 0; } else { if (!strnicmp(ext, "#trackID=", 9)) trackID = atoi(ext+9); else if (!stricmp(ext, "#video")) trackID = get_track_id(read->mov, GF_ISOM_MEDIA_VISUAL, 0); else if (!strnicmp(ext, "#video", 6)) { trackID = atoi(ext+6); trackID = get_track_id(read->mov, GF_ISOM_MEDIA_VISUAL, trackID); } else if (!stricmp(ext, "#audio")) trackID = get_track_id(read->mov, GF_ISOM_MEDIA_AUDIO, 0); else if (!strnicmp(ext, "#audio", 6)) { trackID = atoi(ext+6); trackID = get_track_id(read->mov, GF_ISOM_MEDIA_AUDIO, trackID); } else trackID = atoi(ext+1); /*if trackID is 0, assume this is a fragment identifier*/ } } if (!trackID && (expect_type!=GF_MEDIA_OBJECT_SCENE) && (expect_type!=GF_MEDIA_OBJECT_UNDEF)) { for (i=0; i<gf_isom_get_track_count(read->mov); i++) { u32 type = gf_isom_get_media_type(read->mov, i+1); if ( ((type==GF_ISOM_MEDIA_VISUAL) && (expect_type==GF_MEDIA_OBJECT_VIDEO)) || ((type==GF_ISOM_MEDIA_AUDIO) && (expect_type==GF_MEDIA_OBJECT_AUDIO)) ) { trackID = gf_isom_get_track_id(read->mov, i+1); break; } } } if (trackID && (expect_type!=GF_MEDIA_OBJECT_SCENE) ) { u32 track = gf_isom_get_track_by_id(read->mov, trackID); if (!track) return NULL; esd = gf_media_map_esd(read->mov, track); esd->OCRESID = 0; iod = (GF_InitialObjectDescriptor *) gf_isom_get_root_od(read->mov); if (!iod) { iod = (GF_InitialObjectDescriptor *) gf_odf_desc_new(GF_ODF_IOD_TAG); iod->OD_profileAndLevel = iod->audio_profileAndLevel = iod->graphics_profileAndLevel = iod->scene_profileAndLevel = iod->visual_profileAndLevel = 0xFE; } else { while (gf_list_count(iod->ESDescriptors)) { GF_ESD *old = (GF_ESD *)gf_list_get(iod->ESDescriptors, 0); gf_odf_desc_del((GF_Descriptor *) old); gf_list_rem(iod->ESDescriptors, 0); } } gf_list_add(iod->ESDescriptors, esd); isor_emulate_chapters(read->mov, iod); return (GF_Descriptor *) iod; } iod = NULL; if (check_mpeg4_systems(plug, read->mov)) { iod = (GF_InitialObjectDescriptor *) gf_isom_get_root_od(read->mov); if (!iod) {#ifndef GPAC_DISABLE_LOG GF_Err e = gf_isom_last_error(read->mov); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_NETWORK, ("[IsoMedia] Cannot fetch MPEG-4 IOD (error %s) - generating one/n", gf_error_to_string(e) )); } else { GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[IsoMedia] No MPEG-4 IOD found in file - generating one/n")); }#endif } } if (!iod) return isor_emulate_iod(read); count = gf_list_count(iod->ESDescriptors); if (!count) { gf_odf_desc_del((GF_Descriptor*) iod); return isor_emulate_iod(read); } if (count==1) { esd = (GF_ESD *)gf_list_get(iod->ESDescriptors, 0); switch (esd->decoderConfig->streamType) { case GF_STREAM_SCENE: case GF_STREAM_PRIVATE_SCENE: break;//.........这里部分代码省略.........
开发者ID:supperlitt,项目名称:gpac,代码行数:101,
注:本文中的GF_LOG函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 C++ GF_REGISTER_MODULE_INTERFACE函数代码示例 C++ GF_FREE函数代码示例 |