这篇教程C++ CSFLogError函数代码示例写得很实用,希望能帮到您。
本文整理汇总了C++中CSFLogError函数的典型用法代码示例。如果您正苦于以下问题:C++ CSFLogError函数的具体用法?C++ CSFLogError怎么用?C++ CSFLogError使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。 在下文中一共展示了CSFLogError函数的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。 示例1: CheckInputsnsresult SrtpFlow::ProtectRtp(void *in, int in_len, int max_len, int *out_len) { nsresult res = CheckInputs(true, in, in_len, max_len, out_len); if (NS_FAILED(res)) return res; int len = in_len; srtp_err_status_t r = srtp_protect(session_, in, &len); if (r != srtp_err_status_ok) { CSFLogError(LOGTAG, "Error protecting SRTP packet"); return NS_ERROR_FAILURE; } MOZ_ASSERT(len <= max_len); *out_len = len; CSFLogDebug(LOGTAG, "Successfully protected an SRTP packet of len %d", *out_len); return NS_OK;}
开发者ID:luke-chang,项目名称:gecko-1,代码行数:23,
示例2: CSFLogDebug//WebRTC::RTP Callback Implementationint WebrtcAudioConduit::SendPacket(int channel, const void* data, int len){ CSFLogDebug(logTag, "%s : channel %d %s", __FUNCTION__, channel, (mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : ""); if (mEngineReceiving) { if (mOtherDirection) { return mOtherDirection->SendPacket(channel, data, len); } CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender on channel %d", __FUNCTION__, channel); return -1; } else {#ifdef MOZILLA_INTERNAL_API if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) { if (mProcessing.Length() > 0) { TimeStamp started = mProcessing[0].mTimeStamp; mProcessing.RemoveElementAt(0); mProcessing.RemoveElementAt(0); // 20ms packetization! Could automate this by watching sizes TimeDuration t = TimeStamp::Now() - started; int64_t delta = t.ToMilliseconds(); LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta); } }#endif if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK)) { CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__); return len; } else { CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__); return -1; } }}
开发者ID:hitdream2002,项目名称:gecko-dev,代码行数:38,
示例3: MOZ_ASSERTvoidRemoteSourceStreamInfo::StorePipeline(int aTrack, bool aIsVideo, mozilla::RefPtr<mozilla::MediaPipeline> aPipeline){ MOZ_ASSERT(mPipelines.find(aTrack) == mPipelines.end()); if (mPipelines.find(aTrack) != mPipelines.end()) { CSFLogError(logTag, "%s: Request to store duplicate track %d", __FUNCTION__, aTrack); return; } CSFLogDebug(logTag, "%s track %d %s = %p", __FUNCTION__, aTrack, aIsVideo ? "video" : "audio", aPipeline.get()); // See if we have both audio and video here, and if so cross the streams and sync them // XXX Needs to be adjusted when we support multiple streams of the same type for (std::map<int, bool>::iterator it = mTypes.begin(); it != mTypes.end(); ++it) { if (it->second != aIsVideo) { // Ok, we have one video, one non-video - cross the streams! mozilla::WebrtcAudioConduit *audio_conduit = static_cast<mozilla::WebrtcAudioConduit*> (aIsVideo ? mPipelines[it->first]->Conduit() : aPipeline->Conduit()); mozilla::WebrtcVideoConduit *video_conduit = static_cast<mozilla::WebrtcVideoConduit*> (aIsVideo ? aPipeline->Conduit() : mPipelines[it->first]->Conduit()); video_conduit->SyncTo(audio_conduit); CSFLogDebug(logTag, "Syncing %p to %p, %d to %d", video_conduit, audio_conduit, aTrack, it->first); } } //TODO: Revisit once we start supporting multiple streams or multiple tracks // of same type mPipelines[aTrack] = aPipeline; //TODO: move to attribute on Pipeline mTypes[aTrack] = aIsVideo;}
开发者ID:alessandrod,项目名称:mozilla-central,代码行数:36,
示例4: CSFLogDebugint WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, int len){ CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, channel); if (mEngineTransmitting) { if (mOtherDirection) { return mOtherDirection->SendRTCPPacket(channel, data, len); } CSFLogDebug(logTag, "%s : Asked to send RTCP without an RTP receiver on channel %d", __FUNCTION__, channel); return -1; } else { if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK) { CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__); return len; } else { CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__); return -1; } }}
开发者ID:RickEyre,项目名称:mozilla-central,代码行数:24,
示例5: do_GetServicensresultPeerConnectionMedia::InitProxy(){#if !defined(MOZILLA_EXTERNAL_LINKAGE) // Allow mochitests to disable this, since mochitest configures a fake proxy // that serves up content. bool disable = Preferences::GetBool("media.peerconnection.disable_http_proxy", false); if (disable) { mProxyResolveCompleted = true; return NS_OK; }#endif nsresult rv; nsCOMPtr<nsIProtocolProxyService> pps = do_GetService(NS_PROTOCOLPROXYSERVICE_CONTRACTID, &rv); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get proxy service: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } // We use the following URL to find the "default" proxy address for all HTTPS // connections. We will only attempt one HTTP(S) CONNECT per peer connection. // "example.com" is guaranteed to be unallocated and should return the best default. nsCOMPtr<nsIURI> fakeHttpsLocation; rv = NS_NewURI(getter_AddRefs(fakeHttpsLocation), "https://example.com"); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to set URI: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } nsCOMPtr<nsIScriptSecurityManager> secMan( do_GetService(NS_SCRIPTSECURITYMANAGER_CONTRACTID, &rv)); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get IOService: %d", __FUNCTION__, (int)rv); CSFLogError(logTag, "%s: Failed to get securityManager: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } nsCOMPtr<nsIPrincipal> systemPrincipal; rv = secMan->GetSystemPrincipal(getter_AddRefs(systemPrincipal)); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get systemPrincipal: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } nsCOMPtr<nsIChannel> channel; rv = NS_NewChannel(getter_AddRefs(channel), fakeHttpsLocation, systemPrincipal, nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_IS_NULL, nsIContentPolicy::TYPE_OTHER); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get channel from URI: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } RefPtr<ProtocolProxyQueryHandler> handler = new ProtocolProxyQueryHandler(this); rv = pps->AsyncResolve(channel, nsIProtocolProxyService::RESOLVE_PREFER_HTTPS_PROXY | nsIProtocolProxyService::RESOLVE_ALWAYS_TUNNEL, handler, getter_AddRefs(mProxyRequest)); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to resolve protocol proxy: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } return NS_OK;}
开发者ID:carriercomm,项目名称:gecko-dev,代码行数:73,
示例6: CSFLogDebug/** * Peforms intialization of the MANDATORY components of the Video Engine */MediaConduitErrorCode WebrtcVideoConduit::Init(){ CSFLogDebug(logTag, "%s ", __FUNCTION__); if( !(mVideoEngine = webrtc::VideoEngine::Create()) ) { CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); return kMediaConduitSessionNotInited; }#if 0 // TRACING mVideoEngine->SetTraceFilter(webrtc::kTraceAll); mVideoEngine->SetTraceFile( "Vievideotrace.out" );#endif if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine))) { CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); return kMediaConduitSessionNotInited; } if( !(mPtrViECapture = ViECapture::GetInterface(mVideoEngine))) { CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); return kMediaConduitSessionNotInited; } if( !(mPtrViECodec = ViECodec::GetInterface(mVideoEngine))) { CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); return kMediaConduitSessionNotInited; } if( !(mPtrViENetwork = ViENetwork::GetInterface(mVideoEngine))) { CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); return kMediaConduitSessionNotInited; } if( !(mPtrViERender = ViERender::GetInterface(mVideoEngine))) { CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__); return kMediaConduitSessionNotInited; } CSFLogDebug(logTag, "%sEngine Created: Init'ng the interfaces ",__FUNCTION__); if(mPtrViEBase->Init() == -1) { CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__, mPtrViEBase->LastError()); return kMediaConduitSessionNotInited; } if(mPtrViEBase->CreateChannel(mChannel) == -1) { CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__, mPtrViEBase->LastError()); return kMediaConduitChannelError; } if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1) { CSFLogError(logTag, "%s ViENetwork Failed %d ", __FUNCTION__, mPtrViEBase->LastError()); return kMediaConduitTransportRegistrationFail; } mPtrExtCapture = 0; if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId, mPtrExtCapture) == -1) { CSFLogError(logTag, "%s Unable to Allocate capture module: %d ", __FUNCTION__, mPtrViEBase->LastError()); return kMediaConduitCaptureError; } if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1) { CSFLogError(logTag, "%s Unable to Connect capture module: %d ", __FUNCTION__,mPtrViEBase->LastError()); return kMediaConduitCaptureError; } if(mPtrViERender->AddRenderer(mChannel, webrtc::kVideoI420, (webrtc::ExternalRenderer*) this) == -1) { CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__); return kMediaConduitInvalidRenderer; }//.........这里部分代码省略.........
开发者ID:AshishNamdev,项目名称:mozilla-central,代码行数:101,
示例7: CSFLogDebug/* * WebRTCAudioConduit Implementation */MediaConduitErrorCode WebrtcAudioConduit::Init(){ CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);#ifdef MOZ_WIDGET_ANDROID jobject context = jsjni_GetGlobalContextRef(); // get the JVM JavaVM *jvm = jsjni_GetVM(); JNIEnv* jenv = jsjni_GetJNIForThread(); if (webrtc::VoiceEngine::SetAndroidObjects(jvm, jenv, (void*)context) != 0) { CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__); return kMediaConduitSessionNotInited; }#endif // Per WebRTC APIs below function calls return nullptr on failure if(!(mVoiceEngine = webrtc::VoiceEngine::Create())) { CSFLogError(logTag, "%s Unable to create voice engine", __FUNCTION__); return kMediaConduitSessionNotInited; } EnableWebRtcLog(); if(!(mPtrVoEBase = VoEBase::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to initialize VoEBase", __FUNCTION__); return kMediaConduitSessionNotInited; } if(!(mPtrVoENetwork = VoENetwork::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to initialize VoENetwork", __FUNCTION__); return kMediaConduitSessionNotInited; } if(!(mPtrVoECodec = VoECodec::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to initialize VoEBCodec", __FUNCTION__); return kMediaConduitSessionNotInited; } if(!(mPtrVoEProcessing = VoEAudioProcessing::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to initialize VoEProcessing", __FUNCTION__); return kMediaConduitSessionNotInited; } if(!(mPtrVoEXmedia = VoEExternalMedia::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to initialize VoEExternalMedia", __FUNCTION__); return kMediaConduitSessionNotInited; } if(!(mPtrVoERTP_RTCP = VoERTP_RTCP::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to initialize VoERTP_RTCP", __FUNCTION__); return kMediaConduitSessionNotInited; } if(!(mPtrVoEVideoSync = VoEVideoSync::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to initialize VoEVideoSync", __FUNCTION__); return kMediaConduitSessionNotInited; } if (!(mPtrRTP = webrtc::VoERTP_RTCP::GetInterface(mVoiceEngine))) { CSFLogError(logTag, "%s Unable to get audio RTP/RTCP interface ", __FUNCTION__); return kMediaConduitSessionNotInited; } // init the engine with our audio device layer if(mPtrVoEBase->Init() == -1) { CSFLogError(logTag, "%s VoiceEngine Base Not Initialized", __FUNCTION__); return kMediaConduitSessionNotInited; } if( (mChannel = mPtrVoEBase->CreateChannel()) == -1) { CSFLogError(logTag, "%s VoiceEngine Channel creation failed",__FUNCTION__); return kMediaConduitChannelError; } CSFLogDebug(logTag, "%s Channel Created %d ",__FUNCTION__, mChannel); if(mPtrVoENetwork->RegisterExternalTransport(mChannel, *this) == -1) { CSFLogError(logTag, "%s VoiceEngine, External Transport Failed",__FUNCTION__); return kMediaConduitTransportRegistrationFail; } if(mPtrVoEXmedia->SetExternalRecordingStatus(true) == -1) { CSFLogError(logTag, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__, mPtrVoEBase->LastError());//.........这里部分代码省略.........
开发者ID:mtjvankuik,项目名称:gecko-dev,代码行数:101,
示例8: CSFLogDebug/** * Note: Setting the send-codec on the Video Engine will restart the encoder, * sets up new SSRC and reset RTP_RTCP module with the new codec setting. * * Note: this is called from MainThread, and the codec settings are read on * videoframe delivery threads (i.e in SendVideoFrame(). With * renegotiation/reconfiguration, this now needs a lock! Alternatively * changes could be queued until the next frame is delivered using an * Atomic pointer and swaps. */MediaConduitErrorCodeWebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig){ CSFLogDebug(logTag, "%s for %s", __FUNCTION__, codecConfig ? codecConfig->mName.c_str() : "<null>"); bool codecFound = false; MediaConduitErrorCode condError = kMediaConduitNoError; int error = 0; //webrtc engine errors webrtc::VideoCodec video_codec; std::string payloadName; memset(&video_codec, 0, sizeof(video_codec)); { //validate basic params if((condError = ValidateCodecConfig(codecConfig,true)) != kMediaConduitNoError) { return condError; } } condError = StopTransmitting(); if (condError != kMediaConduitNoError) { return condError; } if (mExternalSendCodec && codecConfig->mType == mExternalSendCodec->mType) { CSFLogError(logTag, "%s Configuring External H264 Send Codec", __FUNCTION__); // width/height will be overridden on the first frame video_codec.width = 320; video_codec.height = 240;#ifdef MOZ_WEBRTC_OMX if (codecConfig->mType == webrtc::kVideoCodecH264) { video_codec.resolution_divisor = 16; } else { video_codec.resolution_divisor = 1; // We could try using it to handle odd resolutions }#else video_codec.resolution_divisor = 1; // We could try using it to handle odd resolutions#endif video_codec.qpMax = 56; video_codec.numberOfSimulcastStreams = 1; video_codec.mode = webrtc::kRealtimeVideo; codecFound = true; } else { // we should be good here to set the new codec. for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++) { if(0 == mPtrViECodec->GetCodec(idx, video_codec)) { payloadName = video_codec.plName; if(codecConfig->mName.compare(payloadName) == 0) { // Note: side-effect of this is that video_codec is filled in // by GetCodec() codecFound = true; break; } } }//for } if(codecFound == false) { CSFLogError(logTag, "%s Codec Mismatch ", __FUNCTION__); return kMediaConduitInvalidSendCodec; } // Note: only for overriding parameters from GetCodec()! CodecConfigToWebRTCCodec(codecConfig, video_codec); if(mPtrViECodec->SetSendCodec(mChannel, video_codec) == -1) { error = mPtrViEBase->LastError(); if(error == kViECodecInvalidCodec) { CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__); return kMediaConduitInvalidSendCodec; } CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__, mPtrViEBase->LastError()); return kMediaConduitUnknownError; } if (!mVideoCodecStat) { mVideoCodecStat = new VideoCodecStatistics(mChannel, mPtrViECodec); } mVideoCodecStat->Register(true);//.........这里部分代码省略.........
开发者ID:Antonius32,项目名称:Pale-Moon,代码行数:101,
示例9: RemoveTransportFlownsresultPeerConnectionMedia::UpdateTransportFlow( size_t aLevel, bool aIsRtcp, const JsepTransport& aTransport){ if (aIsRtcp && aTransport.mComponents < 2) { RemoveTransportFlow(aLevel, aIsRtcp); return NS_OK; } if (!aIsRtcp && !aTransport.mComponents) { RemoveTransportFlow(aLevel, aIsRtcp); return NS_OK; } nsresult rv; RefPtr<TransportFlow> flow = GetTransportFlow(aLevel, aIsRtcp); if (flow) { if (IsIceRestarting()) { CSFLogInfo(LOGTAG, "Flow[%s]: detected ICE restart - level: %u rtcp: %d", flow->id().c_str(), (unsigned)aLevel, aIsRtcp); RefPtr<PeerConnectionMedia> pcMedia(this); rv = GetSTSThread()->Dispatch( WrapRunnableNM(AddNewIceStreamForRestart_s, pcMedia, flow, aLevel, aIsRtcp), NS_DISPATCH_NORMAL); if (NS_FAILED(rv)) { CSFLogError(LOGTAG, "Failed to dispatch AddNewIceStreamForRestart_s"); return rv; } } return NS_OK; } std::ostringstream osId; osId << mParentHandle << ":" << aLevel << "," << (aIsRtcp ? "rtcp" : "rtp"); flow = new TransportFlow(osId.str()); // The media streams are made on STS so we need to defer setup. auto ice = MakeUnique<TransportLayerIce>(); auto dtls = MakeUnique<TransportLayerDtls>(); dtls->SetRole(aTransport.mDtls->GetRole() == JsepDtlsTransport::kJsepDtlsClient ? TransportLayerDtls::CLIENT : TransportLayerDtls::SERVER); RefPtr<DtlsIdentity> pcid = mParent->Identity(); if (!pcid) { CSFLogError(LOGTAG, "Failed to get DTLS identity."); return NS_ERROR_FAILURE; } dtls->SetIdentity(pcid); const SdpFingerprintAttributeList& fingerprints = aTransport.mDtls->GetFingerprints(); for (const auto& fingerprint : fingerprints.mFingerprints) { std::ostringstream ss; ss << fingerprint.hashFunc; rv = dtls->SetVerificationDigest(ss.str(), &fingerprint.fingerprint[0], fingerprint.fingerprint.size()); if (NS_FAILED(rv)) { CSFLogError(LOGTAG, "Could not set fingerprint"); return rv; } } std::vector<uint16_t> srtpCiphers; srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_80); srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_32); rv = dtls->SetSrtpCiphers(srtpCiphers); if (NS_FAILED(rv)) { CSFLogError(LOGTAG, "Couldn't set SRTP ciphers"); return rv; } // Always permits negotiation of the confidential mode. // Only allow non-confidential (which is an allowed default), // if we aren't confidential. std::set<std::string> alpn; std::string alpnDefault = ""; alpn.insert("c-webrtc"); if (!mParent->PrivacyRequested()) { alpnDefault = "webrtc"; alpn.insert(alpnDefault); } rv = dtls->SetAlpn(alpn, alpnDefault); if (NS_FAILED(rv)) { CSFLogError(LOGTAG, "Couldn't set ALPN"); return rv; } nsAutoPtr<PtrVector<TransportLayer> > layers(new PtrVector<TransportLayer>); layers->values.push_back(ice.release()); layers->values.push_back(dtls.release());//.........这里部分代码省略.........
开发者ID:luke-chang,项目名称:gecko-1,代码行数:101,
示例10: CSFLogDebugMediaConduitErrorCodeWebrtcVideoConduit::ConfigureRecvMediaCodecs( const std::vector<VideoCodecConfig* >& codecConfigList){ CSFLogDebug(logTag, "%s ", __FUNCTION__); MediaConduitErrorCode condError = kMediaConduitNoError; int error = 0; //webrtc engine errors bool success = false; std::string payloadName; // are we receiving already? If so, stop receiving and playout // since we can't apply new recv codec when the engine is playing. if(mEngineReceiving) { CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__); if(mPtrViEBase->StopReceive(mChannel) == -1) { error = mPtrViEBase->LastError(); if(error == kViEBaseUnknownError) { CSFLogDebug(logTag, "%s StopReceive() Success ", __FUNCTION__); mEngineReceiving = false; } else { CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__, mPtrViEBase->LastError()); return kMediaConduitUnknownError; } } } mEngineReceiving = false; if(codecConfigList.empty()) { CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__); return kMediaConduitMalformedArgument; } webrtc::ViEKeyFrameRequestMethod kf_request = webrtc::kViEKeyFrameRequestNone; bool use_nack_basic = false; //Try Applying the codecs in the list // we treat as success if atleast one codec was applied and reception was // started successfully. for(std::vector<VideoCodecConfig*>::size_type i=0;i < codecConfigList.size();i++) { //if the codec param is invalid or diplicate, return error if((condError = ValidateCodecConfig(codecConfigList[i],false)) != kMediaConduitNoError) { return condError; } // Check for the keyframe request type: PLI is preferred // over FIR, and FIR is preferred over none. if (codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_PLI)) { kf_request = webrtc::kViEKeyFrameRequestPliRtcp; } else if(kf_request == webrtc::kViEKeyFrameRequestNone && codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_CCM_FIR)) { kf_request = webrtc::kViEKeyFrameRequestFirRtcp; } // Check whether NACK is requested if(codecConfigList[i]->RtcpFbIsSet(SDP_RTCP_FB_NACK_BASIC)) { use_nack_basic = true; } webrtc::VideoCodec video_codec; mEngineReceiving = false; memset(&video_codec, 0, sizeof(webrtc::VideoCodec)); //Retrieve pre-populated codec structure for our codec. for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++) { if(mPtrViECodec->GetCodec(idx, video_codec) == 0) { payloadName = video_codec.plName; if(codecConfigList[i]->mName.compare(payloadName) == 0) { CodecConfigToWebRTCCodec(codecConfigList[i], video_codec); if(mPtrViECodec->SetReceiveCodec(mChannel,video_codec) == -1) { CSFLogError(logTag, "%s Invalid Receive Codec %d ", __FUNCTION__, mPtrViEBase->LastError()); } else { CSFLogError(logTag, "%s Successfully Set the codec %s", __FUNCTION__, codecConfigList[i]->mName.c_str()); if(CopyCodecToDB(codecConfigList[i])) { success = true; } else { CSFLogError(logTag,"%s Unable to updated Codec Database", __FUNCTION__); return kMediaConduitUnknownError; } } break; //we found a match } }//.........这里部分代码省略.........
开发者ID:cbrem,项目名称:gecko-dev,代码行数:101,
示例11: do_GetServicensresult PeerConnectionMedia::Init(const std::vector<NrIceStunServer>& stun_servers, const std::vector<NrIceTurnServer>& turn_servers, NrIceCtx::Policy policy){ nsresult rv; nsCOMPtr<nsIProtocolProxyService> pps = do_GetService(NS_PROTOCOLPROXYSERVICE_CONTRACTID, &rv); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get proxy service: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } // We use the following URL to find the "default" proxy address for all HTTPS // connections. We will only attempt one HTTP(S) CONNECT per peer connection. // "example.com" is guaranteed to be unallocated and should return the best default. nsCOMPtr<nsIURI> fakeHttpsLocation; rv = NS_NewURI(getter_AddRefs(fakeHttpsLocation), "https://example.com"); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to set URI: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } nsCOMPtr<nsIScriptSecurityManager> secMan( do_GetService(NS_SCRIPTSECURITYMANAGER_CONTRACTID, &rv)); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get IOService: %d", __FUNCTION__, (int)rv); CSFLogError(logTag, "%s: Failed to get securityManager: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } nsCOMPtr<nsIPrincipal> systemPrincipal; rv = secMan->GetSystemPrincipal(getter_AddRefs(systemPrincipal)); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get systemPrincipal: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } nsCOMPtr<nsIChannel> channel; rv = NS_NewChannel(getter_AddRefs(channel), fakeHttpsLocation, systemPrincipal, nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_IS_NULL, nsIContentPolicy::TYPE_OTHER); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to get channel from URI: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; } RefPtr<ProtocolProxyQueryHandler> handler = new ProtocolProxyQueryHandler(this); rv = pps->AsyncResolve(channel, nsIProtocolProxyService::RESOLVE_PREFER_HTTPS_PROXY | nsIProtocolProxyService::RESOLVE_ALWAYS_TUNNEL, handler, getter_AddRefs(mProxyRequest)); if (NS_FAILED(rv)) { CSFLogError(logTag, "%s: Failed to resolve protocol proxy: %d", __FUNCTION__, (int)rv); return NS_ERROR_FAILURE; }#if !defined(MOZILLA_EXTERNAL_LINKAGE) bool ice_tcp = Preferences::GetBool("media.peerconnection.ice.tcp", false); if (!XRE_IsParentProcess()) { CSFLogError(logTag, "%s: ICE TCP not support on e10s", __FUNCTION__); ice_tcp = false; } bool default_address_only = Preferences::GetBool( "media.peerconnection.ice.default_address_only", false);#else bool ice_tcp = false; bool default_address_only = false;#endif // TODO([email C++ CSFML_CALL函数代码示例 C++ CRegString函数代码示例
|