您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ AudioUnitGetProperty函数代码示例

51自学网 2021-06-01 19:48:26
  C++
这篇教程C++ AudioUnitGetProperty函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中AudioUnitGetProperty函数的典型用法代码示例。如果您正苦于以下问题:C++ AudioUnitGetProperty函数的具体用法?C++ AudioUnitGetProperty怎么用?C++ AudioUnitGetProperty使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了AudioUnitGetProperty函数的24个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: sizeof

std::vector<AudioStreamBasicDescription> ofxAudioUnitMatrixMixer::getASBDs() const{	std::vector<AudioStreamBasicDescription> ASBDs;	AudioStreamBasicDescription inputASBD, outputASBD;	UInt32 inputSize, outputSize;	inputSize = outputSize = sizeof(UInt32);		OFXAU_PRINT(AudioUnitGetProperty(*_unit,									 kAudioUnitProperty_StreamFormat,									 kAudioUnitScope_Input,									 0,									 &inputASBD,									 &inputSize),				"getting matrix mixer input ASBD");		OFXAU_PRINT(AudioUnitGetProperty(*_unit,									 kAudioUnitProperty_StreamFormat,									 kAudioUnitScope_Output,									 0,									 &outputASBD,									 &outputSize),				"getting matrix mixer output ASBD");		ASBDs.push_back(inputASBD);	ASBDs.push_back(outputASBD);	return ASBDs;}
开发者ID:CLOUDS-Interactive-Documentary,项目名称:ofxAudioUnit,代码行数:27,


示例2: Init

	// get the fast dispatch pointers	void Init()	{		UInt32 size = sizeof(AudioUnitRenderProc);		if (AudioUnitGetProperty(mUnit, kAudioUnitProperty_FastDispatch,								kAudioUnitScope_Global, kAudioUnitRenderSelect,								&mRenderProc, &size) != noErr)			mRenderProc = NULL;		if (AudioUnitGetProperty(mUnit, kAudioUnitProperty_FastDispatch,								kAudioUnitScope_Global, kAudioUnitGetParameterSelect,								&mGetParamProc, &size) != noErr)			mGetParamProc = NULL;		if (AudioUnitGetProperty(mUnit, kAudioUnitProperty_FastDispatch,								kAudioUnitScope_Global, kAudioUnitSetParameterSelect,								&mSetParamProc, &size) != noErr)			mSetParamProc = NULL;		if (AudioUnitGetProperty(mUnit, kAudioUnitProperty_FastDispatch,								kAudioUnitScope_Global, kMusicDeviceMIDIEventSelect,								&mMIDIEventProc, &size) != noErr)			mMIDIEventProc = NULL;		if (mRenderProc || mGetParamProc || mSetParamProc || mMIDIEventProc)			mConnInstanceStorage = GetComponentInstanceStorage(mUnit);		else			mConnInstanceStorage = NULL;	}
开发者ID:63n,项目名称:ardour,代码行数:27,


示例3: sizeof

//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~//	AUMixer3DView::SetRenderingFlagsCheckboxes////~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~void	AUMixer3DView::SetRenderingFlagsCheckboxes(){	// set the check boxes according to the current rendering flags	UInt32 flags;	UInt32 size = sizeof(flags);		AudioUnitGetProperty(		GetEditAudioUnit(),		kAudioUnitProperty_3DMixerRenderingFlags,		kAudioUnitScope_Input,		0,		&flags,		&size );	UInt32 usesReverb = 0;	AudioUnitGetProperty(		GetEditAudioUnit(),		kAudioUnitProperty_UsesInternalReverb,		kAudioUnitScope_Input,		0,		&usesReverb,		&size );	SetCheckbox('atr0', 1, flags & k3DMixerRenderingFlags_InterAuralDelay );	SetCheckbox('atr1', 2, flags & k3DMixerRenderingFlags_DopplerShift );	SetCheckbox('atr2', 3, flags & k3DMixerRenderingFlags_DistanceAttenuation );	SetCheckbox('atr3', 4, flags & k3DMixerRenderingFlags_DistanceFilter );	SetCheckbox('atr4', 5, flags & k3DMixerRenderingFlags_DistanceDiffusion );	SetCheckbox('rvrb', 6, usesReverb );	::UpdateControls(GetCarbonWindow(), NULL );}
开发者ID:arnelh,项目名称:Examples,代码行数:35,


示例4: PrintMatrixMixerVolumes

void	PrintMatrixMixerVolumes (FILE* file, AudioUnit au){	UInt32 dims[2];	UInt32 theSize =  sizeof(UInt32) * 2;	Float32 *theVols = NULL;	OSStatus result;	// this call will fail if the unit is NOT initialized as it would present an incomplete state		ca_require_noerr (result = AudioUnitGetProperty (au, kAudioUnitProperty_MatrixDimensions,								kAudioUnitScope_Global, 0, dims, &theSize), home);	theSize = ((dims[0] + 1) * (dims[1] + 1)) * sizeof(Float32);		theVols	= static_cast<Float32*> (malloc (theSize));		ca_require_noerr (result = AudioUnitGetProperty (au, kAudioUnitProperty_MatrixLevels,								kAudioUnitScope_Global, 0, theVols, &theSize), home);home:	if (result) {		if (theVols)			free(theVols);		return;	}		theSize /= sizeof(Float32);		unsigned int inputs = dims[0];	unsigned int outputs = dims[1];	fprintf (file, "/tInput Channels = %d, Output Channels = %d/n", (int)dims[0], (int)dims[1]);	PrintBuses (file, "Input", au, kAudioUnitScope_Input);	PrintBuses (file, "Output", au, kAudioUnitScope_Output);	fprintf (file, "/tGlobal Volume: %.3f/n", theVols [theSize - 1]);	for (unsigned int i = 0; i < (inputs + 1); ++i) {		if (i < inputs) {			fprintf (file, "/t%.3f   ", theVols[(i + 1) * (outputs + 1) - 1]);						for (unsigned int j = 0; j < outputs; ++j)				fprintf (file, "(%.3f) ", theVols[(i * (outputs  + 1)) + j]);		} else {			fprintf (file, "/t        ");			for (unsigned int j = 0; j < outputs; ++j)				fprintf (file, " %.3f  ", theVols[(i * (outputs + 1)) + j]);		}		fprintf (file, "/n");	}#if 0	for (unsigned int i = 0; i < theSize; ++i)		printf ("%f, ", theVols[i]);#endif	free(theVols);}
开发者ID:abscura,项目名称:audiounitjs,代码行数:54,


示例5: notification

static OSStatus notification(AudioDeviceID inDevice,							UInt32 inChannel,							Boolean	isInput,							AudioDevicePropertyID inPropertyID,							void* inClientData){    coreaudio_driver_t* driver = (coreaudio_driver_t*)inClientData;    switch (inPropertyID) {			case kAudioDeviceProcessorOverload:			driver->xrun_detected = 1;			break;					case kAudioDevicePropertyNominalSampleRate: {			UInt32 outSize =  sizeof(Float64);			Float64 sampleRate;			AudioStreamBasicDescription srcFormat, dstFormat;			OSStatus err = AudioDeviceGetProperty(driver->device_id, 0, kAudioDeviceSectionGlobal, kAudioDevicePropertyNominalSampleRate, &outSize, &sampleRate);			if (err != noErr) {				jack_error("Cannot get current sample rate");				return kAudioHardwareUnsupportedOperationError;			}			JCALog("JackCoreAudioDriver::NotificationCallback kAudioDevicePropertyNominalSampleRate %ld/n", (long)sampleRate);			outSize = sizeof(AudioStreamBasicDescription);						// Update SR for input			err = AudioUnitGetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &srcFormat, &outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Input");			}			srcFormat.mSampleRate = sampleRate;			err = AudioUnitSetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &srcFormat, outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Input");			}					// Update SR for output			err = AudioUnitGetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &dstFormat, &outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Output");			}			dstFormat.mSampleRate = sampleRate;			err = AudioUnitSetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &dstFormat, outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Output");			}			break;		}    }    return noErr;}
开发者ID:Llefjord,项目名称:jack1,代码行数:51,


示例6: sizeof

OSStatus	CAAudioUnit::GetAUPreset (CFPropertyListRef &outData) const{	UInt32 dataSize = sizeof(outData);	return AudioUnitGetProperty (AU(), kAudioUnitProperty_ClassInfo,								kAudioUnitScope_Global, 0,								&outData, &dataSize);}
开发者ID:63n,项目名称:ardour,代码行数:7,


示例7: AudioUnitGetPropertyInfo

bool CCoreAudioUnit::GetSupportedChannelLayouts(AudioChannelLayoutList* pLayouts){  if (!m_audioUnit || !pLayouts)    return false;  UInt32 propSize = 0;  Boolean writable = false;  OSStatus ret = AudioUnitGetPropertyInfo(m_audioUnit,    kAudioUnitProperty_SupportedChannelLayoutTags, kAudioUnitScope_Input, 0, &propSize, &writable);  if (ret)  {    CLog::Log(LOGERROR, "CCoreAudioUnit::GetSupportedChannelLayouts: "      "Unable to retrieve supported channel layout property info. Error = %s", GetError(ret).c_str());    return false;  }  UInt32 layoutCount = propSize / sizeof(AudioChannelLayoutTag);  AudioChannelLayoutTag* pSuppLayouts = new AudioChannelLayoutTag[layoutCount];  ret = AudioUnitGetProperty(m_audioUnit,    kAudioUnitProperty_SupportedChannelLayoutTags, kAudioUnitScope_Output, 0, pSuppLayouts, &propSize);  if (ret)  {    CLog::Log(LOGERROR, "CCoreAudioUnit::GetSupportedChannelLayouts: "      "Unable to retrieve supported channel layouts. Error = %s", GetError(ret).c_str());    return false;  }  for (UInt32 layout = 0; layout < layoutCount; layout++)    pLayouts->push_back(pSuppLayouts[layout]);  delete[] pSuppLayouts;  return true;}
开发者ID:JohnsonAugustine,项目名称:xbmc-rbp,代码行数:30,


示例8: sizeof

Float32 CAAUParameter::GetValueFromString(CFStringRef str) const{	if (ValuesHaveStrings()) 	{		AudioUnitParameterValueFromString valueString;		valueString.inParamID = mParameterID;		valueString.inString = str;		UInt32 propertySize = sizeof(valueString);				OSStatus err = AudioUnitGetProperty (mAudioUnit, 										kAudioUnitProperty_ParameterValueFromString,										mScope, 										mParameterID, 										&valueString, 										&propertySize);												if (err == noErr) {			return valueString.outValue;		}	}		Float32 paramValue = mParamInfo.defaultValue;	char valstr[32];	CFStringGetCString(str, valstr, sizeof(valstr), kCFStringEncodingUTF8);	sscanf(valstr, "%f", &paramValue);	return paramValue;}
开发者ID:DanielAeolusLaude,项目名称:ardour,代码行数:27,


示例9: sizeof

// ----------------------------------------------------------void ofxAudioUnitInput::connectTo(ofxAudioUnit &otherUnit, int destinationBus, int sourceBus)// ----------------------------------------------------------{	AURenderCallbackStruct callback;	callback.inputProc = pullCallback;	callback.inputProcRefCon = &_renderContext;		AudioStreamBasicDescription ASBD;	UInt32 ASBDSize = sizeof(ASBD);		OFXAU_RETURN(AudioUnitGetProperty(*otherUnit.getUnit(),									  kAudioUnitProperty_StreamFormat,									  kAudioUnitScope_Input,									  destinationBus,									  &ASBD,									  &ASBDSize),				 "getting hardware input destination's format");		OFXAU_RETURN(AudioUnitSetProperty(*_unit,									  kAudioUnitProperty_StreamFormat,									  kAudioUnitScope_Output,									  1,									  &ASBD,									  sizeof(ASBD)),				 "setting hardware input's output format");		otherUnit.setRenderCallback(callback, destinationBus);}
开发者ID:jasonlevine,项目名称:ofxAudioUnit,代码行数:29,


示例10: sizeof

// ----------------------------------------------------------ofxAudioUnit& ofxAudioUnitInput::connectTo(ofxAudioUnit &otherUnit, int destinationBus, int sourceBus)// ----------------------------------------------------------{	AudioStreamBasicDescription ASBD;	UInt32 ASBDSize = sizeof(ASBD);		OFXAU_PRINT(AudioUnitGetProperty(otherUnit,									 kAudioUnitProperty_StreamFormat,									 kAudioUnitScope_Input,									 destinationBus,									 &ASBD,									 &ASBDSize),				"getting hardware input destination's format");		OFXAU_PRINT(AudioUnitSetProperty(*_unit,									 kAudioUnitProperty_StreamFormat,									 kAudioUnitScope_Output,									 1,									 &ASBD,									 sizeof(ASBD)),				"setting hardware input's output format");		AURenderCallbackStruct callback = {PullCallback, &_impl->ctx};	otherUnit.setRenderCallback(callback, destinationBus);	return otherUnit;}
开发者ID:microcosm,项目名称:ofxAudioUnit,代码行数:27,


示例11: AudioUnitSetProperty

void AudioDestinationIOS::configure(){    // Set render callback    AURenderCallbackStruct input;    input.inputProc = inputProc;    input.inputProcRefCon = this;    OSStatus result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &input, sizeof(input));    ASSERT(!result);    // Set stream format    AudioStreamBasicDescription streamFormat;    UInt32 size = sizeof(AudioStreamBasicDescription);    result = AudioUnitGetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, (void*)&streamFormat, &size);    ASSERT(!result);    const int bytesPerFloat = sizeof(Float32);    const int bitsPerByte = 8;    streamFormat.mSampleRate = m_sampleRate;    streamFormat.mFormatID = kAudioFormatLinearPCM;    streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;    streamFormat.mBytesPerPacket = bytesPerFloat;    streamFormat.mFramesPerPacket = 1;    streamFormat.mBytesPerFrame = bytesPerFloat;    streamFormat.mChannelsPerFrame = 2;    streamFormat.mBitsPerChannel = bitsPerByte * bytesPerFloat;    result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, (void*)&streamFormat, sizeof(AudioStreamBasicDescription));    ASSERT(!result);    AudioSession::sharedSession().setPreferredBufferSize(kPreferredBufferSize);}
开发者ID:AndriyKalashnykov,项目名称:webkit,代码行数:32,


示例12: sizeof

const Boolean AudioUnitNode::doBypass() const{    UInt32 doB;    UInt32 size = sizeof(UInt32);    AudioUnitGetProperty(mUnit, kAudioUnitProperty_BypassEffect, kAudioUnitScope_Global, 0, &doB, &size);    return doB == 1 ? true : false;}
开发者ID:satoshi-takano,项目名称:ExtendedAudioUnit,代码行数:7,


示例13: _impl

// ----------------------------------------------------------ofxAudioUnitInput::ofxAudioUnitInput(unsigned int samplesToBuffer): _impl(new InputImpl)// ----------------------------------------------------------{	_desc = inputDesc;	initUnit();		AudioStreamBasicDescription ASBD = {0};	UInt32 ASBD_size = sizeof(ASBD);	OFXAU_PRINT(AudioUnitGetProperty(*_unit,									 kAudioUnitProperty_StreamFormat,									 kAudioUnitScope_Output,									 1,									 &ASBD,									 &ASBD_size),				"getting input ASBD");		_impl->ctx.inputUnit  = _unit;	_impl->ctx.bufferList = AudioBufferListRef(AudioBufferListAlloc(ASBD.mChannelsPerFrame, 1024), AudioBufferListRelease);	_impl->ctx.circularBuffers.resize(ASBD.mChannelsPerFrame);	_impl->isReady = false;	#if !TARGET_OS_IPHONE	_impl->inputDeviceID = DefaultAudioInputDevice();#endif		for(int i = 0; i < ASBD.mChannelsPerFrame; i++) {		TPCircularBufferInit(&_impl->ctx.circularBuffers[i], samplesToBuffer * sizeof(Float32));	}}
开发者ID:microcosm,项目名称:ofxAudioUnit,代码行数:31,


示例14: gst_core_audio_get_samples_and_latency_impl

static gbooleangst_core_audio_get_samples_and_latency_impl (GstCoreAudio * core_audio,    gdouble rate, guint * samples, gdouble * latency){  OSStatus status;  UInt32 size = sizeof (double);  if (core_audio->is_passthrough) {    *samples = _audio_device_get_latency (core_audio->device_id);    *samples += _audio_stream_get_latency (core_audio->stream_id);    *latency = (double) *samples / rate;  } else {    status = AudioUnitGetProperty (core_audio->audiounit, kAudioUnitProperty_Latency, kAudioUnitScope_Global, 0,        /* N/A for global */        latency, &size);    if (status) {      GST_WARNING_OBJECT (core_audio->osxbuf, "Failed to get latency: %d",          (int) status);      *samples = 0;      return FALSE;    }    *samples = *latency * rate;  }  return TRUE;}
开发者ID:Distrotech,项目名称:gst-plugins-good,代码行数:26,


示例15: ca_get_hardware_latency

static int64_t ca_get_hardware_latency(struct ao *ao) {    struct priv *p = ao->priv;    double audiounit_latency_sec = 0.0;    uint32_t size = sizeof(audiounit_latency_sec);    OSStatus err = AudioUnitGetProperty(            p->audio_unit,            kAudioUnitProperty_Latency,            kAudioUnitScope_Global,            0,            &audiounit_latency_sec,            &size);    CHECK_CA_ERROR("cannot get audio unit latency");    uint32_t frames = 0;    err = CA_GET_O(p->device, kAudioDevicePropertyLatency, &frames);    CHECK_CA_ERROR("cannot get device latency");    uint64_t audiounit_latency_us = audiounit_latency_sec * 1e6;    uint64_t device_latency_us    = ca_frames_to_us(ao, frames);    MP_VERBOSE(ao, "audiounit latency [us]: %lld/n", audiounit_latency_us);    MP_VERBOSE(ao, "device latency [us]: %lld/n", device_latency_us);    return audiounit_latency_us + device_latency_us;coreaudio_error:    return 0;}
开发者ID:BILIHUBSU,项目名称:mpv,代码行数:29,


示例16: sizeof

bool CAUMatrixMixer::InitMatrixMixerVolumes(){  // Fetch thechannel configuration  UInt32 dims[2];  UInt32 size = sizeof(dims);  OSStatus ret = AudioUnitGetProperty(m_audioUnit,    kAudioUnitProperty_MatrixDimensions, kAudioUnitScope_Global, 0, dims, &size);  if (ret)  {    CLog::Log(LOGERROR, "CAUMatrixMixer::Initialize:: "      "Get matrix dimesion. Error = %s", GetError(ret).c_str());    return false;  }  // Initialize global, input, and output levels  if (!SetGlobalVolume(1.0f))    return false;  for (UInt32 i = 0; i < dims[0]; i++)    if (!SetInputVolume(i, 1.0f))      return false;  for (UInt32 i = 0; i < dims[1]; i++)    if (!SetOutputVolume(i, 1.0f))      return false;  return true;}
开发者ID:JohnsonAugustine,项目名称:xbmc-rbp,代码行数:26,


示例17: Disconnect

//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~//	AUInputElement::SetConnection////~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~void	AUInputElement::SetConnection(const AudioUnitConnection &conn){	if (conn.sourceAudioUnit == 0) {		Disconnect();		return;	}			mInputType = kFromConnection;	mConnection = conn;	AllocateBuffer();	mConnInstanceStorage = NULL;#if !CA_USE_AUDIO_PLUGIN_ONLY	mConnRenderProc = NULL;	UInt32 size = sizeof(AudioUnitRenderProc);	OSStatus result = AudioUnitGetProperty(	conn.sourceAudioUnit,							kAudioUnitProperty_FastDispatch,							kAudioUnitScope_Global,							kAudioUnitRenderSelect,							&mConnRenderProc,							&size);	if (result == noErr)		mConnInstanceStorage = CMgr_GetComponentInstanceStorage (conn.sourceAudioUnit);	else		mConnRenderProc = NULL;#endif}
开发者ID:abscura,项目名称:audiounitjs,代码行数:32,


示例18: gst_core_audio_open

gbooleangst_core_audio_open (GstCoreAudio * core_audio){  if (!gst_core_audio_open_impl (core_audio))    return FALSE;  if (core_audio->is_src) {    AudioStreamBasicDescription asbd_in;    UInt32 propertySize;    OSStatus status;    GstOsxAudioSrc *src =        GST_OSX_AUDIO_SRC (GST_OBJECT_PARENT (core_audio->osxbuf));    propertySize = sizeof (asbd_in);    status = AudioUnitGetProperty (core_audio->audiounit,        kAudioUnitProperty_StreamFormat,        kAudioUnitScope_Input, 1, &asbd_in, &propertySize);    if (status) {      AudioComponentInstanceDispose (core_audio->audiounit);      core_audio->audiounit = NULL;      GST_WARNING_OBJECT (core_audio,          "Unable to obtain device properties: %d", (int) status);      return FALSE;    } else {      src->deviceChannels = asbd_in.mChannelsPerFrame;    }  }  return TRUE;}
开发者ID:jcaden,项目名称:gst-plugins-good,代码行数:33,


示例19: AudioUnitGetProperty

bool CAUOutputDevice::EnableInputOuput(){  if (!m_audioUnit)    return false;  OSStatus ret;  UInt32 enable;  UInt32 hasio;  UInt32 size=sizeof(UInt32);  ret = AudioUnitGetProperty(m_audioUnit,kAudioOutputUnitProperty_HasIO,kAudioUnitScope_Input, 1, &hasio, &size);  if (hasio)  {    enable = 1;    ret =  AudioUnitSetProperty(m_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &enable, sizeof(enable));    if (ret)    {      CLog::Log(LOGERROR, "CAUOutputDevice::EnableInputOuput:: Unable to enable input on bus 1. Error = %s", GetError(ret).c_str());      return false;    }    enable = 1;    ret = AudioUnitSetProperty(m_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &enable, sizeof(enable));    if (ret)    {      CLog::Log(LOGERROR, "CAUOutputDevice::EnableInputOuput:: Unable to disable output on bus 0. Error = %s", GetError(ret).c_str());      return false;    }  }  return true;}
开发者ID:midripps,项目名称:xbmc,代码行数:33,


示例20: sizeof

bool CCoreAudioMixMap::SetMixingMatrix(CAUMatrixMixer *mixerUnit,  CCoreAudioMixMap *mixMap, AudioStreamBasicDescription *inputFormat,  AudioStreamBasicDescription *fmt, int channelOffset){  if (!mixerUnit || !inputFormat || !fmt)    return false;  // Fetch the mixing unit size  UInt32 dims[2];  UInt32 size = sizeof(dims);  AudioUnitGetProperty(mixerUnit->GetUnit(),    kAudioUnitProperty_MatrixDimensions, kAudioUnitScope_Global, 0, dims, &size);  if(inputFormat->mChannelsPerFrame + channelOffset > dims[0])  {    CLog::Log(LOGERROR, "CCoreAudioMixMap::SetMixingMatrix - input format doesn't fit mixer size %u+%u > %u"                      , inputFormat->mChannelsPerFrame, channelOffset, dims[0]);    return false;  }  if(fmt->mChannelsPerFrame > dims[1])  {    CLog::Log(LOGERROR, "CCoreAudioMixMap::SetMixingMatrix - ouput format doesn't fit mixer size %u > %u"              , fmt->mChannelsPerFrame, dims[1]);    return false;  }  if(fmt->mChannelsPerFrame < dims[1])  {    CLog::Log(LOGWARNING, "CCoreAudioMixMap::SetMixingMatrix - ouput format doesn't specify all outputs %u < %u"              , fmt->mChannelsPerFrame, dims[1]);  }  // Configure the mixing matrix  Float32* val = (Float32*)*mixMap;  for (UInt32 i = 0; i < inputFormat->mChannelsPerFrame; ++i)  {    UInt32 j = 0;    for (; j < fmt->mChannelsPerFrame; ++j)    {      AudioUnitSetParameter(mixerUnit->GetUnit(),        kMatrixMixerParam_Volume, kAudioUnitScope_Global, ( (i + channelOffset) << 16 ) | j, *val++, 0);    }    // zero out additional outputs from this input    for (; j < dims[1]; ++j)    {      AudioUnitSetParameter(mixerUnit->GetUnit(),        kMatrixMixerParam_Volume, kAudioUnitScope_Global, ( (i + channelOffset) << 16 ) | j, 0.0f, 0);    }  }  CLog::Log(LOGDEBUG, "CCoreAudioGraph::Open: "    "Mixer Output Format: %d channels, %0.1f kHz, %d bits, %d bytes per frame",    (int)fmt->mChannelsPerFrame, fmt->mSampleRate / 1000.0f, (int)fmt->mBitsPerChannel, (int)fmt->mBytesPerFrame);  if (!mixerUnit->InitMatrixMixerVolumes())    return false;  return true;}
开发者ID:1c0n,项目名称:xbmc,代码行数:60,


示例21: debug

void auLoader::printInfo() const{  UInt32 size;  void *data;  Boolean write;  ComponentResult err = noErr;    debug(LOG_INFO, "Plugin Properties:");  for(int i = 0; i < sizeof(_AUCODES) / sizeof(UInt32); ++i) {    data = 0;    err = AudioUnitGetPropertyInfo(m_plugin, _AUCODES[i], kAudioUnitScope_Global, 0, &size, &write);    if(size && err == noErr) {      if(write) {        debug(LOG_INFO, "  %s: %d bytes (+ writeable)", AUPropertyStr(_AUCODES[i]), size);      }      else {        debug(LOG_INFO, "  %s: %d bytes", AUPropertyStr(_AUCODES[i]), size);      }    }        if(data) {      free(data);    }  }    // Get parameter information  AudioUnitParameterInfo auinfo;  UInt32 *plist;  int num_params = 0;  err = AudioUnitGetPropertyInfo(m_plugin, kAudioUnitProperty_ParameterList, kAudioUnitScope_Global, 0, &size, &write);  if(err == noErr && size > 0) {    num_params = size / sizeof(UInt32);    plist = new UInt32[num_params];    err = AudioUnitGetProperty(m_plugin, kAudioUnitProperty_ParameterList, kAudioUnitScope_Global, 0, plist, &size);  }    debug(LOG_INFO, "Parameters (%d total):", num_params);  for(int i = 0; i < num_params; ++i) {    err = AudioUnitGetPropertyInfo(m_plugin, kAudioUnitProperty_ParameterInfo, kAudioUnitScope_Global, plist[i], &size, &write);    if(size == sizeof(AudioUnitParameterInfo) && err == noErr) {      err = AudioUnitGetProperty(m_plugin, kAudioUnitProperty_ParameterInfo, kAudioUnitScope_Global, plist[i], &auinfo, &size);      if(err == noErr) {        debug(LOG_INFO, "  %d: %s, type %d, min %f, max %f", plist[i], auinfo.name, auinfo.unit, auinfo.minValue, auinfo.maxValue);      }    }  }}
开发者ID:Epitek,项目名称:KickMaker,代码行数:47,


示例22: sizeof

void AudioDestinationIOS::frameSizeChangedProc(void *inRefCon, AudioUnit, AudioUnitPropertyID, AudioUnitScope, AudioUnitElement){    AudioDestinationIOS* audioOutput = static_cast<AudioDestinationIOS*>(inRefCon);    UInt32 bufferSize = 0;    UInt32 dataSize = sizeof(bufferSize);    AudioUnitGetProperty(audioOutput->m_outputUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, (void*)&bufferSize, &dataSize);    fprintf(stderr, ">>>> frameSizeChanged = %lu/n", static_cast<unsigned long>(bufferSize));}
开发者ID:AndriyKalashnykov,项目名称:webkit,代码行数:8,


示例23: XWindow

AUEditWindow::AUEditWindow(XController *owner, IBNibRef nibRef, CFStringRef name, AudioUnit editUnit, bool forceGeneric) :	XWindow(owner, nibRef, name),	mEditUnit(editUnit){	OSStatus err;	ComponentDescription editorComponentDesc;		// set up to use generic UI component	editorComponentDesc.componentType = kAudioUnitCarbonViewComponentType;	editorComponentDesc.componentSubType = 'gnrc';	editorComponentDesc.componentManufacturer = 'appl';	editorComponentDesc.componentFlags = 0;	editorComponentDesc.componentFlagsMask = 0;		if (!forceGeneric) {		// ask the AU for its first editor component		UInt32 propertySize;		err = AudioUnitGetPropertyInfo(editUnit, kAudioUnitProperty_GetUIComponentList,			kAudioUnitScope_Global, 0, &propertySize, NULL);		if (!err) {			int nEditors = propertySize / sizeof(ComponentDescription);			ComponentDescription *editors = new ComponentDescription[nEditors];			err = AudioUnitGetProperty(editUnit, kAudioUnitProperty_GetUIComponentList,				kAudioUnitScope_Global, 0, editors, &propertySize);			if (!err)				// just pick the first one for now				editorComponentDesc = editors[0];			delete[] editors;		}	}	Component editComp = FindNextComponent(NULL, &editorComponentDesc);		verify_noerr(OpenAComponent(editComp, &mEditView));		ControlRef rootControl;	verify_noerr(GetRootControl(mWindow, &rootControl));	Rect r;	ControlRef viewPane;	GetControlBounds(rootControl, &r);	Float32Point location = { 0., 0. };	Float32Point size = { Float32(r.right), Float32(r.bottom) };	verify_noerr(AudioUnitCarbonViewCreate(mEditView, mEditUnit, mWindow, rootControl, &location, &size, &viewPane));		AudioUnitCarbonViewSetEventListener(mEditView, EventListener, this);	GetControlBounds(viewPane, &r);	size.x = r.right-r.left; size.y = r.bottom-r.top;	SetSize(size);	Show();/*	EventLoopTimerRef timer;	RequireNoErr(		InstallEventLoopTimer(			GetMainEventLoop(), 5., 0., TimerProc, this, &timer));*/}
开发者ID:arnelh,项目名称:Examples,代码行数:56,


示例24: sizeof

bool CCoreAudioUnit::IsRunning(){  if (!m_Component)    return false;    UInt32 isRunning = 0;  UInt32 size = sizeof(isRunning);  AudioUnitGetProperty(m_Component, kAudioOutputUnitProperty_IsRunning, kAudioUnitScope_Global, 0, &isRunning, &size);  return (isRunning != 0);}
开发者ID:flyingtime,项目名称:boxee,代码行数:10,



注:本文中的AudioUnitGetProperty函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ AudioUnitInitialize函数代码示例
C++ AudioQueueStart函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。