您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ AudioUnitSetProperty函数代码示例

51自学网 2021-06-01 19:48:29
  C++
这篇教程C++ AudioUnitSetProperty函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中AudioUnitSetProperty函数的典型用法代码示例。如果您正苦于以下问题:C++ AudioUnitSetProperty函数的具体用法?C++ AudioUnitSetProperty怎么用?C++ AudioUnitSetProperty使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了AudioUnitSetProperty函数的28个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: zeromem

Error AudioDriverIphone::init() {	active = false;	channels = 2;	AudioStreamBasicDescription strdesc;	strdesc.mFormatID = kAudioFormatLinearPCM;	strdesc.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;	strdesc.mChannelsPerFrame = channels;	strdesc.mSampleRate = 44100;	strdesc.mFramesPerPacket = 1;	strdesc.mBitsPerChannel = 16;	strdesc.mBytesPerFrame =		strdesc.mBitsPerChannel * strdesc.mChannelsPerFrame / 8;	strdesc.mBytesPerPacket =		strdesc.mBytesPerFrame * strdesc.mFramesPerPacket;	OSStatus result = noErr;	AURenderCallbackStruct callback;	AudioComponentDescription desc;	AudioComponent comp = NULL;	const AudioUnitElement output_bus = 0;	const AudioUnitElement bus = output_bus;	const AudioUnitScope scope = kAudioUnitScope_Input;	zeromem(&desc, sizeof(desc));	desc.componentType = kAudioUnitType_Output;	desc.componentSubType = kAudioUnitSubType_RemoteIO;  /* !!! FIXME: ? */	comp = AudioComponentFindNext(NULL, &desc);	desc.componentManufacturer = kAudioUnitManufacturer_Apple;	result = AudioComponentInstanceNew(comp, &audio_unit);	ERR_FAIL_COND_V(result != noErr, FAILED);	ERR_FAIL_COND_V(comp == NULL, FAILED);	result = AudioUnitSetProperty(audio_unit,								  kAudioUnitProperty_StreamFormat,								  scope, bus, &strdesc, sizeof(strdesc));	ERR_FAIL_COND_V(result != noErr, FAILED);	zeromem(&callback, sizeof(AURenderCallbackStruct));	callback.inputProc = &AudioDriverIphone::output_callback;	callback.inputProcRefCon = this;	result = AudioUnitSetProperty(audio_unit,								  kAudioUnitProperty_SetRenderCallback,								  scope, bus, &callback, sizeof(callback));	ERR_FAIL_COND_V(result != noErr, FAILED);	result = AudioUnitInitialize(audio_unit);	ERR_FAIL_COND_V(result != noErr, FAILED);	result = AudioOutputUnitStart(audio_unit);	ERR_FAIL_COND_V(result != noErr, FAILED);	const int samples = 1024;	samples_in = memnew_arr(int32_t, samples); // whatever	buffer_frames = samples / channels;	return FAILED;};
开发者ID:AMG194,项目名称:godot,代码行数:60,


示例2: AUGraphAddNode

OSStatus FCoreAudioSoundSource::CreateAudioUnit( OSType Type, OSType SubType, OSType Manufacturer, AudioStreamBasicDescription* InputFormat, AudioStreamBasicDescription* OutputFormat, AUNode* OutNode, AudioUnit* OutUnit ){	AudioComponentDescription Desc;	Desc.componentFlags = 0;	Desc.componentFlagsMask = 0;	Desc.componentType = Type;	Desc.componentSubType = SubType;	Desc.componentManufacturer = Manufacturer;	OSStatus Status = AUGraphAddNode( AudioDevice->GetAudioUnitGraph(), &Desc, OutNode );	if( Status == noErr )	{		Status = AUGraphNodeInfo( AudioDevice->GetAudioUnitGraph(), *OutNode, NULL, OutUnit );	}	if( Status == noErr )	{		if( InputFormat )		{			Status = AudioUnitSetProperty( *OutUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, InputFormat, sizeof( AudioStreamBasicDescription ) );		}		if( Status == noErr )		{			if( OutputFormat )			{				Status = AudioUnitSetProperty( *OutUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, OutputFormat, sizeof( AudioStreamBasicDescription ) );			}		}	}	return Status;}
开发者ID:Tigrouzen,项目名称:UnrealEngine-4,代码行数:32,


示例3: CFStringCreateWithCString

// ----------------------------------------------------------void ofxAudioUnitNetReceive::connectToHost(const std::string &address, unsigned long port)// ----------------------------------------------------------{    stringstream ss;    ss << address << ":" << port;    CFStringRef hostName = CFStringCreateWithCString(kCFAllocatorDefault,                           ss.str().c_str(),                           kCFStringEncodingUTF8);    OFXAU_PRINT(AudioUnitSetProperty(*_unit,                                     kAUNetReceiveProperty_Hostname,                                     kAudioUnitScope_Global,                                     0,                                     &hostName,                                     sizeof(hostName)),                "setting net receive host name");    // setting net send disconnect to 0 to connect net receive because that makes sense    UInt32 connect = 0;    OFXAU_PRINT(AudioUnitSetProperty(*_unit,                                     kAUNetSendProperty_Disconnect,                                     kAudioUnitScope_Global,                                     0,                                     &connect,                                     sizeof(connect)),                "connecting net receive");    CFRelease(hostName);}
开发者ID:ruxrux,项目名称:ofxAudioUnit,代码行数:30,


示例4: audiounits_start

static int audiounits_start(void *usr) {	au_instance_t *ap = (au_instance_t*) usr;	OSStatus err;	if (ap->kind == AI_RECORDER) {#if defined(MAC_OS_X_VERSION_10_5) && (MAC_OS_X_VERSION_MIN_REQUIRED>=MAC_OS_X_VERSION_10_5)		err = AudioDeviceStart(ap->inDev, ap->inIOProcID);#else		err = AudioDeviceStart(ap->inDev, inputRenderProc);#endif		if (err) Rf_error("unable to start recording (%08x)", err);	} else {		AURenderCallbackStruct renderCallback = { outputRenderProc, usr };		ap->done = NO;		/* set format */		ap->fmtOut.mSampleRate = ap->sample_rate;		err = AudioUnitSetProperty(ap->outUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &ap->fmtOut, sizeof(ap->fmtOut));		if (err) Rf_error("unable to set output audio format (%08x)", err);		/* set callback */		err = AudioUnitSetProperty(ap->outUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &renderCallback, sizeof(renderCallback));		if (err) Rf_error("unable to register audio callback (%08x)", err);		/* start audio */		err = AudioOutputUnitStart(ap->outUnit);		if (err) Rf_error("unable to start playback (%08x)", err);	}	return 1;}
开发者ID:brezniczky,项目名称:audio,代码行数:26,


示例5: AudioUnitSetProperty

void AudioDestinationIOS::configure(){    // Set render callback    AURenderCallbackStruct input;    input.inputProc = inputProc;    input.inputProcRefCon = this;    OSStatus result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &input, sizeof(input));    ASSERT(!result);    // Set stream format    AudioStreamBasicDescription streamFormat;    UInt32 size = sizeof(AudioStreamBasicDescription);    result = AudioUnitGetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, (void*)&streamFormat, &size);    ASSERT(!result);    const int bytesPerFloat = sizeof(Float32);    const int bitsPerByte = 8;    streamFormat.mSampleRate = m_sampleRate;    streamFormat.mFormatID = kAudioFormatLinearPCM;    streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;    streamFormat.mBytesPerPacket = bytesPerFloat;    streamFormat.mFramesPerPacket = 1;    streamFormat.mBytesPerFrame = bytesPerFloat;    streamFormat.mChannelsPerFrame = 2;    streamFormat.mBitsPerChannel = bitsPerByte * bytesPerFloat;    result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, (void*)&streamFormat, sizeof(AudioStreamBasicDescription));    ASSERT(!result);    AudioSession::sharedSession().setPreferredBufferSize(kPreferredBufferSize);}
开发者ID:AndriyKalashnykov,项目名称:webkit,代码行数:32,


示例6: AudioUnitSetProperty

OSStatus CAPlayThrough::EnableIO(){		OSStatus err = noErr;	UInt32 enableIO;		///////////////	//ENABLE IO (INPUT)	//You must enable the Audio Unit (AUHAL) for input and disable output 	//BEFORE setting the AUHAL's current device.		//Enable input on the AUHAL	enableIO = 1;	err =  AudioUnitSetProperty(mInputUnit,								kAudioOutputUnitProperty_EnableIO,								kAudioUnitScope_Input,								1, // input element								&enableIO,								sizeof(enableIO));	checkErr(err);		//disable Output on the AUHAL	enableIO = 0;	err = AudioUnitSetProperty(mInputUnit,							  kAudioOutputUnitProperty_EnableIO,							  kAudioUnitScope_Output,							  0,   //output element							  &enableIO,							  sizeof(enableIO));	return err;}
开发者ID:aranm,项目名称:CAPlayThrough,代码行数:30,


示例7: AudioUnitGetProperty

bool CAUOutputDevice::EnableInputOuput(){  if (!m_audioUnit)    return false;  OSStatus ret;  UInt32 enable;  UInt32 hasio;  UInt32 size=sizeof(UInt32);  ret = AudioUnitGetProperty(m_audioUnit,kAudioOutputUnitProperty_HasIO,kAudioUnitScope_Input, 1, &hasio, &size);  if (hasio)  {    enable = 1;    ret =  AudioUnitSetProperty(m_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &enable, sizeof(enable));    if (ret)    {      CLog::Log(LOGERROR, "CAUOutputDevice::EnableInputOuput:: Unable to enable input on bus 1. Error = %s", GetError(ret).c_str());      return false;    }    enable = 1;    ret = AudioUnitSetProperty(m_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &enable, sizeof(enable));    if (ret)    {      CLog::Log(LOGERROR, "CAUOutputDevice::EnableInputOuput:: Unable to disable output on bus 0. Error = %s", GetError(ret).c_str());      return false;    }  }  return true;}
开发者ID:midripps,项目名称:xbmc,代码行数:33,


示例8: AudioUnitSetProperty

void AudioDestinationMac::configure(){    // Set render callback    AURenderCallbackStruct input;    input.inputProc = inputProc;    input.inputProcRefCon = this;    OSStatus result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, 0, &input, sizeof(input));    ASSERT(!result);    // Set stream format    AudioStreamBasicDescription streamFormat;    streamFormat.mSampleRate = m_sampleRate;    streamFormat.mFormatID = kAudioFormatLinearPCM;    streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;    streamFormat.mBitsPerChannel = 8 * sizeof(Float32);    streamFormat.mChannelsPerFrame = 2;    streamFormat.mFramesPerPacket = 1;    streamFormat.mBytesPerPacket = sizeof(Float32);    streamFormat.mBytesPerFrame = sizeof(Float32);    result = AudioUnitSetProperty(m_outputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, (void*)&streamFormat, sizeof(AudioStreamBasicDescription));    ASSERT(!result);    // Set the buffer frame size.    UInt32 bufferSize = kBufferSize;    result = AudioUnitSetProperty(m_outputUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Output, 0, (void*)&bufferSize, sizeof(bufferSize));    ASSERT(!result);        m_input->configure(streamFormat, bufferSize);}
开发者ID:cor3ntin,项目名称:LabSound,代码行数:30,


示例9: RegionForEntireFile

void ofxAudioUnitFilePlayer::prime() {		_region = RegionForEntireFile(_fileID[0]);		if(_seekSampleTime) {		_region.mStartFrame = _seekSampleTime;		_pauseTimeAccumulator += _seekSampleTime;	} else if(_pauseTimeStamp.mSampleTime > 0) {		_region.mStartFrame = _pauseTimeStamp.mSampleTime + _pauseTimeAccumulator;		_pauseTimeAccumulator += _pauseTimeStamp.mSampleTime;	} else {		_pauseTimeAccumulator = 0;	}		if(_loopCount > 0) {		_region.mLoopCount = _loopCount;	}		// resetting time-tracking members	memset(&_pauseTimeStamp, 0, sizeof(_pauseTimeStamp));	_loopCount = 0;	_seekSampleTime = 0;		if(!(_region.mTimeStamp.mFlags & kAudioTimeStampHostTimeValid)) {		cout << "ofxAudioUnitFilePlayer has no file to play" << endl;		return;	}		OFXAU_RETURN(AudioUnitSetProperty(*_unit,	                                  kAudioUnitProperty_ScheduledFileIDs,	                                  kAudioUnitScope_Global,	                                  0,	                                  _fileID,	                                  sizeof(_fileID)),	             "setting file player's file ID");		OFXAU_RETURN(AudioUnitSetProperty(*_unit,	                                  kAudioUnitProperty_ScheduledFileRegion,	                                  kAudioUnitScope_Global,	                                  0,	                                  &_region,	                                  sizeof(_region)),	             "setting file player region");		UInt32 framesToPrime = 0; // 0 = "use the default"	OFXAU_RETURN(AudioUnitSetProperty(*_unit,									  kAudioUnitProperty_ScheduledFilePrime,									  kAudioUnitScope_Global,									  0,									  &framesToPrime,									  sizeof(framesToPrime)),				 "priming file player");		_primed = true;}
开发者ID:dferrandizmont,项目名称:ofxAudioUnit,代码行数:55,


示例10: SetupRemoteIO

int SetupRemoteIO (AudioUnit& inRemoteIOUnit, Float64 sampleRate, AURenderCallbackStruct inRenderProc, CAStreamBasicDescription& outFormat){	try {		// Open the output unit		AudioComponentDescription desc;		desc.componentType = kAudioUnitType_Output;		desc.componentSubType = kAudioUnitSubType_RemoteIO;		desc.componentManufacturer = kAudioUnitManufacturer_Apple;		desc.componentFlags = 0;		desc.componentFlagsMask = 0;				AudioComponent comp = AudioComponentFindNext(NULL, &desc);				XThrowIfError(AudioComponentInstanceNew(comp, &inRemoteIOUnit), "couldn't open the remote I/O unit");                UInt32 zero = 0;		UInt32 one = 1;        //enable input		XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof(one)), "couldn't enable input on the remote I/O unit");        //disable output        XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &zero, sizeof(zero)), "couldn't disable output ");                //set input callback		XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &inRenderProc, sizeof(inRenderProc)), "couldn't set remote i/o input callback");        		        // set our required format - LPCM non-interleaved 32 bit floating point        AudioStreamBasicDescription outFormat;        outFormat.mSampleRate = sampleRate;        outFormat.mFormatID = kAudioFormatLinearPCM;        outFormat.mFormatFlags = kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsFloat | kAudioFormatFlagIsNonInterleaved;        outFormat.mFramesPerPacket = 1;        outFormat.mBytesPerPacket= 4;        outFormat.mChannelsPerFrame = 1;        outFormat.mBitsPerChannel = 32;        outFormat.mBytesPerFrame = 4;        		XThrowIfError(AudioUnitSetProperty(inRemoteIOUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &outFormat, sizeof(outFormat)), "couldn't set the remote I/O unit's output client format");        		XThrowIfError(AudioUnitInitialize(inRemoteIOUnit), "couldn't initialize the remote I/O unit");	}	catch (CAXException &e) {		char buf[256];		fprintf(stderr, "Error: %s (%s)/n", e.mOperation, e.FormatError(buf));		return 1;	}	catch (...) {		fprintf(stderr, "An unknown error occurred/n");		return 1;	}		return 0;}
开发者ID:2k13yr,项目名称:surespot-ios,代码行数:54,


示例11: AudioUnitSetProperty

OSStatus	CAAudioUnit::SetPresentPreset (AUPreset &inData){	OSStatus result = AudioUnitSetProperty (AU(), kAudioUnitProperty_PresentPreset,								kAudioUnitScope_Global, 0,								&inData, sizeof (AUPreset));	if (result == kAudioUnitErr_InvalidProperty) {		result = AudioUnitSetProperty (AU(), kAudioUnitProperty_CurrentPreset,								kAudioUnitScope_Global, 0,								&inData, sizeof (AUPreset));	}	return result;}
开发者ID:63n,项目名称:ardour,代码行数:12,


示例12: notification

static OSStatus notification(AudioDeviceID inDevice,							UInt32 inChannel,							Boolean	isInput,							AudioDevicePropertyID inPropertyID,							void* inClientData){    coreaudio_driver_t* driver = (coreaudio_driver_t*)inClientData;    switch (inPropertyID) {			case kAudioDeviceProcessorOverload:			driver->xrun_detected = 1;			break;					case kAudioDevicePropertyNominalSampleRate: {			UInt32 outSize =  sizeof(Float64);			Float64 sampleRate;			AudioStreamBasicDescription srcFormat, dstFormat;			OSStatus err = AudioDeviceGetProperty(driver->device_id, 0, kAudioDeviceSectionGlobal, kAudioDevicePropertyNominalSampleRate, &outSize, &sampleRate);			if (err != noErr) {				jack_error("Cannot get current sample rate");				return kAudioHardwareUnsupportedOperationError;			}			JCALog("JackCoreAudioDriver::NotificationCallback kAudioDevicePropertyNominalSampleRate %ld/n", (long)sampleRate);			outSize = sizeof(AudioStreamBasicDescription);						// Update SR for input			err = AudioUnitGetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &srcFormat, &outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Input");			}			srcFormat.mSampleRate = sampleRate;			err = AudioUnitSetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &srcFormat, outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Input");			}					// Update SR for output			err = AudioUnitGetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &dstFormat, &outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Output");			}			dstFormat.mSampleRate = sampleRate;			err = AudioUnitSetProperty(driver->au_hal, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &dstFormat, outSize);			if (err != noErr) {				jack_error("Error calling AudioUnitSetProperty - kAudioUnitProperty_StreamFormat kAudioUnitScope_Output");			}			break;		}    }    return noErr;}
开发者ID:Llefjord,项目名称:jack1,代码行数:51,


示例13: sizeof

// ----------------------------------------------------------ofxAudioUnit& ofxAudioUnitInput::connectTo(ofxAudioUnit &otherUnit, int destinationBus, int sourceBus)// ----------------------------------------------------------{	AudioStreamBasicDescription ASBD;	UInt32 ASBDSize = sizeof(ASBD);		OFXAU_PRINT(AudioUnitGetProperty(otherUnit,									 kAudioUnitProperty_StreamFormat,									 kAudioUnitScope_Input,									 destinationBus,									 &ASBD,									 &ASBDSize),				"getting hardware input destination's format");		OFXAU_PRINT(AudioUnitSetProperty(*_unit,									 kAudioUnitProperty_StreamFormat,									 kAudioUnitScope_Output,									 1,									 &ASBD,									 sizeof(ASBD)),				"setting hardware input's output format");		AURenderCallbackStruct callback = {PullCallback, &_impl->ctx};	otherUnit.setRenderCallback(callback, destinationBus);	return otherUnit;}
开发者ID:microcosm,项目名称:ofxAudioUnit,代码行数:27,


示例14: sizeof

OSStatus CAPlayThrough::SetInputDeviceAsCurrent(AudioDeviceID in){    UInt32 size = sizeof(AudioDeviceID);    OSStatus err = noErr;		if(in == kAudioDeviceUnknown) //get the default input device if device is unknown	{  		err = AudioHardwareGetProperty(kAudioHardwarePropertyDefaultInputDevice,									   &size,  									   &in);		checkErr(err);	}		mInputDevice.Init(in, true);		//Set the Current Device to the AUHAL.	//this should be done only after IO has been enabled on the AUHAL.    err = AudioUnitSetProperty(mInputUnit,							  kAudioOutputUnitProperty_CurrentDevice, 							  kAudioUnitScope_Global, 							  0, 							  &mInputDevice.mID, 							  sizeof(mInputDevice.mID));	checkErr(err);	return err;}
开发者ID:aranm,项目名称:CAPlayThrough,代码行数:26,


示例15: Core_CloseAudio

void Core_CloseAudio(_THIS){    OSStatus result;    struct AURenderCallbackStruct callback;    /* stop processing the audio unit */    result = AudioOutputUnitStop (outputAudioUnit);    if (result != noErr) {        SDL_SetError("Core_CloseAudio: AudioOutputUnitStop");        return;    }    /* Remove the input callback */    callback.inputProc = 0;    callback.inputProcRefCon = 0;    result = AudioUnitSetProperty (outputAudioUnit,                         kAudioUnitProperty_SetRenderCallback,                        kAudioUnitScope_Input,                         0,                        &callback,                         sizeof(callback));    if (result != noErr) {        SDL_SetError("Core_CloseAudio: AudioUnitSetProperty (kAudioUnitProperty_SetInputCallback)");        return;    }    result = CloseComponent(outputAudioUnit);    if (result != noErr) {        SDL_SetError("Core_CloseAudio: CloseComponent");        return;    }        SDL_free(buffer);}
开发者ID:ahpho,项目名称:wowmapviewer,代码行数:34,


示例16: GetControl32BitValue

void	AUVPresets::HandleControlChange (){#if !__LP64__	SInt32 i = GetControl32BitValue(mControl);	if (i > 0)	{		AUPreset* preset = (AUPreset*) CFArrayGetValueAtIndex (mPresets, i-1);			verify_noerr(AudioUnitSetProperty (mView->GetEditAudioUnit(), 									mPropertyID,	// either currentPreset or PresentPreset depending on which is supported									kAudioUnitScope_Global, 									0, 									preset, 									sizeof(AUPreset)));											// when we change a preset we can't expect the AU to update its state		// as it isn't meant to know that its being viewed!		// so we broadcast a notification to all listeners that all parameters on this AU have changed		AudioUnitParameter changedUnit;		changedUnit.mAudioUnit = mView->GetEditAudioUnit();		changedUnit.mParameterID = kAUParameterListener_AnyParameter;		verify_noerr (AUParameterListenerNotify (NULL, NULL, &changedUnit) );	}#endif}
开发者ID:kdridi,项目名称:acau,代码行数:25,


示例17: gst_core_audio_bind_device

gbooleangst_core_audio_bind_device (GstCoreAudio * core_audio){  OSStatus status;  /* Specify which device we're using. */  GST_DEBUG_OBJECT (core_audio->osxbuf, "Bind AudioUnit to device %d",      (int) core_audio->device_id);  status = AudioUnitSetProperty (core_audio->audiounit,      kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global, 0,      &core_audio->device_id, sizeof (AudioDeviceID));  if (status) {    GST_ERROR_OBJECT (core_audio->osxbuf, "Failed binding to device: %"        GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status));    goto audiounit_error;  }  return TRUE;audiounit_error:  if (core_audio->recBufferList) {    buffer_list_free (core_audio->recBufferList);    core_audio->recBufferList = NULL;  }  return FALSE;}
开发者ID:pli3,项目名称:gst-plugins-good,代码行数:25,


示例18: CFURLCreateFromFileSystemRepresentation

bool ofxAudioUnitFilePlayer::setFile(const std::string &filePath) {	CFURLRef fileURL;	fileURL = CFURLCreateFromFileSystemRepresentation(kCFAllocatorDefault,	                                                  (const UInt8 *)filePath.c_str(),	                                                  filePath.length(),	                                                  NULL);		if(_fileID[0]) {		AudioFileClose(_fileID[0]);		_fileID[0] = NULL;	}		OSStatus s = AudioFileOpenURL(fileURL, kAudioFileReadPermission, 0, _fileID);	CFRelease(fileURL);		_primed = false;		if(s != noErr) {		cout << "Error " << s << " while opening file at " << filePath << endl;		return false;	} else {		// setting the file ID now since it seems to have some overhead.		// Doing it now ensures you'll get sound pretty much instantly after		// calling play() (subsequent calls don't have the overhead)		OFXAU_RET_BOOL(AudioUnitSetProperty(*_unit,											kAudioUnitProperty_ScheduledFileIDs,											kAudioUnitScope_Global,											0,											_fileID,											sizeof(_fileID)),					   "setting file player's file ID");	}}
开发者ID:dferrandizmont,项目名称:ofxAudioUnit,代码行数:33,


示例19: AudioFilePlayer_Connect

static int    AudioFilePlayer_Connect(AudioFilePlayer *afp){#if DEBUG    printf ("Connect:%x, engaged=%d/n", (int)afp->mPlayUnit, (afp->mConnected ? 1 : 0));#endif    if (!afp->mConnected)    {                   if (!afp->mAudioFileManager->DoConnect(afp->mAudioFileManager))            return 0;        /* set the render callback for the file data to be supplied to the sound converter AU */        afp->mInputCallback.inputProc = afp->mAudioFileManager->FileInputProc;        afp->mInputCallback.inputProcRefCon = afp->mAudioFileManager;        OSStatus result = AudioUnitSetProperty (afp->mPlayUnit,                             kAudioUnitProperty_SetRenderCallback,                            kAudioUnitScope_Input,                             0,                            &afp->mInputCallback,                             sizeof(afp->mInputCallback));        if (result) return 0;  /*THROW_RESULT("AudioUnitSetProperty")*/        afp->mConnected = 1;    }    return 1;}
开发者ID:ahpho,项目名称:wowmapviewer,代码行数:26,


示例20: CFURLCreateFromFileSystemRepresentation

// ----------------------------------------------------------bool ofxAudioUnitSampler::setSamples(const std::vector<std::string> &samplePaths)// ----------------------------------------------------------{	CFURLRef sampleURLs[samplePaths.size()];		for(int i = 0; i < samplePaths.size(); i++)	{		sampleURLs[i] = CFURLCreateFromFileSystemRepresentation(kCFAllocatorDefault,																(const UInt8 *)samplePaths[i].c_str(),																samplePaths[i].length(),																NULL);	}		CFArrayRef samples = CFArrayCreate(NULL, (const void **)&sampleURLs, samplePaths.size(), &kCFTypeArrayCallBacks);		OSStatus s = AudioUnitSetProperty(*_unit,									 kAUSamplerProperty_LoadAudioFiles,									 kAudioUnitScope_Global,									 0,									 &samples,									 sizeof(samples));		OFXAU_PRINT(s, "setting ofxAudioUnitSampler's source samples");		for(int i = 0; i < samplePaths.size(); i++) CFRelease(sampleURLs[i]);		CFRelease(samples);		return s == noErr;}
开发者ID:CLOUDS-Interactive-Documentary,项目名称:ofxAudioUnit,代码行数:31,


示例21: CoreAudioDrv_PCM_Shutdown

void CoreAudioDrv_PCM_Shutdown(void){	OSStatus result;	struct AURenderCallbackStruct callback;    	if (!Initialised) {		return;	}	    // stop processing the audio unit	CoreAudioDrv_PCM_StopPlayback();	    // Remove the input callback	callback.inputProc = 0;	callback.inputProcRefCon = 0;	result = AudioUnitSetProperty(output_audio_unit,                                  kAudioUnitProperty_SetRenderCallback,                                  kAudioUnitScope_Input,                                  0,                                  &callback,                                  sizeof(callback));	//result = CloseComponent(output_audio_unit);    	pthread_mutex_destroy(&mutex);		Initialised = 0;}
开发者ID:TermiT,项目名称:sw-redux,代码行数:27,


示例22: gst_core_audio_set_format

gbooleangst_core_audio_set_format (GstCoreAudio * core_audio,    AudioStreamBasicDescription format){  /* Configure the output stream and allocate ringbuffer memory */  OSStatus status;  UInt32 propertySize;  int element;  AudioUnitScope scope;  GST_DEBUG_OBJECT (core_audio->osxbuf, "Setting format for AudioUnit");  scope = core_audio->is_src ? kAudioUnitScope_Output : kAudioUnitScope_Input;  element = core_audio->is_src ? 1 : 0;  propertySize = sizeof (AudioStreamBasicDescription);  status = AudioUnitSetProperty (core_audio->audiounit,      kAudioUnitProperty_StreamFormat, scope, element, &format, propertySize);  if (status) {    GST_WARNING_OBJECT (core_audio->osxbuf,        "Failed to set audio description: %" GST_FOURCC_FORMAT,        GST_FOURCC_ARGS (status));    return FALSE;;  }  return TRUE;}
开发者ID:pli3,项目名称:gst-plugins-good,代码行数:28,


示例23: gst_core_audio_remove_render_callback

voidgst_core_audio_remove_render_callback (GstCoreAudio * core_audio){  AURenderCallbackStruct input;  OSStatus status;  /* Deactivate the render callback by calling SetRenderCallback   * with a NULL inputProc.   */  input.inputProc = NULL;  input.inputProcRefCon = NULL;  status = AudioUnitSetProperty (core_audio->audiounit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, 0,        /* N/A for global */      &input, sizeof (input));  if (status) {    GST_WARNING_OBJECT (core_audio->osxbuf, "Failed to remove render callback %"        GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status));  }  /* Remove the RenderNotify too */  status = AudioUnitRemoveRenderNotify (core_audio->audiounit,      (AURenderCallback) gst_core_audio_render_notify, core_audio);  if (status) {    GST_WARNING_OBJECT (core_audio->osxbuf,        "Failed to remove render notify callback %" GST_FOURCC_FORMAT,        GST_FOURCC_ARGS (status));  }  /* We're deactivated.. */  core_audio->io_proc_needs_deactivation = FALSE;  core_audio->io_proc_active = FALSE;}
开发者ID:pli3,项目名称:gst-plugins-good,代码行数:34,


示例24: COREAUDIO_CloseDevice

static voidCOREAUDIO_CloseDevice(_THIS){    if (this->hidden != NULL) {        if (this->hidden->audioUnitOpened) {            OSStatus result = noErr;            AURenderCallbackStruct callback;            const AudioUnitElement output_bus = 0;            const AudioUnitElement input_bus = 1;            const int iscapture = this->iscapture;            const AudioUnitElement bus =                ((iscapture) ? input_bus : output_bus);            const AudioUnitScope scope =                ((iscapture) ? kAudioUnitScope_Output :                 kAudioUnitScope_Input);            /* stop processing the audio unit */            result = AudioOutputUnitStop(this->hidden->audioUnit);            /* Remove the input callback */            SDL_memset(&callback, '/0', sizeof(AURenderCallbackStruct));            result = AudioUnitSetProperty(this->hidden->audioUnit,                                          kAudioUnitProperty_SetRenderCallback,                                          scope, bus, &callback,                                          sizeof(callback));            //CloseComponent(this->hidden->audioUnit);            this->hidden->audioUnitOpened = 0;        }        SDL_free(this->hidden->buffer);        SDL_free(this->hidden);        this->hidden = NULL;    }}
开发者ID:arcanon,项目名称:ipadflash,代码行数:34,


示例25: AudioUnitSetProperty

bool CCoreAudioUnit::RemoveRenderProc(){  if (!m_audioUnit || !m_renderProc)    return false;  AURenderCallbackStruct callbackInfo;  callbackInfo.inputProc = nil;  callbackInfo.inputProcRefCon = nil;  OSStatus ret = AudioUnitSetProperty(m_audioUnit, kAudioUnitProperty_SetRenderCallback,                                      kAudioUnitScope_Input, 0, &callbackInfo, sizeof(AURenderCallbackStruct));  if (ret)  {    CLog::Log(LOGERROR, "CCoreAudioUnit::RemoveRenderProc: Unable to remove AudioUnit render callback. Error = %s", GetError(ret).c_str());    return false;  }  CLog::Log(LOGDEBUG, "CCoreAudioUnit::RemoveRenderProc: Remove RenderProc 0x%08x for unit 0x%08x.", (unsigned int)m_renderProc, (unsigned int)m_audioUnit);  m_renderProc = NULL;  Sleep(100);  return true;}
开发者ID:midripps,项目名称:xbmc,代码行数:25,


示例26: sizeof

// ----------------------------------------------------------void ofxAudioUnitInput::connectTo(ofxAudioUnit &otherUnit, int destinationBus, int sourceBus)// ----------------------------------------------------------{	AURenderCallbackStruct callback;	callback.inputProc = pullCallback;	callback.inputProcRefCon = &_renderContext;		AudioStreamBasicDescription ASBD;	UInt32 ASBDSize = sizeof(ASBD);		OFXAU_RETURN(AudioUnitGetProperty(*otherUnit.getUnit(),									  kAudioUnitProperty_StreamFormat,									  kAudioUnitScope_Input,									  destinationBus,									  &ASBD,									  &ASBDSize),				 "getting hardware input destination's format");		OFXAU_RETURN(AudioUnitSetProperty(*_unit,									  kAudioUnitProperty_StreamFormat,									  kAudioUnitScope_Output,									  1,									  &ASBD,									  sizeof(ASBD)),				 "setting hardware input's output format");		otherUnit.setRenderCallback(callback, destinationBus);}
开发者ID:jasonlevine,项目名称:ofxAudioUnit,代码行数:29,


示例27: AudioUnitSetProperty

OSStatus OutputImplAudioUnit::Track::renderNotifyCallback( void * audioTrack, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData ){	OSStatus err = noErr;	if( *ioActionFlags &= kAudioUnitRenderAction_PostRender ) {		OutputImplAudioUnit::Track * theTrack = reinterpret_cast<OutputImplAudioUnit::Track *>( audioTrack );				if( ! theTrack->isPlaying() ) {			//disable render callback			AURenderCallbackStruct rcbs;			rcbs.inputProc = NULL;			rcbs.inputProcRefCon = NULL;			err = AudioUnitSetProperty( theTrack->mOutput->mMixerUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, theTrack->mInputBus, &rcbs, sizeof(rcbs) );			if( err ) {							}						err = AudioUnitRemoveRenderNotify( theTrack->mOutput->mMixerUnit, OutputImplAudioUnit::Track::renderNotifyCallback, audioTrack );			if( err ) {						}						theTrack->mOutput->removeTrack( theTrack->getTrackId() );			//now the track should be dead		}	}	return err;}
开发者ID:AaronMeyers,项目名称:Cinder,代码行数:27,


示例28: sizeof

OSStatus CAPlayThrough::SetOutputDeviceAsCurrent(AudioDeviceID out){    UInt32 size = sizeof(AudioDeviceID);;    OSStatus err = noErr;    //        UInt32 propsize = sizeof(Float32);        //AudioObjectPropertyScope theScope = mIsInput ? kAudioDevicePropertyScopeInput : kAudioDevicePropertyScopeOutput;        AudioObjectPropertyAddress theAddress = { kAudioHardwarePropertyDefaultOutputDevice,                                              kAudioObjectPropertyScopeGlobal,                                              kAudioObjectPropertyElementMaster };		if(out == kAudioDeviceUnknown) //Retrieve the default output device	{		err = AudioObjectGetPropertyData(kAudioObjectSystemObject, &theAddress, 0, NULL, &size, &out);        checkErr(err);	}	mOutputDevice.Init(out, false);		//Set the Current Device to the Default Output Unit.    err = AudioUnitSetProperty(mOutputUnit,							  kAudioOutputUnitProperty_CurrentDevice, 							  kAudioUnitScope_Global, 							  0, 							  &mOutputDevice.mID, 							  sizeof(mOutputDevice.mID));								return err;}
开发者ID:thepixelheart,项目名称:PixelPusher,代码行数:30,



注:本文中的AudioUnitSetProperty函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ AudioUnitUninitialize函数代码示例
C++ AudioUnitSetParameter函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。