aboutsummaryrefslogtreecommitdiff
path: root/pd/portaudio_v18/pa_mac_core/pa_mac_core.c
diff options
context:
space:
mode:
Diffstat (limited to 'pd/portaudio_v18/pa_mac_core/pa_mac_core.c')
-rw-r--r--pd/portaudio_v18/pa_mac_core/pa_mac_core.c339
1 files changed, 213 insertions, 126 deletions
diff --git a/pd/portaudio_v18/pa_mac_core/pa_mac_core.c b/pd/portaudio_v18/pa_mac_core/pa_mac_core.c
index 5bf24cb6..e0a31374 100644
--- a/pd/portaudio_v18/pa_mac_core/pa_mac_core.c
+++ b/pd/portaudio_v18/pa_mac_core/pa_mac_core.c
@@ -1,5 +1,5 @@
/*
- * $Id: pa_mac_core.c,v 1.8.4.8 2003/03/07 01:34:18 philburk Exp $
+ * $Id: pa_mac_core.c,v 1.8.4.12 2003/04/16 19:06:01 philburk Exp $
* pa_mac_core.c
* Implementation of PortAudio for Mac OS X Core Audio
*
@@ -97,6 +97,17 @@
See code related to "streamInterleavingBuffer".
03.06.2003 - Phil Burk and Ryan Francesconi - fixed numChannels query for MOTU828.
Handle fact that MOTU828 gives you 8 channels even when you ask for 2!
+ 04.06.2003 - Phil Burk - Combine Dominic Mazzoni's technique of using Configuration to query maxChannels
+ with old technique of scanning for mormat.
+ Increase channel scan by 1 to handle mono USB microphones.
+ Do not merge or split channels in AudioConverter to handle 2+2 channels
+ of Quattro which has a format of 2 channels.
+ 04.07.2003 - Phil Burk - use AudioGetCurrentHostTime instead of getrusage() which can lock threads.
+ 04.10.2003 - Phil Burk - fixed pointer bug with input deinterleaving loop.
+ Detect and ignore NULL inputData and outputData in CodeAudio callback.
+ Overlap creation and deletion of AudioConverters to prevent thread death when device rate changes.
+ 04.16.2003 - Phil Burk - Fixed input channel scrambling when numChannels != 2^N. Caused by alignment
+ error when filling RingBuffer with 2^N zero bytes.
*/
#include <CoreServices/CoreServices.h>
@@ -107,12 +118,16 @@
#include <AudioUnit/AudioUnit.h>
#include <AudioToolbox/DefaultAudioOutput.h>
#include <AudioToolbox/AudioConverter.h>
+#include <CoreAudio/HostTime.h>
#include "portaudio.h"
#include "pa_host.h"
#include "pa_trace.h"
#include "ringbuffer.h"
+/************************************************* Configuration ********/
+#define PA_ENABLE_LOAD_MEASUREMENT (1)
+
/************************************************* Constants ********/
#define SET_DEVICE_BUFFER_SIZE (1)
@@ -173,8 +188,8 @@ typedef struct PaHostSoundControl
char *ringBufferData;
Boolean formatListenerCalled;
/* For measuring CPU utilization. */
- struct rusage entryRusage;
- double inverseMicrosPerHostBuffer; /* 1/Microseconds of real-time audio per user buffer. */
+ UInt64 entryTime;
+ double inverseHostTicksPerBuffer; /* 1/Ticks of real-time audio per user buffer. */
} PaHostSoundControl;
/**************************************************************
@@ -289,15 +304,7 @@ static void Pa_StartUsageCalculation( internalPortAudioStream *past )
PaHostSoundControl *pahsc = (PaHostSoundControl *) past->past_DeviceData;
if( pahsc == NULL ) return;
/* Query user CPU timer for usage analysis and to prevent overuse of CPU. */
- getrusage( RUSAGE_SELF, &pahsc->entryRusage );
-}
-
-static long SubtractTime_AminusB( struct timeval *timeA, struct timeval *timeB )
-{
- long secs = timeA->tv_sec - timeB->tv_sec;
- long usecs = secs * 1000000;
- usecs += (timeA->tv_usec - timeB->tv_usec);
- return usecs;
+ pahsc->entryTime = AudioGetCurrentHostTime();
}
/******************************************************************************
@@ -306,9 +313,9 @@ static long SubtractTime_AminusB( struct timeval *timeA, struct timeval *timeB )
*/
static void Pa_EndUsageCalculation( internalPortAudioStream *past )
{
- struct rusage currentRusage;
- long usecsElapsed;
- double newUsage;
+ UInt64 exitTime;
+ UInt64 ticksElapsed;
+ double newUsage;
#define LOWPASS_COEFFICIENT_0 (0.95)
#define LOWPASS_COEFFICIENT_1 (0.99999 - LOWPASS_COEFFICIENT_0)
@@ -316,16 +323,15 @@ static void Pa_EndUsageCalculation( internalPortAudioStream *past )
PaHostSoundControl *pahsc = (PaHostSoundControl *) past->past_DeviceData;
if( pahsc == NULL ) return;
- if( getrusage( RUSAGE_SELF, &currentRusage ) == 0 )
- {
- usecsElapsed = SubtractTime_AminusB( &currentRusage.ru_utime, &pahsc->entryRusage.ru_utime );
-
- /* Use inverse because it is faster than the divide. */
- newUsage = usecsElapsed * pahsc->inverseMicrosPerHostBuffer;
+ exitTime = AudioGetCurrentHostTime();
+
+ ticksElapsed = exitTime - pahsc->entryTime;
- past->past_Usage = (LOWPASS_COEFFICIENT_0 * past->past_Usage) +
+ /* Use inverse because it is faster than the divide. */
+ newUsage = ticksElapsed * pahsc->inverseHostTicksPerBuffer;
+ /* Low pass filter result. */
+ past->past_Usage = (LOWPASS_COEFFICIENT_0 * past->past_Usage) +
(LOWPASS_COEFFICIENT_1 * newUsage);
- }
}
/****************************************** END CPU UTILIZATION *******/
@@ -523,10 +529,61 @@ static int PaOSX_ScanDevices( Boolean isInput )
}
/*************************************************************************
-** Determine the maximum number of channels a device will support.
+** Determine the maximum number of channels based on the configuration.
** @return maxChannels or negative error.
*/
-static int PaOSX_GetMaxChannels( AudioDeviceID devID, Boolean isInput )
+static int PaOSX_GetMaxChannels_Config( AudioDeviceID devID, Boolean isInput )
+{
+ OSStatus err;
+ UInt32 outSize;
+ Boolean outWritable;
+ AudioBufferList *list;
+ int numChannels;
+ int i;
+
+ // Determine maximum number of channels supported.
+ // dmazzoni: new method
+
+ outSize = 0;
+ err = AudioDeviceGetPropertyInfo(devID, 0, isInput,
+ kAudioDevicePropertyStreamConfiguration,
+ &outSize, &outWritable);
+ if ( err != noErr )
+ {
+ PRINT_ERR("PaOSX_GetMaxChannels_Config: Could not get stream configuration info", err);
+ sSavedHostError = err;
+ return paHostError;
+ }
+
+ list = (AudioBufferList *)PaHost_AllocateFastMemory( outSize );
+ err = AudioDeviceGetProperty(devID, 0, isInput,
+ kAudioDevicePropertyStreamConfiguration,
+ &outSize, list);
+ if ( err != noErr )
+ {
+ PRINT_ERR("PaOSX_GetMaxChannels_Config: Could not get stream configuration", err);
+ sSavedHostError = err;
+ return paHostError;
+ }
+
+ numChannels = 0;
+ for( i=0; i<list->mNumberBuffers; i++ )
+ {
+ int bufChannels = list->mBuffers[i].mNumberChannels;
+ DBUG(("PaOSX_GetMaxChannels_Config: buffer %d has %d channels.\n", i, bufChannels ));
+ numChannels += bufChannels;
+ }
+
+ PaHost_FreeFastMemory( list, outSize );
+
+ return numChannels;
+}
+
+/*************************************************************************
+** Determine the maximum number of channels a device will support based on scanning the format.
+** @return maxChannels or negative error.
+*/
+static int PaOSX_GetMaxChannels_Format( AudioDeviceID devID, Boolean isInput )
{
OSStatus err;
UInt32 outSize;
@@ -540,13 +597,14 @@ static int PaOSX_GetMaxChannels( AudioDeviceID devID, Boolean isInput )
// For example, some 8 channel devices return 2 when given 256 as input.
gotMax = false;
maxChannels = 0;
+ numChannels = 0;
while( !gotMax )
{
memset( &formatDesc, 0, sizeof(formatDesc));
- numChannels = maxChannels + 2;
- DBUG(("PaOSX_GetMaxChannels: try numChannels = %d = %d + 2\n",
- numChannels, maxChannels ));
+ numChannels = numChannels + 1;
+ DBUG(("PaOSX_GetMaxChannels: try numChannels = %d = %d + 1\n",
+ numChannels, numChannels ));
formatDesc.mChannelsPerFrame = numChannels;
outSize = sizeof(formatDesc);
@@ -557,7 +615,10 @@ static int PaOSX_GetMaxChannels( AudioDeviceID devID, Boolean isInput )
err, formatDesc.mChannelsPerFrame ));
if( err != noErr )
{
- gotMax = true;
+ if (numChannels > (maxChannels + 4)) // Try several possibilities above current max
+ {
+ gotMax = true;
+ }
}
else
{
@@ -568,7 +629,10 @@ static int PaOSX_GetMaxChannels( AudioDeviceID devID, Boolean isInput )
}
else if(formatDesc.mChannelsPerFrame < numChannels)
{
- gotMax = true;
+ if (numChannels > (maxChannels + 4)) // Try several possibilities above current max
+ {
+ gotMax = true;
+ }
}
else
{
@@ -579,6 +643,24 @@ static int PaOSX_GetMaxChannels( AudioDeviceID devID, Boolean isInput )
return maxChannels;
}
+
+
+/*************************************************************************
+** Determine the maximum number of channels a device will support.
+** It is not clear at this point which the better technique so
+** we do both and use the biggest result.
+**
+** @return maxChannels or negative error.
+*/
+static int PaOSX_GetMaxChannels( AudioDeviceID devID, Boolean isInput )
+{
+ int maxChannelsFormat;
+ int maxChannelsConfig;
+ maxChannelsFormat = PaOSX_GetMaxChannels_Format( devID, isInput );
+ maxChannelsConfig = PaOSX_GetMaxChannels_Config( devID, isInput );
+ return (maxChannelsFormat > maxChannelsConfig) ? maxChannelsFormat : maxChannelsConfig;
+}
+
/*************************************************************************
** Try to fill in the device info for this device.
** Return 1 if a good device that PA can use.
@@ -733,12 +815,22 @@ static OSStatus PaOSX_LoadAndProcess( internalPortAudioStream *past,
inputBuffer = pahsc->input.converterBuffer;
}
+ /* Measure CPU load. */
+#if PA_ENABLE_LOAD_MEASUREMENT
+ Pa_StartUsageCalculation( past );
+#endif
+
/* Fill part of audio converter buffer by converting input to user format,
* calling user callback, then converting output to native format. */
if( PaConvert_Process( past, inputBuffer, outputBuffer ))
{
past->past_StopSoon = 1;
}
+
+#if PA_ENABLE_LOAD_MEASUREMENT
+ Pa_EndUsageCalculation( past );
+#endif
+
}
return err;
}
@@ -777,11 +869,11 @@ static OSStatus PaOSX_WriteInputRingBuffer( internalPortAudioStream *past,
char *inputNativeBufferfPtr = NULL;
PaHostSoundControl *pahsc = (PaHostSoundControl *) past->past_DeviceData;
- /* Do we need to interleave the buffers first? */
+ /* Do we need to deinterleave the buffers first? */
if( past->past_NumInputChannels != inInputData->mBuffers[0].mNumberChannels )
{
-
- numFramesInInputBuffer = inInputData->mBuffers[0].mDataByteSize / (sizeof(float) * inInputData->mBuffers[0].mNumberChannels);
+ numFramesInInputBuffer = inInputData->mBuffers[0].mDataByteSize /
+ (sizeof(float) * inInputData->mBuffers[0].mNumberChannels);
numBytes = numFramesInInputBuffer * sizeof(float) * past->past_NumInputChannels;
@@ -813,11 +905,14 @@ static OSStatus PaOSX_WriteInputRingBuffer( internalPortAudioStream *past,
for( j=0; j<numChannelsUsedInThisBuffer; j++ )
{
int k;
+ float *dest = &pahsc->input.streamInterleavingBuffer[ currentInterleavedChannelIndex ];
+ float *src = &((float *)inInputData->mBuffers[i].mData)[ j ];
/* Move one channel from CoreAudio buffer to interleaved buffer. */
for( k=0; k<numFramesInInputBuffer; k++ )
{
- pahsc->input.streamInterleavingBuffer[ k*numInterleavedChannels + currentInterleavedChannelIndex ] =
- ((float *)inInputData->mBuffers[i].mData)[ k*numBufChannels + j ];
+ *dest = *src;
+ src += numBufChannels;
+ dest += numInterleavedChannels;
}
currentInterleavedChannelIndex++;
}
@@ -830,7 +925,7 @@ static OSStatus PaOSX_WriteInputRingBuffer( internalPortAudioStream *past,
else
{
inputNativeBufferfPtr = (char*)inInputData->mBuffers[0].mData;
- numBytes += inInputData->mBuffers[0].mDataByteSize;
+ numBytes = inInputData->mBuffers[0].mDataByteSize;
}
writeRoom = RingBuffer_GetWriteAvailable( &pahsc->ringBuffer );
@@ -838,7 +933,7 @@ static OSStatus PaOSX_WriteInputRingBuffer( internalPortAudioStream *past,
if( numBytes <= writeRoom )
{
RingBuffer_Write( &pahsc->ringBuffer, inputNativeBufferfPtr, numBytes );
- DBUGBACK(("PaOSX_WriteInputRingBuffer: wrote %ld bytes to FIFO.\n", inInputData->mBuffers[0].mDataByteSize));
+ DBUGBACK(("PaOSX_WriteInputRingBuffer: wrote %ld bytes to FIFO.\n", numBytes));
} // FIXME else drop samples on floor, remember overflow???
return noErr;
@@ -854,6 +949,12 @@ static OSStatus PaOSX_HandleInput( internalPortAudioStream *past,
OSStatus err = noErr;
PaHostSoundControl *pahsc = (PaHostSoundControl *) past->past_DeviceData;
+ if( inInputData == NULL )
+ {
+ DBUG(("PaOSX_HandleInput: inInputData == NULL\n"));
+ return noErr;
+ }
+
if( inInputData->mNumberBuffers > 0 )
{
/* Write to FIFO here if we are only using this callback. */
@@ -894,9 +995,16 @@ static OSStatus PaOSX_HandleOutput( internalPortAudioStream *past,
Boolean deinterleavingNeeded;
int numFramesInOutputBuffer;
+ if( outOutputData == NULL )
+ {
+ DBUG(("PaOSX_HandleOutput: outOutputData == NULL\n"));
+ return noErr;
+ }
+
deinterleavingNeeded = past->past_NumOutputChannels != outOutputData->mBuffers[0].mNumberChannels;
- numFramesInOutputBuffer = outOutputData->mBuffers[0].mDataByteSize / (sizeof(float) * outOutputData->mBuffers[0].mNumberChannels);
+ numFramesInOutputBuffer = outOutputData->mBuffers[0].mDataByteSize /
+ (sizeof(float) * outOutputData->mBuffers[0].mNumberChannels);
if( pahsc->mode != PA_MODE_INPUT_ONLY )
{
@@ -931,7 +1039,7 @@ static OSStatus PaOSX_HandleOutput( internalPortAudioStream *past,
outputNativeBufferfPtr = (void*)outOutputData->mBuffers[0].mData;
}
- /* Pull code from PA user through converter. */
+ /* Pull data from PA user through converter. */
err = AudioConverterFillBuffer(
pahsc->output.converter,
PaOSX_OutputConverterCallbackProc,
@@ -962,11 +1070,14 @@ static OSStatus PaOSX_HandleOutput( internalPortAudioStream *past,
for( j=0; j<numChannelsUsedInThisBuffer; j++ )
{
int k;
+ float *dest = &((float *)outOutputData->mBuffers[i].mData)[ j ];
+ float *src = &pahsc->output.streamInterleavingBuffer[ currentInterleavedChannelIndex ];
/* Move one channel from interleaved buffer to CoreAudio buffer. */
for( k=0; k<numFramesInOutputBuffer; k++ )
{
- ((float *)outOutputData->mBuffers[i].mData)[ k*numBufChannels + j ] =
- pahsc->output.streamInterleavingBuffer[ k*numInterleavedChannels + currentInterleavedChannelIndex ];
+ *dest = *src;
+ dest += numBufChannels;
+ src += numInterleavedChannels;
}
currentInterleavedChannelIndex++;
}
@@ -999,7 +1110,7 @@ static OSStatus PaOSX_CoreAudioInputCallback (AudioDeviceID inDevice, const Aud
pahsc = (PaHostSoundControl *) past->past_DeviceData;
/* If there is a FIFO for input then write to it. */
- if( pahsc->ringBufferData != NULL )
+ if( (pahsc->ringBufferData != NULL) && (inInputData != NULL) )
{
err = PaOSX_WriteInputRingBuffer( past, inInputData );
if( err != noErr ) goto error;
@@ -1051,8 +1162,6 @@ static OSStatus PaOSX_CoreAudioIOCallback (AudioDeviceID inDevice, const AudioT
past->past_FrameCount = inInputTime->mSampleTime;
}
- /* Measure CPU load. */
- Pa_StartUsageCalculation( past );
past->past_NumCallbacks += 1;
/* Process full input buffer. */
@@ -1062,8 +1171,6 @@ static OSStatus PaOSX_CoreAudioIOCallback (AudioDeviceID inDevice, const AudioT
/* Fill up empty output buffers. */
err = PaOSX_HandleOutput( past, outOutputData );
if( err != 0 ) goto error;
-
- Pa_EndUsageCalculation( past );
}
if( err != 0 ) DBUG(("PaOSX_CoreAudioIOCallback: returns %ld.\n", err ));
@@ -1198,7 +1305,7 @@ static void PaOSX_FixVolumeScalars( AudioDeviceID devID, Boolean isInput,
kAudioDevicePropertyMute, &dataSize, &uidata32 );
if( err == noErr )
{
- DBUG(("uidata32 for channel %d = %ld\n", iChannel, uidata32));
+ DBUG(("mute for channel %d = %ld\n", iChannel, uidata32));
if( uidata32 == 1 ) // muted?
{
dataSize = sizeof( uidata32 );
@@ -1304,24 +1411,16 @@ static OSStatus PAOSX_DevicePropertyListener (AudioDeviceID inDevice,
UInt32 dataSize;
OSStatus err = noErr;
AudioStreamBasicDescription userStreamFormat, hardwareStreamFormat;
- Boolean updateInverseMicros;
- Boolean updateConverter;
+ PaHostInOut *hostInOut;
+ AudioStreamBasicDescription *destFormatPtr, *srcFormatPtr;
past = (internalPortAudioStream *) inClientData;
pahsc = (PaHostSoundControl *) past->past_DeviceData;
DBUG(("PAOSX_DevicePropertyListener: called with propertyID = 0x%0X\n", (unsigned int) inPropertyID ));
- updateInverseMicros = (inDevice == pahsc->primaryDeviceID) &&
- ((inPropertyID == kAudioDevicePropertyStreamFormat) ||
- (inPropertyID == kAudioDevicePropertyBufferFrameSize));
-
- updateConverter = (inPropertyID == kAudioDevicePropertyStreamFormat);
-
- // Sample rate needed for both.
- if( updateConverter || updateInverseMicros )
+ if(inPropertyID == kAudioDevicePropertyStreamFormat)
{
-
/* Get target device format */
dataSize = sizeof(hardwareStreamFormat);
err = AudioDeviceGetProperty(inDevice, 0, isInput,
@@ -1332,12 +1431,7 @@ static OSStatus PAOSX_DevicePropertyListener (AudioDeviceID inDevice,
sSavedHostError = err;
goto error;
}
- }
-
- if( updateConverter )
- {
- DBUG(("PAOSX_DevicePropertyListener: HW rate = %f\n", hardwareStreamFormat.mSampleRate ));
- DBUG(("PAOSX_DevicePropertyListener: user rate = %f\n", past->past_SampleRate ));
+
DBUG(("PAOSX_DevicePropertyListener: HW mChannelsPerFrame = %d\n", (int)hardwareStreamFormat.mChannelsPerFrame ));
/* Set source user format. */
@@ -1349,69 +1443,48 @@ static OSStatus PAOSX_DevicePropertyListener (AudioDeviceID inDevice,
userStreamFormat.mBytesPerFrame = userStreamFormat.mChannelsPerFrame * sizeof(float);
userStreamFormat.mBytesPerPacket = userStreamFormat.mBytesPerFrame * userStreamFormat.mFramesPerPacket;
- /* Don't use AudioConverter for merging channels. */
- if( hardwareStreamFormat.mChannelsPerFrame > userStreamFormat.mChannelsPerFrame )
- {
- hardwareStreamFormat.mChannelsPerFrame = userStreamFormat.mChannelsPerFrame;
- hardwareStreamFormat.mBytesPerFrame = userStreamFormat.mBytesPerFrame;
- hardwareStreamFormat.mBytesPerPacket = userStreamFormat.mBytesPerPacket;
- }
-
+ /* Don't use AudioConverter for merging or splitting channels. */
+ hardwareStreamFormat.mChannelsPerFrame = userStreamFormat.mChannelsPerFrame;
+ hardwareStreamFormat.mBytesPerFrame = userStreamFormat.mBytesPerFrame;
+ hardwareStreamFormat.mBytesPerPacket = userStreamFormat.mBytesPerPacket;
+
if( isInput )
{
- if( pahsc->input.converter != NULL )
- {
- verify_noerr(AudioConverterDispose (pahsc->input.converter));
- }
-
- // Convert from hardware format to user format.
- err = AudioConverterNew (
- &hardwareStreamFormat,
- &userStreamFormat,
- &pahsc->input.converter );
- if( err != noErr )
- {
- PRINT_ERR("Could not create input format converter", err);
- sSavedHostError = err;
- goto error;
- }
+ hostInOut = &pahsc->input;
+ srcFormatPtr = &hardwareStreamFormat;
+ destFormatPtr = &userStreamFormat;
}
else
{
- if( pahsc->output.converter != NULL )
- {
- verify_noerr(AudioConverterDispose (pahsc->output.converter));
- }
-
- // Convert from user format to hardware format.
- err = AudioConverterNew (
- &userStreamFormat,
- &hardwareStreamFormat,
- &pahsc->output.converter );
- if( err != noErr )
- {
- PRINT_ERR("Could not create output format converter", err);
- sSavedHostError = err;
- goto error;
- }
+ hostInOut = &pahsc->output;
+ srcFormatPtr = &userStreamFormat;
+ destFormatPtr = &hardwareStreamFormat;
}
- }
-
- if( updateInverseMicros )
- {
- // Update coefficient used to calculate CPU Load based on sampleRate and bufferSize.
- UInt32 ioBufferSize;
- dataSize = sizeof(ioBufferSize);
- err = AudioDeviceGetProperty( inDevice, 0, isInput,
- kAudioDevicePropertyBufferFrameSize, &dataSize,
- &ioBufferSize);
- if( err == noErr )
+ DBUG(("PAOSX_DevicePropertyListener: source rate = %f\n", srcFormatPtr->mSampleRate ));
+ DBUG(("PAOSX_DevicePropertyListener: dest rate = %f\n", destFormatPtr->mSampleRate ));
+
+ // Don't delete old converter until we create new one so we don't pull
+ // the rug out from under other audio threads.
+ AudioConverterRef oldConverter = hostInOut->converter;
+
+ // Make converter to change sample rate.
+ err = AudioConverterNew (
+ srcFormatPtr,
+ destFormatPtr,
+ &hostInOut->converter );
+ if( err != noErr )
{
- pahsc->inverseMicrosPerHostBuffer = hardwareStreamFormat.mSampleRate /
- (1000000.0 * ioBufferSize);
+ PRINT_ERR("Could not create format converter", err);
+ sSavedHostError = err;
+ goto error;
+ }
+
+ if( oldConverter != NULL )
+ {
+ verify_noerr( AudioConverterDispose( oldConverter ) );
}
}
-
+
error:
pahsc->formatListenerCalled = true;
return err;
@@ -1430,6 +1503,7 @@ static PaError PaOSX_CreateInputRingBuffer( internalPortAudioStream *past )
UInt32 framesPerHostBuffer;
UInt32 bytesForDevice;
UInt32 bytesForUser;
+ UInt32 bytesPerFrame;
AudioStreamBasicDescription formatDesc;
dataSize = sizeof(formatDesc);
@@ -1437,7 +1511,7 @@ static PaError PaOSX_CreateInputRingBuffer( internalPortAudioStream *past )
kAudioDevicePropertyStreamFormat, &dataSize, &formatDesc);
if( err != noErr )
{
- PRINT_ERR("PaOSX_CreateInputRingBuffer: Could not get I/O buffer size.\n", err);
+ PRINT_ERR("PaOSX_CreateInputRingBuffer: Could not get input format.\n", err);
sSavedHostError = err;
return paHostError;
}
@@ -1452,15 +1526,15 @@ static PaError PaOSX_CreateInputRingBuffer( internalPortAudioStream *past )
&framesPerHostBuffer);
if( err != noErr )
{
- PRINT_ERR("PaOSX_CreateInputRingBuffer: Could not get I/O buffer size.\n", err);
+ PRINT_ERR("PaOSX_CreateInputRingBuffer: Could not get input buffer size.\n", err);
sSavedHostError = err;
return paHostError;
}
- bytesForDevice = framesPerHostBuffer * formatDesc.mChannelsPerFrame * sizeof(Float32) * 2;
+ bytesPerFrame = past->past_NumInputChannels * sizeof(Float32);
- bytesForUser = past->past_FramesPerUserBuffer * past->past_NumInputChannels *
- sizeof(Float32) * 3 * sampleRateRatio;
+ bytesForDevice = framesPerHostBuffer * bytesPerFrame * 2;
+ bytesForUser = past->past_FramesPerUserBuffer * bytesPerFrame * 3 * sampleRateRatio;
// Ring buffer should be large enough to consume audio input from device,
// and to deliver a complete user buffer.
@@ -1475,8 +1549,13 @@ static PaError PaOSX_CreateInputRingBuffer( internalPortAudioStream *past )
return paInsufficientMemory;
}
RingBuffer_Init( &pahsc->ringBuffer, numBytes, pahsc->ringBufferData );
- // make it look full at beginning
- RingBuffer_AdvanceWriteIndex( &pahsc->ringBuffer, numBytes );
+ // Make it look almost full at beginning. We must advance by an integral number of frames
+ // so that the channels don't get scrambled when numChannels is not a power of 2.
+ {
+ int numZeroFrames = numBytes / bytesPerFrame;
+ int numZeroBytes = numZeroFrames * bytesPerFrame;
+ RingBuffer_AdvanceWriteIndex( &pahsc->ringBuffer, numZeroBytes );
+ }
return paNoError;
}
@@ -1750,7 +1829,7 @@ PaError PaHost_OpenStream( internalPortAudioStream *past )
DBUG(("outputDeviceID = %ld\n", pahsc->output.audioDeviceID ));
DBUG(("inputDeviceID = %ld\n", pahsc->input.audioDeviceID ));
DBUG(("primaryDeviceID = %ld\n", pahsc->primaryDeviceID ));
-
+
/* ------------------ OUTPUT */
if( useOutput )
{
@@ -1764,6 +1843,14 @@ PaError PaHost_OpenStream( internalPortAudioStream *past )
result = PaOSX_OpenInputDevice( past );
if( result < 0 ) goto error;
}
+
+#if PA_ENABLE_LOAD_MEASUREMENT
+ pahsc->inverseHostTicksPerBuffer = past->past_SampleRate /
+ (AudioGetHostClockFrequency() * past->past_FramesPerUserBuffer);
+ DBUG(("inverseHostTicksPerBuffer based on buffer size of %d frames.\n", past->past_FramesPerUserBuffer ));
+#else
+ PRINT(("WARNING - Pa_GetCPULoad() mesaurement disabled in pa_mac_core.c.\n"));
+#endif
return result;