Path: blob/master/src/java.desktop/macosx/native/libjsound/PLATFORM_API_MacOSX_PCM.cpp
41149 views
/*1* Copyright (c) 2002, 2020, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation. Oracle designates this7* particular file as subject to the "Classpath" exception as provided8* by Oracle in the LICENSE file that accompanied this code.9*10* This code is distributed in the hope that it will be useful, but WITHOUT11* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or12* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License13* version 2 for more details (a copy is included in the LICENSE file that14* accompanied this code).15*16* You should have received a copy of the GNU General Public License version17* 2 along with this work; if not, write to the Free Software Foundation,18* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.19*20* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA21* or visit www.oracle.com if you need additional information or have any22* questions.23*/2425//#define USE_ERROR26//#define USE_TRACE27//#define USE_VERBOSE_TRACE2829#include <AudioUnit/AudioUnit.h>30#include <AudioToolbox/AudioConverter.h>31#include <pthread.h>32#include <math.h>33/*34#if !defined(__COREAUDIO_USE_FLAT_INCLUDES__)35#include <CoreAudio/CoreAudioTypes.h>36#else37#include <CoreAudioTypes.h>38#endif39*/4041#include "PLATFORM_API_MacOSX_Utils.h"4243extern "C" {44#include "Utilities.h"45#include "DirectAudio.h"46}4748#if USE_DAUDIO == TRUE495051#ifdef USE_TRACE52static void PrintStreamDesc(const AudioStreamBasicDescription *inDesc) {53TRACE4("ID='%c%c%c%c'", (char)(inDesc->mFormatID >> 24), (char)(inDesc->mFormatID >> 16), (char)(inDesc->mFormatID >> 8), (char)(inDesc->mFormatID));54TRACE2(", %f Hz, flags=0x%lX", (float)inDesc->mSampleRate, (long unsigned)inDesc->mFormatFlags);55TRACE2(", %ld channels, %ld bits", (long)inDesc->mChannelsPerFrame, (long)inDesc->mBitsPerChannel);56TRACE1(", %ld bytes per frame\n", (long)inDesc->mBytesPerFrame);57}58#else59static inline void PrintStreamDesc(const AudioStreamBasicDescription *inDesc) { }60#endif616263#define MAX(x, y) ((x) >= (y) ? (x) : (y))64#define MIN(x, y) ((x) <= (y) ? (x) : (y))656667// =======================================68// MixerProvider functions implementation6970static DeviceList deviceCache;7172INT32 DAUDIO_GetDirectAudioDeviceCount() {73deviceCache.Refresh();74int count = deviceCache.GetCount();75if (count > 0) {76// add "default" device77count++;78TRACE1("DAUDIO_GetDirectAudioDeviceCount: returns %d devices\n", count);79} else {80TRACE0("DAUDIO_GetDirectAudioDeviceCount: no devices found\n");81}82return count;83}8485INT32 DAUDIO_GetDirectAudioDeviceDescription(INT32 mixerIndex, DirectAudioDeviceDescription *desc) {86bool result = true;87desc->deviceID = 0;88if (mixerIndex == 0) {89// default device90strncpy(desc->name, "Default Audio Device", DAUDIO_STRING_LENGTH);91strncpy(desc->description, "Default Audio Device", DAUDIO_STRING_LENGTH);92desc->maxSimulLines = -1;93} else {94AudioDeviceID deviceID;95result = deviceCache.GetDeviceInfo(mixerIndex-1, &deviceID, DAUDIO_STRING_LENGTH,96desc->name, desc->vendor, desc->description, desc->version);97if (result) {98desc->deviceID = (INT32)deviceID;99desc->maxSimulLines = -1;100}101}102return result ? TRUE : FALSE;103}104105106void DAUDIO_GetFormats(INT32 mixerIndex, INT32 deviceID, int isSource, void* creator) {107TRACE3(">>DAUDIO_GetFormats mixerIndex=%d deviceID=0x%x isSource=%d\n", (int)mixerIndex, (int)deviceID, isSource);108109AudioDeviceID audioDeviceID = deviceID == 0 ? GetDefaultDevice(isSource) : (AudioDeviceID)deviceID;110111if (audioDeviceID == 0) {112return;113}114115int totalChannels = GetChannelCount(audioDeviceID, isSource);116117if (totalChannels == 0) {118TRACE0("<<DAUDIO_GetFormats, no streams!\n");119return;120}121122if (isSource && totalChannels < 2) {123// report 2 channels even if only mono is supported124totalChannels = 2;125}126127int channels[] = {1, 2, totalChannels};128int channelsCount = MIN(totalChannels, 3);129130float hardwareSampleRate = GetSampleRate(audioDeviceID, isSource);131TRACE2(" DAUDIO_GetFormats: got %d channels, sampleRate == %f\n", totalChannels, hardwareSampleRate);132133// any sample rates are supported134float sampleRate = -1;135136static int sampleBits[] = {8, 16, 24};137static int sampleBitsCount = sizeof(sampleBits)/sizeof(sampleBits[0]);138139// the last audio format is the default one (used by DataLine.open() if format is not specified)140// consider as default 16bit PCM stereo (mono is stereo is not supported) with the current sample rate141int defBits = 16;142int defChannels = MIN(2, channelsCount);143float defSampleRate = hardwareSampleRate;144// don't add default format is sample rate is not specified145bool addDefault = defSampleRate > 0;146147// TODO: CoreAudio can handle signed/unsigned, little-endian/big-endian148// TODO: register the formats (to prevent DirectAudio software conversion) - need to fix DirectAudioDevice.createDataLineInfo149// to avoid software conversions if both signed/unsigned or big-/little-endian are supported150for (int channelIndex = 0; channelIndex < channelsCount; channelIndex++) {151for (int bitIndex = 0; bitIndex < sampleBitsCount; bitIndex++) {152int bits = sampleBits[bitIndex];153if (addDefault && bits == defBits && channels[channelIndex] != defChannels && sampleRate == defSampleRate) {154// the format is the default one, don't add it now155continue;156}157DAUDIO_AddAudioFormat(creator,158bits, // sample size in bits159-1, // frame size (auto)160channels[channelIndex], // channels161sampleRate, // sample rate162DAUDIO_PCM, // only accept PCM163bits == 8 ? FALSE : TRUE, // signed164bits == 8 ? FALSE // little-endian for 8bit165: UTIL_IsBigEndianPlatform());166}167}168// add default format169if (addDefault) {170DAUDIO_AddAudioFormat(creator,171defBits, // 16 bits172-1, // automatically calculate frame size173defChannels, // channels174defSampleRate, // sample rate175DAUDIO_PCM, // PCM176TRUE, // signed177UTIL_IsBigEndianPlatform()); // native endianess178}179180TRACE0("<<DAUDIO_GetFormats\n");181}182183184// =======================================185// Source/Target DataLine functions implementation186187// ====188/* 1writer-1reader ring buffer class with flush() support */189class RingBuffer {190public:191RingBuffer() : pBuffer(NULL), nBufferSize(0) {192pthread_mutex_init(&lockMutex, NULL);193}194~RingBuffer() {195Deallocate();196pthread_mutex_destroy(&lockMutex);197}198199// extraBytes: number of additionally allocated bytes to prevent data200// overlapping when almost whole buffer is filled201// (required only if Write() can override the buffer)202bool Allocate(int requestedBufferSize, int extraBytes) {203int fullBufferSize = requestedBufferSize + extraBytes;204long powerOfTwo = 1;205while (powerOfTwo < fullBufferSize) {206powerOfTwo <<= 1;207}208if (powerOfTwo > INT_MAX || fullBufferSize < 0) {209ERROR0("RingBuffer::Allocate: REQUESTED MEMORY SIZE IS TOO BIG\n");210return false;211}212pBuffer = (Byte*)malloc(powerOfTwo);213if (pBuffer == NULL) {214ERROR0("RingBuffer::Allocate: OUT OF MEMORY\n");215return false;216}217218nBufferSize = requestedBufferSize;219nAllocatedBytes = powerOfTwo;220nPosMask = powerOfTwo - 1;221nWritePos = 0;222nReadPos = 0;223nFlushPos = -1;224225TRACE2("RingBuffer::Allocate: OK, bufferSize=%d, allocated:%d\n", nBufferSize, nAllocatedBytes);226return true;227}228229void Deallocate() {230if (pBuffer) {231free(pBuffer);232pBuffer = NULL;233nBufferSize = 0;234}235}236237inline int GetBufferSize() {238return nBufferSize;239}240241inline int GetAllocatedSize() {242return nAllocatedBytes;243}244245// gets number of bytes available for reading246int GetValidByteCount() {247lock();248INT64 result = nWritePos - (nFlushPos >= 0 ? nFlushPos : nReadPos);249unlock();250return result > (INT64)nBufferSize ? nBufferSize : (int)result;251}252253int Write(void *srcBuffer, int len, bool preventOverflow) {254lock();255TRACE2("RingBuffer::Write (%d bytes, preventOverflow=%d)\n", len, preventOverflow ? 1 : 0);256TRACE2(" writePos = %lld (%d)", (long long)nWritePos, Pos2Offset(nWritePos));257TRACE2(" readPos=%lld (%d)", (long long)nReadPos, Pos2Offset(nReadPos));258TRACE2(" flushPos=%lld (%d)\n", (long long)nFlushPos, Pos2Offset(nFlushPos));259260INT64 writePos = nWritePos;261if (preventOverflow) {262INT64 avail_read = writePos - (nFlushPos >= 0 ? nFlushPos : nReadPos);263if (avail_read >= (INT64)nBufferSize) {264// no space265TRACE0(" preventOverlow: OVERFLOW => len = 0;\n");266len = 0;267} else {268int avail_write = nBufferSize - (int)avail_read;269if (len > avail_write) {270TRACE2(" preventOverlow: desrease len: %d => %d\n", len, avail_write);271len = avail_write;272}273}274}275unlock();276277if (len > 0) {278279write((Byte *)srcBuffer, Pos2Offset(writePos), len);280281lock();282TRACE4("--RingBuffer::Write writePos: %lld (%d) => %lld, (%d)\n",283(long long)nWritePos, Pos2Offset(nWritePos), (long long)nWritePos + len, Pos2Offset(nWritePos + len));284nWritePos += len;285unlock();286}287return len;288}289290int Read(void *dstBuffer, int len) {291lock();292TRACE1("RingBuffer::Read (%d bytes)\n", len);293TRACE2(" writePos = %lld (%d)", (long long)nWritePos, Pos2Offset(nWritePos));294TRACE2(" readPos=%lld (%d)", (long long)nReadPos, Pos2Offset(nReadPos));295TRACE2(" flushPos=%lld (%d)\n", (long long)nFlushPos, Pos2Offset(nFlushPos));296297applyFlush();298INT64 avail_read = nWritePos - nReadPos;299// check for overflow300if (avail_read > (INT64)nBufferSize) {301nReadPos = nWritePos - nBufferSize;302avail_read = nBufferSize;303TRACE0(" OVERFLOW\n");304}305INT64 readPos = nReadPos;306unlock();307308if (len > (int)avail_read) {309TRACE2(" RingBuffer::Read - don't have enough data, len: %d => %d\n", len, (int)avail_read);310len = (int)avail_read;311}312313if (len > 0) {314315read((Byte *)dstBuffer, Pos2Offset(readPos), len);316317lock();318if (applyFlush()) {319// just got flush(), results became obsolete320TRACE0("--RingBuffer::Read, got Flush, return 0\n");321len = 0;322} else {323TRACE4("--RingBuffer::Read readPos: %lld (%d) => %lld (%d)\n",324(long long)nReadPos, Pos2Offset(nReadPos), (long long)nReadPos + len, Pos2Offset(nReadPos + len));325nReadPos += len;326}327unlock();328} else {329// underrun!330}331return len;332}333334// returns number of the flushed bytes335int Flush() {336lock();337INT64 flushedBytes = nWritePos - (nFlushPos >= 0 ? nFlushPos : nReadPos);338nFlushPos = nWritePos;339unlock();340return flushedBytes > (INT64)nBufferSize ? nBufferSize : (int)flushedBytes;341}342343private:344Byte *pBuffer;345int nBufferSize;346int nAllocatedBytes;347INT64 nPosMask;348349pthread_mutex_t lockMutex;350351volatile INT64 nWritePos;352volatile INT64 nReadPos;353// Flush() sets nFlushPos value to nWritePos;354// next Read() sets nReadPos to nFlushPos and resests nFlushPos to -1355volatile INT64 nFlushPos;356357inline void lock() {358pthread_mutex_lock(&lockMutex);359}360inline void unlock() {361pthread_mutex_unlock(&lockMutex);362}363364inline bool applyFlush() {365if (nFlushPos >= 0) {366nReadPos = nFlushPos;367nFlushPos = -1;368return true;369}370return false;371}372373inline int Pos2Offset(INT64 pos) {374return (int)(pos & nPosMask);375}376377void write(Byte *srcBuffer, int dstOffset, int len) {378int dstEndOffset = dstOffset + len;379380int lenAfterWrap = dstEndOffset - nAllocatedBytes;381if (lenAfterWrap > 0) {382// dest.buffer does wrap383len = nAllocatedBytes - dstOffset;384memcpy(pBuffer+dstOffset, srcBuffer, len);385memcpy(pBuffer, srcBuffer+len, lenAfterWrap);386} else {387// dest.buffer does not wrap388memcpy(pBuffer+dstOffset, srcBuffer, len);389}390}391392void read(Byte *dstBuffer, int srcOffset, int len) {393int srcEndOffset = srcOffset + len;394395int lenAfterWrap = srcEndOffset - nAllocatedBytes;396if (lenAfterWrap > 0) {397// need to unwrap data398len = nAllocatedBytes - srcOffset;399memcpy(dstBuffer, pBuffer+srcOffset, len);400memcpy(dstBuffer+len, pBuffer, lenAfterWrap);401} else {402// source buffer is not wrapped403memcpy(dstBuffer, pBuffer+srcOffset, len);404}405}406};407408409class Resampler {410private:411enum {412kResamplerEndOfInputData = 1 // error to interrupt conversion (end of input data)413};414public:415Resampler() : converter(NULL), outBuffer(NULL) { }416~Resampler() {417if (converter != NULL) {418AudioConverterDispose(converter);419}420if (outBuffer != NULL) {421free(outBuffer);422}423}424425// inFormat & outFormat must be interleaved!426bool Init(const AudioStreamBasicDescription *inFormat, const AudioStreamBasicDescription *outFormat,427int inputBufferSizeInBytes)428{429TRACE0(">>Resampler::Init\n");430TRACE0(" inFormat: ");431PrintStreamDesc(inFormat);432TRACE0(" outFormat: ");433PrintStreamDesc(outFormat);434TRACE1(" inputBufferSize: %d bytes\n", inputBufferSizeInBytes);435OSStatus err;436437if ((outFormat->mFormatFlags & kAudioFormatFlagIsNonInterleaved) != 0 && outFormat->mChannelsPerFrame != 1) {438ERROR0("Resampler::Init ERROR: outFormat is non-interleaved\n");439return false;440}441if ((inFormat->mFormatFlags & kAudioFormatFlagIsNonInterleaved) != 0 && inFormat->mChannelsPerFrame != 1) {442ERROR0("Resampler::Init ERROR: inFormat is non-interleaved\n");443return false;444}445446memcpy(&asbdIn, inFormat, sizeof(AudioStreamBasicDescription));447memcpy(&asbdOut, outFormat, sizeof(AudioStreamBasicDescription));448449err = AudioConverterNew(inFormat, outFormat, &converter);450451if (err || converter == NULL) {452OS_ERROR1(err, "Resampler::Init (AudioConverterNew), converter=%p", converter);453return false;454}455456// allocate buffer for output data457int maximumInFrames = inputBufferSizeInBytes / inFormat->mBytesPerFrame;458// take into account trailingFrames459AudioConverterPrimeInfo primeInfo = {0, 0};460UInt32 sizePrime = sizeof(primeInfo);461err = AudioConverterGetProperty(converter, kAudioConverterPrimeInfo, &sizePrime, &primeInfo);462if (err) {463OS_ERROR0(err, "Resampler::Init (get kAudioConverterPrimeInfo)");464// ignore the error465} else {466// the default primeMethod is kConverterPrimeMethod_Normal, so we need only trailingFrames467maximumInFrames += primeInfo.trailingFrames;468}469float outBufferSizeInFrames = (outFormat->mSampleRate / inFormat->mSampleRate) * ((float)maximumInFrames);470// to avoid complex calculation just set outBufferSize as double of the calculated value471outBufferSize = (int)outBufferSizeInFrames * outFormat->mBytesPerFrame * 2;472// safety check - consider 256 frame as the minimum input buffer473int minOutSize = 256 * outFormat->mBytesPerFrame;474if (outBufferSize < minOutSize) {475outBufferSize = minOutSize;476}477478outBuffer = malloc(outBufferSize);479480if (outBuffer == NULL) {481ERROR1("Resampler::Init ERROR: malloc failed (%d bytes)\n", outBufferSize);482AudioConverterDispose(converter);483converter = NULL;484return false;485}486487TRACE1(" allocated: %d bytes for output buffer\n", outBufferSize);488489TRACE0("<<Resampler::Init: OK\n");490return true;491}492493// returns size of the internal output buffer494int GetOutBufferSize() {495return outBufferSize;496}497498// process next part of data (writes resampled data to the ringBuffer without overflow check)499int Process(void *srcBuffer, int len, RingBuffer *ringBuffer) {500int bytesWritten = 0;501TRACE2(">>Resampler::Process: %d bytes, converter = %p\n", len, converter);502if (converter == NULL) { // sanity check503bytesWritten = ringBuffer->Write(srcBuffer, len, false);504} else {505InputProcData data;506data.pThis = this;507data.data = (Byte *)srcBuffer;508data.dataSize = len;509510OSStatus err;511do {512AudioBufferList abl; // by default it contains 1 AudioBuffer513abl.mNumberBuffers = 1;514abl.mBuffers[0].mNumberChannels = asbdOut.mChannelsPerFrame;515abl.mBuffers[0].mDataByteSize = outBufferSize;516abl.mBuffers[0].mData = outBuffer;517518UInt32 packets = (UInt32)outBufferSize / asbdOut.mBytesPerPacket;519520TRACE2(">>AudioConverterFillComplexBuffer: request %d packets, provide %d bytes buffer\n",521(int)packets, (int)abl.mBuffers[0].mDataByteSize);522523err = AudioConverterFillComplexBuffer(converter, ConverterInputProc, &data, &packets, &abl, NULL);524525TRACE2("<<AudioConverterFillComplexBuffer: got %d packets (%d bytes)\n",526(int)packets, (int)abl.mBuffers[0].mDataByteSize);527if (packets > 0) {528int bytesToWrite = (int)(packets * asbdOut.mBytesPerPacket);529bytesWritten += ringBuffer->Write(abl.mBuffers[0].mData, bytesToWrite, false);530}531532// if outputBuffer is small to store all available frames,533// we get noErr here. In the case just continue the conversion534} while (err == noErr);535536if (err != kResamplerEndOfInputData) {537// unexpected error538OS_ERROR0(err, "Resampler::Process (AudioConverterFillComplexBuffer)");539}540}541TRACE2("<<Resampler::Process: written %d bytes (converted from %d bytes)\n", bytesWritten, len);542543return bytesWritten;544}545546// resets internal bufferes547void Discontinue() {548TRACE0(">>Resampler::Discontinue\n");549if (converter != NULL) {550AudioConverterReset(converter);551}552TRACE0("<<Resampler::Discontinue\n");553}554555private:556AudioConverterRef converter;557558// buffer for output data559// note that there is no problem if the buffer is not big enough to store560// all converted data - it's only performance issue561void *outBuffer;562int outBufferSize;563564AudioStreamBasicDescription asbdIn;565AudioStreamBasicDescription asbdOut;566567struct InputProcData {568Resampler *pThis;569Byte *data; // data == NULL means we handle Discontinue(false)570int dataSize; // == 0 if all data was already provided to the converted of we handle Discontinue(false)571};572573static OSStatus ConverterInputProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets,574AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)575{576InputProcData *data = (InputProcData *)inUserData;577578TRACE3(" >>ConverterInputProc: requested %d packets, data contains %d bytes (%d packets)\n",579(int)*ioNumberDataPackets, (int)data->dataSize, (int)(data->dataSize / data->pThis->asbdIn.mBytesPerPacket));580if (data->dataSize == 0) {581// already called & provided all input data582// interrupt conversion by returning error583*ioNumberDataPackets = 0;584TRACE0(" <<ConverterInputProc: returns kResamplerEndOfInputData\n");585return kResamplerEndOfInputData;586}587588ioData->mNumberBuffers = 1;589ioData->mBuffers[0].mNumberChannels = data->pThis->asbdIn.mChannelsPerFrame;590ioData->mBuffers[0].mDataByteSize = data->dataSize;591ioData->mBuffers[0].mData = data->data;592593*ioNumberDataPackets = data->dataSize / data->pThis->asbdIn.mBytesPerPacket;594595// all data has been provided to the converter596data->dataSize = 0;597598TRACE1(" <<ConverterInputProc: returns %d packets\n", (int)(*ioNumberDataPackets));599return noErr;600}601602};603604605struct OSX_DirectAudioDevice {606AudioUnit audioUnit;607RingBuffer ringBuffer;608AudioStreamBasicDescription asbd;609610// only for target lines611UInt32 inputBufferSizeInBytes;612Resampler *resampler;613// to detect discontinuity (to reset resampler)614SInt64 lastWrittenSampleTime;615616617OSX_DirectAudioDevice() : audioUnit(NULL), asbd(), resampler(NULL), lastWrittenSampleTime(0) {618}619620~OSX_DirectAudioDevice() {621if (audioUnit) {622AudioComponentInstanceDispose(audioUnit);623}624if (resampler) {625delete resampler;626}627}628};629630static AudioUnit CreateOutputUnit(AudioDeviceID deviceID, int isSource)631{632OSStatus err;633AudioUnit unit;634635AudioComponentDescription desc;636desc.componentType = kAudioUnitType_Output;637desc.componentSubType = (deviceID == 0 && isSource) ? kAudioUnitSubType_DefaultOutput : kAudioUnitSubType_HALOutput;638desc.componentManufacturer = kAudioUnitManufacturer_Apple;639desc.componentFlags = 0;640desc.componentFlagsMask = 0;641642AudioComponent comp = AudioComponentFindNext(NULL, &desc);643err = AudioComponentInstanceNew(comp, &unit);644645if (err) {646OS_ERROR0(err, "CreateOutputUnit:OpenAComponent");647return NULL;648}649650if (!isSource) {651int enableIO = 0;652err = AudioUnitSetProperty(unit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output,6530, &enableIO, sizeof(enableIO));654if (err) {655OS_ERROR0(err, "SetProperty (output EnableIO)");656}657enableIO = 1;658err = AudioUnitSetProperty(unit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input,6591, &enableIO, sizeof(enableIO));660if (err) {661OS_ERROR0(err, "SetProperty (input EnableIO)");662}663664if (!deviceID) {665// get real AudioDeviceID for default input device (macosx current input device)666deviceID = GetDefaultDevice(isSource);667if (!deviceID) {668AudioComponentInstanceDispose(unit);669return NULL;670}671}672}673674if (deviceID) {675err = AudioUnitSetProperty(unit, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global,6760, &deviceID, sizeof(deviceID));677if (err) {678OS_ERROR0(err, "SetProperty (CurrentDevice)");679AudioComponentInstanceDispose(unit);680return NULL;681}682}683684return unit;685}686687static OSStatus OutputCallback(void *inRefCon,688AudioUnitRenderActionFlags *ioActionFlags,689const AudioTimeStamp *inTimeStamp,690UInt32 inBusNumber,691UInt32 inNumberFrames,692AudioBufferList *ioData)693{694OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)inRefCon;695696int nchannels = ioData->mNumberBuffers; // should be always == 1 (interleaved channels)697AudioBuffer *audioBuffer = ioData->mBuffers;698699TRACE3(">>OutputCallback: busNum=%d, requested %d frames (%d bytes)\n",700(int)inBusNumber, (int)inNumberFrames, (int)(inNumberFrames * device->asbd.mBytesPerFrame));701TRACE3(" abl: %d buffers, buffer[0].channels=%d, buffer.size=%d\n",702nchannels, (int)audioBuffer->mNumberChannels, (int)audioBuffer->mDataByteSize);703704int bytesToRead = inNumberFrames * device->asbd.mBytesPerFrame;705if (bytesToRead > (int)audioBuffer->mDataByteSize) {706TRACE0("--OutputCallback: !!! audioBuffer IS TOO SMALL!!!\n");707bytesToRead = audioBuffer->mDataByteSize / device->asbd.mBytesPerFrame * device->asbd.mBytesPerFrame;708}709int bytesRead = device->ringBuffer.Read(audioBuffer->mData, bytesToRead);710if (bytesRead < bytesToRead) {711// no enough data (underrun)712TRACE2("--OutputCallback: !!! UNDERRUN (read %d bytes of %d)!!!\n", bytesRead, bytesToRead);713// silence the rest714memset((Byte*)audioBuffer->mData + bytesRead, 0, bytesToRead-bytesRead);715bytesRead = bytesToRead;716}717718audioBuffer->mDataByteSize = (UInt32)bytesRead;719// SAFETY: set mDataByteSize for all other AudioBuffer in the AudioBufferList to zero720while (--nchannels > 0) {721audioBuffer++;722audioBuffer->mDataByteSize = 0;723}724TRACE1("<<OutputCallback (returns %d)\n", bytesRead);725726return noErr;727}728729static OSStatus InputCallback(void *inRefCon,730AudioUnitRenderActionFlags *ioActionFlags,731const AudioTimeStamp *inTimeStamp,732UInt32 inBusNumber,733UInt32 inNumberFrames,734AudioBufferList *ioData)735{736OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)inRefCon;737738TRACE4(">>InputCallback: busNum=%d, timeStamp=%lld, %d frames (%d bytes)\n",739(int)inBusNumber, (long long)inTimeStamp->mSampleTime, (int)inNumberFrames, (int)(inNumberFrames * device->asbd.mBytesPerFrame));740741AudioBufferList abl; // by default it contains 1 AudioBuffer742abl.mNumberBuffers = 1;743abl.mBuffers[0].mNumberChannels = device->asbd.mChannelsPerFrame;744abl.mBuffers[0].mDataByteSize = device->inputBufferSizeInBytes; // assume this is == (inNumberFrames * device->asbd.mBytesPerFrame)745abl.mBuffers[0].mData = NULL; // request for the audioUnit's buffer746747OSStatus err = AudioUnitRender(device->audioUnit, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &abl);748if (err) {749OS_ERROR0(err, "<<InputCallback: AudioUnitRender");750} else {751if (device->resampler != NULL) {752// test for discontinuity753// AUHAL starts timestamps at zero, so test if the current timestamp less then the last written754SInt64 sampleTime = inTimeStamp->mSampleTime;755if (sampleTime < device->lastWrittenSampleTime) {756// discontinuity, reset the resampler757TRACE2(" InputCallback (RESAMPLED), DISCONTINUITY (%f -> %f)\n",758(float)device->lastWrittenSampleTime, (float)sampleTime);759760device->resampler->Discontinue();761} else {762TRACE2(" InputCallback (RESAMPLED), continuous: lastWrittenSampleTime = %f, sampleTime=%f\n",763(float)device->lastWrittenSampleTime, (float)sampleTime);764}765device->lastWrittenSampleTime = sampleTime + inNumberFrames;766767int bytesWritten = device->resampler->Process(abl.mBuffers[0].mData, (int)abl.mBuffers[0].mDataByteSize, &device->ringBuffer);768TRACE2("<<InputCallback (RESAMPLED, saved %d bytes of %d)\n", bytesWritten, (int)abl.mBuffers[0].mDataByteSize);769} else {770int bytesWritten = device->ringBuffer.Write(abl.mBuffers[0].mData, (int)abl.mBuffers[0].mDataByteSize, false);771TRACE2("<<InputCallback (saved %d bytes of %d)\n", bytesWritten, (int)abl.mBuffers[0].mDataByteSize);772}773}774775return noErr;776}777778779static void FillASBDForNonInterleavedPCM(AudioStreamBasicDescription& asbd,780float sampleRate, int channels, int sampleSizeInBits, bool isFloat, int isSigned, bool isBigEndian)781{782// FillOutASBDForLPCM cannot produce unsigned integer format783asbd.mSampleRate = sampleRate;784asbd.mFormatID = kAudioFormatLinearPCM;785asbd.mFormatFlags = (isFloat ? kAudioFormatFlagIsFloat : (isSigned ? kAudioFormatFlagIsSignedInteger : 0))786| (isBigEndian ? (kAudioFormatFlagIsBigEndian) : 0)787| kAudioFormatFlagIsPacked;788asbd.mBytesPerPacket = channels * ((sampleSizeInBits + 7) / 8);789asbd.mFramesPerPacket = 1;790asbd.mBytesPerFrame = asbd.mBytesPerPacket;791asbd.mChannelsPerFrame = channels;792asbd.mBitsPerChannel = sampleSizeInBits;793}794795void* DAUDIO_Open(INT32 mixerIndex, INT32 deviceID, int isSource,796int encoding, float sampleRate, int sampleSizeInBits,797int frameSize, int channels,798int isSigned, int isBigEndian, int bufferSizeInBytes)799{800TRACE3(">>DAUDIO_Open: mixerIndex=%d deviceID=0x%x isSource=%d\n", (int)mixerIndex, (unsigned int)deviceID, isSource);801TRACE3(" sampleRate=%d sampleSizeInBits=%d channels=%d\n", (int)sampleRate, sampleSizeInBits, channels);802#ifdef USE_TRACE803{804AudioDeviceID audioDeviceID = deviceID;805if (audioDeviceID == 0) {806// default device807audioDeviceID = GetDefaultDevice(isSource);808}809char name[256];810OSStatus err = GetAudioObjectProperty(audioDeviceID, kAudioUnitScope_Global, kAudioDevicePropertyDeviceName, 256, &name, 0);811if (err != noErr) {812OS_ERROR1(err, " audioDeviceID=0x%x, name is N/A:", (int)audioDeviceID);813} else {814TRACE2(" audioDeviceID=0x%x, name=%s\n", (int)audioDeviceID, name);815}816}817#endif818819if (encoding != DAUDIO_PCM) {820ERROR1("<<DAUDIO_Open: ERROR: unsupported encoding (%d)\n", encoding);821return NULL;822}823if (channels <= 0) {824ERROR1("<<DAUDIO_Open: ERROR: Invalid number of channels=%d!\n", channels);825return NULL;826}827828OSX_DirectAudioDevice *device = new OSX_DirectAudioDevice();829830AudioUnitScope scope = isSource ? kAudioUnitScope_Input : kAudioUnitScope_Output;831int element = isSource ? 0 : 1;832OSStatus err = noErr;833int extraBufferBytes = 0;834835device->audioUnit = CreateOutputUnit(deviceID, isSource);836837if (!device->audioUnit) {838delete device;839return NULL;840}841842if (!isSource) {843AudioDeviceID actualDeviceID = deviceID != 0 ? deviceID : GetDefaultDevice(isSource);844float hardwareSampleRate = GetSampleRate(actualDeviceID, isSource);845TRACE2("--DAUDIO_Open: sampleRate = %f, hardwareSampleRate=%f\n", sampleRate, hardwareSampleRate);846847if (fabs(sampleRate - hardwareSampleRate) > 1) {848device->resampler = new Resampler();849850// request HAL for Float32 with native endianess851FillASBDForNonInterleavedPCM(device->asbd, hardwareSampleRate, channels, 32, true, false, kAudioFormatFlagsNativeEndian != 0);852} else {853sampleRate = hardwareSampleRate; // in case sample rates are not exactly equal854}855}856857if (device->resampler == NULL) {858// no resampling, request HAL for the requested format859FillASBDForNonInterleavedPCM(device->asbd, sampleRate, channels, sampleSizeInBits, false, isSigned, isBigEndian);860}861862err = AudioUnitSetProperty(device->audioUnit, kAudioUnitProperty_StreamFormat, scope, element, &device->asbd, sizeof(device->asbd));863if (err) {864OS_ERROR0(err, "<<DAUDIO_Open set StreamFormat");865delete device;866return NULL;867}868869AURenderCallbackStruct output;870output.inputProc = isSource ? OutputCallback : InputCallback;871output.inputProcRefCon = device;872873err = AudioUnitSetProperty(device->audioUnit,874isSource875? (AudioUnitPropertyID)kAudioUnitProperty_SetRenderCallback876: (AudioUnitPropertyID)kAudioOutputUnitProperty_SetInputCallback,877kAudioUnitScope_Global, 0, &output, sizeof(output));878if (err) {879OS_ERROR0(err, "<<DAUDIO_Open set RenderCallback");880delete device;881return NULL;882}883884err = AudioUnitInitialize(device->audioUnit);885if (err) {886OS_ERROR0(err, "<<DAUDIO_Open UnitInitialize");887delete device;888return NULL;889}890891if (!isSource) {892// for target lines we need extra bytes in the ringBuffer893// to prevent collisions when InputCallback overrides data on overflow894UInt32 size;895OSStatus err;896897size = sizeof(device->inputBufferSizeInBytes);898err = AudioUnitGetProperty(device->audioUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Global,8990, &device->inputBufferSizeInBytes, &size);900if (err) {901OS_ERROR0(err, "<<DAUDIO_Open (TargetDataLine)GetBufferSize\n");902delete device;903return NULL;904}905device->inputBufferSizeInBytes *= device->asbd.mBytesPerFrame; // convert frames to bytes906extraBufferBytes = (int)device->inputBufferSizeInBytes;907}908909if (device->resampler != NULL) {910// resampler output format is a user requested format (== ringBuffer format)911AudioStreamBasicDescription asbdOut; // ringBuffer format912FillASBDForNonInterleavedPCM(asbdOut, sampleRate, channels, sampleSizeInBits, false, isSigned, isBigEndian);913914// set resampler input buffer size to the HAL buffer size915if (!device->resampler->Init(&device->asbd, &asbdOut, (int)device->inputBufferSizeInBytes)) {916ERROR0("<<DAUDIO_Open: resampler.Init() FAILED.\n");917delete device;918return NULL;919}920// extra bytes in the ringBuffer (extraBufferBytes) should be equal resampler output buffer size921extraBufferBytes = device->resampler->GetOutBufferSize();922}923924if (!device->ringBuffer.Allocate(bufferSizeInBytes, extraBufferBytes)) {925ERROR0("<<DAUDIO_Open: Ring buffer allocation error\n");926delete device;927return NULL;928}929930TRACE0("<<DAUDIO_Open: OK\n");931return device;932}933934int DAUDIO_Start(void* id, int isSource) {935OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;936TRACE0("DAUDIO_Start\n");937938OSStatus err = AudioOutputUnitStart(device->audioUnit);939940if (err != noErr) {941OS_ERROR0(err, "DAUDIO_Start");942}943944return err == noErr ? TRUE : FALSE;945}946947int DAUDIO_Stop(void* id, int isSource) {948OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;949TRACE0("DAUDIO_Stop\n");950951OSStatus err = AudioOutputUnitStop(device->audioUnit);952953return err == noErr ? TRUE : FALSE;954}955956void DAUDIO_Close(void* id, int isSource) {957OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;958TRACE0("DAUDIO_Close\n");959960delete device;961}962963int DAUDIO_Write(void* id, char* data, int byteSize) {964OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;965TRACE1(">>DAUDIO_Write: %d bytes to write\n", byteSize);966967int result = device->ringBuffer.Write(data, byteSize, true);968969TRACE1("<<DAUDIO_Write: %d bytes written\n", result);970return result;971}972973int DAUDIO_Read(void* id, char* data, int byteSize) {974OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;975TRACE1(">>DAUDIO_Read: %d bytes to read\n", byteSize);976977int result = device->ringBuffer.Read(data, byteSize);978979TRACE1("<<DAUDIO_Read: %d bytes has been read\n", result);980return result;981}982983int DAUDIO_GetBufferSize(void* id, int isSource) {984OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;985986int bufferSizeInBytes = device->ringBuffer.GetBufferSize();987988TRACE1("DAUDIO_GetBufferSize returns %d\n", bufferSizeInBytes);989return bufferSizeInBytes;990}991992int DAUDIO_StillDraining(void* id, int isSource) {993OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;994995int draining = device->ringBuffer.GetValidByteCount() > 0 ? TRUE : FALSE;996997TRACE1("DAUDIO_StillDraining returns %d\n", draining);998return draining;999}10001001int DAUDIO_Flush(void* id, int isSource) {1002OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;1003TRACE0("DAUDIO_Flush\n");10041005device->ringBuffer.Flush();10061007return TRUE;1008}10091010int DAUDIO_GetAvailable(void* id, int isSource) {1011OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;10121013int bytesInBuffer = device->ringBuffer.GetValidByteCount();1014if (isSource) {1015return device->ringBuffer.GetBufferSize() - bytesInBuffer;1016} else {1017return bytesInBuffer;1018}1019}10201021INT64 DAUDIO_GetBytePosition(void* id, int isSource, INT64 javaBytePos) {1022OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;1023INT64 position;10241025if (isSource) {1026position = javaBytePos - device->ringBuffer.GetValidByteCount();1027} else {1028position = javaBytePos + device->ringBuffer.GetValidByteCount();1029}10301031TRACE2("DAUDIO_GetBytePosition returns %lld (javaBytePos = %lld)\n", (long long)position, (long long)javaBytePos);1032return position;1033}10341035void DAUDIO_SetBytePosition(void* id, int isSource, INT64 javaBytePos) {1036// no need javaBytePos (it's available in DAUDIO_GetBytePosition)1037}10381039int DAUDIO_RequiresServicing(void* id, int isSource) {1040return FALSE;1041}10421043void DAUDIO_Service(void* id, int isSource) {1044// unreachable1045}10461047#endif // USE_DAUDIO == TRUE104810491050