Play and record streaming audio
- by Igor
I'm working on an iPhone app that should be able to play and record audio streaming data simultaneously. Is it actually possible? I'm trying to mix SpeakHere and AudioRecorder samples and getting an empty file with no audio data...
Here is my .m code:
import "AzRadioViewController.h"
@implementation azRadioViewController
static const CFOptionFlags kNetworkEvents = kCFStreamEventOpenCompleted |
kCFStreamEventHasBytesAvailable |
kCFStreamEventEndEncountered |
kCFStreamEventErrorOccurred;
void MyAudioQueueOutputCallback( void* inClientData,
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp inStartTime,
UInt32 inNumberPacketDescriptions,
const AudioStreamPacketDescription inPacketDesc
)
{
NSLog(@"start MyAudioQueueOutputCallback");
MyData* myData = (MyData*)inClientData;
NSLog(@"--- %i", inNumberPacketDescriptions);
if(inNumberPacketDescriptions == 0 && myData-dataFormat.mBytesPerPacket != 0)
{
inNumberPacketDescriptions = inBuffer-mAudioDataByteSize / myData-dataFormat.mBytesPerPacket;
}
OSStatus status = AudioFileWritePackets(myData-audioFile, FALSE, inBuffer-mAudioDataByteSize,
inPacketDesc, myData-currentPacket, &inNumberPacketDescriptions, inBuffer-mAudioData);
if(status == 0)
{
myData-currentPacket += inNumberPacketDescriptions;
}
NSLog(@"status:%i curpac:%i pcdesct: %i", status, myData-currentPacket, inNumberPacketDescriptions);
unsigned int bufIndex = MyFindQueueBuffer(myData, inBuffer);
pthread_mutex_lock(&myData-mutex);
myData-inuse[bufIndex] = false;
pthread_cond_signal(&myData-cond);
pthread_mutex_unlock(&myData-mutex);
}
OSStatus StartQueueIfNeeded(MyData* myData)
{
NSLog(@"start StartQueueIfNeeded");
OSStatus err = noErr;
if (!myData-started) {
err = AudioQueueStart(myData-queue, NULL);
if (err) { PRINTERROR("AudioQueueStart"); myData-failed = true; return err; }
myData-started = true;
printf("started\n");
}
return err;
}
OSStatus MyEnqueueBuffer(MyData* myData)
{
NSLog(@"start MyEnqueueBuffer");
OSStatus err = noErr;
myData-inuse[myData-fillBufferIndex] = true;
AudioQueueBufferRef fillBuf = myData-audioQueueBuffer[myData-fillBufferIndex];
fillBuf-mAudioDataByteSize = myData-bytesFilled;
err = AudioQueueEnqueueBuffer(myData-queue, fillBuf, myData-packetsFilled, myData-packetDescs);
if (err) { PRINTERROR("AudioQueueEnqueueBuffer"); myData-failed = true; return err; }
StartQueueIfNeeded(myData);
return err;
}
void WaitForFreeBuffer(MyData* myData)
{
NSLog(@"start WaitForFreeBuffer");
if (++myData-fillBufferIndex = kNumAQBufs) myData-fillBufferIndex = 0;
myData-bytesFilled = 0;
myData-packetsFilled = 0;
printf("-lock\n");
pthread_mutex_lock(&myData-mutex);
while (myData-inuse[myData-fillBufferIndex]) {
printf("... WAITING ...\n");
pthread_cond_wait(&myData-cond, &myData-mutex);
}
pthread_mutex_unlock(&myData-mutex);
printf("<-unlock\n");
}
int MyFindQueueBuffer(MyData* myData, AudioQueueBufferRef inBuffer)
{
NSLog(@"start MyFindQueueBuffer");
for (unsigned int i = 0; i < kNumAQBufs; ++i) {
if (inBuffer == myData-audioQueueBuffer[i])
return i;
}
return -1;
}
void MyAudioQueueIsRunningCallback( void* inClientData,
AudioQueueRef inAQ,
AudioQueuePropertyID inID)
{
NSLog(@"start MyAudioQueueIsRunningCallback");
MyData* myData = (MyData*)inClientData;
UInt32 running;
UInt32 size;
OSStatus err = AudioQueueGetProperty(inAQ, kAudioQueueProperty_IsRunning, &running, &size);
if (err) { PRINTERROR("get kAudioQueueProperty_IsRunning"); return; }
if (!running) {
pthread_mutex_lock(&myData-mutex);
pthread_cond_signal(&myData-done);
pthread_mutex_unlock(&myData-mutex);
}
}
void MyPropertyListenerProc( void * inClientData,
AudioFileStreamID inAudioFileStream,
AudioFileStreamPropertyID inPropertyID,
UInt32 * ioFlags)
{
NSLog(@"start MyPropertyListenerProc");
MyData* myData = (MyData*)inClientData;
OSStatus err = noErr;
printf("found property '%c%c%c%c'\n", (inPropertyID24)&255, (inPropertyID16)&255, (inPropertyID8)&255, inPropertyID&255);
switch (inPropertyID) {
case kAudioFileStreamProperty_ReadyToProducePackets :
{
AudioStreamBasicDescription asbd;
UInt32 asbdSize = sizeof(asbd);
err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &asbd);
if (err) { PRINTERROR("get kAudioFileStreamProperty_DataFormat"); myData-failed = true; break; }
err = AudioQueueNewOutput(&asbd, MyAudioQueueOutputCallback, myData, NULL, NULL, 0, &myData-queue);
if (err) { PRINTERROR("AudioQueueNewOutput"); myData-failed = true; break; }
for (unsigned int i = 0; i < kNumAQBufs; ++i) {
err = AudioQueueAllocateBuffer(myData-queue, kAQBufSize, &myData-audioQueueBuffer[i]);
if (err) { PRINTERROR("AudioQueueAllocateBuffer"); myData-failed = true; break; }
}
UInt32 cookieSize;
Boolean writable;
err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable);
if (err) { PRINTERROR("info kAudioFileStreamProperty_MagicCookieData"); break; }
printf("cookieSize %d\n", cookieSize);
void* cookieData = calloc(1, cookieSize);
err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData);
if (err) { PRINTERROR("get kAudioFileStreamProperty_MagicCookieData"); free(cookieData); break; }
err = AudioQueueSetProperty(myData-queue, kAudioQueueProperty_MagicCookie, cookieData, cookieSize);
free(cookieData);
if (err) { PRINTERROR("set kAudioQueueProperty_MagicCookie"); break; }
err = AudioQueueAddPropertyListener(myData-queue, kAudioQueueProperty_IsRunning, MyAudioQueueIsRunningCallback, myData);
if (err) { PRINTERROR("AudioQueueAddPropertyListener"); myData-failed = true; break; }
break;
}
}
}
static void
ReadStreamClientCallBack(CFReadStreamRef stream, CFStreamEventType type, void *clientCallBackInfo) {
NSLog(@"start ReadStreamClientCallBack");
if(type == kCFStreamEventHasBytesAvailable) {
UInt8 buffer[2048];
CFIndex bytesRead = CFReadStreamRead(stream, buffer, sizeof(buffer));
if (bytesRead < 0) {
}
else if (bytesRead) {
OSStatus err = AudioFileStreamParseBytes(globalMyData-audioFileStream, bytesRead, buffer, 0);
if (err) { PRINTERROR("AudioFileStreamParseBytes"); }
}
}
}
void MyPacketsProc(void * inClientData,
UInt32 inNumberBytes,
UInt32 inNumberPackets,
const void * inInputData,
AudioStreamPacketDescription inPacketDescriptions)
{
NSLog(@"start MyPacketsProc");
MyData myData = (MyData*)inClientData;
printf("got data. bytes: %d packets: %d\n", inNumberBytes, inNumberPackets);
for (int i = 0; i < inNumberPackets; ++i) {
SInt64 packetOffset = inPacketDescriptions[i].mStartOffset;
SInt64 packetSize = inPacketDescriptions[i].mDataByteSize;
size_t bufSpaceRemaining = kAQBufSize - myData-bytesFilled;
if (bufSpaceRemaining < packetSize) {
MyEnqueueBuffer(myData);
WaitForFreeBuffer(myData);
}
AudioQueueBufferRef fillBuf = myData-audioQueueBuffer[myData-fillBufferIndex];
memcpy((char*)fillBuf-mAudioData + myData-bytesFilled, (const char*)inInputData + packetOffset, packetSize);
myData-packetDescs[myData-packetsFilled] = inPacketDescriptions[i];
myData-packetDescs[myData-packetsFilled].mStartOffset = myData-bytesFilled;
myData-bytesFilled += packetSize;
myData-packetsFilled += 1;
size_t packetsDescsRemaining = kAQMaxPacketDescs - myData-packetsFilled;
if (packetsDescsRemaining == 0) {
MyEnqueueBuffer(myData);
WaitForFreeBuffer(myData);
}
}
}
(IBAction)buttonPlayPressedid)sender
{
label.text = @"Buffering";
[self connectionStart];
}
(IBAction)buttonSavePressedid)sender
{
NSLog(@"save");
AudioFileClose(myData.audioFile);
AudioQueueDispose(myData.queue, TRUE);
}
bool getFilename(char* buffer,int maxBufferLength)
{
NSArray paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString docDir = [paths objectAtIndex:0];
NSString* file = [docDir stringByAppendingString:@"/rec.caf"];
return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}
-(void)connectionStart {
@try {
MyData* myData = (MyData*)calloc(1, sizeof(MyData));
globalMyData = myData;
pthread_mutex_init(&myData-mutex, NULL);
pthread_cond_init(&myData-cond, NULL);
pthread_cond_init(&myData-done, NULL);
NSLog(@"Start");
myData-dataFormat.mSampleRate = 16000.0f;
myData-dataFormat.mFormatID = kAudioFormatLinearPCM;
myData-dataFormat.mFramesPerPacket = 1;
myData-dataFormat.mChannelsPerFrame = 1;
myData-dataFormat.mBytesPerFrame = 2;
myData-dataFormat.mBytesPerPacket = 2;
myData-dataFormat.mBitsPerChannel = 16;
myData-dataFormat.mReserved = 0;
myData-dataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
int i, bufferByteSize;
UInt32 size;
AudioQueueNewInput(
&myData-dataFormat,
MyAudioQueueOutputCallback,
&myData,
NULL /* run loop /, kCFRunLoopCommonModes / run loop mode /,
0 / flags */, &myData-queue);
size = sizeof(&myData-dataFormat);
AudioQueueGetProperty(&myData-queue, kAudioQueueProperty_StreamDescription,
&myData-dataFormat, &size);
CFURLRef fileURL;
char path[256];
memset(path,0,sizeof(path));
getFilename(path,256);
fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(path), FALSE);
AudioFileCreateWithURL(fileURL,
kAudioFileCAFType,
&myData-dataFormat,
kAudioFileFlags_EraseFile,
&myData-audioFile);
OSStatus err = AudioFileStreamOpen(myData, MyPropertyListenerProc, MyPacketsProc,
kAudioFileMP3Type, &myData-audioFileStream);
if (err) { PRINTERROR("AudioFileStreamOpen"); return 1; }
CFStreamClientContext ctxt = {0, self, NULL, NULL, NULL};
CFStringRef bodyData = CFSTR(""); // Usually used for POST data
CFStringRef headerFieldName = CFSTR("X-My-Favorite-Field");
CFStringRef headerFieldValue = CFSTR("Dreams");
CFStringRef url = CFSTR(RADIO_LOCATION);
CFURLRef myURL = CFURLCreateWithString(kCFAllocatorDefault, url, NULL);
CFStringRef requestMethod = CFSTR("GET");
CFHTTPMessageRef myRequest = CFHTTPMessageCreateRequest(kCFAllocatorDefault, requestMethod, myURL, kCFHTTPVersion1_1);
CFHTTPMessageSetBody(myRequest, bodyData);
CFHTTPMessageSetHeaderFieldValue(myRequest, headerFieldName, headerFieldValue);
CFReadStreamRef stream = CFReadStreamCreateForHTTPRequest(kCFAllocatorDefault, myRequest);
if (!stream) {
NSLog(@"Creating the stream failed");
return;
}
if (!CFReadStreamSetClient(stream, kNetworkEvents, ReadStreamClientCallBack, &ctxt)) {
CFRelease(stream);
NSLog(@"Setting the stream's client failed.");
return;
}
CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
if (!CFReadStreamOpen(stream)) {
CFReadStreamSetClient(stream, 0, NULL, NULL);
CFReadStreamUnscheduleFromRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
CFRelease(stream);
NSLog(@"Opening the stream failed.");
return;
}
}
@catch (NSException *exception) {
NSLog(@"main: Caught %@: %@", [exception name], [exception reason]);
}
}
(void)viewDidLoad {
[[UIApplication sharedApplication] setIdleTimerDisabled:YES];
[super viewDidLoad];
}
(void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
(void)viewDidUnload {
}
(void)dealloc {
[super dealloc];
}
@end