├── .gitignore ├── CH10_iOSPlayThrough ├── en.lproj │ ├── InfoPlist.strings │ └── MainWindow.xib ├── Icon.png ├── Icon@2x.png ├── main.m ├── CH10_iOSPlayThroughAppDelegate.h ├── CH10_iOSPlayThrough-Info.plist ├── README.txt └── CH10_iOSPlayThroughAppDelegate.m ├── CH11_MIDIWifiSource ├── en.lproj │ └── InfoPlist.strings ├── Icon.png ├── Icon@2x.png ├── main.m ├── CH11_MIDIWifiSourceViewController.h ├── CH11_MIDIWifiSourceAppDelegate.h ├── README.txt ├── CH11_MIDIWifiSource-Info.plist ├── CH11_MIDIWifiSourceAppDelegate.m └── CH11_MIDIWifiSourceViewController.m ├── CH12_MIDIToAUSamplerIOS ├── en.lproj │ └── InfoPlist.strings ├── Icon.png ├── Icon@2x.png ├── chris-coreaudio-c2.caf ├── AppDelegate.h ├── main.m ├── README.txt ├── CH12_MIDIToAUSamplerIOS-Info.plist ├── ch12-aupreset.aupreset └── AppDelegate.m ├── CH10_iOSBackgroundingTone ├── en.lproj │ ├── InfoPlist.strings │ └── MainWindow.xib ├── Icon.png ├── Icon@2x.png ├── main.m ├── CH10_iOSBackgroundingToneAppDelegate.h ├── CH10_iOSBackgroundingTone-Info.plist ├── README.txt └── CH10_iOSBackgroundingToneAppDelegate.m ├── CH12_MIDIToAUSampler ├── chris-coreaudio-c2.caf ├── README.txt ├── README-AUPreset.txt ├── main.c └── ch12-aupreset.aupreset ├── README.md ├── CH07_AUGraphSpeechSynthesis ├── README.txt └── main.c ├── CH05_Player ├── README.txt └── main.c ├── CH07_AUGraphPlayer ├── README.txt └── main.c ├── CH09_OpenALOrbitLoop ├── README.txt └── main.c ├── CH11_MIDIToAUGraph ├── README.txt └── main.c ├── CH04_Recorder ├── README.txt └── main.c ├── CH07_AUGraphSineWave ├── README.txt └── main.c ├── CH09_OpenALOrbitStream ├── README.txt └── main.c ├── CH06_AudioConverter ├── README.txt └── main.c ├── CH06_ExtAudioFileConverter ├── README.txt └── main.c ├── CH03_CAStreamFormatTester ├── README.txt └── main.m ├── CH08_AUGraphInput ├── README.txt └── CARingBuffer-README ├── CH02_CAToneFileGenerator ├── README.txt └── main.m ├── CH01_CAMetadata ├── README.txt └── main.m └── CHANGES.txt /.gitignore: -------------------------------------------------------------------------------- 1 | a.out 2 | *.o 3 | .DS_Store 4 | -------------------------------------------------------------------------------- /CH10_iOSPlayThrough/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /CH10_iOSPlayThrough/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH10_iOSPlayThrough/Icon.png -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH11_MIDIWifiSource/Icon.png -------------------------------------------------------------------------------- /CH10_iOSPlayThrough/Icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH10_iOSPlayThrough/Icon@2x.png -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/Icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH11_MIDIWifiSource/Icon@2x.png -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH12_MIDIToAUSamplerIOS/Icon.png -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH10_iOSBackgroundingTone/Icon.png -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/Icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH10_iOSBackgroundingTone/Icon@2x.png -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/Icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH12_MIDIToAUSamplerIOS/Icon@2x.png -------------------------------------------------------------------------------- /CH12_MIDIToAUSampler/chris-coreaudio-c2.caf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH12_MIDIToAUSampler/chris-coreaudio-c2.caf -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/chris-coreaudio-c2.caf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rweichler/coreaudio-examples/HEAD/CH12_MIDIToAUSamplerIOS/chris-coreaudio-c2.caf -------------------------------------------------------------------------------- /CH10_iOSPlayThrough/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // CH10_iOSPlayThrough 4 | // 5 | // Created by Chris Adamson on 7/26/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | int main(int argc, char *argv[]) 12 | { 13 | int retVal = UIApplicationMain(argc, argv, nil, nil); 14 | return retVal; 15 | } 16 | -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // CH11_MIDIWifiSource 4 | // 5 | // Created by Chris Adamson on 9/10/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | int main(int argc, char *argv[]) 12 | { 13 | int retVal = UIApplicationMain(argc, argv, nil, nil); 14 | return retVal; 15 | } 16 | -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // CH10_iOSBackgroundingTone 4 | // 5 | // Created by Chris Adamson on 4/22/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | int main(int argc, char *argv[]) 12 | { 13 | int retVal = UIApplicationMain(argc, argv, nil, nil); 14 | return retVal; 15 | } 16 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // CH12_MIDIToAUSamplerIOS 4 | // 5 | // Created by Chris Adamson on 1/2/12. 6 | // Copyright (c) 2012 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | these have been adapted to better suit my workflow. 2 | 3 | i.e.: 4 | 5 | * i have deleted all of the xcode project files. 6 | * the style isnt ridiculously ugly. 7 | * replace tabs with 4 spaces 8 | 9 | 10 | you will have to hand compile them yourself from the command line. 11 | 12 | typically, something like: 13 | 14 | ```bash 15 | clang main.c -framework Foundation -framework AudioToolbox 16 | ``` 17 | 18 | will work. 19 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // CH12_MIDIToAUSamplerIOS 4 | // 5 | // Created by Chris Adamson on 1/2/12. 6 | // Copyright (c) 2012 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "AppDelegate.h" 12 | 13 | int main(int argc, char *argv[]) 14 | { 15 | @autoreleasepool { 16 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/CH11_MIDIWifiSourceViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // CH11_MIDIWifiSourceViewController.h 3 | // CH11_MIDIWifiSource 4 | // 5 | // Created by Chris Adamson on 9/10/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface CH11_MIDIWifiSourceViewController : UIViewController 12 | 13 | -(IBAction) handleKeyDown:(id)sender; 14 | -(IBAction) handleKeyUp:(id)sender; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /CH07_AUGraphSpeechSynthesis/README.txt: -------------------------------------------------------------------------------- 1 | CH07_AUGraphSpeechSynthesis 2 | 3 | This example uses the AUSpeechSynthesis audio unit to speak a string, connected to an AUMatrixReverb unit to provide an "echo" effect. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH05_Player/README.txt: -------------------------------------------------------------------------------- 1 | CH05_Player 2 | 3 | This example uses an Audio Queue to play an audio file in any Core Audio-supported format. The path to the file to play is set as the kPlaybackFileLocation macro at the top of main.c. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH07_AUGraphPlayer/README.txt: -------------------------------------------------------------------------------- 1 | CH07_AUGraphPlayer 2 | 3 | This example uses au AUGraph to play a file with the AUFilePlayer audio unit. The path to the file to convert is set as the kInputFileLocation macro at the top of main.c. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH09_OpenALOrbitLoop/README.txt: -------------------------------------------------------------------------------- 1 | CH09_OpenALOrbitLoop 2 | 3 | This example uses OpenAL to play a looping bicycle sound that orbits on an ellipse in all three dimensions around the listener; this is heard as a stereo mix on headphones or a typical two-speaker setup. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH11_MIDIToAUGraph/README.txt: -------------------------------------------------------------------------------- 1 | CH11_MIDIToAUGraph 2 | 3 | This example receives MIDI events and forwards them to an AUDLSSynth audio unit to play them as notes. It requires a MIDI device such as a musical keyboard, typically connected to the Mac via a USB MIDI interface. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH04_Recorder/README.txt: -------------------------------------------------------------------------------- 1 | CH04_Recorder 2 | 3 | This example uses an Audio Queue to record from the default input device to a .caf file. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. The resulting file is saved in "output.caf" in the local directory (which will be the Derived Data build directory if you run from Xcode) 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH07_AUGraphSineWave/README.txt: -------------------------------------------------------------------------------- 1 | CH07_AUGraphSineWave 2 | 3 | This example uses the default output Audio Unit connected to a render callback function to produce a sine wave in real-time. The frequency of the sine wave is set as the sineFrequency macro at the top of main.c. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH09_OpenALOrbitStream/README.txt: -------------------------------------------------------------------------------- 1 | CH09_OpenALOrbitStream 2 | 3 | This example uses OpenAL's streaming API to play a song sound that orbits on an ellipse in all three dimensions around the listener; this is heard as a stereo mix on headphones or a typical two-speaker setup. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH12_MIDIToAUSampler/README.txt: -------------------------------------------------------------------------------- 1 | CH12_MIDIToAUSampler 2 | 3 | This example receives MIDI events and forwards them to an AUSampler audio unit to play them as notes. It requires a MIDI device such as a musical keyboard, typically connected to the Mac via a USB MIDI interface. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/CH11_MIDIWifiSourceAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // CH11_MIDIWifiSourceAppDelegate.h 3 | // CH11_MIDIWifiSource 4 | // 5 | // Created by Chris Adamson on 9/10/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @class CH11_MIDIWifiSourceViewController; 12 | 13 | @interface CH11_MIDIWifiSourceAppDelegate : NSObject 14 | 15 | @property (nonatomic, retain) IBOutlet UIWindow *window; 16 | 17 | @property (nonatomic, retain) IBOutlet CH11_MIDIWifiSourceViewController *viewController; 18 | 19 | @end 20 | -------------------------------------------------------------------------------- /CH06_AudioConverter/README.txt: -------------------------------------------------------------------------------- 1 | CH06_AudioConverter 2 | 3 | This example uses an Audio Converter Services to convert an audio file in any Core Audio-supported format to 16-big big-endian LPCM in an .aif file. The path to the file to convert is set as the kInputFileLocation macro at the top of main.c. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH06_ExtAudioFileConverter/README.txt: -------------------------------------------------------------------------------- 1 | CH06_ExtAudioFileConverter 2 | 3 | This example uses an Extended Audio File Services to convert an audio file in any Core Audio-supported format to 16-big big-endian LPCM in an .aif file. The path to the file to convert is set as the kInputFileLocation macro at the top of main.c. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH03_CAStreamFormatTester/README.txt: -------------------------------------------------------------------------------- 1 | CH03_CAStreamFormatTester 2 | 3 | This example inspects Core Audio's support for a given combination of file format and audio format, logging the supported format flags and bit-depths. 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. The following lines are different from what's printed in the book: 8 | 9 | Removed line 5: 10 | NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init]; 11 | 12 | Removed line 46: 13 | [pool drain]; 14 | -------------------------------------------------------------------------------- /CH10_iOSPlayThrough/CH10_iOSPlayThroughAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // CH10_iOSPlayThroughAppDelegate.h 3 | // CH10_iOSPlayThrough 4 | // 5 | // Created by Chris Adamson on 7/10/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | typedef struct { 13 | AudioUnit rioUnit; 14 | AudioStreamBasicDescription asbd; 15 | float sineFrequency; 16 | float sinePhase; 17 | } EffectState; 18 | 19 | 20 | @interface CH10_iOSPlayThroughAppDelegate : UIResponder { 21 | 22 | } 23 | 24 | @property (nonatomic, retain) UIWindow *window; 25 | @property (assign) EffectState effectState; 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /CH08_AUGraphInput/README.txt: -------------------------------------------------------------------------------- 1 | CH08_AUGraphInput 2 | 3 | This example creates an audio pass-through application by creating an AUHALOutput audio unit for input and an AUDefaultOutput audio unit for output and connecting them by way of the CARingBuffer. The CARingBuffer class is from Apple and is not provided in this zip file -- see CARingBuffer-README for how to get it based on your current version of Xcode (as of this writing, Xcode 5.1 is current, and you will find it as part of the "Core Audio Utility Classes" sample project in Xcode's documentation viewer) 4 | 5 | The program runs as a command-line executable and takes no arguments, meaning you need to either run it from the command line in the Derived Data build directory, or directly from Xcode. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. There are no code changes compared to what's in the book. -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/CH10_iOSBackgroundingToneAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // CH10_iOSBackgroundingToneAppDelegate.h 3 | // CH10_iOSBackgroundingTone 4 | // 5 | // Created by Chris Adamson on 4/22/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface CH10_iOSBackgroundingToneAppDelegate : NSObject 13 | 14 | @property (nonatomic, retain) IBOutlet UIWindow *window; 15 | 16 | @property (nonatomic, assign) AudioStreamBasicDescription streamFormat; 17 | @property (nonatomic, assign) UInt32 bufferSize; 18 | @property (nonatomic, assign) double currentFrequency; 19 | @property (nonatomic, assign) double startingFrameCount; 20 | @property (nonatomic, assign) AudioQueueRef audioQueue; 21 | 22 | -(OSStatus) fillBuffer: (AudioQueueBufferRef) buffer; 23 | 24 | @end 25 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSampler/README-AUPreset.txt: -------------------------------------------------------------------------------- 1 | The AUPreset files used by this example need to live in specific locations, as described in chapter 12. You can and should use AU Lab to create these files with your own audio. 2 | 3 | In the interest of reproducibility, I'm including my own preset files with this version of the download code. To use these, you would need to do the following: 4 | 5 | 1. Put chris-coreaudio-c2.caf in ~/Library/Audio/Sounds/ 6 | 2. Put ch12-aupreset.aupreset in ~/Library/Audio/Presets/Apple/AU Sampler/ 7 | 3. The .aupreset is a property list file, so you can edit it in Xcode, Property List Editor, or any text editor. Edit the section so that it has the correct path to chris-coreaudio-c2.caf (which you moved into your ~/Library/Audio/Sounds folder in step 1). 8 | 4. Change line 128 of main.c to provide the full path to your .aupreset file (which you moved in step 2). 9 | 10 | -------------------------------------------------------------------------------- /CH02_CAToneFileGenerator/README.txt: -------------------------------------------------------------------------------- 1 | CH02_CAToneFileGenerator 2 | 3 | This example writes a sound wave to a AIFF file, sample-by-sample. 4 | 5 | The program runs as a command-line executable and takes as its argument the frequency to produce (in Hz), meaning you need to either run it from the command line in the Derived Data build directory, or supply arguments as part of the Xcode scheme. The resulting file is saved in the local directory (which will be the Derived Data build directory if you run from Xcode) 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. The following lines are different from what's printed in the book: 8 | 9 | main.m: 10 | Removed line 11: 11 | NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init]; 12 | 13 | Changed line 41 from: 14 | audioErr = AudioFileCreateWithURL((CFURLRef)fileURL, 15 | to 16 | audioErr = AudioFileCreateWithURL((__bridge CFURLRef)fileURL, 17 | 18 | Removed line 106: 19 | [pool drain]; 20 | -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/README.txt: -------------------------------------------------------------------------------- 1 | CH11_MIDIWifiSource 2 | 3 | This example provides MIDI events over wifi that can run a MIDI app (such as CH11_MIDIToAUGraph) running on OS X, provided you connect the network source in /Applications/Utilities/Audio MIDI Setup, as described in the book. 4 | 5 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. The following lines are different from what's printed in the book: 6 | 7 | 8 | CH11_MIDIWifiSourceViewController.m 9 | Removed lines 66-71: 10 | - (void)dealloc 11 | { 12 | [_window release]; 13 | [_viewController release]; 14 | [super dealloc]; 15 | } 16 | 17 | 18 | CH11_MIDIWifiSourceViewController.xib: 19 | The buttons' backgrounds have been explicitly colored to resemble white and black piano keys, since iOS 7 buttons are indistinguishable from labels. 20 | 21 | 22 | main.m (not shown in book since it is created for you by Xcode template): 23 | Removed line 13: 24 | NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; 25 | Removed line 15: 26 | [pool release]; 27 | -------------------------------------------------------------------------------- /CH01_CAMetadata/README.txt: -------------------------------------------------------------------------------- 1 | CH01_CAMetadata 2 | 3 | This example illustrates getting metadata from an audio file like a song downloaded from iTunes or Amazon MP3. The available metadata may depend on file format, version of OS X, and what metadata the vendor has included in the file. 4 | 5 | The program runs as a command-line executable and takes as its argument the path to an audio file, meaning you need to either run it from the command line in the Derived Data build directory, or supply arguments as part of the Xcode scheme. 6 | 7 | 8 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. The following lines are different from what's printed in the book: 9 | 10 | main.m: 11 | Removed line 5: 12 | NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init]; 13 | 14 | Changed line 16 from: 15 | theErr = AudioFileOpenURL((CFURLRef)audioURL, kAudioFileReadPermission, 0, &audioFile); // 6 16 | to 17 | theErr = AudioFileOpenURL((__bridge CFURLRef)audioURL, kAudioFileReadPermission, 0, &audioFile); // 6 18 | 19 | Removed line 33: 20 | [pool drain]; 21 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/README.txt: -------------------------------------------------------------------------------- 1 | CH11_MIDIWifiSourceViewController 2 | 3 | This example gets MIDI packets from a connected device (typically a MIDI keyboard connected via a MIDI-to-USB adapter with and then into the iOS device via the Camera Connection Kit) and sends MIDI note events to the AUSampler audio unit. 4 | 5 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. The following lines are different from what's printed in the book: 6 | 7 | AppDelegate.m: 8 | Removed lines 35-39 9 | - (void)dealloc 10 | { 11 | [_window release]; 12 | [super dealloc]; 13 | } 14 | 15 | Changed line 89 from: 16 | printf("MIDI Notify, messageId=%d,", message->messageID); 17 | to: 18 | printf("MIDI Notify, messageId=%d,", (int)message->messageID); 19 | 20 | Changed line 226 from: 21 | self.window = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease]; 22 | to: 23 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 24 | 25 | 26 | 27 | main.m (not shown in book since it is created for you by Xcode template): 28 | Removed line 13: 29 | NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; 30 | Removed line 15: 31 | [pool release]; 32 | -------------------------------------------------------------------------------- /CH01_CAMetadata/main.m: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | int main (int argc, const char * argv[]) { 5 | if (argc < 2) { 6 | printf ("Usage: CAMetadata /full/path/to/audiofile\n"); 7 | return -1; 8 | } // 1 9 | 10 | NSString *audioFilePath = [[NSString stringWithUTF8String:argv[1]] 11 | stringByExpandingTildeInPath]; // 2 12 | NSURL *audioURL = [NSURL fileURLWithPath:audioFilePath]; // 3 13 | NSLog (@"audioURL: %@", audioURL); 14 | AudioFileID audioFile; // 4 15 | OSStatus theErr = noErr; // 5 16 | theErr = AudioFileOpenURL((__bridge CFURLRef)audioURL, kAudioFileReadPermission, 0, &audioFile); // 6 17 | assert (theErr == noErr); // 7 18 | UInt32 dictionarySize = 0; // 8 19 | theErr = AudioFileGetPropertyInfo (audioFile, kAudioFilePropertyInfoDictionary, 20 | &dictionarySize, 0); // 9 21 | assert (theErr == noErr); // 10 22 | CFDictionaryRef dictionary; // 11 23 | theErr = AudioFileGetProperty (audioFile, kAudioFilePropertyInfoDictionary, 24 | &dictionarySize, &dictionary); // 12 25 | assert (theErr == noErr); // 13 26 | NSLog (@"dictionary: %@", dictionary); // 14 27 | CFRelease (dictionary); // 15 28 | theErr = AudioFileClose (audioFile); // 16 29 | assert (theErr == noErr); // 17 30 | 31 | return 0; 32 | } 33 | -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/CH11_MIDIWifiSource-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIconFile 12 | 13 | CFBundleIdentifier 14 | com.subfurther..${PRODUCT_NAME:rfc1034identifier} 15 | CFBundleInfoDictionaryVersion 16 | 6.0 17 | CFBundleName 18 | ${PRODUCT_NAME} 19 | CFBundlePackageType 20 | APPL 21 | CFBundleShortVersionString 22 | 1.0 23 | CFBundleSignature 24 | ???? 25 | CFBundleVersion 26 | 1.0 27 | LSRequiresIPhoneOS 28 | 29 | NSMainNibFile 30 | MainWindow 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /CH03_CAStreamFormatTester/main.m: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | int main (int argc, const char * argv[]) { 5 | 6 | AudioFileTypeAndFormatID fileTypeAndFormat; 7 | fileTypeAndFormat.mFileType = kAudioFileAIFFType; 8 | fileTypeAndFormat.mFormatID = kAudioFormatLinearPCM; 9 | 10 | OSStatus audioErr = noErr; 11 | UInt32 infoSize = 0; 12 | 13 | audioErr = AudioFileGetGlobalInfoSize 14 | (kAudioFileGlobalInfo_AvailableStreamDescriptionsForFormat, 15 | sizeof (fileTypeAndFormat), 16 | &fileTypeAndFormat, 17 | &infoSize); 18 | if (audioErr != noErr) { 19 | UInt32 err4cc = CFSwapInt32HostToBig(audioErr); 20 | NSLog (@"%4.4s", (char*)&err4cc); 21 | } 22 | assert (audioErr == noErr); 23 | 24 | AudioStreamBasicDescription *asbds = malloc (infoSize); 25 | audioErr = AudioFileGetGlobalInfo 26 | (kAudioFileGlobalInfo_AvailableStreamDescriptionsForFormat, 27 | sizeof (fileTypeAndFormat), 28 | &fileTypeAndFormat, 29 | &infoSize, 30 | asbds); 31 | assert (audioErr == noErr); 32 | 33 | int asbdCount = infoSize / sizeof (AudioStreamBasicDescription); 34 | for (int i=0; i 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIconFile 12 | 13 | CFBundleIconFiles 14 | 15 | Icon.png 16 | Icon@2x.png 17 | 18 | CFBundleIdentifier 19 | com.subfurther.${PRODUCT_NAME:rfc1034identifier} 20 | CFBundleInfoDictionaryVersion 21 | 6.0 22 | CFBundleName 23 | ${PRODUCT_NAME} 24 | CFBundlePackageType 25 | APPL 26 | CFBundleShortVersionString 27 | 1.0 28 | CFBundleSignature 29 | ???? 30 | CFBundleVersion 31 | 1.0 32 | LSRequiresIPhoneOS 33 | 34 | NSMainNibFile 35 | MainWindow 36 | UISupportedInterfaceOrientations 37 | 38 | UIInterfaceOrientationPortrait 39 | UIInterfaceOrientationLandscapeLeft 40 | UIInterfaceOrientationLandscapeRight 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/CH10_iOSBackgroundingTone-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleDocumentTypes 10 | 11 | CFBundleExecutable 12 | ${EXECUTABLE_NAME} 13 | CFBundleIconFile 14 | 15 | CFBundleIconFiles 16 | 17 | Icon.png 18 | Icon@2x.png 19 | 20 | CFBundleIdentifier 21 | com.subfurther.${PRODUCT_NAME:rfc1034identifier} 22 | CFBundleInfoDictionaryVersion 23 | 6.0 24 | CFBundleName 25 | ${PRODUCT_NAME} 26 | CFBundlePackageType 27 | APPL 28 | CFBundleShortVersionString 29 | 1.0 30 | CFBundleSignature 31 | ???? 32 | CFBundleURLTypes 33 | 34 | CFBundleVersion 35 | 1.0 36 | LSRequiresIPhoneOS 37 | 38 | NSMainNibFile 39 | MainWindow 40 | UIBackgroundModes 41 | 42 | audio 43 | 44 | UISupportedInterfaceOrientations 45 | 46 | UIInterfaceOrientationPortrait 47 | UIInterfaceOrientationLandscapeLeft 48 | UIInterfaceOrientationLandscapeRight 49 | 50 | UTExportedTypeDeclarations 51 | 52 | UTImportedTypeDeclarations 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/CH12_MIDIToAUSamplerIOS-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIconFiles 12 | 13 | Icon@2x.png 14 | Icon.png 15 | 16 | CFBundleIcons 17 | 18 | CFBundlePrimaryIcon 19 | 20 | CFBundleIconFiles 21 | 22 | Icon@2x.png 23 | Icon.png 24 | 25 | UIPrerenderedIcon 26 | 27 | 28 | 29 | CFBundleIdentifier 30 | com.pearson.learningcoreaudio.${PRODUCT_NAME:rfc1034identifier} 31 | CFBundleInfoDictionaryVersion 32 | 6.0 33 | CFBundleName 34 | ${PRODUCT_NAME} 35 | CFBundlePackageType 36 | APPL 37 | CFBundleShortVersionString 38 | 1.0 39 | CFBundleSignature 40 | ???? 41 | CFBundleVersion 42 | 1.0 43 | LSRequiresIPhoneOS 44 | 45 | UIRequiredDeviceCapabilities 46 | 47 | armv7 48 | 49 | UISupportedInterfaceOrientations 50 | 51 | UIInterfaceOrientationPortrait 52 | UIInterfaceOrientationLandscapeLeft 53 | UIInterfaceOrientationLandscapeRight 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /CH10_iOSPlayThrough/README.txt: -------------------------------------------------------------------------------- 1 | CH10_iOSPlayThrough 2 | 3 | This example uses an AURemoteIO audio unit to play captured audio from the mic through to the speaker or headphones, and optionally uses a render callback to apply a ring modulator effect. 4 | 5 | Note that the AudioSession functions used in this example were deprecated after the book was published. AVAudioSession, in the AV Foundation framework, is intended as a drop-in replacement. 6 | 7 | Note that versions of iOS released after the book's publication require explicit user approval to use the microphone. The first time this app is run, the AURemoteIO unit will produce silence because it is waiting for this approval. Once approval is given and the app is re-started, the AURemoteIO unit will be able to capture from the mic. To do this request up front, before attempting to create the AUGraph, you can use AV Foundation's -[AVCaptureDevice requestAccessForMediaType:completionHandler:], which was introduced in iOS 7. 8 | 9 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. The following lines are different from what's printed in the book: 10 | 11 | CH10_iOSPlayThroughAppDelegate.m: 12 | 13 | Changed lines 43-4 from: 14 | printf ("Interrupted! inInterruptionState=%u\n", inInterruptionState); 15 | CH10_iOSPlayThroughAppDelegate *appDelegate = (CH10_iOSPlayThroughAppDelegate*)inUserData; 16 | to: 17 | printf ("Interrupted! inInterruptionState=%u\n", (unsigned int)inInterruptionState); 18 | CH10_iOSPlayThroughAppDelegate *appDelegate = (__bridge CH10_iOSPlayThroughAppDelegate*)inUserData; 19 | 20 | 21 | Changed line 127 from: 22 | self), 23 | to: 24 | (__bridge void *)(self)), 25 | 26 | Removed line 151: 27 | [noInputAlert release]; 28 | 29 | 30 | 31 | main.m (not shown in book since it is created for you by Xcode template): 32 | Removed line 13: 33 | NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; 34 | Removed line 15: 35 | [pool release]; 36 | -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/README.txt: -------------------------------------------------------------------------------- 1 | CH10_iOSBackgroundingTone 2 | 3 | This example uses an Audio Queue to play a sine wave, which changes frequency based on whether the app is in the foreground or background. 4 | 5 | Note that the AudioSession functions used in this example were deprecated after the book was published. AVAudioSession, in the AV Foundation framework, is intended as a drop-in replacement. 6 | 7 | March 13, 2014: This example has been modernized for Xcode 5.1, and has been converted to ARC. The following lines are different from what's printed in the book: 8 | 9 | CH10_iOSBackgroundingToneAppDelegate.m 10 | Changed line 79 from: 11 | CH10_iOSBackgroundingToneAppDelegate *appDelegate = (CH10_iOSBackgroundingToneAppDelegate*)inUserData; 12 | to: 13 | CH10_iOSBackgroundingToneAppDelegate *appDelegate = (__bridge CH10_iOSBackgroundingToneAppDelegate*)inUserData; 14 | 15 | Changed lines 93-4 from: 16 | printf ("Interrupted! inInterruptionState=%u\n", inInterruptionState); 17 | CH10_iOSBackgroundingToneAppDelegate *appDelegate = (CH10_iOSBackgroundingToneAppDelegate*)inUserData; 18 | to: 19 | printf ("Interrupted! inInterruptionState=%u\n", (unsigned int)inInterruptionState); 20 | CH10_iOSBackgroundingToneAppDelegate *appDelegate = (__bridge CH10_iOSBackgroundingToneAppDelegate*)inUserData; 21 | 22 | Changed line 117 from: 23 | self), 24 | to: 25 | (__bridge void *)(self)), 26 | 27 | Changed line 140 from: 28 | self, 29 | to: 30 | (__bridge void *)(self), 31 | 32 | Changed line 151 from: 33 | NSLog (@"bufferSize is %u", bufferSize); 34 | to: 35 | NSLog (@"bufferSize is %u", (unsigned int)bufferSize); 36 | 37 | Removed lines 223-7: 38 | - (void)dealloc 39 | { 40 | [_window release]; 41 | [super dealloc]; 42 | } 43 | 44 | 45 | 46 | main.m (not shown in book since it is created for you by Xcode template): 47 | Removed line 13: 48 | NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; 49 | Removed line 15: 50 | [pool release]; 51 | -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/CH11_MIDIWifiSourceAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // CH11_MIDIWifiSourceAppDelegate.m 3 | // CH11_MIDIWifiSource 4 | // 5 | // Created by Chris Adamson on 9/10/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import "CH11_MIDIWifiSourceAppDelegate.h" 10 | 11 | #import "CH11_MIDIWifiSourceViewController.h" 12 | 13 | @implementation CH11_MIDIWifiSourceAppDelegate 14 | 15 | @synthesize window = _window; 16 | @synthesize viewController = _viewController; 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 19 | { 20 | // Override point for customization after application launch. 21 | 22 | self.window.rootViewController = self.viewController; 23 | [self.window makeKeyAndVisible]; 24 | return YES; 25 | } 26 | 27 | - (void)applicationWillResignActive:(UIApplication *)application 28 | { 29 | /* 30 | Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 31 | Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 32 | */ 33 | } 34 | 35 | - (void)applicationDidEnterBackground:(UIApplication *)application 36 | { 37 | /* 38 | Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 39 | If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 40 | */ 41 | } 42 | 43 | - (void)applicationWillEnterForeground:(UIApplication *)application 44 | { 45 | /* 46 | Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 47 | */ 48 | } 49 | 50 | - (void)applicationDidBecomeActive:(UIApplication *)application 51 | { 52 | /* 53 | Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 54 | */ 55 | } 56 | 57 | - (void)applicationWillTerminate:(UIApplication *)application 58 | { 59 | /* 60 | Called when the application is about to terminate. 61 | Save data if appropriate. 62 | See also applicationDidEnterBackground:. 63 | */ 64 | } 65 | 66 | 67 | @end 68 | -------------------------------------------------------------------------------- /CH08_AUGraphInput/CARingBuffer-README: -------------------------------------------------------------------------------- 1 | "LEARNING CORE AUDIO" - IMPORTANT NOTE ABOUT THE CARingBuffer CLASS 2 | 3 | As one of our book's few external dependencies, this CARingBuffer class has caused problems for a number of readers. Here's a summary: 4 | 5 | * FOR READERS ON OS X 10.6 AND EARLIER - As mentioned in the p. 167 footnote, an older version of the CARingBuffer class was buggy and was updated on Apple's website. Unfortunately, some versions of Xcode tools would overwrite the good version with the older, buggier one. The way to tell the difference is that the good version of the class has a Fetch() method that takes three arguments, while the buggy one takes a fourth argument (a bool). More info in Apple Technical Q&A 1665 (sorry, no URL because Apple re-organizes their website every few months and breaks incoming links) 6 | 7 | * FOR READERS USING XCODE 4.2 OR EARLIER - The CARingBuffer files (.h and .cpp) are installed at /Developer/Extras/CoreAudio/PublicUtility, as described in the book. Then see the 4.5 point below for an important warning about paths. 8 | 9 | * FOR READERS USING XCODE 4.3 AND 4.4 - The CARingBuffer files are an optional download from Apple's website. Use Xcode's "More Developer Tools..." menu item, and search for the "Audio Tools for Xcode" package. Put the PublicUtility directory in /Developer/Extras (as in 4.2 and earlier versions of Xcode), then see the 4.5 point below for an important warning about paths. 10 | 11 | * FOR READERS USING XCODE 4.5 AND UP - The CARingBuffer is no longer in the "Audio Tools for Xcode" package (though you'll still need that package for the AULab application that we use in chapter 12). Instead, go to Xcode's documentation browser and search for "Core Audio Public Utility". The result looks like a sample code project, but will actually unzip a "CoreAudioUtilityClasses" folder (containing a "CoreAudio" folder and a "Readme.rtf" document) in your ~/Downloads directory. The "Readme" directs you to put the files in /Library/Developer. 12 | 13 | The "CoreAudio" folder contains the "PublicUtility" folder, so the path to the CARingBuffer class is /Library/Developer/CoreAudio/PublicUtility/CARingBuffer.h and /Library/Developer/CoreAudio/PublicUtility/CARingBuffer.cpp. 14 | 15 | IMPORTANT: In this version of the downloadable code, we have set the CARingBuffer files to use an absolute reference to this new path. So, if you're using the latest Xcode and you follow Apple's directions, everything will just work. If you have PublicUtility in a different location (maybe because you're on an older version of Xcode), you'll have to re-point those file references with Xcode's File Inspector (cmd-option-1). 16 | -------------------------------------------------------------------------------- /CH02_CAToneFileGenerator/main.m: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | #define SAMPLE_RATE 44100 // 1 5 | #define DURATION 5.0 // 2 6 | // #define FILENAME_FORMAT @"%0.3f-square.aif" 7 | // #define FILENAME_FORMAT @"%0.3f-saw.aif" 8 | #define FILENAME_FORMAT @"%0.3f-sine.aif" 9 | 10 | int main (int argc, const char * argv[]) { 11 | if (argc < 2) { 12 | printf ("Usage: CAToneFileGenerator n\n(where n is tone in Hz)"); 13 | return -1; 14 | } // 1 15 | 16 | double hz = atof(argv[1]); // 2 17 | assert (hz > 0); 18 | NSLog (@"generating %f hz tone", hz); 19 | 20 | NSString *fileName = [NSString stringWithFormat:FILENAME_FORMAT, hz]; 21 | NSString *filePath = [[[NSFileManager defaultManager] currentDirectoryPath] 22 | stringByAppendingPathComponent: fileName]; 23 | NSURL *fileURL = [NSURL fileURLWithPath: filePath]; 24 | NSLog (@"path: %@", fileURL); 25 | 26 | // prepare the format 27 | AudioStreamBasicDescription asbd; 28 | memset(&asbd, 0, sizeof(asbd)); 29 | asbd.mSampleRate = SAMPLE_RATE; 30 | asbd.mFormatID = kAudioFormatLinearPCM; 31 | asbd.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 32 | asbd.mChannelsPerFrame = 1; 33 | asbd.mFramesPerPacket = 1; 34 | asbd.mBitsPerChannel = 16; 35 | asbd.mBytesPerFrame = 2; 36 | asbd.mBytesPerPacket = 2; 37 | 38 | // set up the file 39 | AudioFileID audioFile; 40 | OSStatus audioErr = noErr; 41 | audioErr = AudioFileCreateWithURL((__bridge CFURLRef)fileURL, 42 | kAudioFileAIFFType, 43 | &asbd, 44 | kAudioFileFlags_EraseFile, 45 | &audioFile); 46 | assert (audioErr == noErr); 47 | 48 | // start writing samples 49 | long maxSampleCount = SAMPLE_RATE * DURATION; 50 | long sampleCount = 0; 51 | UInt32 bytesToWrite = 2; 52 | double wavelengthInSamples = SAMPLE_RATE / hz; 53 | NSLog (@"wavelengthInSamples = %f", wavelengthInSamples); 54 | 55 | while (sampleCount < maxSampleCount) { 56 | for (int i=0; i 2 | 3 | #define sineFrequency 880.0 4 | 5 | 6 | typedef struct MySineWavePlayer 7 | { 8 | AudioUnit outputUnit; 9 | double startingFrameCount; 10 | } MySineWavePlayer; 11 | 12 | OSStatus SineWaveRenderProc(void *inRefCon, 13 | AudioUnitRenderActionFlags *ioActionFlags, 14 | const AudioTimeStamp *inTimeStamp, 15 | UInt32 inBusNumber, 16 | UInt32 inNumberFrames, 17 | AudioBufferList * ioData); 18 | void CreateAndConnectOutputUnit (MySineWavePlayer *player) ; 19 | 20 | #pragma mark - callback function - 21 | OSStatus SineWaveRenderProc(void *inRefCon, 22 | AudioUnitRenderActionFlags *ioActionFlags, 23 | const AudioTimeStamp *inTimeStamp, 24 | UInt32 inBusNumber, 25 | UInt32 inNumberFrames, 26 | AudioBufferList * ioData) 27 | { 28 | // printf ("SineWaveRenderProc needs %ld frames at %f\n", inNumberFrames, CFAbsoluteTimeGetCurrent()); 29 | 30 | MySineWavePlayer *player = (MySineWavePlayer*)inRefCon; 31 | 32 | double j = player->startingFrameCount; 33 | // double cycleLength = 44100. / 2200./*frequency*/; 34 | double cycleLength = 44100. / sineFrequency; 35 | int frame = 0; 36 | for (frame = 0; frame < inNumberFrames; ++frame) 37 | { 38 | Float32 *data = (Float32*)ioData->mBuffers[0].mData; 39 | (data)[frame] = (Float32)sin (2 * M_PI * (j / cycleLength)); 40 | 41 | // copy to right channel too 42 | data = (Float32*)ioData->mBuffers[1].mData; 43 | (data)[frame] = (Float32)sin (2 * M_PI * (j / cycleLength)); 44 | 45 | j += 1.0; 46 | if (j > cycleLength) 47 | j -= cycleLength; 48 | } 49 | 50 | player->startingFrameCount = j; 51 | return noErr; 52 | } 53 | 54 | #pragma mark - utility functions - 55 | 56 | // generic error handler - if err is nonzero, prints error message and exits program. 57 | static void CheckError(OSStatus error, const char *operation) 58 | { 59 | if (error == noErr) return; 60 | 61 | char str[20]; 62 | // see if it appears to be a 4-char-code 63 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 64 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 65 | str[0] = str[5] = '\''; 66 | str[6] = '\0'; 67 | } else 68 | // no, format it as an integer 69 | sprintf(str, "%d", (int)error); 70 | 71 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 72 | 73 | exit(1); 74 | } 75 | 76 | 77 | void CreateAndConnectOutputUnit (MySineWavePlayer *player) { 78 | 79 | // 10.6 and later: generate description that will match out output device (speakers) 80 | AudioComponentDescription outputcd = {0}; // 10.6 version 81 | outputcd.componentType = kAudioUnitType_Output; 82 | outputcd.componentSubType = kAudioUnitSubType_DefaultOutput; 83 | outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; 84 | 85 | AudioComponent comp = AudioComponentFindNext (NULL, &outputcd); 86 | if (comp == NULL) { 87 | printf ("can't get output unit"); 88 | exit (-1); 89 | } 90 | CheckError (AudioComponentInstanceNew(comp, &player->outputUnit), 91 | "Couldn't open component for outputUnit"); 92 | 93 | // register render callback 94 | AURenderCallbackStruct input; 95 | input.inputProc = SineWaveRenderProc; 96 | input.inputProcRefCon = player; 97 | CheckError(AudioUnitSetProperty(player->outputUnit, 98 | kAudioUnitProperty_SetRenderCallback, 99 | kAudioUnitScope_Input, 100 | 0, 101 | &input, 102 | sizeof(input)), 103 | "AudioUnitSetProperty failed"); 104 | 105 | // initialize unit 106 | CheckError (AudioUnitInitialize(player->outputUnit), 107 | "Couldn't initialize output unit"); 108 | 109 | } 110 | 111 | #pragma mark main 112 | 113 | int main(int argc, const char *argv[]) 114 | { 115 | MySineWavePlayer player = {0}; 116 | 117 | // set up unit and callback 118 | CreateAndConnectOutputUnit(&player); 119 | 120 | // start playing 121 | CheckError (AudioOutputUnitStart(player.outputUnit), "Couldn't start output unit"); 122 | 123 | printf ("playing\n"); 124 | // play for 5 seconds 125 | sleep(5); 126 | cleanup: 127 | AudioOutputUnitStop(player.outputUnit); 128 | AudioUnitUninitialize(player.outputUnit); 129 | AudioComponentInstanceDispose(player.outputUnit); 130 | 131 | return 0; 132 | } 133 | -------------------------------------------------------------------------------- /CH11_MIDIWifiSource/CH11_MIDIWifiSourceViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // CH11_MIDIWifiSourceViewController.m 3 | // CH11_MIDIWifiSource 4 | // 5 | // Created by Chris Adamson on 9/10/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import "CH11_MIDIWifiSourceViewController.h" 10 | #import 11 | 12 | #define DESTINATION_ADDRESS @"192.168.2.108" 13 | 14 | @interface CH11_MIDIWifiSourceViewController() 15 | - (void) connectToHost; 16 | - (void) sendStatus:(Byte)status data1:(Byte)data1 data2:(Byte)data2; 17 | - (void) sendNoteOnEvent:(Byte) note velocity:(Byte)velocity; 18 | - (void) sendNoteOffEvent:(Byte)key velocity:(Byte)velocity; 19 | @property (assign) MIDINetworkSession *midiSession; 20 | @property (assign) MIDIEndpointRef destinationEndpoint; 21 | @property (assign) MIDIPortRef outputPort; 22 | @end 23 | 24 | @implementation CH11_MIDIWifiSourceViewController 25 | 26 | @synthesize midiSession; 27 | @synthesize destinationEndpoint; 28 | @synthesize outputPort; 29 | 30 | 31 | #pragma mark utility functions 32 | static void CheckError(OSStatus error, const char *operation) 33 | { 34 | if (error == noErr) return; 35 | 36 | char str[20]; 37 | // see if it appears to be a 4-char-code 38 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 39 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 40 | str[0] = str[5] = '\''; 41 | str[6] = '\0'; 42 | } else 43 | // no, format it as an integer 44 | sprintf(str, "%d", (int)error); 45 | 46 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 47 | 48 | exit(1); 49 | } 50 | 51 | 52 | #pragma mark - View lifecycle 53 | 54 | /* 55 | // Implement viewDidLoad to do additional setup after loading the view, typically from a nib. 56 | */ 57 | - (void)viewDidLoad 58 | { 59 | [super viewDidLoad]; 60 | [self connectToHost]; 61 | } 62 | 63 | 64 | - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation 65 | { 66 | // Return YES for supported orientations 67 | return (interfaceOrientation == UIInterfaceOrientationPortrait); 68 | } 69 | 70 | #pragma midi stuff 71 | -(void) connectToHost { 72 | MIDINetworkHost *host = [MIDINetworkHost hostWithName:@"MyMIDIWifi" address:DESTINATION_ADDRESS port:5004]; 73 | if(!host) 74 | return; 75 | 76 | MIDINetworkConnection *connection = [MIDINetworkConnection connectionWithHost:host]; 77 | if(!connection) 78 | return; 79 | 80 | self.midiSession = [MIDINetworkSession defaultSession]; 81 | if (self.midiSession) { 82 | NSLog (@"Got MIDI session"); 83 | [self.midiSession addConnection:connection]; 84 | self.midiSession.enabled = YES; 85 | self.destinationEndpoint = [self.midiSession destinationEndpoint]; 86 | 87 | MIDIClientRef client = NULL; 88 | MIDIPortRef outport = NULL; 89 | CheckError (MIDIClientCreate(CFSTR("MyMIDIWifi Client"), NULL, NULL, &client), 90 | "Couldn't create MIDI client"); 91 | CheckError (MIDIOutputPortCreate(client, CFSTR("MyMIDIWifi Output port"), &outport), 92 | "Couldn't create output port"); 93 | self.outputPort = outport; 94 | NSLog (@"Got output port"); 95 | } 96 | } 97 | 98 | -(void) sendStatus:(Byte)status data1:(Byte)data1 data2:(Byte)data2 { 99 | MIDIPacketList packetList; 100 | 101 | packetList.numPackets = 1; 102 | packetList.packet[0].length = 3; 103 | packetList.packet[0].data[0] = status; 104 | packetList.packet[0].data[1] = data1; 105 | packetList.packet[0].data[2] = data2; 106 | packetList.packet[0].timeStamp = 0; 107 | 108 | CheckError (MIDISend(self.outputPort, self.destinationEndpoint, &packetList), 109 | "Couldn't send MIDI packet list"); 110 | } 111 | 112 | -(void) sendNoteOnEvent:(Byte)key velocity:(Byte)velocity { 113 | [self sendStatus:0x90 data1:key & 0x7F data2:velocity & 0x7F]; 114 | 115 | } 116 | 117 | -(void) sendNoteOffEvent:(Byte)key velocity:(Byte)velocity { 118 | [self sendStatus:0x80 data1:key & 0x7F data2:velocity & 0x7F]; 119 | } 120 | 121 | 122 | #pragma mark event handlers 123 | -(IBAction) handleKeyDown:(id)sender { 124 | NSInteger note = [sender tag]; 125 | [self sendNoteOnEvent:(Byte) note velocity:127]; 126 | } 127 | 128 | -(IBAction) handleKeyUp:(id)sender { 129 | NSInteger note = [sender tag]; 130 | [self sendNoteOffEvent:(Byte) note velocity:127]; 131 | 132 | } 133 | 134 | 135 | 136 | @end 137 | -------------------------------------------------------------------------------- /CHANGES.txt: -------------------------------------------------------------------------------- 1 | CHANGES for "Learning Core Audio: A Hands-On Guide to Audio Programming for Mac and iOS" sample code 2 | 3 | March 13, 2014 4 | 5 | All projects: 6 | ------------- 7 | With the release of Xcode 5.1 and iOS 7.1, we are updating all projects to use "Latest OS X" or "Latest iOS" as their target SDK, rather than risk breakage as newer versions of Xcode drop older versions of the OS X and iOS SDKs. We are also modernizing the project files to use LLVM and LLDB, the supported compiler and debugger as of Xcode 5.1. 8 | 9 | Because it has been a source of confusion for some readers, we are also moving all Foundation-based examples to use Automatic Reference Counting (ARC). This means the downloadable code no longer matches the book, in that all uses of retain/release, autorelease pools, and explicit calls to dealloc been removed, and that toll-free bridging casts now take the appropriate __bridge modifier. But we've found that ARC is now sufficiently entrenched that some people don't recognize these pre-ARC memory management techniques. 10 | 11 | There are no code changes other than those required to handle ARC or resolve new compiler warnings (most of which are just explicit casts). 12 | 13 | Each project now has its own README.txt file describing the project. These files include the individual changes made as part of this update. 14 | 15 | Individual examples: 16 | -------------------- 17 | 18 | We've re-colored the piano keyboard buttons in the CH11_MIDIWifiSource .xib, since iOS 7 made them not look like buttons. 19 | 20 | CH12_MIDIToAUSampler now includes the .aupreset file and source sound used when writing the book, though we still recommend building your own preset with AU Lab. This same preset has always been included with the CH12_MIDIToAUSamplerIOS example, since the iOS version needs to find the preset within its own app bundle, unlike OS X, which searches known filesystem paths. 21 | 22 | 23 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 24 | 25 | January 23, 2013 26 | 27 | CH06_AudioConverter/ 28 | main.c 29 | -------------------- 30 | After line 205, added 31 | free (outputBuffer); 32 | to release memory that was malloc()ed on line 163. 33 | (thanks Matt Daugherty) 34 | 35 | CH07_AUGraphSpeechSynthesis/ 36 | main.c 37 | ---------------------------- 38 | After line 196, added 39 | DisposeAUGraph(player.graph); 40 | to release the AUGraph. 41 | (thanks Seth Willits) 42 | 43 | CH08_AUGraphInput/ 44 | main.cpp 45 | ------------------ 46 | Changed line 58 from: 47 | if ((player->firstOutputSampleTime > 0.0) && 48 | to 49 | if ((player->firstOutputSampleTime > -1.0) && 50 | 51 | Changed line 104 from: 52 | if ((player->firstInputSampleTime > 0.0) && 53 | to 54 | if ((player->firstInputSampleTime > -1.0) && 55 | to properly check against the -1 flag value that is set on these counters before they're first used (see the -1 initializations on lines 481 and 320, respectively). 56 | (thanks Ron Lee) 57 | 58 | CH08_AUGraphInput/ 59 | CH08_AUGraphInput.xcodeproj 60 | ------------------------------- 61 | The project's references to the CARingBuffer.h and CARingBuffer.cpp files now uses an absolute reference to the /Library/Developer/CoreAudio/PublicUtility folder, which is their expected location as of Xcode 4.5 and up. Search Xcode documentation for "Core Audio Utility Classes", a pseudo sample code project, for the latest version of these classes. The Readme.rtf supplied with the classes indicates that this is their new expected path (replacing /Developer/Extras/CoreAudio/PublicUtility, where they were automatically installed prior to Xcode 4.3, and where the book describes them being). 62 | 63 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 64 | 65 | August 29, 2012: 66 | 67 | All projects: 68 | ------------- 69 | Now that Xcode 4.4 has dropped support for OS X 10.6 as a base SDK, we have updated all the OS X project files to build for the OS X 10.7 SDK. The deployment target is still 10.6. iOS projects are still "Latest iOS", and work on iOS 5.1, the current public version as of this writing. 70 | 71 | CH02_CAToneFileGenerator/ 72 | main.m 73 | ------------------------- 74 | Changed line 106 from: 75 | NSLog (@"wrote %ld samples", sampleCount); 76 | to 77 | NSLog (@"wrote %ld samples", sampleCount); 78 | to match the bit-depth of the sampleCount argument 79 | (thanks "jarryd") 80 | 81 | 82 | CH07_AUGraphSineWave/ 83 | main.c 84 | ---------------------- 85 | Changed line 96 from: 86 | input.inputProcRefCon = &player; 87 | to 88 | input.inputProcRefCon = player; 89 | to correct the pointer type of player, and eliminate a crash when the sleep() exits. 90 | (thanks Stefan Frauenfelder and Dmitri Kharlamov) 91 | 92 | 93 | CH10_iOSBackgroundingTone/ 94 | CH10_iOSBackgroundingToneAppDelegate.m 95 | ----------------------------------------------- 96 | Changed line 63 from: 97 | (data)[frame] = (SInt16) (sin (2 * M_PI * (j / cycleLength)) * 0x8000); 98 | to 99 | (data)[frame] = (SInt16) (sin (2 * M_PI * (j / cycleLength)) * SHRT_MAX); 100 | to eliminate malformed waves at certain frequencies / sample rates. 101 | (thanks Markus Boigner) 102 | 103 | 104 | -------------------------------------------------------------------------------- /CH11_MIDIToAUGraph/main.c: -------------------------------------------------------------------------------- 1 | // 2 | // main.c 3 | // CH11_MIDIToAUGraph 4 | // 5 | // Created by Chris Adamson on 9/6/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #include 10 | #import 11 | #import 12 | 13 | #pragma mark - state struct 14 | typedef struct MyMIDIPlayer { 15 | AUGraph graph; 16 | AudioUnit instrumentUnit; 17 | } MyMIDIPlayer; 18 | 19 | #pragma mark - forward declarations 20 | void setupMIDI(MyMIDIPlayer *player); 21 | void setupAUGraph(MyMIDIPlayer *player); 22 | static void MyMIDIReadProc(const MIDIPacketList *pktlist, void *refCon, void *connRefCon); 23 | void MyMIDINotifyProc (const MIDINotification *message, void *refCon); 24 | 25 | #pragma mark utility functions 26 | static void CheckError(OSStatus error, const char *operation) 27 | { 28 | if (error == noErr) return; 29 | 30 | char str[20]; 31 | // see if it appears to be a 4-char-code 32 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 33 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 34 | str[0] = str[5] = '\''; 35 | str[6] = '\0'; 36 | } else 37 | // no, format it as an integer 38 | sprintf(str, "%d", (int)error); 39 | 40 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 41 | 42 | exit(1); 43 | } 44 | 45 | #pragma mark - callbacks 46 | static void MyMIDIReadProc(const MIDIPacketList *pktlist, void *refCon, void *connRefCon) { 47 | MyMIDIPlayer *player = (MyMIDIPlayer*) refCon; 48 | 49 | MIDIPacket *packet = (MIDIPacket *)pktlist->packet; 50 | for (int i=0; i < pktlist->numPackets; i++) { 51 | Byte midiStatus = packet->data[0]; 52 | Byte midiCommand = midiStatus >> 4; 53 | // is it a note-on or note-off 54 | if ((midiCommand == 0x09) || 55 | (midiCommand == 0x08)) { 56 | Byte note = packet->data[1] & 0x7F; 57 | Byte velocity = packet->data[2] & 0x7F; 58 | printf("midiCommand=%d. Note=%d, Velocity=%d\n", midiCommand, note, velocity); 59 | 60 | // send to augraph 61 | CheckError(MusicDeviceMIDIEvent (player->instrumentUnit, 62 | midiStatus, 63 | note, 64 | velocity, 65 | 0), 66 | "Couldn't send MIDI event"); 67 | 68 | } 69 | packet = MIDIPacketNext(packet); 70 | } 71 | } 72 | 73 | 74 | void MyMIDINotifyProc (const MIDINotification *message, void *refCon) { 75 | printf("MIDI Notify, messageId=%d,", message->messageID); 76 | } 77 | 78 | 79 | 80 | #pragma mark - augraph 81 | void setupAUGraph(MyMIDIPlayer *player) { 82 | 83 | CheckError(NewAUGraph(&player->graph), 84 | "Couldn't open AU Graph"); 85 | 86 | // generate description that will match our output device (speakers) 87 | AudioComponentDescription outputcd = {0}; 88 | outputcd.componentType = kAudioUnitType_Output; 89 | outputcd.componentSubType = kAudioUnitSubType_DefaultOutput; 90 | outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; 91 | 92 | // adds a node with above description to the graph 93 | AUNode outputNode; 94 | CheckError(AUGraphAddNode(player->graph, &outputcd, &outputNode), 95 | "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed"); 96 | 97 | 98 | AudioComponentDescription instrumentcd = {0}; 99 | instrumentcd.componentManufacturer = kAudioUnitManufacturer_Apple; 100 | instrumentcd.componentType = kAudioUnitType_MusicDevice; 101 | instrumentcd.componentSubType = kAudioUnitSubType_DLSSynth; 102 | 103 | AUNode instrumentNode; 104 | CheckError(AUGraphAddNode(player->graph, &instrumentcd, &instrumentNode), 105 | "AUGraphAddNode[kAudioUnitSubType_DLSSynth] failed"); 106 | 107 | // opening the graph opens all contained audio units but does not allocate any resources yet 108 | CheckError(AUGraphOpen(player->graph), 109 | "AUGraphOpen failed"); 110 | 111 | // get the reference to the AudioUnit object for the instrument graph node 112 | CheckError(AUGraphNodeInfo(player->graph, instrumentNode, NULL, &player->instrumentUnit), 113 | "AUGraphNodeInfo failed"); 114 | 115 | // connect the output source of the instrument AU to the input source of the output node 116 | CheckError(AUGraphConnectNodeInput(player->graph, instrumentNode, 0, outputNode, 0), 117 | "AUGraphConnectNodeInput"); 118 | 119 | // now initialize the graph (causes resources to be allocated) 120 | CheckError(AUGraphInitialize(player->graph), 121 | "AUGraphInitialize failed"); 122 | 123 | 124 | } 125 | 126 | #pragma mark - midi 127 | void setupMIDI(MyMIDIPlayer *player) { 128 | 129 | MIDIClientRef client; 130 | CheckError (MIDIClientCreate(CFSTR("Core MIDI to System Sounds Demo"), MyMIDINotifyProc, player, &client), 131 | "Couldn't create MIDI client"); 132 | 133 | MIDIPortRef inPort; 134 | CheckError (MIDIInputPortCreate(client, CFSTR("Input port"), MyMIDIReadProc, player, &inPort), 135 | "Couldn't create MIDI input port"); 136 | 137 | unsigned long sourceCount = MIDIGetNumberOfSources(); 138 | printf ("%ld sources\n", sourceCount); 139 | for (int i = 0; i < sourceCount; ++i) { 140 | MIDIEndpointRef src = MIDIGetSource(i); 141 | CFStringRef endpointName = NULL; 142 | CheckError(MIDIObjectGetStringProperty(src, kMIDIPropertyName, &endpointName), 143 | "Couldn't get endpoint name"); 144 | char endpointNameC[255]; 145 | CFStringGetCString(endpointName, endpointNameC, 255, kCFStringEncodingUTF8); 146 | printf(" source %d: %s\n", i, endpointNameC); 147 | CheckError (MIDIPortConnectSource(inPort, src, NULL), 148 | "Couldn't connect MIDI port"); 149 | } 150 | 151 | 152 | } 153 | 154 | #pragma mark - main 155 | int main (int argc, const char * argv[]) 156 | { 157 | 158 | MyMIDIPlayer player; 159 | 160 | setupAUGraph(&player); 161 | setupMIDI(&player); 162 | 163 | CheckError (AUGraphStart(player.graph), 164 | "couldn't start graph"); 165 | 166 | CFRunLoopRun(); 167 | // run until aborted with control-C 168 | 169 | return 0; 170 | } 171 | 172 | 173 | -------------------------------------------------------------------------------- /CH06_ExtAudioFileConverter/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #define kInputFileLocation CFSTR("/Insert/Path/To/Audio/File.xxx") 4 | // #define kInputFileLocation CFSTR("/Users/kevin/Desktop/tmp_storage/audio_tests/cdsd_scratch.aiff") 5 | // #define kInputFileLocation CFSTR("/Volumes/Sephiroth/iTunes/iTunes Media/Music/Compilations/ESCAFLOWNE - ORIGINAL MOVIE SOUNDTRACK/21 We're flying.m4a") 6 | 7 | // ml: I'm playing with the name of these structs because it's confusing to have a struct called audioConverter and an audioConverter called converter. I'm not married to it, and will see how it flows with the text and as the bigger picture becomes clear. This is generally true with the names of things. 8 | 9 | typedef struct MyAudioConverterSettings 10 | { 11 | AudioStreamBasicDescription outputFormat; // output file's data stream description 12 | 13 | ExtAudioFileRef inputFile; // reference to your input file 14 | AudioFileID outputFile; // reference to your output file 15 | 16 | } MyAudioConverterSettings; 17 | 18 | void Convert(MyAudioConverterSettings *mySettings); 19 | 20 | #pragma mark - utility functions - 21 | 22 | // generic error handler - if result is nonzero, prints error message and exits program. 23 | static void CheckResult(OSStatus result, const char *operation) 24 | { 25 | if (result == noErr) return; 26 | 27 | char errorString[20]; 28 | // see if it appears to be a 4-char-code 29 | *(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(result); 30 | if (isprint(errorString[1]) && isprint(errorString[2]) && isprint(errorString[3]) && isprint(errorString[4])) { 31 | errorString[0] = errorString[5] = '\''; 32 | errorString[6] = '\0'; 33 | } else 34 | // no, format it as an integer 35 | sprintf(errorString, "%d", (int)result); 36 | 37 | fprintf(stderr, "Error: %s (%s)\n", operation, errorString); 38 | 39 | exit(1); 40 | } 41 | 42 | 43 | #pragma mark - audio converter - 44 | void Convert(MyAudioConverterSettings *mySettings) 45 | { 46 | 47 | UInt32 outputBufferSize = 32 * 1024; // 32 KB is a good starting point 48 | UInt32 sizePerPacket = mySettings->outputFormat.mBytesPerPacket; 49 | UInt32 packetsPerBuffer = outputBufferSize / sizePerPacket; 50 | 51 | // allocate destination buffer 52 | UInt8 *outputBuffer = (UInt8 *)malloc(sizeof(UInt8) * outputBufferSize); 53 | 54 | UInt32 outputFilePacketPosition = 0; //in bytes 55 | while(1) 56 | { 57 | // wrap the destination buffer in an AudioBufferList 58 | AudioBufferList convertedData; 59 | convertedData.mNumberBuffers = 1; 60 | convertedData.mBuffers[0].mNumberChannels = mySettings->outputFormat.mChannelsPerFrame; 61 | convertedData.mBuffers[0].mDataByteSize = outputBufferSize; 62 | convertedData.mBuffers[0].mData = outputBuffer; 63 | 64 | UInt32 frameCount = packetsPerBuffer; 65 | 66 | // read from the extaudiofile 67 | CheckResult(ExtAudioFileRead(mySettings->inputFile, 68 | &frameCount, 69 | &convertedData), 70 | "Couldn't read from input file"); 71 | 72 | if (frameCount == 0) { 73 | printf ("done reading from file"); 74 | return; 75 | } 76 | 77 | // write the converted data to the output file 78 | CheckResult (AudioFileWritePackets(mySettings->outputFile, 79 | FALSE, 80 | frameCount, 81 | NULL, 82 | outputFilePacketPosition / mySettings->outputFormat.mBytesPerPacket, 83 | &frameCount, 84 | convertedData.mBuffers[0].mData), 85 | "Couldn't write packets to file"); 86 | 87 | // advance the output file write location 88 | outputFilePacketPosition += (frameCount * mySettings->outputFormat.mBytesPerPacket); 89 | } 90 | 91 | // AudioConverterDispose(audioConverter); 92 | } 93 | 94 | int main(int argc, const char *argv[]) 95 | { 96 | MyAudioConverterSettings audioConverterSettings = {0}; 97 | 98 | // open the input with ExtAudioFile 99 | CFURLRef inputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kInputFileLocation, kCFURLPOSIXPathStyle, false); 100 | CheckResult(ExtAudioFileOpenURL(inputFileURL, 101 | &audioConverterSettings.inputFile), 102 | "ExtAudioFileOpenURL failed"); 103 | 104 | // define the ouput format. AudioConverter requires that one of the data formats be LPCM 105 | audioConverterSettings.outputFormat.mSampleRate = 44100.0; 106 | audioConverterSettings.outputFormat.mFormatID = kAudioFormatLinearPCM; 107 | audioConverterSettings.outputFormat.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 108 | audioConverterSettings.outputFormat.mBytesPerPacket = 4; 109 | audioConverterSettings.outputFormat.mFramesPerPacket = 1; 110 | audioConverterSettings.outputFormat.mBytesPerFrame = 4; 111 | audioConverterSettings.outputFormat.mChannelsPerFrame = 2; 112 | audioConverterSettings.outputFormat.mBitsPerChannel = 16; 113 | 114 | // create output file 115 | CFURLRef outputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, CFSTR("output.aif"), kCFURLPOSIXPathStyle, false); 116 | CheckResult (AudioFileCreateWithURL(outputFileURL, kAudioFileAIFFType, &audioConverterSettings.outputFormat, kAudioFileFlags_EraseFile, &audioConverterSettings.outputFile), 117 | "AudioFileCreateWithURL failed"); 118 | CFRelease(outputFileURL); 119 | 120 | // set the PCM format as the client format on the input ext audio file 121 | CheckResult(ExtAudioFileSetProperty(audioConverterSettings.inputFile, 122 | kExtAudioFileProperty_ClientDataFormat, 123 | sizeof (AudioStreamBasicDescription), 124 | &audioConverterSettings.outputFormat), 125 | "Couldn't set client data format on input ext file"); 126 | 127 | fprintf(stdout, "Converting...\n"); 128 | Convert(&audioConverterSettings); 129 | 130 | cleanup: 131 | // AudioFileClose(audioConverterSettings.inputFile); 132 | ExtAudioFileDispose(audioConverterSettings.inputFile); 133 | AudioFileClose(audioConverterSettings.outputFile); 134 | return 0; 135 | } 136 | -------------------------------------------------------------------------------- /CH07_AUGraphPlayer/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include // for usleep() 3 | 4 | #define kInputFileLocation CFSTR ("/Insert/Path/To/Audio/File.xxx") 5 | // #define kInputFileLocation CFSTR("/Volumes/Galactica/Music/Dubee - its the crest.mp3") 6 | // #define kInputFileLocation CFSTR("/Volumes/Sephiroth/iTunes/iTunes Media/Music/Metric/Fantasies/06 Gimme Sympathy.mp3") 7 | 8 | typedef struct MyAUGraphPlayer 9 | { 10 | AudioStreamBasicDescription inputFormat; // input file's data stream description 11 | AudioFileID inputFile; // reference to your input file 12 | 13 | AUGraph graph; 14 | AudioUnit fileAU; 15 | 16 | } MyAUGraphPlayer; 17 | 18 | void CreateMyAUGraph(MyAUGraphPlayer *player); 19 | double PrepareFileAU(MyAUGraphPlayer *player); 20 | 21 | #pragma mark - utility functions - 22 | 23 | // generic error handler - if err is nonzero, prints error message and exits program. 24 | static void CheckError(OSStatus error, const char *operation) 25 | { 26 | if (error == noErr) return; 27 | 28 | char str[20]; 29 | // see if it appears to be a 4-char-code 30 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 31 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 32 | str[0] = str[5] = '\''; 33 | str[6] = '\0'; 34 | } else 35 | // no, format it as an integer 36 | sprintf(str, "%d", (int)error); 37 | 38 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 39 | 40 | exit(1); 41 | } 42 | 43 | 44 | #pragma mark - audio converter - 45 | 46 | void CreateMyAUGraph(MyAUGraphPlayer *player) 47 | { 48 | // create a new AUGraph 49 | CheckError(NewAUGraph(&player->graph), 50 | "NewAUGraph failed"); 51 | 52 | // generate description that will match out output device (speakers) 53 | AudioComponentDescription outputcd = {0}; 54 | outputcd.componentType = kAudioUnitType_Output; 55 | outputcd.componentSubType = kAudioUnitSubType_DefaultOutput; 56 | outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; 57 | 58 | // adds a node with above description to the graph 59 | AUNode outputNode; 60 | CheckError(AUGraphAddNode(player->graph, &outputcd, &outputNode), 61 | "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed"); 62 | 63 | // generate description that will match a generator AU of type: audio file player 64 | AudioComponentDescription fileplayercd = {0}; 65 | fileplayercd.componentType = kAudioUnitType_Generator; 66 | fileplayercd.componentSubType = kAudioUnitSubType_AudioFilePlayer; 67 | fileplayercd.componentManufacturer = kAudioUnitManufacturer_Apple; 68 | 69 | // adds a node with above description to the graph 70 | AUNode fileNode; 71 | CheckError(AUGraphAddNode(player->graph, &fileplayercd, &fileNode), 72 | "AUGraphAddNode[kAudioUnitSubType_AudioFilePlayer] failed"); 73 | 74 | // opening the graph opens all contained audio units but does not allocate any resources yet 75 | CheckError(AUGraphOpen(player->graph), 76 | "AUGraphOpen failed"); 77 | 78 | // get the reference to the AudioUnit object for the file player graph node 79 | CheckError(AUGraphNodeInfo(player->graph, fileNode, NULL, &player->fileAU), 80 | "AUGraphNodeInfo failed"); 81 | 82 | // connect the output source of the file player AU to the input source of the output node 83 | CheckError(AUGraphConnectNodeInput(player->graph, fileNode, 0, outputNode, 0), 84 | "AUGraphConnectNodeInput"); 85 | 86 | // now initialize the graph (causes resources to be allocated) 87 | CheckError(AUGraphInitialize(player->graph), 88 | "AUGraphInitialize failed"); 89 | } 90 | 91 | double PrepareFileAU(MyAUGraphPlayer *player) 92 | { 93 | 94 | // tell the file player unit to load the file we want to play 95 | CheckError(AudioUnitSetProperty(player->fileAU, kAudioUnitProperty_ScheduledFileIDs, 96 | kAudioUnitScope_Global, 0, &player->inputFile, sizeof(player->inputFile)), 97 | "AudioUnitSetProperty[kAudioUnitProperty_ScheduledFileIDs] failed"); 98 | 99 | UInt64 nPackets; 100 | UInt32 propsize = sizeof(nPackets); 101 | CheckError(AudioFileGetProperty(player->inputFile, kAudioFilePropertyAudioDataPacketCount, 102 | &propsize, &nPackets), 103 | "AudioFileGetProperty[kAudioFilePropertyAudioDataPacketCount] failed"); 104 | 105 | // tell the file player AU to play the entire file 106 | ScheduledAudioFileRegion rgn; 107 | memset (&rgn.mTimeStamp, 0, sizeof(rgn.mTimeStamp)); 108 | rgn.mTimeStamp.mFlags = kAudioTimeStampSampleTimeValid; 109 | rgn.mTimeStamp.mSampleTime = 0; 110 | rgn.mCompletionProc = NULL; 111 | rgn.mCompletionProcUserData = NULL; 112 | rgn.mAudioFile = player->inputFile; 113 | rgn.mLoopCount = 1; 114 | rgn.mStartFrame = 0; 115 | rgn.mFramesToPlay = nPackets * player->inputFormat.mFramesPerPacket; 116 | 117 | CheckError(AudioUnitSetProperty(player->fileAU, kAudioUnitProperty_ScheduledFileRegion, 118 | kAudioUnitScope_Global, 0,&rgn, sizeof(rgn)), 119 | "AudioUnitSetProperty[kAudioUnitProperty_ScheduledFileRegion] failed"); 120 | 121 | // prime the file player AU with default values 122 | UInt32 defaultVal = 0; 123 | CheckError(AudioUnitSetProperty(player->fileAU, kAudioUnitProperty_ScheduledFilePrime, 124 | kAudioUnitScope_Global, 0, &defaultVal, sizeof(defaultVal)), 125 | "AudioUnitSetProperty[kAudioUnitProperty_ScheduledFilePrime] failed"); 126 | 127 | // tell the file player AU when to start playing (-1 sample time means next render cycle) 128 | AudioTimeStamp startTime; 129 | memset (&startTime, 0, sizeof(startTime)); 130 | startTime.mFlags = kAudioTimeStampSampleTimeValid; 131 | startTime.mSampleTime = -1; 132 | CheckError(AudioUnitSetProperty(player->fileAU, kAudioUnitProperty_ScheduleStartTimeStamp, 133 | kAudioUnitScope_Global, 0, &startTime, sizeof(startTime)), 134 | "AudioUnitSetProperty[kAudioUnitProperty_ScheduleStartTimeStamp]"); 135 | 136 | // file duration 137 | return (nPackets * player->inputFormat.mFramesPerPacket) / player->inputFormat.mSampleRate; 138 | } 139 | 140 | #pragma mark - main - 141 | int main(int argc, const char *argv[]) 142 | { 143 | CFURLRef inputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kInputFileLocation, kCFURLPOSIXPathStyle, false); 144 | MyAUGraphPlayer player = {0}; 145 | 146 | // open the input audio file 147 | CheckError(AudioFileOpenURL(inputFileURL, kAudioFileReadPermission, 0, &player.inputFile), 148 | "AudioFileOpenURL failed"); 149 | CFRelease(inputFileURL); 150 | 151 | // get the audio data format from the file 152 | UInt32 propSize = sizeof(player.inputFormat); 153 | CheckError(AudioFileGetProperty(player.inputFile, kAudioFilePropertyDataFormat, 154 | &propSize, &player.inputFormat), 155 | "couldn't get file's data format"); 156 | 157 | // build a basic fileplayer->speakers graph 158 | CreateMyAUGraph(&player); 159 | 160 | // configure the file player 161 | Float64 fileDuration = PrepareFileAU(&player); 162 | 163 | // start playing 164 | CheckError(AUGraphStart(player.graph), 165 | "AUGraphStart failed"); 166 | 167 | // sleep until the file is finished 168 | usleep ((int)(fileDuration * 1000.0 * 1000.0)); 169 | 170 | cleanup: 171 | AUGraphStop (player.graph); 172 | AUGraphUninitialize (player.graph); 173 | AUGraphClose(player.graph); 174 | AudioFileClose(player.inputFile); 175 | 176 | return 0; 177 | } 178 | -------------------------------------------------------------------------------- /CH07_AUGraphSpeechSynthesis/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #define PART_II 5 | 6 | typedef struct MyAUGraphPlayer 7 | { 8 | // AudioStreamBasicDescription streamFormat; // ASBD to use in the graph 9 | 10 | AUGraph graph; 11 | AudioUnit speechAU; 12 | 13 | } MyAUGraphPlayer; 14 | 15 | void CreateMyAUGraph(MyAUGraphPlayer *player); 16 | void PrepareSpeechAU(MyAUGraphPlayer *player); 17 | 18 | #pragma mark - utility functions - 19 | 20 | // generic error handler - if err is nonzero, prints error message and exits program. 21 | static void CheckError(OSStatus error, const char *operation) 22 | { 23 | if (error == noErr) return; 24 | 25 | char str[20]; 26 | // see if it appears to be a 4-char-code 27 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 28 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 29 | str[0] = str[5] = '\''; 30 | str[6] = '\0'; 31 | } else 32 | // no, format it as an integer 33 | sprintf(str, "%d", (int)error); 34 | 35 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 36 | 37 | exit(1); 38 | } 39 | 40 | void CreateMyAUGraph(MyAUGraphPlayer *player) 41 | { 42 | // create a new AUGraph 43 | CheckError(NewAUGraph(&player->graph), 44 | "NewAUGraph failed"); 45 | 46 | // generate description that will match our output device (speakers) 47 | AudioComponentDescription outputcd = {0}; 48 | outputcd.componentType = kAudioUnitType_Output; 49 | outputcd.componentSubType = kAudioUnitSubType_DefaultOutput; 50 | outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; 51 | 52 | // adds a node with above description to the graph 53 | AUNode outputNode; 54 | CheckError(AUGraphAddNode(player->graph, &outputcd, &outputNode), 55 | "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed"); 56 | 57 | // generate description that will match a generator AU of type: speech synthesizer 58 | AudioComponentDescription speechcd = {0}; 59 | speechcd.componentType = kAudioUnitType_Generator; 60 | speechcd.componentSubType = kAudioUnitSubType_SpeechSynthesis; 61 | speechcd.componentManufacturer = kAudioUnitManufacturer_Apple; 62 | 63 | // adds a node with above description to the graph 64 | AUNode speechNode; 65 | CheckError(AUGraphAddNode(player->graph, &speechcd, &speechNode), 66 | "AUGraphAddNode[kAudioUnitSubType_SpeechSynthesis] failed"); 67 | 68 | // opening the graph opens all contained audio units but does not allocate any resources yet 69 | CheckError(AUGraphOpen(player->graph), 70 | "AUGraphOpen failed"); 71 | 72 | // get the reference to the AudioUnit object for the speech synthesis graph node 73 | CheckError(AUGraphNodeInfo(player->graph, speechNode, NULL, &player->speechAU), 74 | "AUGraphNodeInfo failed"); 75 | 76 | // // debug - get the asbd 77 | // UInt32 propSize = sizeof (AudioStreamBasicDescription); 78 | // CheckError(AudioUnitGetProperty(player->speechAU, 79 | // kAudioUnitProperty_StreamFormat, 80 | // kAudioUnitScope_Output, 81 | // 0, 82 | // &player->streamFormat, 83 | // &propSize), 84 | // "Couldn't get ASBD"); 85 | 86 | #ifdef PART_II 87 | // 88 | // FUN! re-route the speech thru a reverb effect before sending to speakers 89 | // 90 | // generate description that will match out reverb effect 91 | AudioComponentDescription reverbcd = {0}; 92 | reverbcd.componentType = kAudioUnitType_Effect; 93 | reverbcd.componentSubType = kAudioUnitSubType_MatrixReverb; 94 | reverbcd.componentManufacturer = kAudioUnitManufacturer_Apple; 95 | 96 | // adds a node with above description to the graph 97 | AUNode reverbNode; 98 | CheckError(AUGraphAddNode(player->graph, &reverbcd, &reverbNode), 99 | "AUGraphAddNode[kAudioUnitSubType_MatrixReverb] failed"); 100 | 101 | // connect the output source of the speech synthesizer AU to the input source of the reverb node 102 | CheckError(AUGraphConnectNodeInput(player->graph, speechNode, 0, reverbNode, 0), 103 | "AUGraphConnectNodeInput"); 104 | 105 | // connect the output source of the reverb AU to the input source of the output node 106 | CheckError(AUGraphConnectNodeInput(player->graph, reverbNode, 0, outputNode, 0), 107 | "AUGraphConnectNodeInput"); 108 | 109 | // get the reference to the AudioUnit object for the reverb graph node 110 | AudioUnit reverbUnit; 111 | CheckError(AUGraphNodeInfo(player->graph, reverbNode, NULL, &reverbUnit), 112 | "AUGraphNodeInfo failed"); 113 | 114 | /* 115 | enum { 116 | kReverbRoomType_SmallRoom = 0, 117 | kReverbRoomType_MediumRoom = 1, 118 | kReverbRoomType_LargeRoom = 2, 119 | kReverbRoomType_MediumHall = 3, 120 | kReverbRoomType_LargeHall = 4, 121 | kReverbRoomType_Plate = 5, 122 | kReverbRoomType_MediumChamber = 6, 123 | kReverbRoomType_LargeChamber = 7, 124 | kReverbRoomType_Cathedral = 8, 125 | kReverbRoomType_LargeRoom2 = 9, 126 | kReverbRoomType_MediumHall2 = 10, 127 | kReverbRoomType_MediumHall3 = 11, 128 | kReverbRoomType_LargeHall2 = 12 129 | }; 130 | 131 | */ 132 | 133 | // now initialize the graph (causes resources to be allocated) 134 | CheckError(AUGraphInitialize(player->graph), 135 | "AUGraphInitialize failed"); 136 | 137 | 138 | // set the reverb preset for room size 139 | // UInt32 roomType = kReverbRoomType_SmallRoom; 140 | // UInt32 roomType = kReverbRoomType_MediumRoom; 141 | UInt32 roomType = kReverbRoomType_LargeHall; 142 | // UInt32 roomType = kReverbRoomType_Cathedral; 143 | 144 | CheckError(AudioUnitSetProperty(reverbUnit, kAudioUnitProperty_ReverbRoomType, 145 | kAudioUnitScope_Global, 0, &roomType, sizeof(UInt32)), 146 | "AudioUnitSetProperty[kAudioUnitProperty_ReverbRoomType] failed"); 147 | 148 | 149 | #else 150 | 151 | // connect the output source of the speech synthesis AU to the input source of the output node 152 | CheckError(AUGraphConnectNodeInput(player->graph, speechNode, 0, outputNode, 0), 153 | "AUGraphConnectNodeInput"); 154 | 155 | // now initialize the graph (causes resources to be allocated) 156 | CheckError(AUGraphInitialize(player->graph), 157 | "AUGraphInitialize failed"); 158 | 159 | #endif 160 | CAShow(player->graph); 161 | } 162 | 163 | void PrepareSpeechAU(MyAUGraphPlayer *player) 164 | { 165 | SpeechChannel chan; 166 | 167 | UInt32 propsize = sizeof(SpeechChannel); 168 | CheckError(AudioUnitGetProperty(player->speechAU, kAudioUnitProperty_SpeechChannel, 169 | kAudioUnitScope_Global, 0, &chan, &propsize), 170 | "AudioUnitGetProperty[kAudioUnitProperty_SpeechChannel] failed"); 171 | 172 | SpeakCFString(chan, CFSTR("hello world"), NULL); 173 | } 174 | 175 | #pragma mark main 176 | 177 | int main(int argc, const char *argv[]) 178 | { 179 | MyAUGraphPlayer player = {0}; 180 | 181 | // build a basic speech->speakers graph 182 | CreateMyAUGraph(&player); 183 | 184 | // configure the speech synthesizer 185 | PrepareSpeechAU(&player); 186 | 187 | // start playing 188 | CheckError(AUGraphStart(player.graph), "AUGraphStart failed"); 189 | 190 | // sleep a while so the speech can play out 191 | usleep ((int)(10 * 1000. * 1000.)); 192 | 193 | cleanup: 194 | AUGraphStop (player.graph); 195 | AUGraphUninitialize (player.graph); 196 | AUGraphClose(player.graph); 197 | DisposeAUGraph(player.graph); 198 | return 0; 199 | } 200 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSampler/main.c: -------------------------------------------------------------------------------- 1 | // 2 | // main.c 3 | // CH11_MIDIToAUGraph 4 | // 5 | // Created by Chris Adamson on 9/6/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #include 10 | #import 11 | #import 12 | 13 | #pragma mark - state struct 14 | typedef struct MyMIDIPlayer { 15 | AUGraph graph; 16 | AudioUnit instrumentUnit; 17 | } MyMIDIPlayer; 18 | 19 | #pragma mark - forward declarations 20 | void setupMIDI(MyMIDIPlayer *player); 21 | void setupAUGraph(MyMIDIPlayer *player); 22 | static void MyMIDIReadProc(const MIDIPacketList *pktlist, void *refCon, void *connRefCon); 23 | void MyMIDINotifyProc (const MIDINotification *message, void *refCon); 24 | 25 | #pragma mark utility functions 26 | static void CheckError(OSStatus error, const char *operation) 27 | { 28 | if (error == noErr) return; 29 | 30 | char str[20]; 31 | // see if it appears to be a 4-char-code 32 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 33 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 34 | str[0] = str[5] = '\''; 35 | str[6] = '\0'; 36 | } else 37 | // no, format it as an integer 38 | sprintf(str, "%d", (int)error); 39 | 40 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 41 | 42 | exit(1); 43 | } 44 | 45 | #pragma mark - callbacks 46 | static void MyMIDIReadProc(const MIDIPacketList *pktlist, void *refCon, void *connRefCon) { 47 | MyMIDIPlayer *player = (MyMIDIPlayer*) refCon; 48 | 49 | MIDIPacket *packet = (MIDIPacket *)pktlist->packet; 50 | for (int i=0; i < pktlist->numPackets; i++) { 51 | Byte midiStatus = packet->data[0]; 52 | Byte midiCommand = midiStatus >> 4; 53 | // is it a note-on or note-off 54 | if ((midiCommand == 0x09) || 55 | (midiCommand == 0x08)) { 56 | Byte note = packet->data[1] & 0x7F; 57 | Byte velocity = packet->data[2] & 0x7F; 58 | printf("midiCommand=%d. Note=%d, Velocity=%d\n", midiCommand, note, velocity); 59 | 60 | // send to augraph 61 | CheckError(MusicDeviceMIDIEvent (player->instrumentUnit, 62 | midiStatus, 63 | note, 64 | velocity, 65 | 0), 66 | "Couldn't send MIDI event"); 67 | 68 | } 69 | packet = MIDIPacketNext(packet); 70 | } 71 | } 72 | 73 | 74 | void MyMIDINotifyProc (const MIDINotification *message, void *refCon) { 75 | printf("MIDI Notify, messageId=%d,", message->messageID); 76 | } 77 | 78 | 79 | 80 | #pragma mark - augraph 81 | void setupAUGraph(MyMIDIPlayer *player) { 82 | 83 | CheckError(NewAUGraph(&player->graph), 84 | "Couldn't open AU Graph"); 85 | 86 | // generate description that will match our output device (speakers) 87 | AudioComponentDescription outputcd = {0}; 88 | outputcd.componentType = kAudioUnitType_Output; 89 | outputcd.componentSubType = kAudioUnitSubType_DefaultOutput; 90 | outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; 91 | 92 | // adds a node with above description to the graph 93 | AUNode outputNode; 94 | CheckError(AUGraphAddNode(player->graph, &outputcd, &outputNode), 95 | "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed"); 96 | 97 | 98 | AudioComponentDescription instrumentcd = {0}; 99 | instrumentcd.componentManufacturer = kAudioUnitManufacturer_Apple; 100 | instrumentcd.componentType = kAudioUnitType_MusicDevice; 101 | instrumentcd.componentSubType = kAudioUnitSubType_Sampler; // changed! 102 | 103 | AUNode instrumentNode; 104 | CheckError(AUGraphAddNode(player->graph, &instrumentcd, &instrumentNode), 105 | "AUGraphAddNode[kAudioUnitSubType_DLSSynth] failed"); 106 | 107 | // opening the graph opens all contained audio units but does not allocate any resources yet 108 | CheckError(AUGraphOpen(player->graph), 109 | "AUGraphOpen failed"); 110 | 111 | // get the reference to the AudioUnit object for the instrument graph node 112 | CheckError(AUGraphNodeInfo(player->graph, instrumentNode, NULL, &player->instrumentUnit), 113 | "AUGraphNodeInfo failed"); 114 | 115 | // connect the output source of the instrument AU to the input source of the output node 116 | CheckError(AUGraphConnectNodeInput(player->graph, instrumentNode, 0, outputNode, 0), 117 | "AUGraphConnectNodeInput"); 118 | 119 | // now initialize the graph (causes resources to be allocated) 120 | CheckError(AUGraphInitialize(player->graph), 121 | "AUGraphInitialize failed"); 122 | 123 | 124 | // configure the AUSampler 125 | // 2nd parameter obviously needs to be a full path on your system, and 3rd param is its length in characters 126 | CFURLRef presetURL = CFURLCreateFromFileSystemRepresentation( 127 | kCFAllocatorDefault, 128 | "/Users/cadamson/Library/Audio/Presets/Apple/AUSampler/ch12-aupreset.aupreset", 129 | 77, 130 | false); 131 | 132 | // load preset file into a CFDataRef 133 | CFDataRef presetData = NULL; 134 | SInt32 errorCode = noErr; 135 | Boolean gotPresetData = 136 | CFURLCreateDataAndPropertiesFromResource(kCFAllocatorSystemDefault, 137 | presetURL, 138 | &presetData, 139 | NULL, 140 | NULL, 141 | &errorCode); 142 | CheckError(errorCode, "couldn't load .aupreset data"); 143 | CheckError(!gotPresetData, "couldn't load .aupreset data"); 144 | 145 | // convert this into a property list 146 | CFPropertyListFormat presetPlistFormat = {0}; 147 | CFErrorRef presetPlistError = NULL; 148 | CFPropertyListRef presetPlist = CFPropertyListCreateWithData(kCFAllocatorSystemDefault, 149 | presetData, 150 | kCFPropertyListImmutable, 151 | &presetPlistFormat, 152 | &presetPlistError); 153 | if (presetPlistError) { 154 | printf ("Couldn't create plist object for .aupreset"); 155 | return; 156 | } 157 | 158 | // set this plist as the kAudioUnitProperty_ClassInfo on _auSampler 159 | if (presetPlist) { 160 | CheckError(AudioUnitSetProperty(player->instrumentUnit, 161 | kAudioUnitProperty_ClassInfo, 162 | kAudioUnitScope_Global, 163 | 0, 164 | &presetPlist, 165 | sizeof(presetPlist)), 166 | "Couldn't set aupreset plist as sampler's class info"); 167 | } 168 | 169 | 170 | 171 | } 172 | 173 | #pragma mark - midi 174 | void setupMIDI(MyMIDIPlayer *player) { 175 | 176 | MIDIClientRef client; 177 | CheckError (MIDIClientCreate(CFSTR("Core MIDI to System Sounds Demo"), MyMIDINotifyProc, player, &client), 178 | "Couldn't create MIDI client"); 179 | 180 | MIDIPortRef inPort; 181 | CheckError (MIDIInputPortCreate(client, CFSTR("Input port"), MyMIDIReadProc, player, &inPort), 182 | "Couldn't create MIDI input port"); 183 | 184 | unsigned long sourceCount = MIDIGetNumberOfSources(); 185 | printf ("%ld sources\n", sourceCount); 186 | for (int i = 0; i < sourceCount; ++i) { 187 | MIDIEndpointRef src = MIDIGetSource(i); 188 | CFStringRef endpointName = NULL; 189 | CheckError(MIDIObjectGetStringProperty(src, kMIDIPropertyName, &endpointName), 190 | "Couldn't get endpoint name"); 191 | char endpointNameC[255]; 192 | CFStringGetCString(endpointName, endpointNameC, 255, kCFStringEncodingUTF8); 193 | printf(" source %d: %s\n", i, endpointNameC); 194 | CheckError (MIDIPortConnectSource(inPort, src, NULL), 195 | "Couldn't connect MIDI port"); 196 | } 197 | 198 | 199 | } 200 | 201 | #pragma mark - main 202 | int main (int argc, const char * argv[]) 203 | { 204 | 205 | MyMIDIPlayer player; 206 | 207 | setupAUGraph(&player); 208 | setupMIDI(&player); 209 | 210 | CheckError (AUGraphStart(player.graph), 211 | "couldn't start graph"); 212 | 213 | CFRunLoopRun(); 214 | // run until aborted with control-C 215 | 216 | return 0; 217 | } 218 | 219 | 220 | -------------------------------------------------------------------------------- /CH09_OpenALOrbitLoop/main.c: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #import 4 | 5 | // #define LOOP_PATH CFSTR("/Library/Audio/Apple Loops/Apple/iLife Sound Effects/Stingers/Cartoon Boing Boing.caf") 6 | #define LOOP_PATH CFSTR ("/Library/Audio/Apple Loops/Apple/iLife Sound Effects/Transportation/Bicycle Coasting.caf") 7 | 8 | #define ORBIT_SPEED 1 9 | #define RUN_TIME 20.0 10 | 11 | #pragma mark user-data struct 12 | typedef struct MyLoopPlayer { 13 | AudioStreamBasicDescription dataFormat; 14 | UInt16 *sampleBuffer; 15 | UInt32 bufferSizeBytes; 16 | ALuint sources[1]; 17 | } MyLoopPlayer; 18 | 19 | void updateSourceLocation (MyLoopPlayer player); 20 | OSStatus loadLoopIntoBuffer(MyLoopPlayer* player); 21 | 22 | #pragma mark - utility functions - 23 | // generic error handler - if err is nonzero, prints error message and exits program. 24 | static void CheckError(OSStatus error, const char *operation) 25 | { 26 | if (error == noErr) return; 27 | 28 | char str[20]; 29 | // see if it appears to be a 4-char-code 30 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 31 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 32 | str[0] = str[5] = '\''; 33 | str[6] = '\0'; 34 | } else 35 | // no, format it as an integer 36 | sprintf(str, "%d", (int)error); 37 | 38 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 39 | 40 | exit(1); 41 | } 42 | 43 | static void CheckALError (const char *operation) { 44 | ALenum alErr = alGetError(); 45 | if (alErr == AL_NO_ERROR) return; 46 | char *errFormat = NULL; 47 | switch (alErr) { 48 | case AL_INVALID_NAME: errFormat = "OpenAL Error: %s (AL_INVALID_NAME)"; break; 49 | case AL_INVALID_VALUE: errFormat = "OpenAL Error: %s (AL_INVALID_VALUE)"; break; 50 | case AL_INVALID_ENUM: errFormat = "OpenAL Error: %s (AL_INVALID_ENUM)"; break; 51 | case AL_INVALID_OPERATION: errFormat = "OpenAL Error: %s (AL_INVALID_OPERATION)"; break; 52 | case AL_OUT_OF_MEMORY: errFormat = "OpenAL Error: %s (AL_OUT_OF_MEMORY)"; break; 53 | } 54 | fprintf (stderr, errFormat, operation); 55 | exit(1); 56 | 57 | } 58 | 59 | void updateSourceLocation (MyLoopPlayer player) { 60 | double theta = fmod (CFAbsoluteTimeGetCurrent() * ORBIT_SPEED, M_PI * 2); 61 | // printf ("%f\n", theta); 62 | ALfloat x = 3 * cos (theta); 63 | ALfloat y = 0.5 * sin (theta); 64 | ALfloat z = 1.0 * sin (theta); 65 | printf ("x=%f, y=%f, z=%f\n", x, y, z); 66 | alSource3f(player.sources[0], AL_POSITION, x, y, z); 67 | } 68 | 69 | OSStatus loadLoopIntoBuffer(MyLoopPlayer* player) { 70 | CFURLRef loopFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, 71 | LOOP_PATH, 72 | kCFURLPOSIXPathStyle, 73 | false); 74 | 75 | // describe the client format - AL needs mono 76 | memset(&player->dataFormat, 0, sizeof(player->dataFormat)); 77 | player->dataFormat.mFormatID = kAudioFormatLinearPCM; 78 | player->dataFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 79 | player->dataFormat.mSampleRate = 44100.0; 80 | player->dataFormat.mChannelsPerFrame = 1; 81 | player->dataFormat.mFramesPerPacket = 1; 82 | player->dataFormat.mBitsPerChannel = 16; 83 | player->dataFormat.mBytesPerFrame = 2; 84 | player->dataFormat.mBytesPerPacket = 2; 85 | 86 | ExtAudioFileRef extAudioFile; 87 | CheckError (ExtAudioFileOpenURL(loopFileURL, &extAudioFile), 88 | "Couldn't open ExtAudioFile for reading"); 89 | 90 | // tell extAudioFile about our format 91 | CheckError(ExtAudioFileSetProperty(extAudioFile, 92 | kExtAudioFileProperty_ClientDataFormat, 93 | sizeof (AudioStreamBasicDescription), 94 | &player->dataFormat), 95 | "Couldn't set client format on ExtAudioFile"); 96 | 97 | // figure out how big a buffer we need 98 | SInt64 fileLengthFrames; 99 | UInt32 propSize = sizeof (fileLengthFrames); 100 | ExtAudioFileGetProperty(extAudioFile, 101 | kExtAudioFileProperty_FileLengthFrames, 102 | &propSize, 103 | &fileLengthFrames); 104 | 105 | printf ("plan on reading %lld frames\n", fileLengthFrames); 106 | player->bufferSizeBytes = fileLengthFrames * player->dataFormat.mBytesPerFrame; 107 | 108 | AudioBufferList *buffers; 109 | UInt32 ablSize = offsetof(AudioBufferList, mBuffers[0]) + (sizeof(AudioBuffer) * 1); // 1 channel 110 | buffers = malloc (ablSize); 111 | 112 | // allocate sample buffer 113 | player->sampleBuffer = malloc(sizeof(UInt16) * player->bufferSizeBytes); // 4/18/11 - fix 1 114 | 115 | buffers->mNumberBuffers = 1; 116 | buffers->mBuffers[0].mNumberChannels = 1; 117 | buffers->mBuffers[0].mDataByteSize = player->bufferSizeBytes; 118 | buffers->mBuffers[0].mData = player->sampleBuffer; 119 | 120 | printf ("created AudioBufferList\n"); 121 | 122 | // loop reading into the ABL until buffer is full 123 | UInt32 totalFramesRead = 0; 124 | do { 125 | UInt32 framesRead = fileLengthFrames - totalFramesRead; 126 | buffers->mBuffers[0].mData = player->sampleBuffer + (totalFramesRead * (sizeof(UInt16))); 127 | CheckError(ExtAudioFileRead(extAudioFile, 128 | &framesRead, 129 | buffers), 130 | "ExtAudioFileRead failed"); 131 | totalFramesRead += framesRead; 132 | printf ("read %d frames\n", framesRead); 133 | } while (totalFramesRead < fileLengthFrames); 134 | 135 | // can free the ABL; still have samples in sampleBuffer 136 | free(buffers); 137 | return noErr; 138 | } 139 | 140 | #pragma mark main 141 | 142 | int main (int argc, const char * argv[]) { 143 | MyLoopPlayer player; 144 | 145 | // convert to an OpenAL-friendly format and read into memory 146 | CheckError(loadLoopIntoBuffer(&player), 147 | "Couldn't load loop into buffer") ; 148 | 149 | // set up OpenAL buffer 150 | ALCdevice* alDevice = alcOpenDevice(NULL); 151 | CheckALError ("Couldn't open AL device"); // default device 152 | ALCcontext* alContext = alcCreateContext(alDevice, 0); 153 | CheckALError ("Couldn't open AL context"); 154 | alcMakeContextCurrent (alContext); 155 | CheckALError ("Couldn't make AL context current"); 156 | ALuint buffers[1]; 157 | alGenBuffers(1, buffers); 158 | CheckALError ("Couldn't generate buffers"); 159 | alBufferData(*buffers, 160 | AL_FORMAT_MONO16, 161 | player.sampleBuffer, 162 | player.bufferSizeBytes, 163 | player.dataFormat.mSampleRate); 164 | 165 | // AL copies the samples, so we can free them now 166 | free(player.sampleBuffer); 167 | 168 | // set up OpenAL source 169 | alGenSources(1, player.sources); 170 | CheckALError ("Couldn't generate sources"); 171 | alSourcei(player.sources[0], AL_LOOPING, AL_TRUE); 172 | CheckALError ("Couldn't set source looping property"); 173 | alSourcef(player.sources[0], AL_GAIN, AL_MAX_GAIN); 174 | CheckALError("Couldn't set source gain"); 175 | updateSourceLocation(player); 176 | CheckALError ("Couldn't set initial source position"); 177 | 178 | // connect buffer to source 179 | alSourcei(player.sources[0], AL_BUFFER, buffers[0]); 180 | CheckALError ("Couldn't connect buffer to source"); 181 | 182 | // set up listener 183 | alListener3f (AL_POSITION, 0.0, 0.0, 0.0); 184 | CheckALError("Couldn't set listner position"); 185 | 186 | // ALfloat listenerOrientation[6]; // 3 vectors: forward x,y,z components, then up x,y,z 187 | // listenerOrientation[2] = -1.0; 188 | // listenerOrientation[0] = listenerOrientation [1] = 0.0; 189 | // listenerOrientation[3] = listenerOrientation [4] = listenerOrientation[5] = 0.0; 190 | // alListenerfv (AL_ORIENTATION, listenerOrientation); 191 | 192 | // start playing 193 | // alSourcePlayv (1, player.sources); 194 | alSourcePlay(player.sources[0]); 195 | CheckALError ("Couldn't play"); 196 | 197 | // and wait 198 | printf("Playing...\n"); 199 | time_t startTime = time(NULL); 200 | do 201 | { 202 | // get next theta 203 | updateSourceLocation(player); 204 | CheckALError ("Couldn't set looping source position"); 205 | CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, false); 206 | } while (difftime(time(NULL), startTime) < RUN_TIME); 207 | 208 | // cleanup: 209 | alSourceStop(player.sources[0]); 210 | alDeleteSources(1, player.sources); 211 | alDeleteBuffers(1, buffers); 212 | alcDestroyContext(alContext); 213 | alcCloseDevice(alDevice); 214 | printf ("Bottom of main\n"); 215 | } -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/CH10_iOSBackgroundingToneAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // CH10_iOSBackgroundingToneAppDelegate.m 3 | // CH10_iOSBackgroundingTone 4 | // 5 | // Created by Chris Adamson on 4/22/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import "CH10_iOSBackgroundingToneAppDelegate.h" 10 | 11 | #pragma mark - #defines 12 | 13 | #define FOREGROUND_FREQUENCY 880.0 14 | #define BACKGROUND_FREQUENCY 523.25 15 | #define BUFFER_COUNT 3 16 | #define BUFFER_DURATION 0.5 17 | 18 | 19 | @implementation CH10_iOSBackgroundingToneAppDelegate 20 | 21 | #pragma mark - @synthesizes 22 | 23 | @synthesize window=_window; 24 | @synthesize streamFormat=_streamFormat; 25 | @synthesize bufferSize; 26 | @synthesize currentFrequency; 27 | @synthesize startingFrameCount; 28 | @synthesize audioQueue; 29 | 30 | #pragma mark helpers 31 | 32 | // generic error handler - if err is nonzero, prints error message and exits program. 33 | static void CheckError(OSStatus error, const char *operation) 34 | { 35 | if (error == noErr) return; 36 | 37 | char str[20]; 38 | // see if it appears to be a 4-char-code 39 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 40 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 41 | str[0] = str[5] = '\''; 42 | str[6] = '\0'; 43 | } else 44 | // no, format it as an integer 45 | sprintf(str, "%d", (int)error); 46 | 47 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 48 | 49 | exit(1); 50 | } 51 | 52 | 53 | #pragma mark callbacks 54 | -(OSStatus) fillBuffer: (AudioQueueBufferRef) buffer { 55 | 56 | double j = self.startingFrameCount; 57 | double cycleLength = 44100. / self.currentFrequency; 58 | int frame = 0; 59 | double frameCount = bufferSize / self.streamFormat.mBytesPerFrame; 60 | for (frame = 0; frame < frameCount; ++frame) 61 | { 62 | SInt16 *data = (SInt16*)buffer->mAudioData; 63 | (data)[frame] = (SInt16) (sin (2 * M_PI * (j / cycleLength)) * SHRT_MAX); 64 | 65 | j += 1.0; 66 | if (j > cycleLength) 67 | j -= cycleLength; 68 | } 69 | 70 | self.startingFrameCount = j; 71 | 72 | buffer->mAudioDataByteSize = bufferSize; 73 | 74 | return noErr; 75 | } 76 | 77 | static void MyAQOutputCallback(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inCompleteAQBuffer) 78 | { 79 | CH10_iOSBackgroundingToneAppDelegate *appDelegate = (__bridge CH10_iOSBackgroundingToneAppDelegate*)inUserData; 80 | CheckError([appDelegate fillBuffer: inCompleteAQBuffer], 81 | "can't refill buffer"); 82 | CheckError(AudioQueueEnqueueBuffer(inAQ, 83 | inCompleteAQBuffer, 84 | 0, 85 | NULL), 86 | "Couldn't enqueue buffer (refill)"); 87 | 88 | } 89 | 90 | static void MyInterruptionListener (void *inUserData, 91 | UInt32 inInterruptionState) { 92 | 93 | printf ("Interrupted! inInterruptionState=%u\n", (unsigned int)inInterruptionState); 94 | CH10_iOSBackgroundingToneAppDelegate *appDelegate = (__bridge CH10_iOSBackgroundingToneAppDelegate*)inUserData; 95 | switch (inInterruptionState) { 96 | case kAudioSessionBeginInterruption: 97 | break; 98 | case kAudioSessionEndInterruption: 99 | CheckError(AudioQueueStart(appDelegate.audioQueue, 0), 100 | "Couldn't restart audio queue"); 101 | break; 102 | default: 103 | break; 104 | }; 105 | } 106 | 107 | 108 | 109 | #pragma mark app lifecycle 110 | 111 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 112 | { 113 | // set up audio session 114 | CheckError(AudioSessionInitialize(NULL, 115 | kCFRunLoopDefaultMode, 116 | MyInterruptionListener, 117 | (__bridge void *)(self)), 118 | "couldn't initialize audio session"); 119 | 120 | UInt32 category = kAudioSessionCategory_MediaPlayback; 121 | CheckError(AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, 122 | sizeof(category), 123 | &category), 124 | "Couldn't set category on audio session"); 125 | 126 | // set stream format 127 | self.currentFrequency = FOREGROUND_FREQUENCY; 128 | _streamFormat.mSampleRate = 44100.0; 129 | _streamFormat.mFormatID = kAudioFormatLinearPCM; 130 | _streamFormat.mFormatFlags = kAudioFormatFlagsCanonical; 131 | _streamFormat.mChannelsPerFrame = 1; 132 | _streamFormat.mFramesPerPacket = 1; 133 | _streamFormat.mBitsPerChannel = 16; 134 | _streamFormat.mBytesPerFrame = 2; 135 | _streamFormat.mBytesPerPacket = 2; 136 | 137 | // create the audio queue 138 | CheckError( AudioQueueNewOutput(&_streamFormat, 139 | MyAQOutputCallback, 140 | (__bridge void *)(self), 141 | NULL, 142 | kCFRunLoopCommonModes, 143 | 0, 144 | &audioQueue), 145 | "Couldn't create output AudioQueue"); 146 | 147 | 148 | // create and enqueue buffers 149 | AudioQueueBufferRef buffers [BUFFER_COUNT]; 150 | bufferSize = BUFFER_DURATION * self.streamFormat.mSampleRate * self.streamFormat.mBytesPerFrame; 151 | NSLog (@"bufferSize is %u", (unsigned int)bufferSize); 152 | for (int i=0; i 2 | 3 | 4 | 800 5 | 10D540 6 | 760 7 | 1038.29 8 | 460.00 9 | 10 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 11 | 81 12 | 13 | 14 | YES 15 | 16 | 17 | 18 | YES 19 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 20 | 21 | 22 | YES 23 | 24 | YES 25 | 26 | 27 | YES 28 | 29 | 30 | 31 | YES 32 | 33 | IBFilesOwner 34 | IBCocoaTouchFramework 35 | 36 | 37 | IBFirstResponder 38 | IBCocoaTouchFramework 39 | 40 | 41 | IBCocoaTouchFramework 42 | 43 | 44 | 45 | 1316 46 | 47 | {320, 480} 48 | 49 | 50 | 1 51 | MSAxIDEAA 52 | 53 | NO 54 | NO 55 | 56 | IBCocoaTouchFramework 57 | YES 58 | 59 | 60 | 61 | 62 | YES 63 | 64 | 65 | delegate 66 | 67 | 68 | 69 | 4 70 | 71 | 72 | 73 | window 74 | 75 | 76 | 77 | 5 78 | 79 | 80 | 81 | 82 | YES 83 | 84 | 0 85 | 86 | 87 | 88 | 89 | 90 | 2 91 | 92 | 93 | YES 94 | 95 | 96 | 97 | 98 | -1 99 | 100 | 101 | File's Owner 102 | 103 | 104 | 3 105 | 106 | 107 | 108 | 109 | -2 110 | 111 | 112 | 113 | 114 | 115 | 116 | YES 117 | 118 | YES 119 | -1.CustomClassName 120 | -2.CustomClassName 121 | 2.IBAttributePlaceholdersKey 122 | 2.IBEditorWindowLastContentRect 123 | 2.IBPluginDependency 124 | 3.CustomClassName 125 | 3.IBPluginDependency 126 | 127 | 128 | YES 129 | UIApplication 130 | UIResponder 131 | 132 | YES 133 | 134 | 135 | YES 136 | 137 | 138 | {{198, 376}, {320, 480}} 139 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 140 | CH10_iOSPlayThroughAppDelegate 141 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 142 | 143 | 144 | 145 | YES 146 | 147 | 148 | YES 149 | 150 | 151 | 152 | 153 | YES 154 | 155 | 156 | YES 157 | 158 | 159 | 160 | 9 161 | 162 | 163 | 164 | YES 165 | 166 | CH10_iOSPlayThroughAppDelegate 167 | NSObject 168 | 169 | window 170 | UIWindow 171 | 172 | 173 | IBProjectSource 174 | CH10_iOSPlayThroughAppDelegate.h 175 | 176 | 177 | 178 | CH10_iOSPlayThroughAppDelegate 179 | NSObject 180 | 181 | IBUserSource 182 | 183 | 184 | 185 | 186 | 187 | 0 188 | IBCocoaTouchFramework 189 | 190 | com.apple.InterfaceBuilder.CocoaTouchPlugin.InterfaceBuilder3 191 | 192 | 193 | YES 194 | CH10_iOSPlayThrough.xcodeproj 195 | 3 196 | 81 197 | 198 | 199 | -------------------------------------------------------------------------------- /CH10_iOSBackgroundingTone/en.lproj/MainWindow.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 800 5 | 10D540 6 | 760 7 | 1038.29 8 | 460.00 9 | 10 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 11 | 81 12 | 13 | 14 | YES 15 | 16 | 17 | 18 | YES 19 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 20 | 21 | 22 | YES 23 | 24 | YES 25 | 26 | 27 | YES 28 | 29 | 30 | 31 | YES 32 | 33 | IBFilesOwner 34 | IBCocoaTouchFramework 35 | 36 | 37 | IBFirstResponder 38 | IBCocoaTouchFramework 39 | 40 | 41 | IBCocoaTouchFramework 42 | 43 | 44 | 45 | 1316 46 | 47 | {320, 480} 48 | 49 | 50 | 1 51 | MSAxIDEAA 52 | 53 | NO 54 | NO 55 | 56 | IBCocoaTouchFramework 57 | YES 58 | 59 | 60 | 61 | 62 | YES 63 | 64 | 65 | delegate 66 | 67 | 68 | 69 | 4 70 | 71 | 72 | 73 | window 74 | 75 | 76 | 77 | 5 78 | 79 | 80 | 81 | 82 | YES 83 | 84 | 0 85 | 86 | 87 | 88 | 89 | 90 | 2 91 | 92 | 93 | YES 94 | 95 | 96 | 97 | 98 | -1 99 | 100 | 101 | File's Owner 102 | 103 | 104 | 3 105 | 106 | 107 | 108 | 109 | -2 110 | 111 | 112 | 113 | 114 | 115 | 116 | YES 117 | 118 | YES 119 | -1.CustomClassName 120 | -2.CustomClassName 121 | 2.IBAttributePlaceholdersKey 122 | 2.IBEditorWindowLastContentRect 123 | 2.IBPluginDependency 124 | 3.CustomClassName 125 | 3.IBPluginDependency 126 | 127 | 128 | YES 129 | UIApplication 130 | UIResponder 131 | 132 | YES 133 | 134 | 135 | YES 136 | 137 | 138 | {{198, 376}, {320, 480}} 139 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 140 | CH10_iOSBackgroundingToneAppDelegate 141 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 142 | 143 | 144 | 145 | YES 146 | 147 | 148 | YES 149 | 150 | 151 | 152 | 153 | YES 154 | 155 | 156 | YES 157 | 158 | 159 | 160 | 9 161 | 162 | 163 | 164 | YES 165 | 166 | CH10_iOSBackgroundingToneAppDelegate 167 | NSObject 168 | 169 | window 170 | UIWindow 171 | 172 | 173 | IBProjectSource 174 | CH10_iOSBackgroundingToneAppDelegate.h 175 | 176 | 177 | 178 | CH10_iOSBackgroundingToneAppDelegate 179 | NSObject 180 | 181 | IBUserSource 182 | 183 | 184 | 185 | 186 | 187 | 0 188 | IBCocoaTouchFramework 189 | 190 | com.apple.InterfaceBuilder.CocoaTouchPlugin.InterfaceBuilder3 191 | 192 | 193 | YES 194 | CH10_iOSBackgroundingTone.xcodeproj 195 | 3 196 | 81 197 | 198 | 199 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSamplerIOS/ch12-aupreset.aupreset: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | AU version 6 | 1 7 | Instrument 8 | 9 | Layers 10 | 11 | 12 | Amplifier 13 | 14 | ID 15 | 0 16 | enabled 17 | 18 | 19 | Connections 20 | 21 | 22 | ID 23 | 0 24 | bipolar 25 | 26 | control 27 | 0 28 | destination 29 | 816840704 30 | enabled 31 | 32 | inverse 33 | 34 | scale 35 | 12800 36 | source 37 | 300 38 | transform 39 | 1 40 | 41 | 42 | ID 43 | 1 44 | bipolar 45 | 46 | control 47 | 0 48 | destination 49 | 1343225856 50 | enabled 51 | 52 | inverse 53 | 54 | scale 55 | -96 56 | source 57 | 301 58 | transform 59 | 2 60 | 61 | 62 | ID 63 | 2 64 | bipolar 65 | 66 | control 67 | 0 68 | destination 69 | 1343225856 70 | enabled 71 | 72 | inverse 73 | 74 | scale 75 | -96 76 | source 77 | 7 78 | transform 79 | 2 80 | 81 | 82 | ID 83 | 3 84 | bipolar 85 | 86 | control 87 | 0 88 | destination 89 | 1343225856 90 | enabled 91 | 92 | inverse 93 | 94 | scale 95 | -96 96 | source 97 | 11 98 | transform 99 | 2 100 | 101 | 102 | ID 103 | 4 104 | bipolar 105 | 106 | control 107 | 0 108 | destination 109 | 1344274432 110 | enabled 111 | 112 | inverse 113 | 114 | scale 115 | 0.5080000162124634 116 | source 117 | 10 118 | transform 119 | 1 120 | 121 | 122 | ID 123 | 7 124 | bipolar 125 | 126 | control 127 | 241 128 | destination 129 | 816840704 130 | enabled 131 | 132 | inverse 133 | 134 | scale 135 | 12800 136 | source 137 | 224 138 | transform 139 | 1 140 | 141 | 142 | ID 143 | 8 144 | bipolar 145 | 146 | control 147 | 0 148 | destination 149 | 816840704 150 | enabled 151 | 152 | inverse 153 | 154 | scale 155 | 100 156 | source 157 | 242 158 | transform 159 | 1 160 | 161 | 162 | ID 163 | 6 164 | bipolar 165 | 166 | control 167 | 1 168 | destination 169 | 816840704 170 | enabled 171 | 172 | inverse 173 | 174 | scale 175 | 50 176 | source 177 | 268435456 178 | transform 179 | 1 180 | 181 | 182 | ID 183 | 5 184 | bipolar 185 | 186 | control 187 | 0 188 | destination 189 | 1343225856 190 | enabled 191 | 192 | inverse 193 | 194 | scale 195 | -96 196 | source 197 | 536870912 198 | transform 199 | 1 200 | 201 | 202 | Envelopes 203 | 204 | 205 | ID 206 | 0 207 | Stages 208 | 209 | 210 | curve 211 | 20 212 | stage 213 | 0 214 | time 215 | 0 216 | 217 | 218 | curve 219 | 22 220 | stage 221 | 1 222 | time 223 | 0 224 | 225 | 226 | curve 227 | 20 228 | stage 229 | 2 230 | time 231 | 0 232 | 233 | 234 | curve 235 | 20 236 | stage 237 | 3 238 | time 239 | 0 240 | 241 | 242 | level 243 | 1 244 | stage 245 | 4 246 | 247 | 248 | curve 249 | 20 250 | stage 251 | 5 252 | time 253 | 0 254 | 255 | 256 | curve 257 | 20 258 | stage 259 | 6 260 | time 261 | 0.004999999888241291 262 | 263 | 264 | enabled 265 | 266 | 267 | 268 | Filters 269 | 270 | ID 271 | 0 272 | cutoff 273 | 10000 274 | enabled 275 | 276 | resonance 277 | 0 278 | 279 | ID 280 | 0 281 | LFOs 282 | 283 | 284 | ID 285 | 0 286 | enabled 287 | 288 | 289 | 290 | Oscillator 291 | 292 | ID 293 | 0 294 | enabled 295 | 296 | 297 | Zones 298 | 299 | 300 | ID 301 | 0 302 | enabled 303 | 304 | loop enabled 305 | 306 | root key 307 | 36 308 | waveform 309 | 1 310 | 311 | 312 | 313 | 314 | name 315 | Default Instrument 316 | 317 | coarse tune 318 | 0 319 | data 320 | AAAAAAAAAAAAAAAEAAADhAAAAAAAAAOFAAAAAAAAA4YAAAAAAAADhwAAAAA= 321 | file-references 322 | 323 | Sample:1 324 | /Users/cadamson/Library/Audio/Sounds/chris-coreaudio-c2.caf 325 | 326 | fine tune 327 | 0 328 | gain 329 | 0 330 | manufacturer 331 | 1634758764 332 | name 333 | ch12-aupreset 334 | output 335 | 0 336 | pan 337 | 0 338 | subtype 339 | 1935764848 340 | type 341 | 1635085685 342 | version 343 | 0 344 | voice count 345 | 64 346 | 347 | 348 | -------------------------------------------------------------------------------- /CH12_MIDIToAUSampler/ch12-aupreset.aupreset: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | AU version 6 | 1 7 | Instrument 8 | 9 | Layers 10 | 11 | 12 | Amplifier 13 | 14 | ID 15 | 0 16 | enabled 17 | 18 | 19 | Connections 20 | 21 | 22 | ID 23 | 0 24 | bipolar 25 | 26 | control 27 | 0 28 | destination 29 | 816840704 30 | enabled 31 | 32 | inverse 33 | 34 | scale 35 | 12800 36 | source 37 | 300 38 | transform 39 | 1 40 | 41 | 42 | ID 43 | 1 44 | bipolar 45 | 46 | control 47 | 0 48 | destination 49 | 1343225856 50 | enabled 51 | 52 | inverse 53 | 54 | scale 55 | -96 56 | source 57 | 301 58 | transform 59 | 2 60 | 61 | 62 | ID 63 | 2 64 | bipolar 65 | 66 | control 67 | 0 68 | destination 69 | 1343225856 70 | enabled 71 | 72 | inverse 73 | 74 | scale 75 | -96 76 | source 77 | 7 78 | transform 79 | 2 80 | 81 | 82 | ID 83 | 3 84 | bipolar 85 | 86 | control 87 | 0 88 | destination 89 | 1343225856 90 | enabled 91 | 92 | inverse 93 | 94 | scale 95 | -96 96 | source 97 | 11 98 | transform 99 | 2 100 | 101 | 102 | ID 103 | 4 104 | bipolar 105 | 106 | control 107 | 0 108 | destination 109 | 1344274432 110 | enabled 111 | 112 | inverse 113 | 114 | scale 115 | 0.50800001621246338 116 | source 117 | 10 118 | transform 119 | 1 120 | 121 | 122 | ID 123 | 7 124 | bipolar 125 | 126 | control 127 | 241 128 | destination 129 | 816840704 130 | enabled 131 | 132 | inverse 133 | 134 | scale 135 | 12800 136 | source 137 | 224 138 | transform 139 | 1 140 | 141 | 142 | ID 143 | 8 144 | bipolar 145 | 146 | control 147 | 0 148 | destination 149 | 816840704 150 | enabled 151 | 152 | inverse 153 | 154 | scale 155 | 100 156 | source 157 | 242 158 | transform 159 | 1 160 | 161 | 162 | ID 163 | 6 164 | bipolar 165 | 166 | control 167 | 1 168 | destination 169 | 816840704 170 | enabled 171 | 172 | inverse 173 | 174 | scale 175 | 50 176 | source 177 | 268435456 178 | transform 179 | 1 180 | 181 | 182 | ID 183 | 5 184 | bipolar 185 | 186 | control 187 | 0 188 | destination 189 | 1343225856 190 | enabled 191 | 192 | inverse 193 | 194 | scale 195 | -96 196 | source 197 | 536870912 198 | transform 199 | 1 200 | 201 | 202 | Envelopes 203 | 204 | 205 | ID 206 | 0 207 | Stages 208 | 209 | 210 | curve 211 | 20 212 | stage 213 | 0 214 | time 215 | 0.0 216 | 217 | 218 | curve 219 | 22 220 | stage 221 | 1 222 | time 223 | 0.0 224 | 225 | 226 | curve 227 | 20 228 | stage 229 | 2 230 | time 231 | 0.0 232 | 233 | 234 | curve 235 | 20 236 | stage 237 | 3 238 | time 239 | 0.0 240 | 241 | 242 | level 243 | 1 244 | stage 245 | 4 246 | 247 | 248 | curve 249 | 20 250 | stage 251 | 5 252 | time 253 | 0.0 254 | 255 | 256 | curve 257 | 20 258 | stage 259 | 6 260 | time 261 | 0.004999999888241291 262 | 263 | 264 | enabled 265 | 266 | 267 | 268 | Filters 269 | 270 | ID 271 | 0 272 | cutoff 273 | 10000 274 | enabled 275 | 276 | resonance 277 | 0.0 278 | 279 | ID 280 | 0 281 | LFOs 282 | 283 | 284 | ID 285 | 0 286 | enabled 287 | 288 | 289 | 290 | Oscillator 291 | 292 | ID 293 | 0 294 | enabled 295 | 296 | 297 | Zones 298 | 299 | 300 | ID 301 | 0 302 | enabled 303 | 304 | loop enabled 305 | 306 | root key 307 | 36 308 | waveform 309 | 1 310 | 311 | 312 | 313 | 314 | name 315 | Default Instrument 316 | 317 | coarse tune 318 | 0 319 | data 320 | 321 | AAAAAAAAAAAAAAAEAAADhAAAAAAAAAOFAAAAAAAAA4YAAAAAAAADhwAAAAA= 322 | 323 | file-references 324 | 325 | Sample:1 326 | /Users/cadamson/Library/Audio/Sounds/chris-coreaudio-c2.caf 327 | 328 | fine tune 329 | 0.0 330 | gain 331 | 0.0 332 | manufacturer 333 | 1634758764 334 | name 335 | ch12-aupreset 336 | output 337 | 0 338 | pan 339 | 0.0 340 | subtype 341 | 1935764848 342 | type 343 | 1635085685 344 | version 345 | 0 346 | voice count 347 | 64 348 | 349 | 350 | -------------------------------------------------------------------------------- /CH09_OpenALOrbitStream/main.c: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #import 4 | 5 | #define STREAM_PATH CFSTR ("/Library/Audio/Apple Loops/Apple/iLife Sound Effects/Jingles/Kickflip Long.caf") 6 | //#define STREAM_PATH CFSTR ("/Volumes/Sephiroth/Tunes/Yes/Highlights - The Very Best Of Yes/Long Distance Runaround.m4a") 7 | 8 | #define ORBIT_SPEED 1 9 | #define BUFFER_DURATION_SECONDS 1.0 10 | #define BUFFER_COUNT 3 11 | #define RUN_TIME 20.0 12 | 13 | typedef struct MyStreamPlayer { 14 | AudioStreamBasicDescription dataFormat; 15 | UInt32 bufferSizeBytes; 16 | SInt64 fileLengthFrames; 17 | SInt64 totalFramesRead; 18 | ALuint sources[1]; 19 | // ALuint buffers[BUFFER_COUNT]; // might need this back for static buffers in ch12/13? 20 | ExtAudioFileRef extAudioFile; 21 | } MyStreamPlayer; 22 | 23 | void updateSourceLocation (MyStreamPlayer player); 24 | OSStatus setUpExtAudioFile (MyStreamPlayer* player); 25 | void refillALBuffers (MyStreamPlayer* player); 26 | void fillALBuffer (MyStreamPlayer* player, ALuint alBuffer); 27 | 28 | #pragma mark - utility functions - 29 | // generic error handler - if err is nonzero, prints error message and exits program. 30 | static void CheckError(OSStatus error, const char *operation) 31 | { 32 | if (error == noErr) return; 33 | 34 | char str[20]; 35 | // see if it appears to be a 4-char-code 36 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 37 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 38 | str[0] = str[5] = '\''; 39 | str[6] = '\0'; 40 | } else 41 | // no, format it as an integer 42 | sprintf(str, "%d", (int)error); 43 | 44 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 45 | 46 | exit(1); 47 | } 48 | 49 | static void CheckALError (const char *operation) { 50 | ALenum alErr = alGetError(); 51 | if (alErr == AL_NO_ERROR) return; 52 | char *errFormat = NULL; 53 | switch (alErr) { 54 | case AL_INVALID_NAME: errFormat = "OpenAL Error: %s (AL_INVALID_NAME)"; break; 55 | case AL_INVALID_VALUE: errFormat = "OpenAL Error: %s (AL_INVALID_VALUE)"; break; 56 | case AL_INVALID_ENUM: errFormat = "OpenAL Error: %s (AL_INVALID_ENUM)"; break; 57 | case AL_INVALID_OPERATION: errFormat = "OpenAL Error: %s (AL_INVALID_OPERATION)"; break; 58 | case AL_OUT_OF_MEMORY: errFormat = "OpenAL Error: %s (AL_OUT_OF_MEMORY)"; break; 59 | default: errFormat = "OpenAL Error: %s (unknown error code)"; break; 60 | } 61 | fprintf (stderr, errFormat, operation); 62 | exit(1); 63 | 64 | } 65 | 66 | void updateSourceLocation (MyStreamPlayer player) { 67 | double theta = fmod (CFAbsoluteTimeGetCurrent() * ORBIT_SPEED, M_PI * 2); 68 | // printf ("%f\n", theta); 69 | ALfloat x = 3 * cos (theta); 70 | ALfloat y = 0.5 * sin (theta); 71 | ALfloat z = 1.0 * sin (theta); 72 | printf ("x=%f, y=%f, z=%f\n", x, y, z); 73 | alSource3f(player.sources[0], AL_POSITION, x, y, z); 74 | } 75 | 76 | 77 | OSStatus setUpExtAudioFile (MyStreamPlayer* player) { 78 | CFURLRef streamFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, 79 | STREAM_PATH, 80 | kCFURLPOSIXPathStyle, 81 | false); 82 | 83 | // describe the client format - AL needs mono 84 | memset(&player->dataFormat, 0, sizeof(player->dataFormat)); 85 | player->dataFormat.mFormatID = kAudioFormatLinearPCM; 86 | player->dataFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 87 | player->dataFormat.mSampleRate = 44100.0; 88 | player->dataFormat.mChannelsPerFrame = 1; 89 | player->dataFormat.mFramesPerPacket = 1; 90 | player->dataFormat.mBitsPerChannel = 16; 91 | player->dataFormat.mBytesPerFrame = 2; 92 | player->dataFormat.mBytesPerPacket = 2; 93 | 94 | CheckError (ExtAudioFileOpenURL(streamFileURL, &player->extAudioFile), 95 | "Couldn't open ExtAudioFile for reading"); 96 | 97 | // tell extAudioFile about our format 98 | CheckError(ExtAudioFileSetProperty(player->extAudioFile, 99 | kExtAudioFileProperty_ClientDataFormat, 100 | sizeof (AudioStreamBasicDescription), 101 | &player->dataFormat), 102 | "Couldn't set client format on ExtAudioFile"); 103 | 104 | // figure out how big file is 105 | UInt32 propSize = sizeof (player->fileLengthFrames); 106 | ExtAudioFileGetProperty(player->extAudioFile, 107 | kExtAudioFileProperty_FileLengthFrames, 108 | &propSize, 109 | &player->fileLengthFrames); 110 | 111 | printf ("fileLengthFrames = %lld frames\n", player->fileLengthFrames); 112 | 113 | player->bufferSizeBytes = BUFFER_DURATION_SECONDS * 114 | player->dataFormat.mSampleRate * 115 | player->dataFormat.mBytesPerFrame; 116 | 117 | printf ("bufferSizeBytes = %d\n", player->bufferSizeBytes); 118 | 119 | printf ("Bottom of setUpExtAudioFile\n"); 120 | return noErr; 121 | } 122 | 123 | void fillALBuffer (MyStreamPlayer* player, ALuint alBuffer) { 124 | AudioBufferList *bufferList; 125 | UInt32 ablSize = offsetof(AudioBufferList, mBuffers[0]) + (sizeof(AudioBuffer) * 1); // 1 channel 126 | bufferList = malloc (ablSize); 127 | 128 | // allocate sample buffer 129 | UInt16 *sampleBuffer = malloc(sizeof(UInt16) * player->bufferSizeBytes); // 4/18/11 - fix 2 130 | 131 | bufferList->mNumberBuffers = 1; 132 | bufferList->mBuffers[0].mNumberChannels = 1; 133 | bufferList->mBuffers[0].mDataByteSize = player->bufferSizeBytes; 134 | bufferList->mBuffers[0].mData = sampleBuffer; 135 | printf ("allocated %d byte buffer for ABL\n", 136 | player->bufferSizeBytes); 137 | 138 | // read from ExtAudioFile into sampleBuffer 139 | // TODO: handle end-of-file wraparound 140 | UInt32 framesReadIntoBuffer = 0; 141 | do { 142 | UInt32 framesRead = player->fileLengthFrames - framesReadIntoBuffer; 143 | bufferList->mBuffers[0].mData = sampleBuffer + (framesReadIntoBuffer * (sizeof(UInt16))); // 4/18/11 - fix 3 144 | CheckError(ExtAudioFileRead(player->extAudioFile, 145 | &framesRead, 146 | bufferList), 147 | "ExtAudioFileRead failed"); 148 | framesReadIntoBuffer += framesRead; 149 | player->totalFramesRead += framesRead; 150 | printf ("read %d frames\n", framesRead); 151 | } while (framesReadIntoBuffer < (player->bufferSizeBytes / sizeof(UInt16))); // 4/18/11 - fix 4 152 | 153 | // copy from sampleBuffer to AL buffer 154 | alBufferData(alBuffer, 155 | AL_FORMAT_MONO16, 156 | sampleBuffer, 157 | player->bufferSizeBytes, 158 | player->dataFormat.mSampleRate); 159 | 160 | free (bufferList); 161 | free (sampleBuffer); 162 | } 163 | 164 | void refillALBuffers (MyStreamPlayer* player) { 165 | ALint processed; 166 | alGetSourcei (player->sources[0], AL_BUFFERS_PROCESSED, &processed); 167 | CheckALError ("couldn't get al_buffers_processed"); 168 | 169 | while (processed > 0) { 170 | ALuint freeBuffer; 171 | alSourceUnqueueBuffers(player->sources[0], 1, &freeBuffer); 172 | CheckALError("couldn't unqueue buffer"); 173 | printf ("refilling buffer %d\n", freeBuffer); 174 | fillALBuffer(player, freeBuffer); 175 | alSourceQueueBuffers(player->sources[0], 1, &freeBuffer); 176 | CheckALError ("couldn't queue refilled buffer"); 177 | printf ("re-queued buffer %d\n", freeBuffer); 178 | processed--; 179 | } 180 | 181 | } 182 | 183 | #pragma mark main 184 | 185 | int main (int argc, const char * argv[]) { 186 | MyStreamPlayer player; 187 | 188 | // prepare the ExtAudioFile for reading 189 | CheckError(setUpExtAudioFile(&player), 190 | "Couldn't open ExtAudioFile") ; 191 | 192 | // set up OpenAL buffers 193 | ALCdevice* alDevice = alcOpenDevice(NULL); 194 | CheckALError ("Couldn't open AL device"); // default device 195 | ALCcontext* alContext = alcCreateContext(alDevice, 0); 196 | CheckALError ("Couldn't open AL context"); 197 | alcMakeContextCurrent (alContext); 198 | CheckALError ("Couldn't make AL context current"); 199 | ALuint buffers[BUFFER_COUNT]; 200 | alGenBuffers(BUFFER_COUNT, buffers); 201 | CheckALError ("Couldn't generate buffers"); 202 | 203 | for (int i=0; i 11 | #import 12 | 13 | #pragma mark - state struct 14 | typedef struct MyMIDIPlayer { 15 | AUGraph graph; 16 | AudioUnit instrumentUnit; 17 | } MyMIDIPlayer; 18 | 19 | #pragma mark - forward declarations 20 | void setupMIDI(MyMIDIPlayer *player); 21 | void setupAUGraph(MyMIDIPlayer *player); 22 | static void MyMIDIReadProc(const MIDIPacketList *pktlist, void *refCon, void *connRefCon); 23 | void MyMIDINotifyProc (const MIDINotification *message, void *refCon); 24 | 25 | 26 | @interface AppDelegate() 27 | @property MyMIDIPlayer player; 28 | @end 29 | 30 | @implementation AppDelegate 31 | 32 | @synthesize window = _window; 33 | @synthesize player = _player; 34 | 35 | 36 | 37 | #pragma mark utility functions 38 | static void CheckError(OSStatus error, const char *operation) 39 | { 40 | if (error == noErr) return; 41 | 42 | char str[20]; 43 | // see if it appears to be a 4-char-code 44 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 45 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 46 | str[0] = str[5] = '\''; 47 | str[6] = '\0'; 48 | } else 49 | // no, format it as an integer 50 | sprintf(str, "%d", (int)error); 51 | 52 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 53 | 54 | exit(1); 55 | } 56 | 57 | #pragma mark - callbacks 58 | static void MyMIDIReadProc(const MIDIPacketList *pktlist, void *refCon, void *connRefCon) { 59 | MyMIDIPlayer *player = (MyMIDIPlayer*) refCon; 60 | NSLog (@"MyMIDIReadProc\n"); 61 | 62 | MIDIPacket *packet = (MIDIPacket *)pktlist->packet; 63 | for (int i=0; i < pktlist->numPackets; i++) { 64 | NSLog (@"i=%d", i); 65 | Byte midiStatus = packet->data[0]; 66 | Byte midiCommand = midiStatus >> 4; 67 | // is it a note-on or note-off 68 | if ((midiCommand == 0x09) || 69 | (midiCommand == 0x08)) { 70 | Byte note = packet->data[1] & 0x7F; 71 | Byte velocity = packet->data[2] & 0x7F; 72 | NSLog(@"midiCommand=%d. Note=%d, Velocity=%d\n", midiCommand, note, velocity); 73 | 74 | // send to augraph 75 | CheckError(MusicDeviceMIDIEvent (player->instrumentUnit, 76 | midiStatus, 77 | note, 78 | velocity, 79 | 0), 80 | "Couldn't send MIDI event"); 81 | 82 | } 83 | packet = MIDIPacketNext(packet); 84 | } 85 | } 86 | 87 | 88 | void MyMIDINotifyProc (const MIDINotification *message, void *refCon) { 89 | printf("MIDI Notify, messageId=%d,", (int)message->messageID); 90 | } 91 | 92 | 93 | 94 | #pragma mark - augraph 95 | void setupAUGraph(MyMIDIPlayer *player) { 96 | 97 | CheckError(NewAUGraph(&player->graph), 98 | "Couldn't open AU Graph"); 99 | 100 | // generate description that will match our output device (speakers) 101 | AudioComponentDescription outputcd = {0}; 102 | outputcd.componentType = kAudioUnitType_Output; 103 | outputcd.componentSubType = kAudioUnitSubType_RemoteIO; 104 | outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; 105 | 106 | // adds a node with above description to the graph 107 | AUNode outputNode; 108 | CheckError(AUGraphAddNode(player->graph, &outputcd, &outputNode), 109 | "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed"); 110 | 111 | 112 | AudioComponentDescription instrumentcd = {0}; 113 | instrumentcd.componentManufacturer = kAudioUnitManufacturer_Apple; 114 | instrumentcd.componentType = kAudioUnitType_MusicDevice; 115 | instrumentcd.componentSubType = kAudioUnitSubType_Sampler; // changed! 116 | 117 | AUNode instrumentNode; 118 | CheckError(AUGraphAddNode(player->graph, &instrumentcd, &instrumentNode), 119 | "AUGraphAddNode[kAudioUnitSubType_DLSSynth] failed"); 120 | 121 | // opening the graph opens all contained audio units but does not allocate any resources yet 122 | CheckError(AUGraphOpen(player->graph), 123 | "AUGraphOpen failed"); 124 | 125 | // get the reference to the AudioUnit object for the instrument graph node 126 | CheckError(AUGraphNodeInfo(player->graph, instrumentNode, NULL, &player->instrumentUnit), 127 | "AUGraphNodeInfo failed"); 128 | 129 | // connect the output source of the instrument AU to the input source of the output node 130 | CheckError(AUGraphConnectNodeInput(player->graph, instrumentNode, 0, outputNode, 0), 131 | "AUGraphConnectNodeInput"); 132 | 133 | // now initialize the graph (causes resources to be allocated) 134 | CheckError(AUGraphInitialize(player->graph), 135 | "AUGraphInitialize failed"); 136 | 137 | 138 | // configure the AUSampler 139 | 140 | // TODO: path to .aupreset file 141 | NSString *presetPath = [[NSBundle mainBundle] pathForResource:@"ch12-aupreset" 142 | ofType:@"aupreset"]; 143 | const char* presetPathC = [presetPath cStringUsingEncoding:NSUTF8StringEncoding]; 144 | NSLog (@"presetPathC: %s", presetPathC); 145 | 146 | CFURLRef presetURL = CFURLCreateFromFileSystemRepresentation( 147 | kCFAllocatorDefault, 148 | presetPathC, 149 | [presetPath length], 150 | false); 151 | 152 | // load preset file into a CFDataRef 153 | CFDataRef presetData = NULL; 154 | SInt32 errorCode = noErr; 155 | Boolean gotPresetData = 156 | CFURLCreateDataAndPropertiesFromResource(kCFAllocatorSystemDefault, 157 | presetURL, 158 | &presetData, 159 | NULL, 160 | NULL, 161 | &errorCode); 162 | CheckError(errorCode, "couldn't load .aupreset data"); 163 | CheckError(!gotPresetData, "couldn't load .aupreset data"); 164 | 165 | // convert this into a property list 166 | CFPropertyListFormat presetPlistFormat = {0}; 167 | CFErrorRef presetPlistError = NULL; 168 | CFPropertyListRef presetPlist = CFPropertyListCreateWithData(kCFAllocatorSystemDefault, 169 | presetData, 170 | kCFPropertyListImmutable, 171 | &presetPlistFormat, 172 | &presetPlistError); 173 | if (presetPlistError) { 174 | printf ("Couldn't create plist object for .aupreset"); 175 | return; 176 | } 177 | 178 | // set this plist as the kAudioUnitProperty_ClassInfo on _auSampler 179 | if (presetPlist) { 180 | CheckError(AudioUnitSetProperty(player->instrumentUnit, 181 | kAudioUnitProperty_ClassInfo, 182 | kAudioUnitScope_Global, 183 | 0, 184 | &presetPlist, 185 | sizeof(presetPlist)), 186 | "Couldn't set aupreset plist as sampler's class info"); 187 | } 188 | 189 | NSLog (@"AUGraph ready"); 190 | CAShow(player->graph); 191 | 192 | } 193 | 194 | #pragma mark - midi 195 | void setupMIDI(MyMIDIPlayer *player) { 196 | 197 | MIDIClientRef client; 198 | CheckError (MIDIClientCreate(CFSTR("Core MIDI to System Sounds Demo"), MyMIDINotifyProc, player, &client), 199 | "Couldn't create MIDI client"); 200 | 201 | MIDIPortRef inPort; 202 | CheckError (MIDIInputPortCreate(client, CFSTR("Input port"), MyMIDIReadProc, player, &inPort), 203 | "Couldn't create MIDI input port"); 204 | 205 | unsigned long sourceCount = MIDIGetNumberOfSources(); 206 | printf ("%ld sources\n", sourceCount); 207 | for (int i = 0; i < sourceCount; ++i) { 208 | MIDIEndpointRef src = MIDIGetSource(i); 209 | CFStringRef endpointName = NULL; 210 | CheckError(MIDIObjectGetStringProperty(src, kMIDIPropertyName, &endpointName), 211 | "Couldn't get endpoint name"); 212 | char endpointNameC[255]; 213 | CFStringGetCString(endpointName, endpointNameC, 255, kCFStringEncodingUTF8); 214 | printf(" source %d: %s\n", i, endpointNameC); 215 | CheckError (MIDIPortConnectSource(inPort, src, NULL), 216 | "Couldn't connect MIDI port"); 217 | } 218 | 219 | NSLog (@"MIDI ready"); 220 | 221 | } 222 | 223 | 224 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 225 | { 226 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 227 | 228 | setupAUGraph(&_player); 229 | setupMIDI(&_player); 230 | 231 | CheckError (AUGraphStart(_player.graph), 232 | "couldn't start graph"); 233 | 234 | 235 | self.window.backgroundColor = [UIColor whiteColor]; 236 | [self.window makeKeyAndVisible]; 237 | NSLog(@"MIDI app running\n"); 238 | return YES; 239 | } 240 | 241 | - (void)applicationWillResignActive:(UIApplication *)application 242 | { 243 | /* 244 | Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 245 | Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 246 | */ 247 | } 248 | 249 | - (void)applicationDidEnterBackground:(UIApplication *)application 250 | { 251 | /* 252 | Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 253 | If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 254 | */ 255 | } 256 | 257 | - (void)applicationWillEnterForeground:(UIApplication *)application 258 | { 259 | /* 260 | Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 261 | */ 262 | } 263 | 264 | - (void)applicationDidBecomeActive:(UIApplication *)application 265 | { 266 | /* 267 | Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 268 | */ 269 | } 270 | 271 | - (void)applicationWillTerminate:(UIApplication *)application 272 | { 273 | /* 274 | Called when the application is about to terminate. 275 | Save data if appropriate. 276 | See also applicationDidEnterBackground:. 277 | */ 278 | } 279 | 280 | @end 281 | -------------------------------------------------------------------------------- /CH05_Player/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #define kPlaybackFileLocation CFSTR("/Insert/Path/To/Audio/File.xxx") 4 | //#define kPlaybackFileLocation CFSTR("/Users/cadamson/Library/Developer/Xcode/DerivedData/CH04_Recorder-dvninfofohfiwcgyndnhzarhsipp/Build/Products/Debug/output.caf") 5 | //#define kPlaybackFileLocation CFSTR("/Users/cadamson/audiofile.m4a") 6 | //#define kPlaybackFileLocation CFSTR("/Volumes/Sephiroth/iTunes/iTunes Media/Music/The Tubes/Tubes World Tour 2001/Wild Women of Wongo.m4p") 7 | //#define kPlaybackFileLocation CFSTR("/Volumes/Sephiroth/iTunes/iTunes Media/Music/Compilations/ESCAFLOWNE - ORIGINAL MOVIE SOUNDTRACK/21 We're flying.m4a") 8 | 9 | void CalculateBytesForTime (AudioFileID inAudioFile, AudioStreamBasicDescription inDesc, Float64 inSeconds, UInt32 *outBufferSize, UInt32 *outNumPackets); 10 | 11 | 12 | #define kNumberPlaybackBuffers 3 13 | typedef struct MyPlayer { 14 | // AudioQueueRef queue; // the audio queue object 15 | // AudioStreamBasicDescription dataFormat; // file's data stream description 16 | AudioFileID playbackFile; // reference to your output file 17 | SInt64 packetPosition; // current packet index in output file 18 | UInt32 numPacketsToRead; // number of packets to read from file 19 | AudioStreamPacketDescription *packetDescs; // array of packet descriptions for read buffer 20 | // AudioQueueBufferRef buffers[kNumberPlaybackBuffers]; 21 | Boolean isDone; // playback has completed 22 | } MyPlayer; 23 | 24 | 25 | #pragma mark - utility functions - 26 | 27 | // generic error handler - if err is nonzero, prints error message and exits program. 28 | static void CheckError(OSStatus error, const char *operation) 29 | { 30 | if (error == noErr) return; 31 | 32 | char str[20]; 33 | // see if it appears to be a 4-char-code 34 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 35 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 36 | str[0] = str[5] = '\''; 37 | str[6] = '\0'; 38 | } else 39 | // no, format it as an integer 40 | sprintf(str, "%d", (int)error); 41 | 42 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 43 | 44 | exit(1); 45 | } 46 | 47 | // we only use time here as a guideline 48 | // we're really trying to get somewhere between 16K and 64K buffers, but not allocate too much if we don't need it 49 | void CalculateBytesForTime (AudioFileID inAudioFile, AudioStreamBasicDescription inDesc, Float64 inSeconds, UInt32 *outBufferSize, UInt32 *outNumPackets) 50 | { 51 | 52 | // we need to calculate how many packets we read at a time, and how big a buffer we need. 53 | // we base this on the size of the packets in the file and an approximate duration for each buffer. 54 | // 55 | // first check to see what the max size of a packet is, if it is bigger than our default 56 | // allocation size, that needs to become larger 57 | UInt32 maxPacketSize; 58 | UInt32 propSize = sizeof(maxPacketSize); 59 | CheckError(AudioFileGetProperty(inAudioFile, kAudioFilePropertyPacketSizeUpperBound, 60 | &propSize, &maxPacketSize), "couldn't get file's max packet size"); 61 | 62 | static const int maxBufferSize = 0x10000; // limit size to 64K 63 | static const int minBufferSize = 0x4000; // limit size to 16K 64 | 65 | if (inDesc.mFramesPerPacket) { 66 | Float64 numPacketsForTime = inDesc.mSampleRate / inDesc.mFramesPerPacket * inSeconds; 67 | *outBufferSize = numPacketsForTime * maxPacketSize; 68 | } else { 69 | // if frames per packet is zero, then the codec has no predictable packet == time 70 | // so we can't tailor this (we don't know how many Packets represent a time period 71 | // we'll just return a default buffer size 72 | *outBufferSize = maxBufferSize > maxPacketSize ? maxBufferSize : maxPacketSize; 73 | } 74 | 75 | // we're going to limit our size to our default 76 | if (*outBufferSize > maxBufferSize && *outBufferSize > maxPacketSize) 77 | *outBufferSize = maxBufferSize; 78 | else { 79 | // also make sure we're not too small - we don't want to go the disk for too small chunks 80 | if (*outBufferSize < minBufferSize) 81 | *outBufferSize = minBufferSize; 82 | } 83 | *outNumPackets = *outBufferSize / maxPacketSize; 84 | } 85 | 86 | 87 | // many encoded formats require a 'magic cookie'. if the file has a cookie we get it 88 | // and configure the queue with it 89 | static void MyCopyEncoderCookieToQueue(AudioFileID theFile, AudioQueueRef queue ) { 90 | UInt32 propertySize; 91 | OSStatus result = AudioFileGetPropertyInfo (theFile, kAudioFilePropertyMagicCookieData, &propertySize, NULL); 92 | if (result == noErr && propertySize > 0) 93 | { 94 | Byte* magicCookie = (UInt8*)malloc(sizeof(UInt8) * propertySize); 95 | CheckError(AudioFileGetProperty (theFile, kAudioFilePropertyMagicCookieData, &propertySize, magicCookie), "get cookie from file failed"); 96 | CheckError(AudioQueueSetProperty(queue, kAudioQueueProperty_MagicCookie, magicCookie, propertySize), "set cookie on queue failed"); 97 | free(magicCookie); 98 | } 99 | } 100 | 101 | 102 | #pragma mark - audio queue - 103 | 104 | static void MyAQOutputCallback(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inCompleteAQBuffer) 105 | { 106 | MyPlayer *aqp = (MyPlayer*)inUserData; 107 | if (aqp->isDone) return; 108 | 109 | // read audio data from file into supplied buffer 110 | UInt32 numBytes; 111 | UInt32 nPackets = aqp->numPacketsToRead; 112 | CheckError(AudioFileReadPackets(aqp->playbackFile, 113 | false, 114 | &numBytes, 115 | aqp->packetDescs, 116 | aqp->packetPosition, 117 | &nPackets, 118 | inCompleteAQBuffer->mAudioData), 119 | "AudioFileReadPackets failed"); 120 | 121 | // enqueue buffer into the Audio Queue 122 | // if nPackets == 0 it means we are EOF (all data has been read from file) 123 | if (nPackets > 0) 124 | { 125 | inCompleteAQBuffer->mAudioDataByteSize = numBytes; 126 | AudioQueueEnqueueBuffer(inAQ, 127 | inCompleteAQBuffer, 128 | (aqp->packetDescs ? nPackets : 0), 129 | aqp->packetDescs); 130 | aqp->packetPosition += nPackets; 131 | } 132 | else 133 | { 134 | CheckError(AudioQueueStop(inAQ, false), "AudioQueueStop failed"); 135 | aqp->isDone = true; 136 | } 137 | } 138 | 139 | int main(int argc, const char *argv[]) 140 | { 141 | MyPlayer player = {0}; 142 | 143 | CFURLRef myFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kPlaybackFileLocation, kCFURLPOSIXPathStyle, false); 144 | 145 | // open the audio file 146 | // CheckError(AudioFileOpenURL(myFileURL, fsRdPerm, 0, &player.playbackFile), "AudioFileOpenURL failed"); 147 | CheckError(AudioFileOpenURL(myFileURL, kAudioFileReadPermission, 0, &player.playbackFile), "AudioFileOpenURL failed"); 148 | CFRelease(myFileURL); 149 | 150 | // get the audio data format from the file 151 | AudioStreamBasicDescription dataFormat; 152 | UInt32 propSize = sizeof(dataFormat); 153 | CheckError(AudioFileGetProperty(player.playbackFile, kAudioFilePropertyDataFormat, 154 | &propSize, &dataFormat), "couldn't get file's data format"); 155 | 156 | // create a output (playback) queue 157 | AudioQueueRef queue; 158 | CheckError(AudioQueueNewOutput(&dataFormat, // ASBD 159 | MyAQOutputCallback, // Callback 160 | &player, // user data 161 | NULL, // run loop 162 | NULL, // run loop mode 163 | 0, // flags (always 0) 164 | &queue), // output: reference to AudioQueue object 165 | "AudioQueueNewOutput failed"); 166 | 167 | 168 | // adjust buffer size to represent about a half second (0.5) of audio based on this format 169 | UInt32 bufferByteSize; 170 | CalculateBytesForTime(player.playbackFile, dataFormat, 0.5, &bufferByteSize, &player.numPacketsToRead); 171 | 172 | // check if we are dealing with a VBR file. ASBDs for VBR files always have 173 | // mBytesPerPacket and mFramesPerPacket as 0 since they can fluctuate at any time. 174 | // If we are dealing with a VBR file, we allocate memory to hold the packet descriptions 175 | bool isFormatVBR = (dataFormat.mBytesPerPacket == 0 || dataFormat.mFramesPerPacket == 0); 176 | if (isFormatVBR) 177 | player.packetDescs = (AudioStreamPacketDescription*)malloc(sizeof(AudioStreamPacketDescription) * player.numPacketsToRead); 178 | else 179 | player.packetDescs = NULL; // we don't provide packet descriptions for constant bit rate formats (like linear PCM) 180 | 181 | // get magic cookie from file and set on queue 182 | MyCopyEncoderCookieToQueue(player.playbackFile, queue); 183 | 184 | // allocate the buffers and prime the queue with some data before starting 185 | AudioQueueBufferRef buffers[kNumberPlaybackBuffers]; 186 | player.isDone = false; 187 | player.packetPosition = 0; 188 | int i; 189 | for (i = 0; i < kNumberPlaybackBuffers; ++i) 190 | { 191 | CheckError(AudioQueueAllocateBuffer(queue, bufferByteSize, &buffers[i]), "AudioQueueAllocateBuffer failed"); 192 | 193 | // manually invoke callback to fill buffers with data 194 | MyAQOutputCallback(&player, queue, buffers[i]); 195 | 196 | // EOF (the entire file's contents fit in the buffers) 197 | if (player.isDone) 198 | break; 199 | } 200 | 201 | 202 | //CheckError(AudioQueueAddPropertyListener(aqp.queue, kAudioQueueProperty_IsRunning, MyAQPropertyListenerCallback, &aqp), "AudioQueueAddPropertyListener(kAudioQueueProperty_IsRunning) failed"); 203 | 204 | // start the queue. this function returns immedatly and begins 205 | // invoking the callback, as needed, asynchronously. 206 | CheckError(AudioQueueStart(queue, NULL), "AudioQueueStart failed"); 207 | 208 | // and wait 209 | printf("Playing...\n"); 210 | do 211 | { 212 | CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.25, false); 213 | } while (!player.isDone /*|| gIsRunning*/); 214 | 215 | // isDone represents the state of the Audio File enqueuing. This does not mean the 216 | // Audio Queue is actually done playing yet. Since we have 3 half-second buffers in-flight 217 | // run for continue to run for a short additional time so they can be processed 218 | CFRunLoopRunInMode(kCFRunLoopDefaultMode, 2, false); 219 | 220 | // end playback 221 | player.isDone = true; 222 | CheckError(AudioQueueStop(queue, TRUE), "AudioQueueStop failed"); 223 | 224 | cleanup: 225 | AudioQueueDispose(queue, TRUE); 226 | AudioFileClose(player.playbackFile); 227 | 228 | return 0; 229 | } 230 | -------------------------------------------------------------------------------- /CH04_Recorder/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #define kNumberRecordBuffers 3 4 | 5 | 6 | typedef struct MyRecorder { 7 | AudioFileID recordFile; // reference to your output file 8 | SInt64 recordPacket; // current packet index in output file 9 | Boolean running; // recording state 10 | } MyRecorder; 11 | 12 | 13 | OSStatus MyGetDefaultInputDeviceSampleRate(Float64 *outSampleRate); 14 | 15 | 16 | #pragma mark - utility functions - 17 | 18 | // generic error handler - if error is nonzero, prints error message and exits program. 19 | static void CheckError(OSStatus error, const char *operation) 20 | { 21 | if (error == noErr) return; 22 | 23 | char errorString[20]; 24 | // see if it appears to be a 4-char-code 25 | *(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(error); 26 | if (isprint(errorString[1]) && isprint(errorString[2]) && isprint(errorString[3]) && isprint(errorString[4])) { 27 | errorString[0] = errorString[5] = '\''; 28 | errorString[6] = '\0'; 29 | } else { 30 | // no, format it as an integer 31 | sprintf(errorString, "%d", (int)error); 32 | } 33 | 34 | fprintf(stderr, "Error: %s (%s)\n", operation, errorString); 35 | 36 | exit(1); 37 | } 38 | 39 | // get sample rate of the default input device 40 | OSStatus MyGetDefaultInputDeviceSampleRate(Float64 *outSampleRate) 41 | { 42 | OSStatus error; 43 | AudioDeviceID deviceID = 0; 44 | 45 | // get the default input device 46 | AudioObjectPropertyAddress propertyAddress; 47 | UInt32 propertySize; 48 | propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; 49 | propertyAddress.mScope = kAudioObjectPropertyScopeGlobal; 50 | propertyAddress.mElement = 0; 51 | propertySize = sizeof(AudioDeviceID); 52 | error = AudioHardwareServiceGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &propertySize, &deviceID); 53 | if (error) return error; 54 | 55 | // get its sample rate 56 | propertyAddress.mSelector = kAudioDevicePropertyNominalSampleRate; 57 | propertyAddress.mScope = kAudioObjectPropertyScopeGlobal; 58 | propertyAddress.mElement = 0; 59 | propertySize = sizeof(Float64); 60 | error = AudioHardwareServiceGetPropertyData(deviceID, &propertyAddress, 0, NULL, &propertySize, outSampleRate); 61 | 62 | return error; 63 | } 64 | 65 | 66 | // Determine the size, in bytes, of a buffer necessary to represent the supplied number 67 | // of seconds of audio data. 68 | static int MyComputeRecordBufferSize(const AudioStreamBasicDescription *format, AudioQueueRef queue, float seconds) 69 | { 70 | int packets, frames, bytes; 71 | 72 | frames = (int)ceil(seconds * format->mSampleRate); 73 | 74 | if (format->mBytesPerFrame > 0) { // 1 75 | bytes = frames * format->mBytesPerFrame; 76 | } else { 77 | UInt32 maxPacketSize; 78 | if (format->mBytesPerPacket > 0) { // 2 79 | maxPacketSize = format->mBytesPerPacket; 80 | } else { 81 | // get the largest single packet size possible 82 | UInt32 propertySize = sizeof(maxPacketSize); // 3 83 | CheckError(AudioQueueGetProperty(queue, kAudioConverterPropertyMaximumOutputPacketSize, &maxPacketSize, 84 | &propertySize), "couldn't get queue's maximum output packet size"); 85 | } 86 | if (format->mFramesPerPacket > 0) { 87 | packets = frames / format->mFramesPerPacket; // 4 88 | } else { 89 | // worst-case scenario: 1 frame in a packet 90 | packets = frames; // 5 91 | } 92 | 93 | if (packets == 0) { // sanity check 94 | packets = 1; 95 | } 96 | bytes = packets * maxPacketSize; // 6 97 | } 98 | return bytes; 99 | } 100 | 101 | // Copy a queue's encoder's magic cookie to an audio file. 102 | static void MyCopyEncoderCookieToFile(AudioQueueRef queue, AudioFileID theFile) 103 | { 104 | UInt32 propertySize; 105 | 106 | // get the magic cookie, if any, from the queue's converter 107 | OSStatus result = AudioQueueGetPropertySize(queue, 108 | kAudioConverterCompressionMagicCookie, &propertySize); 109 | 110 | if (result == noErr && propertySize > 0) { 111 | // there is valid cookie data to be fetched; get it 112 | Byte *magicCookie = (Byte *)malloc(propertySize); 113 | CheckError(AudioQueueGetProperty(queue, kAudioQueueProperty_MagicCookie, magicCookie, 114 | &propertySize), "get audio queue's magic cookie"); 115 | 116 | // now set the magic cookie on the output file 117 | CheckError(AudioFileSetProperty(theFile, kAudioFilePropertyMagicCookieData, propertySize, magicCookie), 118 | "set audio file's magic cookie"); 119 | free(magicCookie); 120 | } 121 | } 122 | 123 | #pragma mark - audio queue - 124 | 125 | // Audio Queue callback function, called when an input buffer has been filled. 126 | static void MyAQInputCallback(void *inUserData, AudioQueueRef inQueue, 127 | AudioQueueBufferRef inBuffer, 128 | const AudioTimeStamp *inStartTime, 129 | UInt32 inNumPackets, 130 | const AudioStreamPacketDescription *inPacketDesc) 131 | { 132 | MyRecorder *recorder = (MyRecorder *)inUserData; 133 | 134 | // if inNumPackets is greater then zero, our buffer contains audio data 135 | // in the format we specified (AAC) 136 | if (inNumPackets > 0) { 137 | // write packets to file 138 | CheckError(AudioFileWritePackets(recorder->recordFile, FALSE, inBuffer->mAudioDataByteSize, 139 | inPacketDesc, recorder->recordPacket, &inNumPackets, 140 | inBuffer->mAudioData), "AudioFileWritePackets failed"); 141 | // increment packet index 142 | recorder->recordPacket += inNumPackets; 143 | } 144 | 145 | // if we're not stopping, re-enqueue the buffer so that it gets filled again 146 | if (recorder->running) { 147 | CheckError(AudioQueueEnqueueBuffer(inQueue, inBuffer, 148 | 0, NULL), "AudioQueueEnqueueBuffer failed"); 149 | } 150 | } 151 | 152 | int main(int argc, const char *argv[]) 153 | { 154 | MyRecorder recorder = {0}; 155 | AudioStreamBasicDescription recordFormat = {0}; 156 | memset(&recordFormat, 0, sizeof(recordFormat)); 157 | 158 | // Configure the output data format to be AAC 159 | recordFormat.mFormatID = kAudioFormatMPEG4AAC; 160 | recordFormat.mChannelsPerFrame = 2; 161 | 162 | // get the sample rate of the default input device 163 | // we use this to adapt the output data format to match hardware capabilities 164 | MyGetDefaultInputDeviceSampleRate(&recordFormat.mSampleRate); 165 | 166 | // ProTip: Use the AudioFormat API to trivialize ASBD creation. 167 | // input: at least the mFormatID, however, at this point we already have 168 | // mSampleRate, mFormatID, and mChannelsPerFrame 169 | // output: the remainder of the ASBD will be filled out as much as possible 170 | // given the information known about the format 171 | UInt32 propSize = sizeof(recordFormat); 172 | CheckError(AudioFormatGetProperty(kAudioFormatProperty_FormatInfo, 0, NULL, 173 | &propSize, &recordFormat), "AudioFormatGetProperty failed"); 174 | 175 | // create a input (recording) queue 176 | AudioQueueRef queue = {0}; 177 | CheckError(AudioQueueNewInput(&recordFormat, // ASBD 178 | MyAQInputCallback, // Callback 179 | &recorder, // user data 180 | NULL, // run loop 181 | NULL, // run loop mode 182 | 0, // flags (always 0) 183 | // &recorder.queue), // output: reference to AudioQueue object 184 | &queue), 185 | "AudioQueueNewInput failed"); 186 | 187 | // since the queue is now initilized, we ask it's Audio Converter object 188 | // for the ASBD it has configured itself with. The file may require a more 189 | // specific stream description than was necessary to create the audio queue. 190 | // 191 | // for example: certain fields in an ASBD cannot possibly be known until it's 192 | // codec is instantiated (in this case, by the AudioQueue's Audio Converter object) 193 | UInt32 size = sizeof(recordFormat); 194 | CheckError(AudioQueueGetProperty(queue, kAudioConverterCurrentOutputStreamDescription, 195 | &recordFormat, &size), "couldn't get queue's format"); 196 | 197 | // create the audio file 198 | CFURLRef myFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, CFSTR("./output.caf"), kCFURLPOSIXPathStyle, false); 199 | CFShow (myFileURL); 200 | CheckError(AudioFileCreateWithURL(myFileURL, kAudioFileCAFType, &recordFormat, 201 | kAudioFileFlags_EraseFile, &recorder.recordFile), "AudioFileCreateWithURL failed"); 202 | CFRelease(myFileURL); 203 | 204 | // many encoded formats require a 'magic cookie'. we set the cookie first 205 | // to give the file object as much info as we can about the data it will be receiving 206 | MyCopyEncoderCookieToFile(queue, recorder.recordFile); 207 | 208 | // allocate and enqueue buffers 209 | int bufferByteSize = MyComputeRecordBufferSize(&recordFormat, queue, 0.5); // enough bytes for half a second 210 | int bufferIndex; 211 | for (bufferIndex = 0; bufferIndex < kNumberRecordBuffers; ++bufferIndex) 212 | { 213 | AudioQueueBufferRef buffer; 214 | CheckError(AudioQueueAllocateBuffer(queue, bufferByteSize, &buffer), 215 | "AudioQueueAllocateBuffer failed"); 216 | CheckError(AudioQueueEnqueueBuffer(queue, buffer, 0, NULL), 217 | "AudioQueueEnqueueBuffer failed"); 218 | } 219 | 220 | // start the queue. this function return immedatly and begins 221 | // invoking the callback, as needed, asynchronously. 222 | recorder.running = TRUE; 223 | CheckError(AudioQueueStart(queue, NULL), "AudioQueueStart failed"); 224 | 225 | // and wait 226 | printf("Recording, press to stop:\n"); 227 | getchar(); 228 | 229 | // end recording 230 | printf("* recording done *\n"); 231 | recorder.running = FALSE; 232 | CheckError(AudioQueueStop(queue, TRUE), "AudioQueueStop failed"); 233 | 234 | // a codec may update its magic cookie at the end of an encoding session 235 | // so reapply it to the file now 236 | MyCopyEncoderCookieToFile(queue, recorder.recordFile); 237 | 238 | cleanup: 239 | AudioQueueDispose(queue, TRUE); 240 | AudioFileClose(recorder.recordFile); 241 | 242 | return 0; 243 | } 244 | -------------------------------------------------------------------------------- /CH10_iOSPlayThrough/CH10_iOSPlayThroughAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // CH10_iOSPlayThroughAppDelegate.m 3 | // CH10_iOSPlayThrough 4 | // 5 | // Created by Chris Adamson on 7/10/11. 6 | // Copyright 2011 Subsequently and Furthermore, Inc. All rights reserved. 7 | // 8 | 9 | #import "CH10_iOSPlayThroughAppDelegate.h" 10 | 11 | @implementation CH10_iOSPlayThroughAppDelegate 12 | 13 | @synthesize window = _window; 14 | @synthesize effectState = _effectState; 15 | 16 | #pragma mark helpers 17 | 18 | // generic error handler - if err is nonzero, prints error message and exits program. 19 | static void CheckError(OSStatus error, const char *operation) 20 | { 21 | if (error == noErr) return; 22 | 23 | char str[20]; 24 | // see if it appears to be a 4-char-code 25 | *(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error); 26 | if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) { 27 | str[0] = str[5] = '\''; 28 | str[6] = '\0'; 29 | } else 30 | // no, format it as an integer 31 | sprintf(str, "%d", (int)error); 32 | 33 | fprintf(stderr, "Error: %s (%s)\n", operation, str); 34 | 35 | exit(1); 36 | } 37 | 38 | 39 | #pragma mark callbacks 40 | static void MyInterruptionListener (void *inUserData, 41 | UInt32 inInterruptionState) { 42 | 43 | printf ("Interrupted! inInterruptionState=%u\n", (unsigned int)inInterruptionState); 44 | CH10_iOSPlayThroughAppDelegate *appDelegate = (__bridge CH10_iOSPlayThroughAppDelegate*)inUserData; 45 | switch (inInterruptionState) { 46 | case kAudioSessionBeginInterruption: 47 | break; 48 | case kAudioSessionEndInterruption: 49 | // TODO: doesn't work! 50 | CheckError(AudioSessionSetActive(true), 51 | "Couldn't set audio session active"); 52 | CheckError(AudioUnitInitialize(appDelegate.effectState.rioUnit), 53 | "Couldn't initialize RIO unit"); 54 | CheckError (AudioOutputUnitStart (appDelegate.effectState.rioUnit), 55 | "Couldn't start RIO unit"); 56 | break; 57 | default: 58 | break; 59 | }; 60 | } 61 | 62 | static OSStatus InputModulatingRenderCallback ( 63 | void * inRefCon, 64 | AudioUnitRenderActionFlags * ioActionFlags, 65 | const AudioTimeStamp * inTimeStamp, 66 | UInt32 inBusNumber, 67 | UInt32 inNumberFrames, 68 | AudioBufferList * ioData) { 69 | EffectState *effectState = (EffectState*) inRefCon; 70 | 71 | // just copy samples 72 | UInt32 bus1 = 1; 73 | CheckError(AudioUnitRender(effectState->rioUnit, 74 | ioActionFlags, 75 | inTimeStamp, 76 | bus1, 77 | inNumberFrames, 78 | ioData), 79 | "Couldn't render from RemoteIO unit"); 80 | 81 | // walk the samples 82 | AudioSampleType sample = 0; 83 | UInt32 bytesPerChannel = effectState->asbd.mBytesPerFrame/effectState->asbd.mChannelsPerFrame; 84 | for (int bufCount=0; bufCountmNumberBuffers; bufCount++) { 85 | AudioBuffer buf = ioData->mBuffers[bufCount]; 86 | int currentFrame = 0; 87 | while ( currentFrame < inNumberFrames ) { 88 | // copy sample to buffer, across all channels 89 | for (int currentChannel=0; currentChannelasbd.mBytesPerFrame) + 92 | (currentChannel * bytesPerChannel), 93 | sizeof(AudioSampleType)); 94 | 95 | float theta = effectState->sinePhase * M_PI * 2; 96 | 97 | sample = (sin(theta) * sample); 98 | 99 | memcpy(buf.mData + (currentFrame * effectState->asbd.mBytesPerFrame) + 100 | (currentChannel * bytesPerChannel), 101 | &sample, 102 | sizeof(AudioSampleType)); 103 | 104 | effectState->sinePhase += 1.0 / (effectState->asbd.mSampleRate / effectState->sineFrequency); 105 | if (effectState->sinePhase > 1.0) { 106 | effectState->sinePhase -= 1.0; 107 | } 108 | } 109 | currentFrame++; 110 | } 111 | } 112 | return noErr; 113 | } 114 | 115 | 116 | 117 | 118 | 119 | #pragma mark app lifecycle 120 | 121 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 122 | { 123 | // set up audio session 124 | CheckError(AudioSessionInitialize(NULL, 125 | kCFRunLoopDefaultMode, 126 | MyInterruptionListener, 127 | (__bridge void *)(self)), 128 | "couldn't initialize audio session"); 129 | 130 | UInt32 category = kAudioSessionCategory_PlayAndRecord; 131 | CheckError(AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, 132 | sizeof(category), 133 | &category), 134 | "Couldn't set category on audio session"); 135 | 136 | // is audio input available? 137 | UInt32 ui32PropertySize = sizeof (UInt32); 138 | UInt32 inputAvailable; 139 | CheckError(AudioSessionGetProperty(kAudioSessionProperty_AudioInputAvailable, 140 | &ui32PropertySize, 141 | &inputAvailable), 142 | "Couldn't get current audio input available prop"); 143 | if (! inputAvailable) { 144 | UIAlertView *noInputAlert = 145 | [[UIAlertView alloc] initWithTitle:@"No audio input" 146 | message:@"No audio input device is currently attached" 147 | delegate:nil 148 | cancelButtonTitle:@"OK" 149 | otherButtonTitles:nil]; 150 | [noInputAlert show]; 151 | // TODO: do we have to die? couldn't we tolerate an incoming connection 152 | // TODO: need another example to show audio routes? 153 | return YES; 154 | } 155 | 156 | // inspect the hardware input rate 157 | Float64 hardwareSampleRate; 158 | UInt32 propSize = sizeof (hardwareSampleRate); 159 | CheckError(AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareSampleRate, 160 | &propSize, 161 | &hardwareSampleRate), 162 | "Couldn't get hardwareSampleRate"); 163 | NSLog (@"hardwareSampleRate = %f", hardwareSampleRate); 164 | 165 | // CheckError(AudioSessionSetActive(true), 166 | // "Couldn't set AudioSession active"); 167 | 168 | // describe unit 169 | AudioComponentDescription audioCompDesc; 170 | audioCompDesc.componentType = kAudioUnitType_Output; 171 | audioCompDesc.componentSubType = kAudioUnitSubType_RemoteIO; 172 | audioCompDesc.componentManufacturer = kAudioUnitManufacturer_Apple; 173 | audioCompDesc.componentFlags = 0; 174 | audioCompDesc.componentFlagsMask = 0; 175 | 176 | // get rio unit from audio component manager 177 | AudioComponent rioComponent = AudioComponentFindNext(NULL, &audioCompDesc); 178 | CheckError(AudioComponentInstanceNew(rioComponent, &_effectState.rioUnit), 179 | "Couldn't get RIO unit instance"); 180 | 181 | // set up the rio unit for playback 182 | UInt32 oneFlag = 1; 183 | AudioUnitElement bus0 = 0; 184 | CheckError(AudioUnitSetProperty (_effectState.rioUnit, 185 | kAudioOutputUnitProperty_EnableIO, 186 | kAudioUnitScope_Output, 187 | bus0, 188 | &oneFlag, 189 | sizeof(oneFlag)), 190 | "Couldn't enable RIO output"); 191 | 192 | // enable rio input 193 | AudioUnitElement bus1 = 1; 194 | CheckError(AudioUnitSetProperty(_effectState.rioUnit, 195 | kAudioOutputUnitProperty_EnableIO, 196 | kAudioUnitScope_Input, 197 | bus1, 198 | &oneFlag, 199 | sizeof(oneFlag)), 200 | "Couldn't enable RIO input"); 201 | 202 | // setup an asbd in the iphone canonical format 203 | AudioStreamBasicDescription myASBD; 204 | memset (&myASBD, 0, sizeof (myASBD)); 205 | myASBD.mSampleRate = hardwareSampleRate; 206 | myASBD.mFormatID = kAudioFormatLinearPCM; 207 | myASBD.mFormatFlags = kAudioFormatFlagsCanonical; 208 | myASBD.mBytesPerPacket = 4; 209 | myASBD.mFramesPerPacket = 1; 210 | myASBD.mBytesPerFrame = 4; 211 | myASBD.mChannelsPerFrame = 2; 212 | myASBD.mBitsPerChannel = 16; 213 | 214 | /* 215 | // set format for output (bus 0) on rio's input scope 216 | */ 217 | CheckError(AudioUnitSetProperty (_effectState.rioUnit, 218 | kAudioUnitProperty_StreamFormat, 219 | kAudioUnitScope_Input, 220 | bus0, 221 | &myASBD, 222 | sizeof (myASBD)), 223 | "Couldn't set ASBD for RIO on input scope / bus 0"); 224 | 225 | 226 | // set asbd for mic input 227 | CheckError(AudioUnitSetProperty (_effectState.rioUnit, 228 | kAudioUnitProperty_StreamFormat, 229 | kAudioUnitScope_Output, 230 | bus1, 231 | &myASBD, 232 | sizeof (myASBD)), 233 | "Couldn't set ASBD for RIO on output scope / bus 1"); 234 | 235 | // more info on ring modulator and dalek voices at: 236 | // http://homepage.powerup.com.au/~spratleo/Tech/Dalek_Voice_Primer.html 237 | _effectState.asbd = myASBD; 238 | _effectState.sineFrequency = 30; 239 | _effectState.sinePhase = 0; 240 | 241 | // set callback method 242 | AURenderCallbackStruct callbackStruct; 243 | callbackStruct.inputProc = InputModulatingRenderCallback; // callback function 244 | callbackStruct.inputProcRefCon = &_effectState; 245 | 246 | CheckError(AudioUnitSetProperty(_effectState.rioUnit, 247 | kAudioUnitProperty_SetRenderCallback, 248 | kAudioUnitScope_Global, 249 | bus0, 250 | &callbackStruct, 251 | sizeof (callbackStruct)), 252 | "Couldn't set RIO render callback on bus 0"); 253 | 254 | 255 | // initialize and start remoteio unit 256 | CheckError(AudioUnitInitialize(_effectState.rioUnit), 257 | "Couldn't initialize RIO unit"); 258 | CheckError (AudioOutputUnitStart (_effectState.rioUnit), 259 | "Couldn't start RIO unit"); 260 | 261 | 262 | printf("RIO started!\n"); 263 | 264 | // Override point for customization after application launch. 265 | [self.window makeKeyAndVisible]; 266 | return YES; 267 | 268 | } 269 | 270 | 271 | 272 | // CHRIS NOTE TO SELF: NO CHANGES TO XCODE TEMPLATE BELOW THIS POINT 273 | 274 | 275 | - (void)applicationWillResignActive:(UIApplication *)application 276 | { 277 | /* 278 | Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 279 | Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 280 | */ 281 | } 282 | 283 | - (void)applicationDidEnterBackground:(UIApplication *)application 284 | { 285 | /* 286 | Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 287 | If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 288 | */ 289 | } 290 | 291 | - (void)applicationWillEnterForeground:(UIApplication *)application 292 | { 293 | /* 294 | Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 295 | */ 296 | } 297 | 298 | - (void)applicationDidBecomeActive:(UIApplication *)application 299 | { 300 | /* 301 | Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 302 | */ 303 | } 304 | 305 | - (void)applicationWillTerminate:(UIApplication *)application 306 | { 307 | /* 308 | Called when the application is about to terminate. 309 | Save data if appropriate. 310 | See also applicationDidEnterBackground:. 311 | */ 312 | } 313 | 314 | @end 315 | -------------------------------------------------------------------------------- /CH06_AudioConverter/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #ifndef MAC_OS_X_VERSION_10_7 4 | // CoreServices defines eofErr, replaced in 10.7 by kAudioFileEndOfFileError 5 | #include 6 | #endif 7 | 8 | 9 | #define kInputFileLocation CFSTR("/Insert/Path/To/Audio/File.xxx") 10 | // #define kInputFileLocation CFSTR("/Users/kevin/Desktop/tmp_storage/audio_tests/cdsd_scratch.aiff") 11 | // #define kInputFileLocation CFSTR("/Volumes/Sephiroth/iTunes/iTunes Media/Music/Compilations/ESCAFLOWNE - ORIGINAL MOVIE SOUNDTRACK/21 We're flying.m4a") 12 | 13 | typedef struct MyAudioConverterSettings 14 | { 15 | AudioStreamBasicDescription inputFormat; // input file's data stream description 16 | AudioStreamBasicDescription outputFormat; // output file's data stream description 17 | 18 | AudioFileID inputFile; // reference to your input file 19 | AudioFileID outputFile; // reference to your output file 20 | 21 | UInt64 inputFilePacketIndex; // current packet index in input file 22 | UInt64 inputFilePacketCount; // total number of packts in input file 23 | UInt32 inputFilePacketMaxSize; // maximum size a packet in the input file can be 24 | AudioStreamPacketDescription *inputFilePacketDescriptions; // array of packet descriptions for read buffer 25 | 26 | // KEVIN: sourceBuffer is never used outside of the callback. why couldn't it be a local there? 27 | void *sourceBuffer; 28 | 29 | } MyAudioConverterSettings; 30 | 31 | 32 | OSStatus MyAudioConverterCallback(AudioConverterRef inAudioConverter, 33 | UInt32 *ioDataPacketCount, 34 | AudioBufferList *ioData, 35 | AudioStreamPacketDescription **outDataPacketDescription, 36 | void *inUserData); 37 | void Convert(MyAudioConverterSettings *mySettings); 38 | 39 | 40 | #pragma mark - utility functions - 41 | 42 | // generic error handler - if result is nonzero, prints error message and exits program. 43 | static void CheckResult(OSStatus result, const char *operation) 44 | { 45 | if (result == noErr) return; 46 | 47 | char errorString[20]; 48 | // see if it appears to be a 4-char-code 49 | *(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(result); 50 | if (isprint(errorString[1]) && isprint(errorString[2]) && isprint(errorString[3]) && isprint(errorString[4])) { 51 | errorString[0] = errorString[5] = '\''; 52 | errorString[6] = '\0'; 53 | } else 54 | // no, format it as an integer 55 | sprintf(errorString, "%d", (int)result); 56 | 57 | fprintf(stderr, "Error: %s (%s)\n", operation, errorString); 58 | 59 | exit(1); 60 | } 61 | 62 | 63 | #pragma mark - audio converter - 64 | 65 | OSStatus MyAudioConverterCallback(AudioConverterRef inAudioConverter, 66 | UInt32 *ioDataPacketCount, 67 | AudioBufferList *ioData, 68 | AudioStreamPacketDescription **outDataPacketDescription, 69 | void *inUserData) 70 | { 71 | MyAudioConverterSettings *audioConverterSettings = (MyAudioConverterSettings *)inUserData; 72 | 73 | // initialize in case of failure 74 | ioData->mBuffers[0].mData = NULL; 75 | ioData->mBuffers[0].mDataByteSize = 0; 76 | 77 | // if there are not enough packets to satisfy request, then read what's left 78 | if (audioConverterSettings->inputFilePacketIndex + *ioDataPacketCount > audioConverterSettings->inputFilePacketCount) 79 | *ioDataPacketCount = audioConverterSettings->inputFilePacketCount - audioConverterSettings->inputFilePacketIndex; 80 | 81 | if(*ioDataPacketCount == 0) 82 | return noErr; 83 | 84 | if (audioConverterSettings->sourceBuffer != NULL) 85 | { 86 | free(audioConverterSettings->sourceBuffer); 87 | audioConverterSettings->sourceBuffer = NULL; 88 | } 89 | 90 | audioConverterSettings->sourceBuffer = (void *)calloc(1, *ioDataPacketCount * audioConverterSettings->inputFilePacketMaxSize); 91 | 92 | UInt32 outByteCount = 0; 93 | OSStatus result = AudioFileReadPackets(audioConverterSettings->inputFile, 94 | true, 95 | &outByteCount, 96 | audioConverterSettings->inputFilePacketDescriptions, 97 | audioConverterSettings->inputFilePacketIndex, 98 | ioDataPacketCount, 99 | audioConverterSettings->sourceBuffer); 100 | 101 | // it's not an error if we just read the remainder of the file 102 | #ifdef MAC_OS_X_VERSION_10_7 103 | if (result == kAudioFileEndOfFileError && *ioDataPacketCount) result = noErr; 104 | #else 105 | if (result == eofErr && *ioDataPacketCount) result = noErr; 106 | #endif 107 | else if (result != noErr) return result; 108 | 109 | audioConverterSettings->inputFilePacketIndex += *ioDataPacketCount; 110 | 111 | // KEVIN: in "// initialize in case of failure", we assumed there was only 1 112 | // buffer (since we set it up ourselves in Convert()). so why be careful to 113 | // iterate over potentially multiple buffers here? 114 | /* 115 | UInt32 bufferIndex; 116 | for (bufferIndex = 0; bufferIndex < ioData->mNumberBuffers; bufferIndex++) 117 | { 118 | ioData->mBuffers[bufferIndex].mData = audioConverterSettings->sourceBuffer; 119 | ioData->mBuffers[bufferIndex].mDataByteSize = outByteCount; 120 | } 121 | */ 122 | // chris' hacky asssume-one-buffer equivalent 123 | ioData->mBuffers[0].mData = audioConverterSettings->sourceBuffer; 124 | ioData->mBuffers[0].mDataByteSize = outByteCount; 125 | 126 | if (outDataPacketDescription) 127 | *outDataPacketDescription = audioConverterSettings->inputFilePacketDescriptions; 128 | 129 | return result; 130 | } 131 | 132 | void Convert(MyAudioConverterSettings *mySettings) 133 | { 134 | // create audioConverter object 135 | AudioConverterRef audioConverter; 136 | CheckResult (AudioConverterNew(&mySettings->inputFormat, &mySettings->outputFormat, &audioConverter), 137 | "AudioConveterNew failed"); 138 | 139 | // allocate packet descriptions if the input file is VBR 140 | UInt32 packetsPerBuffer = 0; 141 | UInt32 outputBufferSize = 32 * 1024; // 32 KB is a good starting point 142 | UInt32 sizePerPacket = mySettings->inputFormat.mBytesPerPacket; 143 | if (sizePerPacket == 0) 144 | { 145 | UInt32 size = sizeof(sizePerPacket); 146 | CheckResult(AudioConverterGetProperty(audioConverter, kAudioConverterPropertyMaximumOutputPacketSize, &size, &sizePerPacket), 147 | "Couldn't get kAudioConverterPropertyMaximumOutputPacketSize"); 148 | 149 | // make sure the buffer is large enough to hold at least one packet 150 | if (sizePerPacket > outputBufferSize) 151 | outputBufferSize = sizePerPacket; 152 | 153 | packetsPerBuffer = outputBufferSize / sizePerPacket; 154 | mySettings->inputFilePacketDescriptions = (AudioStreamPacketDescription*)malloc(sizeof(AudioStreamPacketDescription) * packetsPerBuffer); 155 | 156 | } 157 | else 158 | { 159 | packetsPerBuffer = outputBufferSize / sizePerPacket; 160 | } 161 | 162 | // allocate destination buffer 163 | UInt8 *outputBuffer = (UInt8 *)malloc(sizeof(UInt8) * outputBufferSize); // CHRIS: not sizeof(UInt8*). check book text! 164 | 165 | UInt32 outputFilePacketPosition = 0; //in bytes 166 | while(1) 167 | { 168 | // wrap the destination buffer in an AudioBufferList 169 | AudioBufferList convertedData; 170 | convertedData.mNumberBuffers = 1; 171 | convertedData.mBuffers[0].mNumberChannels = mySettings->inputFormat.mChannelsPerFrame; 172 | convertedData.mBuffers[0].mDataByteSize = outputBufferSize; 173 | convertedData.mBuffers[0].mData = outputBuffer; 174 | 175 | // now call the audioConverter to transcode the data. This function will call 176 | // the callback function as many times as required to fulfill the request. 177 | UInt32 ioOutputDataPackets = packetsPerBuffer; 178 | OSStatus error = AudioConverterFillComplexBuffer(audioConverter, 179 | MyAudioConverterCallback, 180 | mySettings, 181 | &ioOutputDataPackets, 182 | &convertedData, 183 | (mySettings->inputFilePacketDescriptions ? mySettings->inputFilePacketDescriptions : nil)); 184 | if (error || !ioOutputDataPackets) 185 | { 186 | // fprintf(stderr, "err: %ld, packets: %ld\n", err, ioOutputDataPackets); 187 | break; // this is our termination condition 188 | } 189 | 190 | // write the converted data to the output file 191 | // KEVIN: QUESTION: 3rd arg seems like it should be a byte count, not packets. why does this work? 192 | CheckResult (AudioFileWritePackets(mySettings->outputFile, 193 | FALSE, 194 | ioOutputDataPackets, 195 | NULL, 196 | outputFilePacketPosition / mySettings->outputFormat.mBytesPerPacket, 197 | &ioOutputDataPackets, 198 | convertedData.mBuffers[0].mData), 199 | "Couldn't write packets to file"); 200 | 201 | // advance the output file write location 202 | outputFilePacketPosition += (ioOutputDataPackets * mySettings->outputFormat.mBytesPerPacket); 203 | } 204 | 205 | AudioConverterDispose(audioConverter); 206 | free (outputBuffer); 207 | } 208 | 209 | int main(int argc, const char *argv[]) 210 | { 211 | MyAudioConverterSettings audioConverterSettings = {0}; 212 | 213 | // open the input audio file 214 | CFURLRef inputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kInputFileLocation, kCFURLPOSIXPathStyle, false); 215 | CheckResult (AudioFileOpenURL(inputFileURL, kAudioFileReadPermission , 0, &audioConverterSettings.inputFile), 216 | "AudioFileOpenURL failed"); 217 | CFRelease(inputFileURL); 218 | 219 | // get the audio data format from the file 220 | UInt32 propSize = sizeof(audioConverterSettings.inputFormat); 221 | CheckResult (AudioFileGetProperty(audioConverterSettings.inputFile, kAudioFilePropertyDataFormat, &propSize, &audioConverterSettings.inputFormat), 222 | "couldn't get file's data format"); 223 | 224 | // get the total number of packets in the file 225 | propSize = sizeof(audioConverterSettings.inputFilePacketCount); 226 | CheckResult (AudioFileGetProperty(audioConverterSettings.inputFile, kAudioFilePropertyAudioDataPacketCount, &propSize, &audioConverterSettings.inputFilePacketCount), 227 | "couldn't get file's packet count"); 228 | 229 | // get size of the largest possible packet 230 | propSize = sizeof(audioConverterSettings.inputFilePacketMaxSize); 231 | CheckResult(AudioFileGetProperty(audioConverterSettings.inputFile, kAudioFilePropertyMaximumPacketSize, &propSize, &audioConverterSettings.inputFilePacketMaxSize), 232 | "couldn't get file's max packet size"); 233 | 234 | // define the ouput format. AudioConverter requires that one of the data formats be LPCM 235 | audioConverterSettings.outputFormat.mSampleRate = 44100.0; 236 | audioConverterSettings.outputFormat.mFormatID = kAudioFormatLinearPCM; 237 | audioConverterSettings.outputFormat.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 238 | audioConverterSettings.outputFormat.mBytesPerPacket = 4; 239 | audioConverterSettings.outputFormat.mFramesPerPacket = 1; 240 | audioConverterSettings.outputFormat.mBytesPerFrame = 4; 241 | audioConverterSettings.outputFormat.mChannelsPerFrame = 2; 242 | audioConverterSettings.outputFormat.mBitsPerChannel = 16; 243 | 244 | // create output file 245 | // KEVIN: TODO: this fails if file exists. isn't there an overwrite flag we can use? 246 | CFURLRef outputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, CFSTR("output.aif"), kCFURLPOSIXPathStyle, false); 247 | CheckResult (AudioFileCreateWithURL(outputFileURL, kAudioFileAIFFType, &audioConverterSettings.outputFormat, kAudioFileFlags_EraseFile, &audioConverterSettings.outputFile), 248 | "AudioFileCreateWithURL failed"); 249 | CFRelease(outputFileURL); 250 | 251 | fprintf(stdout, "Converting...\n"); 252 | Convert(&audioConverterSettings); 253 | 254 | cleanup: 255 | AudioFileClose(audioConverterSettings.inputFile); 256 | AudioFileClose(audioConverterSettings.outputFile); 257 | printf("Done\r"); 258 | return 0; 259 | } 260 | --------------------------------------------------------------------------------