├── CH08_AUGraphInput ├── CH08_AUGraphInput-Bridging-Header.h ├── Utility.swift └── main.swift ├── SwiftCoreAudio.xcodeproj ├── project.xcworkspace │ └── contents.xcworkspacedata └── xcuserdata │ └── Doug.xcuserdatad │ └── xcschemes │ ├── xcschememanagement.plist │ ├── CH04_Recorder.xcscheme │ ├── CH01_CAMetadata.xcscheme │ ├── CH02_CAToneFileGenerator.xcscheme │ └── CH03_CAStreamFormatTester.xcscheme ├── README.md ├── CH01_CAMetadata └── main.swift ├── CH07_AUGraphPlayer ├── Utility.swift └── main.swift ├── CH06_AudioConverter ├── Utility.swift └── main.swift ├── CH07_AUGraphSineWave ├── Utility.swift └── main.swift ├── CH06_ExtAudioFileConverter ├── Utility.swift └── main.swift ├── CH07_AUGraphSpeechSynthesis ├── Utility.swift └── main.swift ├── CH--_OpenALSineWave ├── Utility.swift └── main.swift ├── CH09_OpenALOrbitLoop ├── Utility.swift └── main.swift ├── CH09_OpenALOrbitStream ├── Utility.swift └── main.swift ├── CH03_CAStreamFormatTester └── main.swift ├── CH02_CAToneFileGenerator └── main.swift ├── CH05_Player ├── Utility.swift └── main.swift └── CH04_Recorder ├── Utility.swift └── main.swift /CH08_AUGraphInput/CH08_AUGraphInput-Bridging-Header.h: -------------------------------------------------------------------------------- 1 | // 2 | // Use this file to import your target's public headers that you would like to expose to Swift. 3 | // 4 | 5 | #include "CARingBuffer.h" 6 | #include 7 | -------------------------------------------------------------------------------- /SwiftCoreAudio.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SwiftCoreAudio 2 | Swift versions of the examples from "Learning Core Audio" (Adamson & Avila) 3 | 4 | As I work my way through the excellent book "Learning Core Audio" by Chris Adamson and Kevin Avila I'm running the example code in it's original Objective-C / C form and then converting each example to run in XCode 8 (beta) using Swift 3. 5 | 6 | 7 | NOTE: CH08_AUGraphInput compiles with errors. I haven't worked out (yet) how to access the C++ CARingBuffer from Swift. 8 | 9 | NOTE: CH--_OpenALSineWave is not from the book. It's my own combination of CH07_AUGraphSineWave and CH09_OpenALOrbitStream (without the "orbit"). It demonstrates how to use two Apple Extensions to OpenAL. 10 | -------------------------------------------------------------------------------- /SwiftCoreAudio.xcodeproj/xcuserdata/Doug.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | CH01_CAMetadata.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | CH02_CAToneFileGenerator.xcscheme 13 | 14 | orderHint 15 | 1 16 | 17 | CH03_CAStreamFormatTester.xcscheme 18 | 19 | orderHint 20 | 2 21 | 22 | CH04_Recorder.xcscheme 23 | 24 | orderHint 25 | 3 26 | 27 | 28 | SuppressBuildableAutocreation 29 | 30 | 91F84E851D289A11005DF80F 31 | 32 | primary 33 | 34 | 35 | 91F84E931D289A51005DF80F 36 | 37 | primary 38 | 39 | 40 | 91F84E9E1D289B32005DF80F 41 | 42 | primary 43 | 44 | 45 | 91F84EA91D289B9A005DF80F 46 | 47 | primary 48 | 49 | 50 | 91F84EB41D289BE9005DF80F 51 | 52 | primary 53 | 54 | 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /CH01_CAMetadata/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH01_CAMetadata 4 | // 5 | // Created by Douglas Adams on 6/28/16. 6 | // 7 | 8 | import CoreFoundation 9 | import AudioToolbox 10 | 11 | //-------------------------------------------------------------------------------------------------- 12 | // MARK: Properties 13 | 14 | var audioFile: AudioFileID? 15 | var theErr: OSStatus = noErr 16 | var dictionarySize: UInt32 = 0 17 | var isWritable: UInt32 = 0 18 | var dictionary: CFDictionary = [:] 19 | 20 | //-------------------------------------------------------------------------------------------------- 21 | // MARK: Main 22 | 23 | if Process.arguments.count < 2 { 24 | Swift.print("Usage: CAMetadata /full/path/to/audiofile\n") 25 | exit(-1) 26 | } 27 | 28 | let audioFilePath = (Process.arguments[1] as NSString).expandingTildeInPath 29 | 30 | Swift.print("audioFilePath = \(audioFilePath)") 31 | 32 | let audioURL = URL(fileURLWithPath: audioFilePath as String) 33 | 34 | Swift.print("audioURL: \(audioURL)") 35 | 36 | theErr = AudioFileOpenURL(audioURL, .readPermission, 0, &audioFile) 37 | 38 | assert (theErr == noErr) 39 | 40 | theErr = AudioFileGetPropertyInfo(audioFile!, kAudioFilePropertyInfoDictionary, &dictionarySize, &isWritable) 41 | 42 | assert (theErr == noErr) 43 | 44 | theErr = AudioFileGetProperty(audioFile!, kAudioFilePropertyInfoDictionary, &dictionarySize, &dictionary) 45 | 46 | assert (theErr == noErr) 47 | 48 | Swift.print("dictionary: \(dictionary)") 49 | 50 | theErr = AudioFileClose(audioFile!) 51 | 52 | assert (theErr == noErr) 53 | -------------------------------------------------------------------------------- /CH07_AUGraphPlayer/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH07_AUGraphPlayer 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | 10 | class Utility { 11 | // 12 | // convert a Core Audio error code to a printable string 13 | // 14 | static func codeToString(_ error: OSStatus) -> String { 15 | 16 | // byte swap the error 17 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 18 | 19 | // separate the UInt32 into 4 bytes 20 | var bytes = [UInt8](repeating: 0, count: 4) 21 | bytes[0] = UInt8(errorCode & 0x000000ff) 22 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 23 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 24 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 25 | 26 | // do the four bytes all represent printable characters? 27 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 28 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 29 | 30 | // YES, return a String made from them 31 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 32 | 33 | } else { 34 | 35 | // NO, treat the UInt32 as a number and create a String of the number 36 | return String(format: "%d", error) 37 | } 38 | } 39 | // 40 | // generic error handler - if error is nonzero, prints error message and exits program. 41 | // 42 | static func check(error: OSStatus , operation: String) { 43 | 44 | // return if no error 45 | if error == noErr { return } 46 | 47 | // print either four characters or the numeric value 48 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 49 | 50 | // terminate the program 51 | exit(1) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /CH08_AUGraphInput/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH08_AUGraphInput 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | 10 | class Utility { 11 | // 12 | // convert a Core Audio error code to a printable string 13 | // 14 | static func codeToString(_ error: OSStatus) -> String { 15 | 16 | // byte swap the error 17 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 18 | 19 | // separate the UInt32 into 4 bytes 20 | var bytes = [UInt8](repeating: 0, count: 4) 21 | bytes[0] = UInt8(errorCode & 0x000000ff) 22 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 23 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 24 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 25 | 26 | // do the four bytes all represent printable characters? 27 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 28 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 29 | 30 | // YES, return a String made from them 31 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 32 | 33 | } else { 34 | 35 | // NO, treat the UInt32 as a number and create a String of the number 36 | return String(format: "%d", error) 37 | } 38 | } 39 | // 40 | // generic error handler - if error is nonzero, prints error message and exits program. 41 | // 42 | static func check(error: OSStatus , operation: String) { 43 | 44 | // return if no error 45 | if error == noErr { return } 46 | 47 | // print either four characters or the numeric value 48 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 49 | 50 | // terminate the program 51 | exit(1) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /CH06_AudioConverter/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH06_AudioConverter 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | 10 | class Utility { 11 | // 12 | // convert a Core Audio error code to a printable string 13 | // 14 | static func codeToString(_ error: OSStatus) -> String { 15 | 16 | // byte swap the error 17 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 18 | 19 | // separate the UInt32 into 4 bytes 20 | var bytes = [UInt8](repeating: 0, count: 4) 21 | bytes[0] = UInt8(errorCode & 0x000000ff) 22 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 23 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 24 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 25 | 26 | // do the four bytes all represent printable characters? 27 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 28 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 29 | 30 | // YES, return a String made from them 31 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 32 | 33 | } else { 34 | 35 | // NO, treat the UInt32 as a number and create a String of the number 36 | return String(format: "%d", error) 37 | } 38 | } 39 | // 40 | // generic error handler - if error is nonzero, prints error message and exits program. 41 | // 42 | static func check(error: OSStatus , operation: String) { 43 | 44 | // return if no error 45 | if error == noErr { return } 46 | 47 | // print either four characters or the numeric value 48 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 49 | 50 | // terminate the program 51 | exit(1) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /CH07_AUGraphSineWave/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH07_AUGraphSineWave 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | 10 | class Utility { 11 | // 12 | // convert a Core Audio error code to a printable string 13 | // 14 | static func codeToString(_ error: OSStatus) -> String { 15 | 16 | // byte swap the error 17 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 18 | 19 | // separate the UInt32 into 4 bytes 20 | var bytes = [UInt8](repeating: 0, count: 4) 21 | bytes[0] = UInt8(errorCode & 0x000000ff) 22 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 23 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 24 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 25 | 26 | // do the four bytes all represent printable characters? 27 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 28 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 29 | 30 | // YES, return a String made from them 31 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 32 | 33 | } else { 34 | 35 | // NO, treat the UInt32 as a number and create a String of the number 36 | return String(format: "%d", error) 37 | } 38 | } 39 | // 40 | // generic error handler - if error is nonzero, prints error message and exits program. 41 | // 42 | static func check(error: OSStatus , operation: String) { 43 | 44 | // return if no error 45 | if error == noErr { return } 46 | 47 | // print either four characters or the numeric value 48 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 49 | 50 | // terminate the program 51 | exit(1) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /CH06_ExtAudioFileConverter/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH06_ExtAudioFileConverter 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | 10 | class Utility { 11 | // 12 | // convert a Core Audio error code to a printable string 13 | // 14 | static func codeToString(_ error: OSStatus) -> String { 15 | 16 | // byte swap the error 17 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 18 | 19 | // separate the UInt32 into 4 bytes 20 | var bytes = [UInt8](repeating: 0, count: 4) 21 | bytes[0] = UInt8(errorCode & 0x000000ff) 22 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 23 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 24 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 25 | 26 | // do the four bytes all represent printable characters? 27 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 28 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 29 | 30 | // YES, return a String made from them 31 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 32 | 33 | } else { 34 | 35 | // NO, treat the UInt32 as a number and create a String of the number 36 | return String(format: "%d", error) 37 | } 38 | } 39 | // 40 | // generic error handler - if error is nonzero, prints error message and exits program. 41 | // 42 | static func check(error: OSStatus , operation: String) { 43 | 44 | // return if no error 45 | if error == noErr { return } 46 | 47 | // print either four characters or the numeric value 48 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 49 | 50 | // terminate the program 51 | exit(1) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /CH07_AUGraphSpeechSynthesis/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH07_AUGraphSpeechSynthesis 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | 10 | class Utility { 11 | // 12 | // convert a Core Audio error code to a printable string 13 | // 14 | static func codeToString(_ error: OSStatus) -> String { 15 | 16 | // byte swap the error 17 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 18 | 19 | // separate the UInt32 into 4 bytes 20 | var bytes = [UInt8](repeating: 0, count: 4) 21 | bytes[0] = UInt8(errorCode & 0x000000ff) 22 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 23 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 24 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 25 | 26 | // do the four bytes all represent printable characters? 27 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 28 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 29 | 30 | // YES, return a String made from them 31 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 32 | 33 | } else { 34 | 35 | // NO, treat the UInt32 as a number and create a String of the number 36 | return String(format: "%d", error) 37 | } 38 | } 39 | // 40 | // generic error handler - if error is nonzero, prints error message and exits program. 41 | // 42 | static func check(error: OSStatus , operation: String) { 43 | 44 | // return if no error 45 | if error == noErr { return } 46 | 47 | // print either four characters or the numeric value 48 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 49 | 50 | // terminate the program 51 | exit(1) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /CH--_OpenALSineWave/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH--_OpenALSineWave 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | import OpenAL 10 | 11 | class Utility { 12 | // 13 | // convert a Core Audio error code to a printable string 14 | // 15 | static func codeToString (_ error: OSStatus) -> String { 16 | 17 | // byte swap the error 18 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 19 | 20 | // separate the UInt32 into 4 bytes 21 | var bytes = [UInt8](repeating: 0, count: 4) 22 | bytes[0] = UInt8(errorCode & 0x000000ff) 23 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 24 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 25 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 26 | 27 | // do the four bytes all represent printable characters? 28 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 29 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 30 | 31 | // YES, return a String made from them 32 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 33 | 34 | } else { 35 | 36 | // NO, treat the UInt32 as a number and create a String of the number 37 | return String(format: "%d", error) 38 | } 39 | } 40 | // 41 | // generic error handler - if error is nonzero, prints error message and exits program. 42 | // 43 | static func check (error: OSStatus , operation: String) { 44 | 45 | // return if no error 46 | if error == noErr { return } 47 | 48 | // print either four characters or the numeric value 49 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 50 | 51 | // terminate the program 52 | exit(1) 53 | } 54 | // 55 | // OpenAL error handler 56 | // 57 | static func checkAL (operation: String) { 58 | 59 | let alErr = alGetError() 60 | 61 | if alErr == AL_NO_ERROR { return } 62 | 63 | var errFormat = "" 64 | switch alErr { 65 | case AL_INVALID_NAME: 66 | errFormat = "OpenAL Error: AL_INVALID_NAME" 67 | case AL_INVALID_VALUE: 68 | errFormat = "OpenAL Error: AL_INVALID_VALUE" 69 | case AL_INVALID_ENUM: 70 | errFormat = "OpenAL Error: AL_INVALID_ENUM" 71 | case AL_INVALID_OPERATION: 72 | errFormat = "OpenAL Error: AL_INVALID_OPERATION" 73 | case AL_OUT_OF_MEMORY: 74 | errFormat = "OpenAL Error: AL_OUT_OF_MEMORY" 75 | default: 76 | errFormat = "OpenAL Error: unknown error" 77 | } 78 | 79 | Swift.print("\(errFormat), \(operation)") 80 | 81 | exit(1) 82 | 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /CH09_OpenALOrbitLoop/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH09_OpenALOrbitLoop 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | import OpenAL 10 | 11 | class Utility { 12 | // 13 | // convert a Core Audio error code to a printable string 14 | // 15 | static func codeToString(_ error: OSStatus) -> String { 16 | 17 | // byte swap the error 18 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 19 | 20 | // separate the UInt32 into 4 bytes 21 | var bytes = [UInt8](repeating: 0, count: 4) 22 | bytes[0] = UInt8(errorCode & 0x000000ff) 23 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 24 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 25 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 26 | 27 | // do the four bytes all represent printable characters? 28 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 29 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 30 | 31 | // YES, return a String made from them 32 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 33 | 34 | } else { 35 | 36 | // NO, treat the UInt32 as a number and create a String of the number 37 | return String(format: "%d", error) 38 | } 39 | } 40 | // 41 | // generic error handler - if error is nonzero, prints error message and exits program. 42 | // 43 | static func check(error: OSStatus , operation: String) { 44 | 45 | // return if no error 46 | if error == noErr { return } 47 | 48 | // print either four characters or the numeric value 49 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 50 | 51 | // terminate the program 52 | exit(1) 53 | } 54 | // 55 | // OpenAL error handler 56 | // 57 | static func checkAL(operation: String) { 58 | 59 | let alErr = alGetError() 60 | 61 | if alErr == AL_NO_ERROR { return } 62 | 63 | var errFormat = "" 64 | switch alErr { 65 | case AL_INVALID_NAME: 66 | errFormat = "OpenAL Error: AL_INVALID_NAME" 67 | case AL_INVALID_VALUE: 68 | errFormat = "OpenAL Error: AL_INVALID_VALUE" 69 | case AL_INVALID_ENUM: 70 | errFormat = "OpenAL Error: AL_INVALID_ENUM" 71 | case AL_INVALID_OPERATION: 72 | errFormat = "OpenAL Error: AL_INVALID_OPERATION" 73 | case AL_OUT_OF_MEMORY: 74 | errFormat = "OpenAL Error: AL_OUT_OF_MEMORY" 75 | default: 76 | errFormat = "OpenAL Error: unknown error" 77 | } 78 | 79 | Swift.print("\(errFormat), \(operation)") 80 | 81 | exit(1) 82 | 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /CH09_OpenALOrbitStream/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH09_OpenALOrbitStream 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | import OpenAL 10 | 11 | class Utility { 12 | // 13 | // convert a Core Audio error code to a printable string 14 | // 15 | static func codeToString (_ error: OSStatus) -> String { 16 | 17 | // byte swap the error 18 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 19 | 20 | // separate the UInt32 into 4 bytes 21 | var bytes = [UInt8](repeating: 0, count: 4) 22 | bytes[0] = UInt8(errorCode & 0x000000ff) 23 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 24 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 25 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 26 | 27 | // do the four bytes all represent printable characters? 28 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 29 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 30 | 31 | // YES, return a String made from them 32 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 33 | 34 | } else { 35 | 36 | // NO, treat the UInt32 as a number and create a String of the number 37 | return String(format: "%d", error) 38 | } 39 | } 40 | // 41 | // generic error handler - if error is nonzero, prints error message and exits program. 42 | // 43 | static func check (error: OSStatus , operation: String) { 44 | 45 | // return if no error 46 | if error == noErr { return } 47 | 48 | // print either four characters or the numeric value 49 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 50 | 51 | // terminate the program 52 | exit(1) 53 | } 54 | // 55 | // OpenAL error handler 56 | // 57 | static func checkAL (operation: String) { 58 | 59 | let alErr = alGetError() 60 | 61 | if alErr == AL_NO_ERROR { return } 62 | 63 | var errFormat = "" 64 | switch alErr { 65 | case AL_INVALID_NAME: 66 | errFormat = "OpenAL Error: AL_INVALID_NAME" 67 | case AL_INVALID_VALUE: 68 | errFormat = "OpenAL Error: AL_INVALID_VALUE" 69 | case AL_INVALID_ENUM: 70 | errFormat = "OpenAL Error: AL_INVALID_ENUM" 71 | case AL_INVALID_OPERATION: 72 | errFormat = "OpenAL Error: AL_INVALID_OPERATION" 73 | case AL_OUT_OF_MEMORY: 74 | errFormat = "OpenAL Error: AL_OUT_OF_MEMORY" 75 | default: 76 | errFormat = "OpenAL Error: unknown error" 77 | } 78 | 79 | Swift.print("\(errFormat), \(operation)") 80 | 81 | exit(1) 82 | 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /CH03_CAStreamFormatTester/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH03_CAStreamFormatTester 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | import AudioToolbox 10 | 11 | //-------------------------------------------------------------------------------------------------- 12 | // MARK: Supporting methods 13 | 14 | // 15 | // convert a formatId to a String 16 | // 17 | func idToString(_ formatId: UInt32) -> String { 18 | 19 | var x = [UInt8](repeating: 0, count: 4) 20 | x[0] = UInt8(formatId & 0x000000ff) 21 | x[1] = UInt8( (formatId & 0x0000ff00) >> 8) 22 | x[2] = UInt8( (formatId & 0x00ff0000) >> 16) 23 | x[3] = UInt8( (formatId & 0xff000000) >> 24) 24 | 25 | return String(bytes: x, encoding: String.Encoding.utf8)! 26 | } 27 | 28 | //-------------------------------------------------------------------------------------------------- 29 | // MARK: Main 30 | 31 | // create and populate an Audio File Type And FormatID struct 32 | var fileTypeAndFormat = AudioFileTypeAndFormatID() 33 | fileTypeAndFormat.mFileType = kAudioFileAIFFType 34 | fileTypeAndFormat.mFormatID = kAudioFormatLinearPCM 35 | 36 | // get the size of the property 37 | var audioErr: OSStatus = noErr 38 | var infoSize: UInt32 = 0 39 | audioErr = AudioFileGetGlobalInfoSize(kAudioFileGlobalInfo_AvailableStreamDescriptionsForFormat, 40 | UInt32(strideof(AudioFileTypeAndFormatID.self)), 41 | &fileTypeAndFormat, 42 | &infoSize); 43 | // Check for errors (exit if an error) 44 | if audioErr != noErr { 45 | let err4cc: UInt32 = CFSwapInt32HostToBig(UInt32(audioErr)) 46 | Swift.print(String(format: "%4.4s", err4cc)) 47 | exit(-1) 48 | } 49 | 50 | // get the property (a pointer to an array of AudioStreamBasicDescription's) 51 | var asbdArrayPtr: UnsafeMutablePointer = malloc(Int(infoSize)) 52 | audioErr = AudioFileGetGlobalInfo(kAudioFileGlobalInfo_AvailableStreamDescriptionsForFormat, 53 | UInt32(sizeof (AudioFileTypeAndFormatID.self)), 54 | &fileTypeAndFormat, 55 | &infoSize, 56 | asbdArrayPtr) 57 | // check for error 58 | assert (audioErr == noErr) 59 | 60 | // calculate how many AudioStreamBasicDescription structs were found 61 | let asbdCount: Int = Int(infoSize) / sizeof (AudioStreamBasicDescription.self) 62 | 63 | // for each AudioStreamBasicDescription 64 | for i in 0..(asbdArrayPtr.advanced(by: i * sizeof (AudioStreamBasicDescription.self))) 68 | 69 | // get the formatId 70 | let idString = idToString(CFSwapInt32HostToBig(asbdPtr.pointee.mFormatID)) 71 | 72 | // print the AudioStreamBasicDescription fields 73 | Swift.print("\(i): mFormatId: \(idString), mFormatFlags: \(asbdPtr.pointee.mFormatFlags), mBitsPerChannel: \(asbdPtr.pointee.mBitsPerChannel)") 74 | } 75 | 76 | // free the malloc'd memory 77 | free (asbdArrayPtr); 78 | 79 | exit(0) 80 | 81 | -------------------------------------------------------------------------------- /CH02_CAToneFileGenerator/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH02_CAToneFileGenerator 4 | // 5 | // Created by Douglas Adams on 6/29/16. 6 | // 7 | 8 | import Foundation 9 | import AudioToolbox 10 | 11 | //-------------------------------------------------------------------------------------------------- 12 | // MARK: Properties 13 | 14 | let kSampleRate: Double = 44100.0 15 | let kDuration: Double = 5.0 16 | //let kFilenameFormat = "%0.3f-square.aif" 17 | //let kFilenameFormat = "%0.3f-saw.aif" 18 | let kFilenameFormat = "%0.3f-sine.aif" 19 | let kMinValue: UInt16 = 0x8000 20 | let kMaxValue: UInt16 = 0x7fff 21 | 22 | //-------------------------------------------------------------------------------------------------- 23 | // MARK: Main 24 | 25 | // if command line argument missing, show usage 26 | if Process.arguments.count < 2 { 27 | Swift.print("Usage: CAToneFileGenerator n (where n is tone in Hz)") 28 | exit(-1) 29 | } 30 | 31 | // make the command line argument into a Double (check for 0) 32 | var tone: Double = atof(Process.arguments[1]) 33 | assert (tone > 0) 34 | 35 | Swift.print("generating \(tone) hz tone") 36 | 37 | // convert the file path into a URL 38 | let fileName = String(format: kFilenameFormat, tone) 39 | let filePath = NSString(string: FileManager.default.currentDirectoryPath).appendingPathComponent( fileName) 40 | let fileURL = URL(fileURLWithPath: filePath) 41 | 42 | Swift.print("path: \(fileURL)") 43 | 44 | // prepare the format (an Audio Stream Basic Description) 45 | var asbd: AudioStreamBasicDescription? = AudioStreamBasicDescription() 46 | asbd!.mSampleRate = kSampleRate 47 | asbd!.mFormatID = kAudioFormatLinearPCM 48 | asbd!.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked 49 | asbd!.mChannelsPerFrame = 1 50 | asbd!.mFramesPerPacket = 1 51 | asbd!.mBitsPerChannel = 16 52 | asbd!.mBytesPerFrame = 2 53 | asbd!.mBytesPerPacket = 2 54 | 55 | // create the file using the URL 56 | var audioFile: AudioFileID? 57 | var audioErr: OSStatus = noErr 58 | audioErr = AudioFileCreateWithURL(fileURL, UInt32(kAudioFileAIFFType), &asbd!, .eraseFile, &audioFile) 59 | assert (audioErr == noErr); 60 | 61 | var maxSampleCount = CLong(kSampleRate * kDuration) 62 | var sampleCount = 0 63 | var bytesToWrite: UInt32 = 2 64 | var wavelengthInSamples = kSampleRate / tone 65 | 66 | Swift.print("wavelengthInSamples = \(wavelengthInSamples)") 67 | 68 | // start writing samples to the file 69 | while sampleCount < maxSampleCount { 70 | for i in 0.. String { 16 | 17 | // byte swap the error 18 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 19 | 20 | // separate the UInt32 into 4 bytes 21 | var bytes = [UInt8](repeating: 0, count: 4) 22 | bytes[0] = UInt8(errorCode & 0x000000ff) 23 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 24 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 25 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 26 | 27 | // do the four bytes all represent printable characters? 28 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 29 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 30 | 31 | // YES, return a String made from them 32 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 33 | 34 | } else { 35 | 36 | // NO, treat the UInt32 as a number and create a String of the number 37 | return String(format: "%d", error) 38 | } 39 | } 40 | // 41 | // generic error handler - if error is nonzero, prints error message and exits program. 42 | // 43 | static func check(error: OSStatus , operation: String) { 44 | 45 | // return if no error 46 | if error == noErr { return } 47 | 48 | // print either four characters or the numeric value 49 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 50 | 51 | // terminate the program 52 | exit(1) 53 | } 54 | // 55 | // Copy a file's magic cookie to a queue 56 | // 57 | static func applyEncoderCookie(fromFile file: AudioFileID, toQueue queue: AudioQueueRef) { 58 | var propertySize: UInt32 = 0 59 | 60 | // get the magic cookie, if any, from the file 61 | let result = AudioFileGetPropertyInfo (file, kAudioFilePropertyMagicCookieData, &propertySize, nil) 62 | 63 | // is there a cookie? 64 | if result == noErr && propertySize > 0 { 65 | 66 | // YES, allocate space for it 67 | let magicCookie: UnsafeMutablePointer = malloc(4) 68 | 69 | // get the cookie 70 | Utility.check(error: AudioFileGetProperty (file, 71 | kAudioFilePropertyMagicCookieData, 72 | &propertySize, 73 | magicCookie), 74 | operation: "get cookie from file failed"); 75 | 76 | // now set the magic cookie on the queue 77 | Utility.check(error: AudioQueueSetProperty(queue, 78 | kAudioQueueProperty_MagicCookie, 79 | magicCookie, 80 | propertySize), 81 | operation: "set cookie on queue failed"); 82 | 83 | // release the malloc'd memory 84 | free(magicCookie); 85 | } 86 | } 87 | 88 | } 89 | -------------------------------------------------------------------------------- /SwiftCoreAudio.xcodeproj/xcuserdata/Doug.xcuserdatad/xcschemes/CH04_Recorder.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 75 | 81 | 82 | 83 | 84 | 86 | 87 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /SwiftCoreAudio.xcodeproj/xcuserdata/Doug.xcuserdatad/xcschemes/CH01_CAMetadata.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 75 | 81 | 82 | 83 | 84 | 86 | 87 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /SwiftCoreAudio.xcodeproj/xcuserdata/Doug.xcuserdatad/xcschemes/CH02_CAToneFileGenerator.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 75 | 81 | 82 | 83 | 84 | 86 | 87 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /SwiftCoreAudio.xcodeproj/xcuserdata/Doug.xcuserdatad/xcschemes/CH03_CAStreamFormatTester.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 75 | 81 | 82 | 83 | 84 | 86 | 87 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /CH04_Recorder/Utility.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Utility.swift 3 | // CH04_Recorder 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import Foundation 9 | import AudioToolbox 10 | 11 | class Utility { 12 | // 13 | // convert a Core Audio error code to a printable string 14 | // 15 | static func codeToString(_ error: OSStatus) -> String { 16 | 17 | // byte swap the error 18 | let errorCode = CFSwapInt32HostToBig(UInt32(bitPattern: error)) 19 | 20 | // separate the UInt32 into 4 bytes 21 | var bytes = [UInt8](repeating: 0, count: 4) 22 | bytes[0] = UInt8(errorCode & 0x000000ff) 23 | bytes[1] = UInt8( (errorCode & 0x0000ff00) >> 8) 24 | bytes[2] = UInt8( (errorCode & 0x00ff0000) >> 16) 25 | bytes[3] = UInt8( (errorCode & 0xff000000) >> 24) 26 | 27 | // do the four bytes all represent printable characters? 28 | if isprint(Int32(bytes[0])) != 0 && isprint(Int32(bytes[1])) != 0 && 29 | isprint(Int32(bytes[2])) != 0 && isprint(Int32(bytes[3])) != 0 { 30 | 31 | // YES, return a String made from them 32 | return String(bytes: bytes, encoding: String.Encoding.ascii)! 33 | 34 | } else { 35 | 36 | // NO, treat the UInt32 as a number and create a String of the number 37 | return String(format: "%d", error) 38 | } 39 | } 40 | // 41 | // generic error handler - if error is nonzero, prints error message and exits program. 42 | // 43 | static func check(error: OSStatus , operation: String) { 44 | 45 | // return if no error 46 | if error == noErr { return } 47 | 48 | // print either four characters or the numeric value 49 | Swift.print("Error: \(operation), returned: \(codeToString(error))") 50 | 51 | // terminate the program 52 | exit(1) 53 | } 54 | // 55 | // Determine the size, in bytes, of a buffer necessary to represent the supplied number 56 | // of seconds of audio data 57 | // 58 | static func bufferSizeFor(seconds: Float, usingFormat format: AudioStreamBasicDescription, andQueue queue: AudioQueueRef ) -> Int { 59 | var packets = 0 60 | var frames = 0 61 | var bytes = 0 62 | 63 | // rounding up, calc the number of frames in the given time 64 | frames = Int(ceil(Double(seconds) * format.mSampleRate)) 65 | 66 | // is this a constant bit rate format? 67 | if format.mBytesPerFrame > 0 { 68 | // YES, calc the number of bytes 69 | bytes = frames * Int(format.mBytesPerFrame) 70 | 71 | } else { 72 | // NO 73 | var maxPacketSize: UInt32 = 0 74 | 75 | // is this a constant Packet size? 76 | if format.mBytesPerPacket > 0 { 77 | // YES, 78 | maxPacketSize = format.mBytesPerPacket 79 | 80 | } else { 81 | // NO, get the largest single packet size possible 82 | var propertySize: UInt32 = 4 83 | maxPacketSize = 4 84 | 85 | // ask for the max packet size 86 | check(error: AudioQueueGetProperty(queue, kAudioConverterPropertyMaximumOutputPacketSize, &maxPacketSize, &propertySize), operation: "couldn't get queue's maximum output packet size") 87 | } 88 | 89 | // do we have a frames per packet? 90 | if format.mFramesPerPacket > 0 { 91 | // YES 92 | packets = frames / Int(format.mFramesPerPacket) 93 | 94 | } else { 95 | // NO, worst-case scenario: 1 frame in a packet 96 | packets = frames 97 | } 98 | // sanity check (just in case) 99 | if packets == 0 { packets = 1 } 100 | 101 | // calc the number of bytes 102 | bytes = packets * Int(maxPacketSize) 103 | } 104 | return bytes 105 | } 106 | // 107 | // Copy a queue's encoder's magic cookie to an audio file. 108 | // 109 | static func applyEncoderCookie(fromQueue queue: AudioQueueRef, toFile file: AudioFileID) { 110 | var propertySize: UInt32 = 0 111 | 112 | // get the magic cookie, if any, from the queue's converter 113 | let result: OSStatus = AudioQueueGetPropertySize(queue, kAudioConverterCompressionMagicCookie, &propertySize) 114 | 115 | // is there a cookie? 116 | if result == noErr && propertySize > 0 { 117 | 118 | // YES, allocate space for it 119 | let magicCookie = malloc(Int(propertySize))! 120 | 121 | // get the cookie 122 | check(error: AudioQueueGetProperty(queue, kAudioQueueProperty_MagicCookie, magicCookie, &propertySize), operation: "get audio queue's magic cookie") 123 | 124 | // now set the magic cookie on the output file 125 | check(error: AudioFileSetProperty(file, kAudioFilePropertyMagicCookieData, propertySize, magicCookie), operation: "set audio file's magic cookie") 126 | 127 | // release the space 128 | free(magicCookie); 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /CH07_AUGraphSineWave/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH07_AUGraphSineWave 4 | // 5 | // Created by Douglas Adams on 7/13/16. 6 | // 7 | 8 | import AudioToolbox 9 | 10 | //-------------------------------------------------------------------------------------------------- 11 | // MARK: Struct definition 12 | 13 | struct SineWavePlayer 14 | { 15 | var outputUnit: AudioUnit? // pointer to a ComponentInstanceRecord 16 | var startingPhase: Double = 0.0 // starting waveform phase 17 | } 18 | 19 | //-------------------------------------------------------------------------------------------------- 20 | // MARK: Supporting methods 21 | 22 | // AURenderCallback function 23 | // 24 | // must have the following signature: 25 | // @convention(c) (UnsafeMutablePointer, // pointer to the SineWavePlayer struct 26 | // UnsafeMutablePointer, // pointer to the AudioUnitRenderActionFlags 27 | // UnsafePointer, // pointer to an AudioTimeStamp 28 | // UInt32, // input Bus Number 29 | // UInt32, // number of frames required 30 | // UnsafeMutablePointer?) -> OSStatus // pointer to the AudioBufferList 31 | // 32 | func SineWaveRenderProc(userData: UnsafeMutablePointer, 33 | actionFlags: UnsafeMutablePointer, 34 | timeStamp: UnsafePointer, 35 | busNumber: UInt32, 36 | numberOfFrames: UInt32, 37 | bufferList: UnsafeMutablePointer?) -> OSStatus 38 | { 39 | 40 | let kSampleRate = 44100.0 41 | let kSineFrequency = 880.0 42 | 43 | let player = UnsafeMutablePointer(userData) 44 | 45 | // get the starting phase of the waveform 46 | var phase: Double = player.pointee.startingPhase 47 | 48 | // calculate the length of one cycle (one wavelength) 49 | let cycleLength: Double = kSampleRate / kSineFrequency 50 | 51 | for frame in 0..(bufferList.pointee.mBuffers.mData)! 57 | let left = UnsafeMutableBufferPointer(start: channels, count: Int(numberOfFrames)) 58 | let right = UnsafeMutableBufferPointer(start: channels.advanced(by: Int(numberOfFrames)), count: Int(numberOfFrames)) 59 | 60 | // populate each channel with the same data 61 | left[frame] = Float32(sin (2 * M_PI * (phase / cycleLength))) 62 | right[frame] = left[frame] 63 | 64 | // increment the current frame number 65 | phase += 1.0 66 | 67 | // the phase repeats going from zero through the cycleLength over and over 68 | if phase > cycleLength { phase -= cycleLength } 69 | } 70 | } 71 | 72 | // save the current phase as the starting phase for the next iteration 73 | player.pointee.startingPhase = phase 74 | 75 | return noErr 76 | } 77 | // 78 | // 79 | // 80 | func CreateAndConnectOutputUnit (player: UnsafeMutablePointer) { 81 | 82 | // 10.6 and later: generate description that will match out output device (speakers) 83 | var outputCd = AudioComponentDescription() 84 | outputCd.componentType = kAudioUnitType_Output 85 | outputCd.componentSubType = kAudioUnitSubType_DefaultOutput 86 | outputCd.componentManufacturer = kAudioUnitManufacturer_Apple 87 | 88 | guard let component = AudioComponentFindNext(nil, &outputCd) else { 89 | Swift.print("can't get output unit") 90 | exit(-1) 91 | } 92 | 93 | Utility.check(error: AudioComponentInstanceNew(component, 94 | &player.pointee.outputUnit), 95 | operation: "Couldn't open component for outputUnit") 96 | 97 | // register render callback 98 | var renderCallback = AURenderCallbackStruct() 99 | renderCallback.inputProc = SineWaveRenderProc 100 | renderCallback.inputProcRefCon = UnsafeMutablePointer(player) 101 | Utility.check(error: AudioUnitSetProperty(player.pointee.outputUnit!, 102 | kAudioUnitProperty_SetRenderCallback, 103 | kAudioUnitScope_Input, 104 | 0, 105 | &renderCallback, 106 | UInt32(sizeof(AURenderCallbackStruct.self))), 107 | operation: "AudioUnitSetProperty failed") 108 | 109 | // initialize unit 110 | Utility.check(error: AudioUnitInitialize(player.pointee.outputUnit!), 111 | operation: "Couldn't initialize output unit") 112 | } 113 | 114 | //-------------------------------------------------------------------------------------------------- 115 | // MARK: Main 116 | 117 | var player = SineWavePlayer() 118 | 119 | // set up unit and callback 120 | CreateAndConnectOutputUnit(player: &player) 121 | 122 | // start playing 123 | Utility.check(error: AudioOutputUnitStart(player.outputUnit!), operation: "Couldn't start output unit") 124 | 125 | Swift.print("playing\n") 126 | 127 | // play for 5 seconds 128 | sleep(5) 129 | 130 | // cleanup 131 | AudioOutputUnitStop(player.outputUnit!) 132 | AudioUnitUninitialize(player.outputUnit!) 133 | AudioComponentInstanceDispose(player.outputUnit!) 134 | 135 | exit(0) 136 | -------------------------------------------------------------------------------- /CH06_ExtAudioFileConverter/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH06_ExtAudioFileConverter 4 | // 5 | // Created by Douglas Adams on 7/15/16. 6 | // 7 | 8 | import AudioToolbox 9 | 10 | //-------------------------------------------------------------------------------------------------- 11 | // MARK: Struct definition 12 | 13 | struct AudioConverterSettings 14 | { 15 | var outputFormat = AudioStreamBasicDescription() // output file's data stream description 16 | var inputFile: ExtAudioFileRef? // reference to your input file 17 | var outputFile: AudioFileID? // reference to your output file 18 | } 19 | 20 | //-------------------------------------------------------------------------------------------------- 21 | // MARK: Supporting methods 22 | 23 | // 24 | // 25 | // 26 | func Convert(mySettings: UnsafeMutablePointer) { 27 | 28 | let outputBufferSize: UInt32 = 32 * 1024 // 32 KB is a good starting point 29 | let sizePerPacket: UInt32 = mySettings.pointee.outputFormat.mBytesPerPacket 30 | let packetsPerBuffer: UInt32 = outputBufferSize / sizePerPacket 31 | 32 | // allocate destination buffer 33 | let outputBuffer = malloc(sizeof(UInt8.self) * Int(outputBufferSize)) 34 | 35 | var outputFilePacketPosition: UInt32 = 0 //in bytes 36 | while(true) 37 | { 38 | // wrap the destination buffer in an AudioBufferList 39 | var convertedData = AudioBufferList() 40 | convertedData.mNumberBuffers = 1 41 | convertedData.mBuffers.mNumberChannels = mySettings.pointee.outputFormat.mChannelsPerFrame 42 | convertedData.mBuffers.mDataByteSize = outputBufferSize 43 | convertedData.mBuffers.mData = outputBuffer 44 | 45 | var frameCount: UInt32 = packetsPerBuffer 46 | 47 | // read from the extaudiofile 48 | Utility.check(error: ExtAudioFileRead(mySettings.pointee.inputFile!, 49 | &frameCount, 50 | &convertedData), 51 | operation: "Couldn't read from input file") 52 | 53 | if frameCount == 0 { 54 | Swift.print("done reading from file") 55 | return 56 | } 57 | 58 | // write the converted data to the output file 59 | Utility.check(error: AudioFileWritePackets(mySettings.pointee.outputFile!, 60 | false, 61 | frameCount, 62 | nil, 63 | Int64(outputFilePacketPosition / mySettings.pointee.outputFormat.mBytesPerPacket), 64 | &frameCount, 65 | convertedData.mBuffers.mData!), 66 | operation: "Couldn't write packets to file") 67 | 68 | // advance the output file write location 69 | outputFilePacketPosition += (frameCount * mySettings.pointee.outputFormat.mBytesPerPacket) 70 | } 71 | } 72 | 73 | //-------------------------------------------------------------------------------------------------- 74 | // MARK: Properties 75 | 76 | let kInputFileLocation = CFStringCreateWithCString(kCFAllocatorDefault, "/Users/Doug/x.mp3", CFStringBuiltInEncodings.UTF8.rawValue) 77 | let kOutputFileLocation = CFStringCreateWithCString(kCFAllocatorDefault, "/Users/Doug/x.aif", CFStringBuiltInEncodings.UTF8.rawValue) 78 | 79 | //-------------------------------------------------------------------------------------------------- 80 | // MARK: Main 81 | 82 | var audioConverterSettings = AudioConverterSettings() 83 | 84 | // open the input with ExtAudioFile 85 | let inputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kInputFileLocation, .cfurlposixPathStyle, false) 86 | Utility.check(error: ExtAudioFileOpenURL(inputFileURL!, 87 | &audioConverterSettings.inputFile), 88 | operation: "ExtAudioFileOpenURL failed") 89 | 90 | // define the ouput format. AudioConverter requires that one of the data formats be LPCM 91 | audioConverterSettings.outputFormat.mSampleRate = 44100.0 92 | audioConverterSettings.outputFormat.mFormatID = kAudioFormatLinearPCM 93 | audioConverterSettings.outputFormat.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked 94 | audioConverterSettings.outputFormat.mBytesPerPacket = 4 95 | audioConverterSettings.outputFormat.mFramesPerPacket = 1 96 | audioConverterSettings.outputFormat.mBytesPerFrame = 4 97 | audioConverterSettings.outputFormat.mChannelsPerFrame = 2 98 | audioConverterSettings.outputFormat.mBitsPerChannel = 16 99 | 100 | // create output file 101 | let outputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kOutputFileLocation, .cfurlposixPathStyle, false)! 102 | Utility.check(error: AudioFileCreateWithURL(outputFileURL, 103 | kAudioFileAIFFType, 104 | &audioConverterSettings.outputFormat, 105 | .eraseFile, 106 | &audioConverterSettings.outputFile), 107 | operation: "AudioFileCreateWithURL failed") 108 | 109 | // set the PCM format as the client format on the input ext audio file 110 | Utility.check(error: ExtAudioFileSetProperty(audioConverterSettings.inputFile!, 111 | kExtAudioFileProperty_ClientDataFormat, 112 | UInt32(sizeof (AudioStreamBasicDescription.self)), 113 | &audioConverterSettings.outputFormat), 114 | operation: "Couldn't set client data format on input ext file") 115 | 116 | Swift.print("Converting...\n") 117 | 118 | Convert(mySettings: &audioConverterSettings) 119 | 120 | // cleanup 121 | AudioFileClose(audioConverterSettings.inputFile!) 122 | ExtAudioFileDispose(audioConverterSettings.inputFile!) 123 | AudioFileClose(audioConverterSettings.outputFile!) 124 | 125 | exit(0) 126 | -------------------------------------------------------------------------------- /CH09_OpenALOrbitLoop/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH09_OpenALOrbitLoop 4 | // 5 | // Created by Douglas Adams on 7/17/16. 6 | // 7 | 8 | import AudioToolbox 9 | import OpenAL 10 | 11 | //-------------------------------------------------------------------------------------------------- 12 | // MARK: Constants 13 | 14 | let loopPath = CFStringCreateWithCString(kCFAllocatorDefault, 15 | "/Library/Audio/Apple Loops/Apple/iLife Sound Effects/Transportation/Bicycle Coasting.caf", 16 | CFStringBuiltInEncodings.UTF8.rawValue) 17 | 18 | let kOrbitSpeed: Double = 1 // speed in seconds 19 | let kRunTime = 20.0 // run time in seconds 20 | 21 | //-------------------------------------------------------------------------------------------------- 22 | // MARK: Struct definition 23 | 24 | struct MyLoopPlayer { 25 | var dataFormat = AudioStreamBasicDescription() // loop AudioStreamBasicDescription 26 | var sampleBuffer: UnsafeMutablePointer? // pointer to the Sample buffer 27 | var bufferSizeBytes: UInt32 = 0 // buffer size in bytes 28 | var sources = [ALuint](repeating: 0, count: 1) // OpenAL source handles 29 | } 30 | 31 | //-------------------------------------------------------------------------------------------------- 32 | // MARK: Supporting methods 33 | 34 | // 35 | // calculate and set a new player position 36 | // 37 | func updateSourceLocation(player: UnsafeMutablePointer) { 38 | 39 | let theta: Double = fmod(CFAbsoluteTimeGetCurrent() * kOrbitSpeed, M_PI * 2) 40 | let x = ALfloat(3.0 * cos(theta)) 41 | let y = ALfloat(0.5 * sin (theta)) 42 | let z = ALfloat(1.0 * sin (theta)) 43 | 44 | Swift.print("x = \(x), y = \(y), z = \(z)\n") 45 | 46 | alSource3f(player.pointee.sources[0], AL_POSITION, x, y, z) 47 | } 48 | // 49 | // use ExtAudioFile to load a loop into the Player's buffer 50 | // 51 | func loadLoopIntoBuffer(player: UnsafeMutablePointer) -> OSStatus { 52 | 53 | let loopFileURL: CFURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, loopPath, .cfurlposixPathStyle, false) 54 | 55 | // describe the client format - AL needs mono 56 | player.pointee.dataFormat.mFormatID = kAudioFormatLinearPCM 57 | player.pointee.dataFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked 58 | player.pointee.dataFormat.mSampleRate = 44100.0 59 | player.pointee.dataFormat.mChannelsPerFrame = 1 60 | player.pointee.dataFormat.mFramesPerPacket = 1 61 | player.pointee.dataFormat.mBitsPerChannel = 16 62 | player.pointee.dataFormat.mBytesPerFrame = 2 63 | player.pointee.dataFormat.mBytesPerPacket = 2 64 | 65 | var extAudioFile: ExtAudioFileRef? 66 | Utility.check(error: ExtAudioFileOpenURL(loopFileURL, 67 | &extAudioFile), 68 | operation: "Couldn't open ExtAudioFile for reading") 69 | 70 | // tell extAudioFile about our format 71 | Utility.check(error: ExtAudioFileSetProperty(extAudioFile!, 72 | kExtAudioFileProperty_ClientDataFormat, 73 | UInt32(sizeof(AudioStreamBasicDescription.self)), 74 | &player.pointee.dataFormat), 75 | operation: "Couldn't set client format on ExtAudioFile") 76 | 77 | // figure out how big a buffer we need 78 | var fileLengthFrames: Int64 = 0 79 | var propSize = UInt32(sizeof(Int64.self)) 80 | ExtAudioFileGetProperty(extAudioFile!, 81 | kExtAudioFileProperty_FileLengthFrames, 82 | &propSize, 83 | &fileLengthFrames) 84 | 85 | Swift.print("plan on reading \(fileLengthFrames) frames\n") 86 | 87 | player.pointee.bufferSizeBytes = UInt32(fileLengthFrames) * player.pointee.dataFormat.mBytesPerFrame 88 | 89 | // allocate sample buffer 90 | player.pointee.sampleBuffer = UnsafeMutablePointer(malloc(sizeof(UInt16.self) * Int(player.pointee.bufferSizeBytes))) // 4/18/11 - fix 1 91 | 92 | var bufferList = AudioBufferList() 93 | bufferList.mNumberBuffers = 1 94 | bufferList.mBuffers.mNumberChannels = 1 95 | bufferList.mBuffers.mDataByteSize = player.pointee.bufferSizeBytes 96 | bufferList.mBuffers.mData = UnsafeMutablePointer(player.pointee.sampleBuffer) 97 | 98 | Swift.print("created AudioBufferList\n") 99 | 100 | // loop reading into the ABL until buffer is full 101 | var totalFramesRead: UInt32 = 0 102 | repeat { 103 | var framesRead: UInt32 = UInt32(fileLengthFrames) - totalFramesRead 104 | bufferList.mBuffers.mData = UnsafeMutablePointer(player.pointee.sampleBuffer?.advanced(by: Int(totalFramesRead) * sizeof(UInt16.self))) 105 | Utility.check(error: ExtAudioFileRead(extAudioFile!, 106 | &framesRead, 107 | &bufferList), 108 | operation: "ExtAudioFileRead failed") 109 | 110 | totalFramesRead += framesRead 111 | 112 | Swift.print("read \(framesRead) frames\n") 113 | 114 | } while (totalFramesRead < UInt32(fileLengthFrames)) 115 | 116 | return noErr 117 | } 118 | 119 | //-------------------------------------------------------------------------------------------------- 120 | // MARK: Main 121 | 122 | // create the player 123 | var player = MyLoopPlayer() 124 | 125 | // convert to an OpenAL-friendly format and read into memory 126 | Utility.check(error: loadLoopIntoBuffer(player: &player), 127 | operation: "Couldn't load loop into buffer") 128 | 129 | // set up OpenAL buffer 130 | var alDevice: OpaquePointer 131 | alDevice = alcOpenDevice(nil) 132 | Utility.checkAL(operation: "Couldn't open AL device") // default device 133 | 134 | var alContext: OpaquePointer 135 | var attrList: ALCint = 0 136 | alContext = alcCreateContext(alDevice, &attrList) 137 | Utility.checkAL(operation: "Couldn't open AL context") 138 | 139 | alcMakeContextCurrent(alContext) 140 | Utility.checkAL(operation: "Couldn't make AL context current") 141 | 142 | var buffers: ALuint = 0 // only one buffer 143 | alGenBuffers(1, &buffers) 144 | Utility.checkAL(operation: "Couldn't generate buffers") 145 | 146 | alBufferData(buffers, 147 | AL_FORMAT_MONO16, 148 | player.sampleBuffer, 149 | ALsizei(player.bufferSizeBytes), 150 | ALsizei(player.dataFormat.mSampleRate)) 151 | 152 | // AL copies the samples, so we can free them now 153 | free(player.sampleBuffer) 154 | 155 | // set up OpenAL source 156 | alGenSources(1, &player.sources) 157 | Utility.checkAL(operation: "Couldn't generate sources") 158 | 159 | // set the source to Looping mode 160 | alSourcei(player.sources[0], AL_LOOPING, AL_TRUE) 161 | Utility.checkAL(operation: "Couldn't set source looping property") 162 | 163 | // set the gain 164 | alSourcef(player.sources[0], AL_GAIN, ALfloat(AL_MAX_GAIN)) 165 | Utility.checkAL(operation: "Couldn't set source gain") 166 | 167 | // set the initial sound position 168 | updateSourceLocation(player: &player) 169 | Utility.checkAL(operation: "Couldn't set initial source position") 170 | 171 | // connect buffer to source 172 | alSourcei(player.sources[0], AL_BUFFER, ALint(buffers)) 173 | Utility.checkAL(operation: "Couldn't connect buffer to source") 174 | 175 | // set up listener 176 | alListener3f (AL_POSITION, 0.0, 0.0, 0.0) 177 | Utility.checkAL(operation: "Couldn't set listner position") 178 | 179 | // ALfloat listenerOrientation[6]; // 3 vectors: forward x,y,z components, then up x,y,z 180 | // listenerOrientation[2] = -1.0; 181 | // listenerOrientation[0] = listenerOrientation [1] = 0.0; 182 | // listenerOrientation[3] = listenerOrientation [4] = listenerOrientation[5] = 0.0; 183 | // alListenerfv (AL_ORIENTATION, listenerOrientation); 184 | 185 | // start playing 186 | // alSourcePlayv (1, player.sources); 187 | alSourcePlay(player.sources[0]) 188 | Utility.checkAL(operation: "Couldn't play") 189 | 190 | // and wait 191 | Swift.print("Playing...\n") 192 | 193 | // start now and loop for kRunTime seconds 194 | var startTime: time_t = time(nil) 195 | repeat 196 | { 197 | // get next theta 198 | updateSourceLocation(player: &player) 199 | Utility.checkAL(operation: "Couldn't set looping source position") 200 | 201 | // pause 202 | CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.1, false) 203 | 204 | } while (difftime(time(nil), startTime) < kRunTime) 205 | 206 | // cleanup 207 | alSourceStop(player.sources[0]) 208 | alDeleteSources(1, player.sources) 209 | alDeleteBuffers(1, &buffers) 210 | alcDestroyContext(alContext) 211 | alcCloseDevice(alDevice) 212 | 213 | Swift.print("Bottom of main\n") 214 | -------------------------------------------------------------------------------- /CH07_AUGraphSpeechSynthesis/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH07_AUGraphSpeechSynthesis 4 | // 5 | // Created by Douglas Adams on 7/14/16. 6 | // 7 | 8 | import AudioToolbox 9 | 10 | //-------------------------------------------------------------------------------------------------- 11 | // MARK: Struct definition 12 | 13 | struct MyAUGraphPlayer 14 | { 15 | // var streamFormat = AudioStreamBasicDescription() // ASBD to use in the graph 16 | var graph: AUGraph? // Opaque pointer to the AUGraph 17 | var speechAU: AudioUnit? // pointer to a ComponentInstanceRecord 18 | } 19 | 20 | //-------------------------------------------------------------------------------------------------- 21 | // MARK: Supporting methods 22 | 23 | // 24 | // 25 | // 26 | func CreateMyAUGraph(player: UnsafeMutablePointer) { 27 | 28 | // create a new AUGraph 29 | Utility.check(error: NewAUGraph(&player.pointee.graph), 30 | operation: "NewAUGraph failed") 31 | 32 | // generate description that will match our output device (speakers) 33 | var outputCd = AudioComponentDescription() 34 | outputCd.componentType = kAudioUnitType_Output 35 | outputCd.componentSubType = kAudioUnitSubType_DefaultOutput 36 | outputCd.componentManufacturer = kAudioUnitManufacturer_Apple 37 | 38 | // adds a node with above description to the graph 39 | var outputNode = AUNode() 40 | Utility.check(error: AUGraphAddNode(player.pointee.graph!, 41 | &outputCd, 42 | &outputNode), 43 | operation: "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed") 44 | 45 | // generate description that will match a generator AU of type: speech synthesizer 46 | var speechCd = AudioComponentDescription() 47 | speechCd.componentType = kAudioUnitType_Generator 48 | speechCd.componentSubType = kAudioUnitSubType_SpeechSynthesis 49 | speechCd.componentManufacturer = kAudioUnitManufacturer_Apple 50 | 51 | // adds a node with above description to the graph 52 | var speechNode = AUNode() 53 | Utility.check(error: AUGraphAddNode(player.pointee.graph!, 54 | &speechCd, 55 | &speechNode), 56 | operation: "AUGraphAddNode[kAudioUnitSubType_SpeechSynthesis] failed") 57 | 58 | // opening the graph opens all contained audio units but does not allocate any resources yet 59 | Utility.check(error: AUGraphOpen(player.pointee.graph!), 60 | operation: "AUGraphOpen failed") 61 | 62 | // get the reference to the AudioUnit object for the speech synthesis graph node 63 | Utility.check(error: AUGraphNodeInfo(player.pointee.graph!, 64 | speechNode, 65 | nil, 66 | &player.pointee.speechAU), 67 | operation: "AUGraphNodeInfo failed") 68 | 69 | // debug - get the asbd 70 | // var propSize = UInt32(sizeof(AudioStreamBasicDescription.self)) 71 | // Utility.check(error: AudioUnitGetProperty(player.pointee.speechAU!, 72 | // kAudioUnitProperty_StreamFormat, 73 | // kAudioUnitScope_Output, 74 | // 0, 75 | // &player.pointee.streamFormat, 76 | // &propSize), 77 | // operation: "Couldn't get ASBD") 78 | 79 | // 80 | // FUN! re-route the speech thru a reverb effect before sending to speakers 81 | // 82 | // generate description that will match out reverb effect 83 | var reverbCd = AudioComponentDescription() 84 | reverbCd.componentType = kAudioUnitType_Effect 85 | reverbCd.componentSubType = kAudioUnitSubType_MatrixReverb 86 | reverbCd.componentManufacturer = kAudioUnitManufacturer_Apple 87 | 88 | // adds a node with above description to the graph 89 | var reverbNode = AUNode() 90 | Utility.check(error: AUGraphAddNode(player.pointee.graph!, 91 | &reverbCd, 92 | &reverbNode), 93 | operation: "AUGraphAddNode[kAudioUnitSubType_MatrixReverb] failed") 94 | 95 | // connect the output source of the reverb AU to the input source of the output node 96 | Utility.check(error: AUGraphConnectNodeInput(player.pointee.graph!, 97 | reverbNode, 98 | 0, 99 | outputNode, 100 | 0), 101 | operation: "AUGraphConnectNodeInput, reverb->output to output->input") 102 | 103 | // connect the output source of the speech synthesizer AU to the input source of the reverb node 104 | Utility.check(error: AUGraphConnectNodeInput(player.pointee.graph!, 105 | speechNode, 106 | 0, 107 | reverbNode, 108 | 0), 109 | operation: "AUGraphConnectNodeInput, synth->output to reverb->input") 110 | 111 | // get the reference to the AudioUnit object for the reverb graph node 112 | var reverbUnit: AudioUnit? = nil 113 | Utility.check(error: AUGraphNodeInfo(player.pointee.graph!, 114 | reverbNode, 115 | nil, 116 | &reverbUnit), 117 | operation: "AUGraphNodeInfo failed") 118 | 119 | /* 120 | enum { 121 | reverbRoomType_SmallRoom = 0, 122 | reverbRoomType_MediumRoom = 1, 123 | reverbRoomType_LargeRoom = 2, 124 | reverbRoomType_MediumHall = 3, 125 | reverbRoomType_LargeHall = 4, 126 | reverbRoomType_Plate = 5, 127 | reverbRoomType_MediumChamber = 6, 128 | reverbRoomType_LargeChamber = 7, 129 | reverbRoomType_Cathedral = 8, 130 | reverbRoomType_LargeRoom2 = 9, 131 | reverbRoomType_MediumHall2 = 10, 132 | reverbRoomType_MediumHall3 = 11, 133 | reverbRoomType_LargeHall2 = 12 134 | } 135 | 136 | */ 137 | 138 | // now initialize the graph (causes resources to be allocated) 139 | Utility.check(error: AUGraphInitialize(player.pointee.graph!), 140 | operation: "AUGraphInitialize failed") 141 | 142 | 143 | // set the reverb preset for room size 144 | var roomType: AUReverbRoomType = .reverbRoomType_SmallRoom 145 | // var roomType: AUReverbRoomType = .reverbRoomType_MediumRoom 146 | // var roomType: AUReverbRoomType = .reverbRoomType_LargeHall 147 | // var roomType: AUReverbRoomType = .reverbRoomType_Cathedral 148 | 149 | Utility.check(error: AudioUnitSetProperty(reverbUnit!, 150 | kAudioUnitProperty_ReverbRoomType, 151 | kAudioUnitScope_Global, 152 | 0, 153 | &roomType, 154 | UInt32(sizeof(UInt32.self))), 155 | operation: "AudioUnitSetProperty[kAudioUnitProperty_ReverbRoomType] failed") 156 | 157 | CAShow(UnsafeMutablePointer(player.pointee.graph!)) 158 | } 159 | // 160 | // 161 | // 162 | func PrepareSpeechAU(player: UnsafeMutablePointer) { 163 | var chan: SpeechChannel? = nil 164 | 165 | var propsize = UInt32(sizeof(SpeechChannel.self)) 166 | Utility.check(error: AudioUnitGetProperty(player.pointee.speechAU!, 167 | kAudioUnitProperty_SpeechChannel, 168 | kAudioUnitScope_Global, 169 | 0, 170 | &chan, 171 | &propsize), 172 | operation: "AudioUnitGetProperty[kAudioUnitProperty_SpeechChannel] failed") 173 | 174 | 175 | let myString = CFStringCreateWithCString(kCFAllocatorDefault, "hello world", CFStringBuiltInEncodings.UTF8.rawValue)! 176 | SpeakCFString(chan!, myString, nil) 177 | } 178 | 179 | //-------------------------------------------------------------------------------------------------- 180 | // MARK: Main 181 | 182 | var player = MyAUGraphPlayer() 183 | 184 | // build a basic speech->speakers graph 185 | CreateMyAUGraph(player: &player) 186 | 187 | // configure the speech synthesizer 188 | PrepareSpeechAU(player: &player) 189 | 190 | // start playing 191 | Utility.check(error: AUGraphStart(player.graph!), 192 | operation: "AUGraphStart failed") 193 | 194 | // sleep a while so the speech can play out 195 | usleep(10 * 1000 * 1000) 196 | 197 | // cleanup 198 | AUGraphStop (player.graph!) 199 | AUGraphUninitialize (player.graph!) 200 | AUGraphClose(player.graph!) 201 | DisposeAUGraph(player.graph!) 202 | 203 | exit(0) 204 | 205 | -------------------------------------------------------------------------------- /CH07_AUGraphPlayer/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH07_AUGraphPlayer 4 | // 5 | // Created by Douglas Adams on 7/14/16. 6 | // 7 | 8 | import AudioToolbox 9 | 10 | 11 | // 12 | // NOTE: This is needed because (apparently) the initializer for ScheduledAudioFileRegion is currently missing in Swift 3 13 | // 14 | public extension ScheduledAudioFileRegion { 15 | 16 | init(mTimeStamp: AudioTimeStamp, mCompletionProc: ScheduledAudioFileRegionCompletionProc?, mCompletionProcUserData: UnsafeMutablePointer?, mAudioFile: OpaquePointer, mLoopCount: UInt32, mStartFrame: Int64, mFramesToPlay: UInt32) { 17 | 18 | self.mTimeStamp = mTimeStamp 19 | self.mCompletionProc = mCompletionProc 20 | self.mCompletionProcUserData = mCompletionProcUserData 21 | self.mAudioFile = mAudioFile 22 | self.mLoopCount = mLoopCount 23 | self.mStartFrame = mStartFrame 24 | self.mFramesToPlay = mFramesToPlay 25 | } 26 | } 27 | 28 | //-------------------------------------------------------------------------------------------------- 29 | // MARK: Struct definition 30 | 31 | struct AUGraphPlayer 32 | { 33 | var inputFormat = AudioStreamBasicDescription() // input file's data stream description 34 | var inputFile: AudioFileID? // Opaque pointer to the input file's AudioFileID 35 | var graph: AUGraph? // Opaque pointer to the AUGraph 36 | var fileAU: AudioUnit? // pointer to a ComponentInstanceRecord 37 | } 38 | 39 | //-------------------------------------------------------------------------------------------------- 40 | // MARK: Supporting methods 41 | 42 | // 43 | // create and setup the AUGraph 44 | // 45 | func CreateAUGraph(player: UnsafeMutablePointer) 46 | { 47 | // create a new AUGraph 48 | Utility.check( error: NewAUGraph(&player.pointee.graph), 49 | operation: "NewAUGraph failed") 50 | 51 | // generate description that will match out output device (speakers) 52 | var outputCd = AudioComponentDescription() 53 | outputCd.componentType = kAudioUnitType_Output 54 | outputCd.componentSubType = kAudioUnitSubType_DefaultOutput 55 | outputCd.componentManufacturer = kAudioUnitManufacturer_Apple 56 | 57 | // adds a node with above description to the graph 58 | var outputNode = AUNode() 59 | Utility.check( error: AUGraphAddNode(player.pointee.graph!, 60 | &outputCd, 61 | &outputNode), 62 | operation: "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed") 63 | 64 | // generate description that will match a generator AU of type: audio file player 65 | var fileplayerCd = AudioComponentDescription() 66 | fileplayerCd.componentType = kAudioUnitType_Generator 67 | fileplayerCd.componentSubType = kAudioUnitSubType_AudioFilePlayer 68 | fileplayerCd.componentManufacturer = kAudioUnitManufacturer_Apple 69 | 70 | // adds a node with above description to the graph 71 | var fileNode = AUNode() 72 | Utility.check( error: AUGraphAddNode(player.pointee.graph!, 73 | &fileplayerCd, 74 | &fileNode), 75 | operation: "AUGraphAddNode[kAudioUnitSubType_AudioFilePlayer] failed") 76 | 77 | // opening the graph opens all contained audio units but does not allocate any resources yet 78 | Utility.check( error: AUGraphOpen(player.pointee.graph!), 79 | operation: "AUGraphOpen failed") 80 | 81 | // get the reference to the AudioUnit object for the file player graph node 82 | Utility.check( error: AUGraphNodeInfo(player.pointee.graph!, 83 | fileNode, 84 | nil, 85 | &player.pointee.fileAU), 86 | operation: "AUGraphNodeInfo failed") 87 | 88 | // connect the output source of the file player AU to the input source of the output node 89 | Utility.check( error: AUGraphConnectNodeInput(player.pointee.graph!, 90 | fileNode, 91 | 0, 92 | outputNode, 93 | 0), 94 | operation: "AUGraphConnectNodeInput") 95 | 96 | // now initialize the graph (causes resources to be allocated) 97 | Utility.check( error: AUGraphInitialize(player.pointee.graph!), 98 | operation: "AUGraphInitialize failed") 99 | } 100 | // 101 | // configure the Player 102 | // 103 | func PrepareFileAU(player: UnsafeMutablePointer) -> Double 104 | { 105 | 106 | // tell the file player unit to load the file we want to play 107 | Utility.check( error: AudioUnitSetProperty(player.pointee.fileAU!, 108 | kAudioUnitProperty_ScheduledFileIDs, 109 | kAudioUnitScope_Global, 110 | 0, 111 | &player.pointee.inputFile, 112 | UInt32(sizeof(AudioFileID.self))), 113 | operation: "AudioUnitSetProperty[kAudioUnitProperty_ScheduledFileIDs] failed") 114 | 115 | var nPackets: UInt64 = 0 116 | var propsize = UInt32(sizeof(UInt64.self)) 117 | Utility.check( error: AudioFileGetProperty(player.pointee.inputFile!, 118 | kAudioFilePropertyAudioDataPacketCount, 119 | &propsize, 120 | &nPackets), 121 | operation: "AudioFileGetProperty[kAudioFilePropertyAudioDataPacketCount] failed") 122 | 123 | 124 | 125 | // tell the file player AU to play the entire file 126 | let timeStamp = AudioTimeStamp(mSampleTime: 0, mHostTime: 0, mRateScalar: 0, mWordClockTime: 0, mSMPTETime: SMPTETime(), mFlags: .sampleTimeValid, mReserved: 0) 127 | var rgn = ScheduledAudioFileRegion(mTimeStamp: timeStamp, mCompletionProc: nil, mCompletionProcUserData: nil, mAudioFile: player.pointee.inputFile!, mLoopCount: 1, mStartFrame: 0, mFramesToPlay: UInt32(nPackets) * player.pointee.inputFormat.mFramesPerPacket) 128 | 129 | Utility.check( error: AudioUnitSetProperty(player.pointee.fileAU!, 130 | kAudioUnitProperty_ScheduledFileRegion, 131 | kAudioUnitScope_Global, 132 | 0, 133 | &rgn, 134 | UInt32(sizeof(ScheduledAudioFileRegion.self))), 135 | operation: "AudioUnitSetProperty[kAudioUnitProperty_ScheduledFileRegion] failed") 136 | 137 | // prime the file player AU with default values 138 | var defaultVal: UInt32 = 0 139 | Utility.check( error: AudioUnitSetProperty(player.pointee.fileAU!, 140 | kAudioUnitProperty_ScheduledFilePrime, 141 | kAudioUnitScope_Global, 142 | 0, 143 | &defaultVal, 144 | UInt32(sizeof(UInt32.self))), 145 | operation: "AudioUnitSetProperty[kAudioUnitProperty_ScheduledFilePrime] failed") 146 | 147 | // tell the file player AU when to start playing (-1 sample time means next render cycle) 148 | var startTime = AudioTimeStamp(mSampleTime: -1, mHostTime: 0, mRateScalar: 0, mWordClockTime: 0, mSMPTETime: SMPTETime(), mFlags: .sampleTimeValid, mReserved: 0) 149 | Utility.check( error: AudioUnitSetProperty(player.pointee.fileAU!, 150 | kAudioUnitProperty_ScheduleStartTimeStamp, 151 | kAudioUnitScope_Global, 152 | 0, 153 | &startTime, 154 | UInt32(sizeof(AudioTimeStamp.self))), 155 | operation: "AudioUnitSetProperty[kAudioUnitProperty_ScheduleStartTimeStamp]") 156 | 157 | // file duration 158 | return Double( UInt32(nPackets) * player.pointee.inputFormat.mFramesPerPacket) / player.pointee.inputFormat.mSampleRate 159 | } 160 | 161 | //-------------------------------------------------------------------------------------------------- 162 | // MARK: Properties 163 | 164 | let kInputFileLocation = CFStringCreateWithCString(kCFAllocatorDefault, "/Users/Doug/x.mp3", CFStringBuiltInEncodings.UTF8.rawValue) 165 | 166 | //-------------------------------------------------------------------------------------------------- 167 | // MARK: Main 168 | 169 | let inputFileURL: CFURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kInputFileLocation, .cfurlposixPathStyle, false) 170 | var player = AUGraphPlayer() 171 | 172 | // open the input audio file 173 | Utility.check( error: AudioFileOpenURL(inputFileURL, 174 | .readPermission, 175 | 0, 176 | &player.inputFile), 177 | operation: "AudioFileOpenURL failed") 178 | 179 | // get the audio data format from the file 180 | var propSize = UInt32(sizeof(AudioStreamBasicDescription.self)) 181 | Utility.check( error: AudioFileGetProperty(player.inputFile!, 182 | kAudioFilePropertyDataFormat, 183 | &propSize, 184 | &player.inputFormat), 185 | operation: "couldn't get file's data format") 186 | 187 | // build a basic fileplayer->speakers graph 188 | CreateAUGraph(player: &player) 189 | 190 | // configure the file player 191 | var fileDuration: Float64 = PrepareFileAU(player: &player) 192 | 193 | // start playing 194 | Utility.check( error: AUGraphStart(player.graph!), 195 | operation: "AUGraphStart failed") 196 | 197 | // sleep until the file is finished 198 | usleep(useconds_t(fileDuration * 1000.0 * 1000.0)) 199 | 200 | // cleanup 201 | AUGraphStop (player.graph!) 202 | AUGraphUninitialize (player.graph!) 203 | AUGraphClose(player.graph!) 204 | AudioFileClose(player.inputFile!) 205 | 206 | exit(0) 207 | -------------------------------------------------------------------------------- /CH04_Recorder/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH04_Recorder 4 | // 5 | // Created by Douglas Adams on 6/30/16. 6 | // 7 | 8 | import AudioToolbox 9 | 10 | //-------------------------------------------------------------------------------------------------- 11 | // MARK: Struct definition 12 | 13 | struct Recorder { // Struct to use in the Callback 14 | 15 | var recordFile: AudioFileID? // reference to the output file 16 | var recordPacket: Int64 = 0 // current packet index in output file 17 | var running = false // recording state 18 | } 19 | 20 | //-------------------------------------------------------------------------------------------------- 21 | // MARK: Supporting methods 22 | 23 | // 24 | // set the output sample rate to be the same as the default input Device 25 | // 26 | func setOutputSampleRate(_ outSampleRate: UnsafeMutablePointer) -> OSStatus { 27 | var error: OSStatus = noErr 28 | var deviceID: AudioDeviceID = 0 29 | 30 | var propertyAddress: AudioObjectPropertyAddress = AudioObjectPropertyAddress() 31 | var propertySize: UInt32 = 0 32 | 33 | // get the default input device 34 | propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice 35 | propertyAddress.mScope = kAudioObjectPropertyScopeGlobal 36 | propertyAddress.mElement = 0 37 | propertySize = 4 38 | 39 | error = AudioObjectGetPropertyData(AudioObjectID(kAudioObjectSystemObject), &propertyAddress, 0, nil, &propertySize, &deviceID) 40 | 41 | if error != noErr { return error } 42 | 43 | // get its sample rate 44 | propertyAddress.mSelector = kAudioDevicePropertyNominalSampleRate; 45 | propertyAddress.mScope = kAudioObjectPropertyScopeGlobal; 46 | propertyAddress.mElement = 0; 47 | propertySize = 8 48 | 49 | error = AudioObjectGetPropertyData(deviceID, &propertyAddress, 0, nil, &propertySize, outSampleRate) 50 | 51 | return error; 52 | } 53 | // 54 | // Write the contents of a buffer to a file 55 | // 56 | // AudioQueueInputCallback function 57 | // 58 | // must have the following signature: 59 | // @convention(c) (UnsafeMutablePointer?, // Void pointer to data 60 | // AudioQueueRef, // reference to the queue 61 | // AudioQueueBufferRef, // reference to the buffer (in the queue) 62 | // UnsafePointer, // pointer to an AudioTimeStamp 63 | // UInt32, // number of packets to be written 64 | // UnsafePointer?) -> Swift.Void // pointer to an array of AudioStreamPacketDescription 65 | // 66 | func inputCallback(userData: UnsafeMutablePointer?, 67 | queue: AudioQueueRef, 68 | bufferToEmpty: UnsafeMutablePointer, 69 | startTime: UnsafePointer, 70 | numPackets: UInt32, 71 | packetDesc: UnsafePointer?) { 72 | 73 | // cast the inUserData Void pointer to a Recorder struct pointer 74 | let recorder = UnsafeMutablePointer(userData) 75 | 76 | // if inNumPackets is greater then zero, our buffer contains audio data 77 | // in the format we specified (AAC) 78 | if numPackets > 0 { 79 | 80 | // write packets to file 81 | var ioNumPackets = numPackets 82 | Utility.check(error: AudioFileWritePackets(recorder!.pointee.recordFile!, // AudioFileID 83 | false, // use cache? 84 | bufferToEmpty.pointee.mAudioDataByteSize, // number of bytes to be written 85 | packetDesc, // pointer to an array of PacketDescriptors 86 | recorder!.pointee.recordPacket, // index of first packet to be written 87 | &ioNumPackets, // number of packets to be written 88 | bufferToEmpty.pointee.mAudioData), // buffer of audio to be written 89 | operation: "AudioFileWritePackets failed") 90 | 91 | // increment packet index 92 | recorder!.pointee.recordPacket = recorder!.pointee.recordPacket + Int(numPackets) 93 | } 94 | 95 | // if we're not stopping, re-enqueue the buffer so that it gets filled again 96 | if recorder!.pointee.running { 97 | Utility.check(error: AudioQueueEnqueueBuffer(queue, // queue 98 | bufferToEmpty, // buffer to enqueue 99 | 0, // always 0 for recording 100 | nil), // always nil for recording 101 | operation: "AudioQueueEnqueueBuffer failed") 102 | } 103 | } 104 | 105 | //-------------------------------------------------------------------------------------------------- 106 | // MARK: Properties 107 | 108 | var recorder = Recorder() // Callback struct 109 | var recordFormat = AudioStreamBasicDescription() // ASBD 110 | var error: OSStatus = noErr // error code 111 | 112 | let kNumberRecordBuffers = 3 // use 3 buffers 113 | 114 | //-------------------------------------------------------------------------------------------------- 115 | // MARK: Main 116 | 117 | // Configure the output data format to be AAC 118 | recordFormat.mFormatID = kAudioFormatMPEG4AAC 119 | recordFormat.mChannelsPerFrame = 2 120 | 121 | // set the output sample rate to be the same as the default input Device 122 | Utility.check(error: setOutputSampleRate(&recordFormat.mSampleRate), 123 | operation: "Unable to get Sample Rate") 124 | 125 | // ProTip: Use the AudioFormat API to trivialize ASBD creation. 126 | // input: at least the mFormatID, however, at this point we already have 127 | // mSampleRate, mFormatID, and mChannelsPerFrame 128 | // output: the remainder of the ASBD will be filled out as much as possible 129 | // given the information known about the format 130 | var propSize: UInt32 = UInt32(sizeof(AudioStreamBasicDescription.self)) 131 | Utility.check(error: AudioFormatGetProperty(kAudioFormatProperty_FormatInfo, 132 | 0, 133 | nil, 134 | &propSize, 135 | &recordFormat), 136 | operation: "AudioFormatGetProperty failed") 137 | 138 | // create an input (recording) queue 139 | var queue: AudioQueueRef? 140 | Utility.check(error: AudioQueueNewInput(&recordFormat, // ASBD 141 | inputCallback, // callback function 142 | &recorder, // user data 143 | nil, // run loop 144 | nil, // run loop mode 145 | 0, // flags (always 0) 146 | &queue), // input queue 147 | operation: "AudioQueueNewInput failed") 148 | 149 | // since the queue is now initilized, we ask it's Audio Converter object 150 | // for the ASBD it has configured itself with. The file may require a more 151 | // specific stream description than was necessary to create the audio queue. 152 | // 153 | // for example: certain fields in an ASBD cannot possibly be known until it's 154 | // codec is instantiated (in this case, by the AudioQueue's Audio Converter object) 155 | var size: UInt32 = UInt32(sizeof(AudioStreamBasicDescription.self)) 156 | Utility.check(error: AudioQueueGetProperty(queue!, 157 | kAudioConverterCurrentOutputStreamDescription, 158 | &recordFormat, 159 | &size), 160 | operation: "couldn't get queue's format") 161 | 162 | // create the audio file 163 | guard let fileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, "./output.caf", .cfurlposixPathStyle, false) else { 164 | 165 | // unable to create file 166 | exit(-1) 167 | } 168 | Utility.check(error: AudioFileCreateWithURL(fileURL, // file URL 169 | kAudioFileCAFType, // type of file (CAF) 170 | &recordFormat, // pointer to an AudioStreamBasicDescription 171 | .eraseFile, // erase 172 | &recorder.recordFile), // AudioFileID 173 | operation: "AudioFileCreateWithURL failed") 174 | 175 | Swift.print("\(fileURL)") 176 | 177 | // many encoded formats require a 'magic cookie'. we set the cookie first 178 | // to give the file object as much info as we can about the data it will be receiving 179 | Utility.applyEncoderCookie(fromQueue: queue!, toFile: recorder.recordFile!) 180 | 181 | // allocate and enqueue buffers 182 | let bufferByteSize = Utility.bufferSizeFor(seconds: 0.5, usingFormat: recordFormat, andQueue: queue!) 183 | var bufferIndex = 0 184 | 185 | // for each buffer 186 | for bufferIndex in 0.. to stop:\n") 205 | 206 | // wait for a key to be pressed 207 | getchar() 208 | 209 | // end recording 210 | Swift.print("* recording done *\n") 211 | recorder.running = false 212 | 213 | // stop the Queue 214 | Utility.check(error: AudioQueueStop(queue!, true), 215 | operation: "AudioQueueStop failed") 216 | 217 | // a codec may update its magic cookie at the end of an encoding session 218 | // so reapply it to the file now 219 | Utility.applyEncoderCookie(fromQueue: queue!, toFile: recorder.recordFile!) 220 | 221 | // cleanup 222 | AudioQueueDispose(queue!, true) 223 | AudioFileClose(recorder.recordFile!) 224 | 225 | exit(0) 226 | -------------------------------------------------------------------------------- /CH--_OpenALSineWave/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH--_OpenALSineWave 4 | // 5 | // Created by Douglas Adams on 7/26/16. 6 | // 7 | 8 | import AudioToolbox 9 | import OpenAL 10 | 11 | //-------------------------------------------------------------------------------------------------- 12 | // MARK: Constants 13 | 14 | let kBufferDuration: Double = 0.01 // duration in seconds of a buffer 15 | let kBufferCount = 8 // number of buffers 16 | let kRefreshInterval: CFTimeInterval = 1.0 // interval in seconds between checks for completion 17 | let kRunTime = 10.0 // program run time in seconds 18 | 19 | let kSampleRate: Double = 24_000.0 // sample rate 20 | let kSineFrequency: Double = 440.0 // sine wave frequency 21 | 22 | 23 | //-------------------------------------------------------------------------------------------------- 24 | // MARK: Struct definition 25 | 26 | struct MyStreamPlayer { 27 | var dataFormat = AudioStreamBasicDescription() // stream AudioStreamBasicDescription 28 | var bufferSizeBytes: UInt32 = 0 // buffer size in bytes 29 | var bufferList: AudioBufferList! 30 | var sources = [ALuint](repeating: 0, count: 1) // OpenAL source handles 31 | var phase: Double = 0.0 // current phase of sine wave 32 | } 33 | 34 | //-------------------------------------------------------------------------------------------------- 35 | // MARK: Supporting methods 36 | 37 | // 38 | // fill an OpenAL buffer with Sine Wave data 39 | // 40 | func fillALBuffer (player: UnsafeMutablePointer, alBuffer: ALuint) { 41 | 42 | // get the starting phase of the waveform 43 | var phase: Double = player.pointee.phase 44 | 45 | // calculate the length of one cycle (one wavelength) 46 | let cycleLength: Double = kSampleRate / kSineFrequency 47 | 48 | for frame in 0..(player.pointee.bufferList.mBuffers.mData)! 52 | let left = UnsafeMutableBufferPointer(start: channels, count: Int(player.pointee.bufferSizeBytes) / sizeof(UInt16.self)) 53 | let right = UnsafeMutableBufferPointer(start: channels.advanced(by: 1), count: Int(player.pointee.bufferSizeBytes) / sizeof(UInt16.self)) 54 | 55 | // populate each channel with the same data 56 | left[frame * 2] = Int16( sin (2 * M_PI * (phase / cycleLength)) * Double(Int16.max)) 57 | right[frame * 2] = left[frame * 2] 58 | 59 | // increment the phase 60 | phase += 1.0 61 | 62 | // the phase repeats going from zero through the cycleLength over and over 63 | if phase > cycleLength { phase -= cycleLength } 64 | } 65 | 66 | // save the current phase as the starting phase for the next iteration 67 | player.pointee.phase = phase 68 | 69 | // copy from the AudioBufferList to the OpenAL buffer 70 | alBufferData(alBuffer, AL_FORMAT_STEREO16, player.pointee.bufferList.mBuffers.mData, ALsizei(player.pointee.bufferSizeBytes), ALsizei(player.pointee.dataFormat.mSampleRate)) 71 | } 72 | // 73 | // re-fill an OpenAL buffer 74 | // 75 | func refillALBuffers (player: UnsafeMutablePointer) { 76 | var processed: ALint = 0 77 | 78 | // get a count of "processed" OpenAL buffers 79 | alGetSourcei(player.pointee.sources[0], AL_BUFFERS_PROCESSED, &processed) 80 | Utility.checkAL(operation: "couldn't get al_buffers_processed") 81 | 82 | // Swift.print("processed = \(processed)") 83 | 84 | // re-fill & re-queue as many buffers as have been processed 85 | while (processed > 0) { 86 | var freeBuffer: ALuint = 0 87 | 88 | // get a free buffer (one that was processed) 89 | alSourceUnqueueBuffers(player.pointee.sources[0], 1, &freeBuffer) 90 | Utility.checkAL(operation: "couldn't unqueue buffer") 91 | 92 | // Swift.print("refilling buffer \(freeBuffer)\n") 93 | 94 | // fill the OpenAL buffer from the player buffer 95 | fillALBuffer(player: player, alBuffer: freeBuffer) 96 | 97 | // queue the buffer 98 | alSourceQueueBuffers(player.pointee.sources[0], 1, &freeBuffer) 99 | Utility.checkAL(operation: "couldn't queue refilled buffer") 100 | 101 | // Swift.print("re-queued buffer \(freeBuffer)\n") 102 | 103 | // decrement the number of processed buffers 104 | processed = processed - 1 105 | } 106 | 107 | } 108 | 109 | //-------------------------------------------------------------------------------------------------- 110 | // MARK: Main 111 | 112 | var bufferDataStaticProc: alBufferDataStaticProcPtr 113 | 114 | var sourceAddNotificationProc: alSourceAddNotificationProcPtr 115 | var sourceNotificationProc: @convention(c) (sid: ALuint, notificationID: ALuint, userData: UnsafeMutablePointer?) -> Swift.Void 116 | 117 | // create the player 118 | var player = MyStreamPlayer() 119 | 120 | // describe the client format - AL needs mono 121 | player.dataFormat.mFormatID = kAudioFormatLinearPCM // uncompressed PCM 122 | player.dataFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked // signed integer & packed 123 | player.dataFormat.mSampleRate = kSampleRate // sample rate = 44,100 124 | player.dataFormat.mChannelsPerFrame = 2 // 2 channels 125 | player.dataFormat.mFramesPerPacket = 1 // 1 frame per packet 126 | player.dataFormat.mBitsPerChannel = 16 // 16 bit signed integer 127 | player.dataFormat.mBytesPerFrame = 4 // 2 bytes per frame 128 | player.dataFormat.mBytesPerPacket = 4 // 2 bytes per packet 129 | 130 | // calcuate the buffer needed (buffer duration * sample rate * bytes per frame = number of bytes) 131 | player.bufferSizeBytes = UInt32(kBufferDuration * player.dataFormat.mSampleRate * Double(player.dataFormat.mBytesPerFrame)) 132 | 133 | // create & setup an AudioBufferList for the samples 134 | var audioBuffer = AudioBuffer(mNumberChannels: 2, mDataByteSize: player.bufferSizeBytes, mData: nil) 135 | player.bufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: audioBuffer) 136 | 137 | // allocate a buffer for the samples 138 | let sampleBuffer = UnsafeMutablePointer(malloc( Int(player.bufferSizeBytes) )) 139 | 140 | // use the sample buffer as the AudioBufferList's buffer 141 | player.bufferList.mBuffers.mData = UnsafeMutablePointer(sampleBuffer) 142 | 143 | // set up OpenAL buffers 144 | var alDevice: OpaquePointer 145 | alDevice = alcOpenDevice(nil) 146 | Utility.checkAL(operation: "Couldn't open AL device") // default device 147 | 148 | var alContext: OpaquePointer 149 | var attrList: ALCint = 0 150 | alContext = alcCreateContext(alDevice, &attrList) 151 | Utility.checkAL(operation: "Couldn't open AL context") 152 | 153 | alcMakeContextCurrent (alContext) 154 | Utility.checkAL(operation: "Couldn't make AL context current") 155 | 156 | // create kBufferCount OpenAL buffers 157 | var buffers = [ALuint](repeating: 0, count: kBufferCount) 158 | alGenBuffers(ALsizei(kBufferCount), &buffers) 159 | Utility.checkAL(operation: "Couldn't generate buffers") 160 | 161 | // set up OpenAL source 162 | alGenSources(1, &player.sources) 163 | Utility.checkAL(operation: "Couldn't generate sources") 164 | 165 | // set the gain 166 | alSourcef(player.sources[0], AL_GAIN, ALfloat(AL_MAX_GAIN)) 167 | Utility.checkAL(operation: "Couldn't set source gain") 168 | 169 | // set the initial sound position 170 | alSource3f(player.sources[0], AL_POSITION, 0.0, 0.0, 0.0) 171 | Utility.checkAL(operation: "Couldn't set sound position") 172 | 173 | // create a closure to use as the callback proc for AL_EXT_SOURCE_NOTIFICATIONS 174 | // NOTE: may be called while previous callback is still executing 175 | // 176 | sourceNotificationProc = {sid, notificationID, userData in 177 | 178 | // is it an AL_BUFFER_PROCESSED notification? 179 | if notificationID == ALuint(AL_BUFFERS_PROCESSED) { 180 | 181 | // YES, refill buffers if needed (enforce sequential order) 182 | DispatchQueue.main.async { 183 | refillALBuffers (player: UnsafeMutablePointer(userData!)) 184 | } 185 | } 186 | } 187 | 188 | // determine if the AL_EXT_SOURCE_NOTIFICATIONS extension is present 189 | var extName = "AL_EXT_SOURCE_NOTIFICATIONS" 190 | if alIsExtensionPresent(extName) == ALboolean(AL_TRUE) { 191 | 192 | // can we get the Proc's address? 193 | if let ptr = alGetProcAddress("alSourceAddNotification") { 194 | 195 | // YES, cast it 196 | sourceAddNotificationProc = unsafeBitCast(ptr, to: alSourceAddNotificationProcPtr.self) 197 | 198 | // set the callback (exit if unsuccessful) 199 | let x = sourceAddNotificationProc(player.sources[0], ALuint(AL_BUFFERS_PROCESSED), sourceNotificationProc, &player) 200 | if x != AL_NO_ERROR { 201 | 202 | Swift.print("Couldn't perform alSourceAddNotification") 203 | exit(1) 204 | } 205 | 206 | } else { 207 | 208 | // NO, exit 209 | Swift.print("Couldn't get alSourceAddNotification ProcAddress") 210 | exit(1) 211 | } 212 | } 213 | 214 | // determine if the AL_EXT_STATIC_BUFFER extension is present 215 | extName = "AL_EXT_STATIC_BUFFER" 216 | if alIsExtensionPresent("AL_EXT_STATIC_BUFFER") == ALboolean(AL_TRUE) { 217 | 218 | // can we get the Proc's address? 219 | if let ptr = alGetProcAddress("alBufferDataStatic") { 220 | 221 | // YES, cast it 222 | bufferDataStaticProc = unsafeBitCast(ptr, to: alBufferDataStaticProcPtr.self) 223 | 224 | for i in 0..) { 42 | 43 | let theta: Double = fmod(CFAbsoluteTimeGetCurrent() * kOrbitSpeed, M_PI * 2) 44 | let x = ALfloat(3.0 * cos(theta)) 45 | let y = ALfloat(0.5 * sin (theta)) 46 | let z = ALfloat(1.0 * sin (theta)) 47 | 48 | Swift.print("x = \(x), y = \(y), z = \(z)\n") 49 | 50 | alSource3f(player.pointee.sources[0], AL_POSITION, x, y, z) 51 | } 52 | // 53 | // setup an ExtAudioFile 54 | // 55 | func setUpExtAudioFile (player: UnsafeMutablePointer) -> OSStatus { 56 | 57 | // create a URL to the sound source 58 | let streamFileURL: CFURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, streamPath, .cfurlposixPathStyle, false) 59 | 60 | // describe the client format - AL needs mono 61 | player.pointee.dataFormat.mFormatID = kAudioFormatLinearPCM // uncompressed PCM 62 | player.pointee.dataFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked // signed integer & packed 63 | player.pointee.dataFormat.mSampleRate = 44100.0 // sample rate = 44,100 64 | player.pointee.dataFormat.mChannelsPerFrame = 1 // 1 channel 65 | player.pointee.dataFormat.mFramesPerPacket = 1 // 1 frame per packet 66 | player.pointee.dataFormat.mBitsPerChannel = 16 // 16 bit signed integer 67 | player.pointee.dataFormat.mBytesPerFrame = 2 // 2 bytes per frame 68 | player.pointee.dataFormat.mBytesPerPacket = 2 // 2 bytes per packet 69 | 70 | // open the source URL 71 | Utility.check(error: ExtAudioFileOpenURL(streamFileURL, &player.pointee.extAudioFile), 72 | operation: "Couldn't open ExtAudioFile for reading") 73 | 74 | // tell extAudioFile about our format 75 | Utility.check(error: ExtAudioFileSetProperty(player.pointee.extAudioFile!, 76 | kExtAudioFileProperty_ClientDataFormat, 77 | UInt32(sizeof (AudioStreamBasicDescription.self)), 78 | &player.pointee.dataFormat), 79 | operation: "Couldn't set client format on ExtAudioFile") 80 | 81 | // figure out how big file is 82 | var propSize = UInt32(sizeof(Int64.self)) 83 | ExtAudioFileGetProperty(player.pointee.extAudioFile!, 84 | kExtAudioFileProperty_FileLengthFrames, 85 | &propSize, 86 | &player.pointee.fileLengthFrames) 87 | 88 | Swift.print("fileLengthFrames = \(player.pointee.fileLengthFrames) frames\n") 89 | 90 | // calcuate the buffer needed (duration * sample rate * bytes per frame) 91 | player.pointee.bufferSizeBytes = kBufferDuration * UInt32(player.pointee.dataFormat.mSampleRate) * player.pointee.dataFormat.mBytesPerFrame 92 | 93 | Swift.print("bufferSizeBytes = \(player.pointee.bufferSizeBytes)\n") 94 | 95 | Swift.print("Bottom of setUpExtAudioFile\n") 96 | 97 | return noErr 98 | } 99 | // 100 | // fill an OpenAL buffer with data read from the ExtAudioFile 101 | // 102 | func fillALBuffer (player: UnsafeMutablePointer, alBuffer: ALuint) { 103 | 104 | // allocate a buffer for the samples 105 | let sampleBuffer = UnsafeMutablePointer(malloc(sizeof(UInt16.self) * Int(player.pointee.bufferSizeBytes))) 106 | 107 | // setup an AudioBufferList 108 | var bufferList = AudioBufferList() 109 | bufferList.mNumberBuffers = 1 110 | bufferList.mBuffers.mNumberChannels = 1 111 | bufferList.mBuffers.mDataByteSize = player.pointee.bufferSizeBytes 112 | 113 | // use the sample buffer as the AudioBufferList's buffer 114 | bufferList.mBuffers.mData = UnsafeMutablePointer(sampleBuffer) 115 | 116 | Swift.print("allocated \(player.pointee.bufferSizeBytes) byte buffer for ABL\n") 117 | 118 | // read from ExtAudioFile into the AudioBufferList (i.e. into the sampleBuffer) 119 | // TODO: handle end-of-file wraparound 120 | var framesReadIntoBuffer: UInt32 = 0 121 | 122 | // repeat until the AudioBufferList's buffer has been filled 123 | repeat { 124 | var framesRead = UInt32(player.pointee.fileLengthFrames) - framesReadIntoBuffer 125 | 126 | // point to the start of the current segment 127 | bufferList.mBuffers.mData = UnsafeMutablePointer(sampleBuffer?.advanced(by: Int(framesReadIntoBuffer) * sizeof(UInt16.self))) 128 | 129 | // read a segment 130 | Utility.check(error: ExtAudioFileRead(player.pointee.extAudioFile!, 131 | &framesRead, 132 | &bufferList), 133 | operation: "ExtAudioFileRead failed") 134 | 135 | // update the count of frames read during this pass 136 | framesReadIntoBuffer += framesRead 137 | 138 | // update the Player's running total of frames read 139 | player.pointee.totalFramesRead = player.pointee.totalFramesRead + Int64(framesRead) 140 | 141 | Swift.print("read \(framesRead) frames\n") 142 | 143 | } while (framesReadIntoBuffer < (player.pointee.bufferSizeBytes / UInt32(sizeof(UInt16.self)))) 144 | 145 | // copy from the AudioBufferList to the OpenAL buffer 146 | alBufferData(alBuffer, AL_FORMAT_MONO16, sampleBuffer, ALsizei(player.pointee.bufferSizeBytes), ALsizei(player.pointee.dataFormat.mSampleRate)) 147 | 148 | // freee the malloc'd memory (the sample buffer) 149 | free(sampleBuffer) 150 | } 151 | // 152 | // re-fill an OpenAL buffer 153 | // 154 | func refillALBuffers (player: UnsafeMutablePointer) { 155 | var processed: ALint = 0 156 | 157 | // get a count of "processed" OpenAL buffers 158 | alGetSourcei(player.pointee.sources[0], AL_BUFFERS_PROCESSED, &processed) 159 | Utility.checkAL(operation: "couldn't get al_buffers_processed") 160 | 161 | // re-fill & re-queue as many buffers as have been processed 162 | while (processed > 0) { 163 | var freeBuffer: ALuint = 0 164 | 165 | // get a free buffer (one that was processed) 166 | alSourceUnqueueBuffers(player.pointee.sources[0], 1, &freeBuffer) 167 | Utility.checkAL(operation: "couldn't unqueue buffer") 168 | 169 | Swift.print("refilling buffer \(freeBuffer)\n") 170 | 171 | // fill the OpenAL buffer from the player buffer 172 | fillALBuffer(player: player, alBuffer: freeBuffer) 173 | 174 | // queue the buffer 175 | alSourceQueueBuffers(player.pointee.sources[0], 1, &freeBuffer) 176 | Utility.checkAL(operation: "couldn't queue refilled buffer") 177 | 178 | Swift.print("re-queued buffer \(freeBuffer)\n") 179 | 180 | // decrement the number of processed buffers 181 | processed = processed - 1 182 | } 183 | 184 | } 185 | 186 | //-------------------------------------------------------------------------------------------------- 187 | // MARK: Main 188 | 189 | // create the player 190 | var player = MyStreamPlayer() 191 | 192 | // prepare the ExtAudioFile for reading 193 | Utility.check(error: setUpExtAudioFile(player: &player), 194 | operation: "Couldn't open ExtAudioFile") 195 | 196 | // set up OpenAL buffers 197 | var alDevice: OpaquePointer 198 | alDevice = alcOpenDevice(nil) 199 | Utility.checkAL(operation: "Couldn't open AL device") // default device 200 | 201 | var alContext: OpaquePointer 202 | var attrList: ALCint = 0 203 | alContext = alcCreateContext(alDevice, &attrList) 204 | Utility.checkAL(operation: "Couldn't open AL context") 205 | 206 | alcMakeContextCurrent (alContext) 207 | Utility.checkAL(operation: "Couldn't make AL context current") 208 | 209 | var buffers = [ALuint](repeating: 0, count: kBufferCount) 210 | alGenBuffers(ALsizei(kBufferCount), &buffers) 211 | Utility.checkAL(operation: "Couldn't generate buffers") 212 | 213 | // do the initial filling of the OpenAL buffers 214 | for i in 0..? // array of packet descriptions for read buffer 19 | var isDone = false // playback has completed 20 | } 21 | 22 | //-------------------------------------------------------------------------------------------------- 23 | // MARK: Supporting methods 24 | 25 | // 26 | // we only use time here as a guideline 27 | // we're really trying to get somewhere between kMinBufferSize and kMaxBufferSize buffers, but not allocate too much if we don't need it 28 | // 29 | func CalculateBytesForTime (inAudioFile: AudioFileID, 30 | inDesc: AudioStreamBasicDescription, 31 | inSeconds: Double, 32 | outBufferSize: UnsafeMutablePointer, 33 | outNumPackets: UnsafeMutablePointer) { 34 | 35 | let kMaxBufferSize: UInt32 = 0x10000 // limit size to 64K 36 | let kMinBufferSize: UInt32 = 0x4000 // limit size to 16K 37 | 38 | // we need to calculate how many packets we read at a time, and how big a buffer we need. 39 | // we base this on the size of the packets in the file and an approximate duration for each buffer. 40 | // 41 | // first check to see what the max size of a packet is, if it is bigger than our default 42 | // allocation size, that needs to become larger 43 | var maxPacketSize: UInt32 = 0 44 | var propSize: UInt32 = 4 45 | Utility.check(error: AudioFileGetProperty(inAudioFile, 46 | kAudioFilePropertyPacketSizeUpperBound, 47 | &propSize, 48 | &maxPacketSize), 49 | operation: "couldn't get file's max packet size") 50 | 51 | 52 | if inDesc.mFramesPerPacket > 0 { 53 | 54 | let numPacketsForTime = UInt32(inDesc.mSampleRate / (Double(inDesc.mFramesPerPacket) * inSeconds)) 55 | 56 | outBufferSize.pointee = numPacketsForTime * maxPacketSize 57 | 58 | } else { 59 | // if frames per packet is zero, then the codec has no predictable packet == time 60 | // so we can't tailor this (we don't know how many Packets represent a time period 61 | // we'll just return a default buffer size 62 | outBufferSize.pointee = (kMaxBufferSize > maxPacketSize ? kMaxBufferSize : maxPacketSize) 63 | } 64 | 65 | // we're going to limit our size to our default 66 | if outBufferSize.pointee > kMaxBufferSize && outBufferSize.pointee > maxPacketSize { 67 | 68 | outBufferSize.pointee = kMaxBufferSize 69 | 70 | } else { 71 | // also make sure we're not too small - we don't want to go the disk for too small chunks 72 | if outBufferSize.pointee < kMinBufferSize { 73 | outBufferSize.pointee = kMinBufferSize 74 | } 75 | } 76 | outNumPackets.pointee = outBufferSize.pointee / maxPacketSize 77 | } 78 | // 79 | // Read bytes from a file into a buffer 80 | // 81 | // AudioQueueOutputCallback function 82 | // 83 | // must have the following signature: 84 | // @convention(c) (UnsafeMutablePointer?, // Void pointer to Player struct 85 | // AudioQueueRef, // reference to the queue 86 | // AudioQueueBufferRef) -> Swift.Void // reference to the buffer in the queue 87 | // 88 | func outputCallback(userData: UnsafeMutablePointer?, queue: AudioQueueRef, bufferToFill: AudioQueueBufferRef) { 89 | 90 | if let player = UnsafeMutablePointer(userData) { 91 | 92 | if player.pointee.isDone { return } 93 | 94 | // read audio data from file into supplied buffer 95 | var numBytes: UInt32 = bufferToFill.pointee.mAudioDataBytesCapacity 96 | var nPackets = player.pointee.numPacketsToRead 97 | 98 | Utility.check(error: AudioFileReadPacketData(player.pointee.playbackFile!, // AudioFileID 99 | false, // use cache? 100 | &numBytes, // initially - buffer capacity, after - bytes actually read 101 | player.pointee.packetDescs, // pointer to an array of PacketDescriptors 102 | player.pointee.packetPosition, // index of first packet to be read 103 | &nPackets, // number of packets 104 | bufferToFill.pointee.mAudioData), // output buffer 105 | operation: "AudioFileReadPacketData failed") 106 | 107 | // enqueue buffer into the Audio Queue 108 | // if nPackets == 0 it means we are EOF (all data has been read from file) 109 | if nPackets > 0 { 110 | bufferToFill.pointee.mAudioDataByteSize = numBytes 111 | 112 | Utility.check(error: AudioQueueEnqueueBuffer(queue, // queue 113 | bufferToFill, // buffer to enqueue 114 | (player.pointee.packetDescs == nil ? 0 : nPackets), // number of packet descriptions 115 | player.pointee.packetDescs), // pointer to a PacketDescriptions array 116 | operation: "AudioQueueEnqueueBuffer failed") 117 | 118 | player.pointee.packetPosition += Int64(nPackets) 119 | 120 | } else { 121 | 122 | Utility.check(error: AudioQueueStop(queue, false), 123 | operation: "AudioQueueStop failed") 124 | 125 | player.pointee.isDone = true 126 | } 127 | } 128 | } 129 | 130 | //-------------------------------------------------------------------------------------------------- 131 | // MARK: Properties 132 | 133 | let kPlaybackFileLocation = CFStringCreateWithCString(kCFAllocatorDefault, "/Users/Doug/x.mp3", CFStringBuiltInEncodings.UTF8.rawValue) 134 | 135 | //#define kPlaybackFileLocation CFSTR("/Users/cadamson/Library/Developer/Xcode/DerivedData/CH04_Recorder-dvninfofohfiwcgyndnhzarhsipp/Build/Products/Debug/output.caf") 136 | //#define kPlaybackFileLocation CFSTR("/Users/cadamson/audiofile.m4a") 137 | //#define kPlaybackFileLocation CFSTR("/Volumes/Sephiroth/iTunes/iTunes Media/Music/The Tubes/Tubes World Tour 2001/Wild Women of Wongo.m4p") 138 | //#define kPlaybackFileLocation CFSTR("/Volumes/Sephiroth/iTunes/iTunes Media/Music/Compilations/ESCAFLOWNE - ORIGINAL MOVIE SOUNDTRACK/21 We're flying.m4a") 139 | 140 | let kNumberPlaybackBuffers = 3 141 | 142 | //-------------------------------------------------------------------------------------------------- 143 | // MARK: Main 144 | 145 | var player = Player() 146 | 147 | let fileURL: CFURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kPlaybackFileLocation, .cfurlposixPathStyle, false) 148 | 149 | // open the audio file, set the playbackFile property in the player struct 150 | Utility.check(error: AudioFileOpenURL(fileURL, // file URL to open 151 | .readPermission, // open to read 152 | 0, // hint 153 | &player.playbackFile), // set on output to the AudioFileID 154 | operation: "AudioFileOpenURL failed") 155 | 156 | 157 | // get the audio data format from the file 158 | var dataFormat = AudioStreamBasicDescription() 159 | var propSize = UInt32(sizeof(AudioStreamBasicDescription.self)) 160 | Utility.check(error: AudioFileGetProperty(player.playbackFile!, // AudioFileID 161 | kAudioFilePropertyDataFormat, // desired property 162 | &propSize, // size of the property 163 | &dataFormat), // set on output to the ASBD 164 | operation: "couldn't get file's data format"); 165 | 166 | // create an output (playback) queue 167 | var queue: AudioQueueRef? 168 | Utility.check(error: AudioQueueNewOutput(&dataFormat, // pointer to the ASBD 169 | outputCallback, // callback function 170 | &player, // pointer to the player struct 171 | nil, // run loop 172 | nil, // run loop mode 173 | 0, // flags (always 0) 174 | &queue), // pointer to the queue 175 | operation: "AudioQueueNewOutput failed"); 176 | 177 | 178 | // adjust buffer size to represent about a half second (0.5) of audio based on this format 179 | var bufferByteSize: UInt32 = 0 180 | CalculateBytesForTime(inAudioFile: player.playbackFile!, inDesc: dataFormat, inSeconds: 0.5, outBufferSize: &bufferByteSize, outNumPackets: &player.numPacketsToRead) 181 | 182 | // check if we are dealing with a variable-bit-rate file. ASBDs for VBR files always have 183 | // mBytesPerPacket and mFramesPerPacket as 0 since they can fluctuate at any time. 184 | // If we are dealing with a VBR file, we allocate memory to hold the packet descriptions 185 | if (dataFormat.mBytesPerPacket == 0 || dataFormat.mFramesPerPacket == 0) { 186 | 187 | // variable bit rate formats 188 | player.packetDescs = UnsafeMutablePointer(malloc(sizeof(AudioStreamPacketDescription.self) * Int(player.numPacketsToRead))) 189 | 190 | } else { 191 | 192 | // constant bit rate formats (we don't provide packet descriptions, e.g linear PCM) 193 | player.packetDescs = nil; 194 | } 195 | 196 | // get magic cookie from file and set on queue 197 | Utility.applyEncoderCookie(fromFile: player.playbackFile!, toQueue: queue!) 198 | 199 | // allocate the buffers 200 | var buffers = [AudioQueueBufferRef?](repeating: nil, count: kNumberPlaybackBuffers) 201 | 202 | player.isDone = false 203 | player.packetPosition = 0 204 | 205 | // prime the queue with some data before starting 206 | for i in 0..? // pointer to an array of AudioStreamPacketDescriptions for read buffer 25 | } 26 | 27 | //-------------------------------------------------------------------------------------------------- 28 | // MARK: Supporting methods 29 | 30 | // 31 | // audioConverterCallback 32 | // 33 | // AudioConverterComplexInputDataProc function 34 | // 35 | // must have the following signature: 36 | // @convention(c) (AudioConverterRef, // reference to the Converter 37 | // UnsafeMutablePointer, // pointer to a UInt32 packet count 38 | // UnsafeMutablePointer, // pointer to an AudioBufferList 39 | // UnsafeMutablePointer?>?, // pointer to a pointer to AudioStreamPacketDescription(s) 40 | // UnsafeMutablePointer?) -> OSStatus // Void pointer to the AudioConverterSettings struct 41 | // 42 | 43 | func audioConverterCallback(audioConverter: AudioConverterRef, 44 | packetCount: UnsafeMutablePointer, 45 | bufferList: UnsafeMutablePointer, 46 | packetDescriptions: UnsafeMutablePointer?>?, 47 | userData: UnsafeMutablePointer?) -> OSStatus { 48 | 49 | // cast the inUserData Void pointer to an AudioConverterSettings struct pointer 50 | if let settings = UnsafeMutablePointer(userData) { 51 | 52 | // initialize in case of failure (there will be only one buffer in the AudioBufferList) 53 | bufferList.pointee.mBuffers.mData = nil 54 | bufferList.pointee.mBuffers.mDataByteSize = 0 55 | 56 | // are there enough packets to satisfy request? 57 | if settings.pointee.inputFilePacketIndex + UInt64(packetCount.pointee) > settings.pointee.inputFilePacketCount { 58 | 59 | // YES 60 | packetCount.pointee = UInt32(settings.pointee.inputFilePacketCount - settings.pointee.inputFilePacketIndex) 61 | } 62 | // return if no packets available 63 | if packetCount.pointee == 0 { return noErr } 64 | 65 | // calculate the intended size & allocate a buffer 66 | var outByteCount: UInt32 = packetCount.pointee * settings.pointee.inputFilePacketMaxSize 67 | let sourceBuffer = calloc(1, Int(outByteCount)) 68 | 69 | // read packets into the buffer 70 | var result = AudioFileReadPacketData(settings.pointee.inputFile!, // AudioFileID 71 | true, // use cache? 72 | &outByteCount, // initially - buffer capacity, after - bytes actually read 73 | settings.pointee.inputFilePacketDescriptions, // pointer to an array of PacketDescriptors 74 | Int64(settings.pointee.inputFilePacketIndex), // index of first packet to be read 75 | packetCount, // number of packets 76 | sourceBuffer) // output buffer 77 | 78 | // did we just read the remainder of the file? 79 | if result == kAudioFileEndOfFileError && (packetCount.pointee > 0) { 80 | 81 | // YES, it's not an error 82 | result = noErr 83 | 84 | } else if result != noErr { 85 | 86 | // some other error occurred 87 | return result 88 | } 89 | // update the position in the file 90 | settings.pointee.inputFilePacketIndex += UInt64(packetCount.pointee) 91 | 92 | // capture the data and byte count 93 | bufferList.pointee.mBuffers.mData = sourceBuffer 94 | bufferList.pointee.mBuffers.mDataByteSize = outByteCount 95 | 96 | // copy over the PacketDescriptors 97 | packetDescriptions?.pointee = settings.pointee.inputFilePacketDescriptions 98 | 99 | return result 100 | } 101 | 102 | // nothing was done 103 | return noErr 104 | } 105 | // 106 | // convert from input to output format 107 | // 108 | func Convert(settings: UnsafeMutablePointer) { 109 | 110 | // create audioConverter object 111 | var audioConverter: AudioConverterRef? 112 | Utility.check(error: AudioConverterNew(&settings.pointee.inputFormat, 113 | &settings.pointee.outputFormat, 114 | &audioConverter), 115 | operation: "AudioConveterNew failed") 116 | 117 | // allocate packet descriptions if the input file is variable-bit-rate (VBR) 118 | var packetsPerBuffer: UInt32 = 0 119 | var outputBufferSize: UInt32 = 32 * 1024 // 32 KB is a good starting point 120 | var sizePerPacket: UInt32 = settings.pointee.inputFormat.mBytesPerPacket 121 | 122 | // is the format VBR? 123 | if sizePerPacket == 0 { 124 | 125 | // YES, get the packet size 126 | var size: UInt32 = UInt32(sizeof(UInt32.self)) 127 | Utility.check(error: AudioConverterGetProperty(audioConverter!, 128 | kAudioConverterPropertyMaximumOutputPacketSize, 129 | &size, 130 | &sizePerPacket), 131 | operation: "Couldn't get kAudioConverterPropertyMaximumOutputPacketSize") 132 | 133 | // make sure the buffer is large enough to hold at least one packet 134 | if sizePerPacket > outputBufferSize { outputBufferSize = sizePerPacket } 135 | 136 | // calculate the number of packets that fit into the buffer 137 | packetsPerBuffer = outputBufferSize / sizePerPacket 138 | 139 | // allocate space for the AudioStreamPacketDescription(s) 140 | settings.pointee.inputFilePacketDescriptions = UnsafeMutablePointer(malloc(sizeof(AudioStreamPacketDescription.self) * Int(packetsPerBuffer))) 141 | 142 | } 143 | else 144 | { 145 | // NO, calculate the number of packets that fit into the buffer 146 | packetsPerBuffer = outputBufferSize / sizePerPacket 147 | } 148 | 149 | // allocate space for the output buffer 150 | let outputBuffer = malloc(Int(outputBufferSize)) 151 | 152 | // loop until the convertion is complete (or an error occurs) 153 | var outputFilePacketPosition: UInt32 = 0 //in bytes 154 | while(true) 155 | { 156 | // wrap the destination buffer in an AudioBufferList 157 | var convertedData = AudioBufferList() 158 | convertedData.mNumberBuffers = 1 159 | convertedData.mBuffers.mNumberChannels = settings.pointee.inputFormat.mChannelsPerFrame 160 | convertedData.mBuffers.mDataByteSize = outputBufferSize 161 | convertedData.mBuffers.mData = outputBuffer 162 | 163 | // now call the audioConverter to transcode the data. This function will call 164 | // the callback function as many times as required to fulfill the request. 165 | var ioOutputDataPackets: UInt32 = packetsPerBuffer 166 | let error = AudioConverterFillComplexBuffer(audioConverter!, 167 | audioConverterCallback, 168 | settings, 169 | &ioOutputDataPackets, 170 | &convertedData, 171 | settings.pointee.inputFilePacketDescriptions) 172 | 173 | if error != noErr || ioOutputDataPackets == 0 { 174 | break; // this is our termination condition 175 | } 176 | 177 | // write the converted data to the output file 178 | // KEVIN: QUESTION: 3rd arg seems like it should be a byte count, not packets. why does this work? 179 | Utility.check(error: AudioFileWritePackets(settings.pointee.outputFile!, 180 | false, 181 | ioOutputDataPackets, 182 | nil, 183 | Int64(outputFilePacketPosition / settings.pointee.outputFormat.mBytesPerPacket), 184 | &ioOutputDataPackets, 185 | convertedData.mBuffers.mData!), 186 | operation: "Couldn't write packets to file") 187 | 188 | // advance the output file write location 189 | outputFilePacketPosition += (ioOutputDataPackets * settings.pointee.outputFormat.mBytesPerPacket) 190 | } 191 | 192 | // cleanup 193 | AudioConverterDispose(audioConverter!) 194 | free(outputBuffer) 195 | } 196 | 197 | //-------------------------------------------------------------------------------------------------- 198 | // MARK: Properties 199 | 200 | let kInputFileLocation = CFStringCreateWithCString(kCFAllocatorDefault, "/Users/Doug/x.mp3", CFStringBuiltInEncodings.UTF8.rawValue) 201 | 202 | //-------------------------------------------------------------------------------------------------- 203 | // MARK: Main 204 | 205 | var audioConverterSettings = AudioConverterSettings() 206 | 207 | // open the input audio file 208 | let inputFileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, kInputFileLocation, .cfurlposixPathStyle, false)! 209 | Utility.check(error: AudioFileOpenURL(inputFileURL, 210 | .readPermission , 211 | 0, 212 | &audioConverterSettings.inputFile), 213 | operation: "AudioFileOpenURL failed") 214 | 215 | // get the audio data format from the file 216 | var propSize: UInt32 = UInt32(sizeof(AudioStreamBasicDescription.self)) 217 | Utility.check(error: AudioFileGetProperty(audioConverterSettings.inputFile!, 218 | kAudioFilePropertyDataFormat, 219 | &propSize, 220 | &audioConverterSettings.inputFormat), 221 | operation: "couldn't get file's data format") 222 | 223 | // get the total number of packets in the file 224 | propSize = UInt32(sizeof(UInt64.self)) 225 | Utility.check(error: AudioFileGetProperty(audioConverterSettings.inputFile!, 226 | kAudioFilePropertyAudioDataPacketCount, 227 | &propSize, 228 | &audioConverterSettings.inputFilePacketCount), 229 | operation: "couldn't get file's packet count") 230 | 231 | // get size of the largest possible packet 232 | propSize = UInt32(sizeof(UInt32.self)) 233 | Utility.check(error: AudioFileGetProperty(audioConverterSettings.inputFile!, 234 | kAudioFilePropertyMaximumPacketSize, 235 | &propSize, &audioConverterSettings.inputFilePacketMaxSize), 236 | operation: "couldn't get file's max packet size") 237 | 238 | // define the ouput format. AudioConverter requires that one of the data formats be LPCM 239 | audioConverterSettings.outputFormat.mSampleRate = 44100.0 240 | audioConverterSettings.outputFormat.mFormatID = kAudioFormatLinearPCM 241 | audioConverterSettings.outputFormat.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked 242 | audioConverterSettings.outputFormat.mBytesPerPacket = 4 243 | audioConverterSettings.outputFormat.mFramesPerPacket = 1 244 | audioConverterSettings.outputFormat.mBytesPerFrame = 4 245 | audioConverterSettings.outputFormat.mChannelsPerFrame = 2 246 | audioConverterSettings.outputFormat.mBitsPerChannel = 16 247 | 248 | // create output file (overwrites any existing file) 249 | let outputFileURL: CFURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, "output.aif", .cfurlposixPathStyle, false) 250 | Utility.check(error: AudioFileCreateWithURL(outputFileURL, 251 | kAudioFileAIFFType, 252 | &audioConverterSettings.outputFormat, 253 | .eraseFile, 254 | &audioConverterSettings.outputFile), 255 | operation: "AudioFileCreateWithURL failed"); 256 | 257 | Swift.print("Converting...\n") 258 | 259 | // perform the convertion 260 | Convert(settings: &audioConverterSettings) 261 | 262 | // cleanup 263 | AudioFileClose(audioConverterSettings.inputFile!) 264 | AudioFileClose(audioConverterSettings.outputFile!) 265 | 266 | Swift.print("Done\r") 267 | 268 | exit(0) 269 | 270 | -------------------------------------------------------------------------------- /CH08_AUGraphInput/main.swift: -------------------------------------------------------------------------------- 1 | // 2 | // main.swift 3 | // CH08_AUGraphInput 4 | // 5 | // Created by Douglas Adams on 7/16/16. 6 | // 7 | 8 | import AudioToolbox 9 | import ApplicationServices 10 | 11 | let part2 = false 12 | 13 | //-------------------------------------------------------------------------------------------------- 14 | // MARK: Struct definition 15 | 16 | struct MyAUGraphPlayer 17 | { 18 | var streamFormat = AudioStreamBasicDescription() 19 | 20 | var graph: AUGraph? 21 | var inputUnit: AudioUnit? 22 | var outputUnit: AudioUnit? 23 | var speechUnit: AudioUnit? 24 | 25 | var inputBuffer: AudioBufferList? 26 | var ringBuffer: CARingBuffer? 27 | 28 | var firstInputSampleTime: Float64 = 0 29 | var firstOutputSampleTime: Float64 = 0 30 | var inToOutSampleTimeOffset: Float64 = 0 31 | 32 | } 33 | 34 | //OSStatus InputRenderProc(void *inRefCon, 35 | // AudioUnitRenderActionFlags *ioActionFlags, 36 | // const AudioTimeStamp *inTimeStamp, 37 | // UInt32 inBusNumber, 38 | // UInt32 inNumberFrames, 39 | // AudioBufferList * ioData); 40 | //OSStatus GraphRenderProc(void *inRefCon, 41 | // AudioUnitRenderActionFlags *ioActionFlags, 42 | // const AudioTimeStamp *inTimeStamp, 43 | // UInt32 inBusNumber, 44 | // UInt32 inNumberFrames, 45 | // AudioBufferList * ioData); 46 | //void CreateInputUnit (MyAUGraphPlayer *player); 47 | //void CreateMyAUGraph(MyAUGraphPlayer *player); 48 | 49 | //-------------------------------------------------------------------------------------------------- 50 | // MARK: Supporting methods 51 | 52 | func InputRenderProc(userData: UnsafeMutablePointer, 53 | actionFlags: UnsafeMutablePointer, 54 | timeStamp: UnsafePointer, 55 | busNumber: UInt32, 56 | numberOfFrames: UInt32, 57 | bufferList: UnsafeMutablePointer?) -> OSStatus { 58 | 59 | // printf ("InputRenderProc!\n"); 60 | let player = UnsafeMutablePointer(userData) 61 | 62 | // have we ever logged input timing? (for offset calculation) 63 | if (player.pointee.firstInputSampleTime < 0.0) { 64 | 65 | player.pointee.firstInputSampleTime = timeStamp.pointee.mSampleTime 66 | 67 | if player.pointee.firstOutputSampleTime > -1.0 && player.pointee.inToOutSampleTimeOffset < 0.0 { 68 | 69 | player.pointee.inToOutSampleTimeOffset = player.pointee.firstInputSampleTime - player.pointee.firstOutputSampleTime 70 | } 71 | } 72 | 73 | // render into our buffer 74 | var inputProcErr: OSStatus = noErr 75 | inputProcErr = AudioUnitRender(player.pointee.inputUnit!, 76 | actionFlags, 77 | timeStamp, 78 | busNumber, 79 | numberOfFrames, 80 | player.pointee.inputBuffer) 81 | // copy from our buffer to ring buffer 82 | if inputProcErr == noErr { 83 | inputProcErr = player.pointee.ringBuffer.pointee.Store(player.pointee.inputBuffer, 84 | numberOfFrames, 85 | timeStamp.pointee.mSampleTime) 86 | 87 | // printf ("stored %d frames at time %f\n", inNumberFrames, inTimeStamp->mSampleTime); 88 | } 89 | // else { 90 | // printf ("input renderErr: %d\n", inputProcErr); 91 | // } 92 | // 93 | 94 | return inputProcErr 95 | } 96 | 97 | 98 | func GraphRenderProc(userData: UnsafeMutablePointer, 99 | actionFlags: UnsafeMutablePointer, 100 | timeStamp: UnsafePointer, 101 | busNumber: UInt32, 102 | numberOfFrames: UInt32, 103 | bufferList: UnsafeMutablePointer?) -> OSStatus { 104 | 105 | // printf ("GraphRenderProc! need %d frames for time %f \n", inNumberFrames, inTimeStamp->mSampleTime); 106 | 107 | let player = UnsafeMutablePointer(userData) 108 | 109 | // have we ever logged output timing? (for offset calculation) 110 | if (player.pointee.firstOutputSampleTime < 0.0) { 111 | 112 | player.pointee.firstOutputSampleTime = inTimeStamp->mSampleTime; 113 | 114 | if ((player.pointee.firstInputSampleTime > -1.0) && 115 | 116 | (player.pointee.inToOutSampleTimeOffset < 0.0)) { 117 | 118 | player.pointee.inToOutSampleTimeOffset = player.pointee.firstInputSampleTime - player.pointee.firstOutputSampleTime 119 | } 120 | } 121 | 122 | // copy samples out of ring buffer 123 | var outputProcErr: OSStatus = noErr; 124 | 125 | // new CARingBuffer doesn't take bool 4th arg 126 | outputProcErr = player.pointee.ringBuffer.Fetch(bufferList, 127 | numberOfFrames, 128 | timeStamp.pointee.mSampleTime + player.pointee.inToOutSampleTimeOffset) 129 | 130 | // printf ("fetched %d frames at time %f\n", inNumberFrames, inTimeStamp->mSampleTime); 131 | return outputProcErr; 132 | } 133 | 134 | // 135 | // 136 | // 137 | func CreateInputUnit (player: UnsafeMutablePointer) { 138 | 139 | // generate description that will match audio HAL 140 | var inputCd = AudioComponentDescription() 141 | inputCd.componentType = kAudioUnitType_Output 142 | inputCd.componentSubType = kAudioUnitSubType_HALOutput 143 | inputCd.componentManufacturer = kAudioUnitManufacturer_Apple 144 | 145 | var comp: AudioComponent? = AudioComponentFindNext(nil, &inputCd) 146 | if comp == nil { 147 | Swift.print("can't get output unit") 148 | exit(-1) 149 | } 150 | 151 | Utility.check(error: AudioComponentInstanceNew(comp, &player.pointee.inputUnit), 152 | operation: "Couldn't open component for inputUnit"); 153 | 154 | // enable/io 155 | var disableFlag: UInt32 = 0 156 | var enableFlag: UInt32 = 1 157 | var outputBus: AudioUnitScope = 0 158 | var inputBus: AudioUnitScope = 1 159 | Utility.check(error: AudioUnitSetProperty(player.pointee.inputUnit, 160 | kAudioOutputUnitProperty_EnableIO, 161 | kAudioUnitScope_Input, 162 | inputBus, 163 | &enableFlag, 164 | sizeof(enableFlag)), 165 | operation: "Couldn't enable input on I/O unit"); 166 | 167 | Utility.check(error: AudioUnitSetProperty(player.pointee.inputUnit, 168 | kAudioOutputUnitProperty_EnableIO, 169 | kAudioUnitScope_Output, 170 | outputBus, 171 | &disableFlag, // well crap, have to disable 172 | sizeof(enableFlag)), 173 | operation: "Couldn't disable output on I/O unit"); 174 | 175 | // set device (osx only... iphone has only one device) 176 | var defaultDevice: AudioDeviceID = kAudioObjectUnknown 177 | var propertySize: UInt32 = sizeof(defaultDevice) 178 | 179 | // AudioHardwareGetProperty() is deprecated 180 | // CheckError (AudioHardwareGetProperty(kAudioHardwarePropertyDefaultInputDevice, 181 | // &propertySize, 182 | // &defaultDevice), 183 | // "Couldn't get default input device"); 184 | 185 | // AudioObjectProperty stuff new in 10.6, replaces AudioHardwareGetProperty() call 186 | // TODO: need to update ch08 to explain, use this call. need CoreAudio.framework 187 | var defaultDeviceProperty = AudioObjectPropertyAddress() 188 | defaultDeviceProperty.mSelector = kAudioHardwarePropertyDefaultInputDevice 189 | defaultDeviceProperty.mScope = kAudioObjectPropertyScopeGlobal 190 | defaultDeviceProperty.mElement = kAudioObjectPropertyElementMaster 191 | 192 | Utility.check(error: AudioObjectGetPropertyData(kAudioObjectSystemObject, 193 | &defaultDeviceProperty, 194 | 0, 195 | nil, 196 | &propertySize, 197 | &defaultDevice), 198 | operation: "Couldn't get default input device") 199 | 200 | // set this defaultDevice as the input's property 201 | // kAudioUnitErr_InvalidPropertyValue if output is enabled on inputUnit 202 | Utility.check(error: AudioUnitSetProperty(player.pointee.inputUnit, 203 | kAudioOutputUnitProperty_CurrentDevice, 204 | kAudioUnitScope_Global, 205 | outputBus, 206 | &defaultDevice, 207 | sizeof(defaultDevice)), 208 | operation: "Couldn't set default device on I/O unit") 209 | 210 | // use the stream format coming out of the AUHAL (should be de-interleaved) 211 | propertySize = sizeof(AudioStreamBasicDescription) 212 | Utility.check(error: AudioUnitGetProperty(player.pointee.inputUnit, 213 | kAudioUnitProperty_StreamFormat, 214 | kAudioUnitScope_Output, 215 | inputBus, 216 | &player.pointee.streamFormat, 217 | &propertySize), 218 | operation: "Couldn't get ASBD from input unit") 219 | 220 | // 9/6/10 - check the input device's stream format 221 | var deviceFormat = AudioStreamBasicDescription() 222 | Utility.check(error: AudioUnitGetProperty(player.pointee.inputUnit, 223 | kAudioUnitProperty_StreamFormat, 224 | kAudioUnitScope_Input, 225 | inputBus, 226 | &deviceFormat, 227 | &propertySize), 228 | operation: "Couldn't get ASBD from input unit") 229 | 230 | Swift.print("Device rate \(deviceFormat.mSampleRate), graph rate \(player.pointee.streamFormat.mSampleRate)\n") 231 | 232 | player.pointee.streamFormat.mSampleRate = deviceFormat.mSampleRate 233 | 234 | propertySize = sizeof(AudioStreamBasicDescription) 235 | Utility.check(error: AudioUnitSetProperty(player.pointee.inputUnit, 236 | kAudioUnitProperty_StreamFormat, 237 | kAudioUnitScope_Output, 238 | inputBus, 239 | &player.pointee.streamFormat, 240 | propertySize), 241 | operation: "Couldn't set ASBD on input unit") 242 | 243 | /* allocate some buffers to hold samples between input and output callbacks 244 | (this part largely copied from CAPlayThrough) */ 245 | //Get the size of the IO buffer(s) 246 | var bufferSizeFrames: UInt32 = 0 247 | propertySize = sizeof(UInt32) 248 | Utility.check(error: AudioUnitGetProperty(player.pointee.inputUnit, 249 | kAudioDevicePropertyBufferFrameSize, 250 | kAudioUnitScope_Global, 251 | 0, 252 | &bufferSizeFrames, 253 | &propertySize), 254 | operation: "Couldn't get buffer frame size from input unit") 255 | 256 | var bufferSizeBytes: UInt32 = bufferSizeFrames * sizeof(Float32) 257 | 258 | if (player.pointee.streamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved) { 259 | 260 | Swift.print("format is non-interleaved\n") 261 | 262 | // allocate an AudioBufferList plus enough space for array of AudioBuffers 263 | var propsize: UInt32 = offsetof(AudioBufferList, mBuffers[0]) + (sizeof(AudioBuffer) * player.pointee.streamFormat.mChannelsPerFrame) 264 | 265 | //malloc buffer lists 266 | player.pointee.inputBuffer = UnsafeMutablePointer(malloc(propsize)) 267 | player.pointee.inputBuffer->mNumberBuffers = player.pointee.streamFormat.mChannelsPerFrame 268 | 269 | //pre-malloc buffers for AudioBufferLists 270 | for i in 0..mBuffers[i].mNumberChannels = 1 273 | player.pointee.inputBuffer->mBuffers[i].mDataByteSize = bufferSizeBytes 274 | player.pointee.inputBuffer->mBuffers[i].mData = malloc(bufferSizeBytes) 275 | } 276 | } else { 277 | printf ("format is interleaved\n"); 278 | // allocate an AudioBufferList plus enough space for array of AudioBuffers 279 | var propsize: UInt32 = offsetof(AudioBufferList, mBuffers[0]) + (sizeof(AudioBuffer) * 1) 280 | 281 | //malloc buffer lists 282 | player.pointee.inputBuffer = UnsafeMutablePointer(malloc(propsize)) 283 | player.pointee.inputBuffer.mNumberBuffers = 1 284 | 285 | //pre-malloc buffers for AudioBufferLists 286 | player.pointee.inputBuffer->mBuffers[0].mNumberChannels = player.pointee.streamFormat.mChannelsPerFrame 287 | player.pointee.inputBuffer->mBuffers[0].mDataByteSize = bufferSizeBytes 288 | player.pointee.inputBuffer->mBuffers[0].mData = malloc(bufferSizeBytes) 289 | } 290 | 291 | //Alloc ring buffer that will hold data between the two audio devices 292 | player.pointee.ringBuffer = CARingBuffer() 293 | player.pointee.ringBuffer->Allocate(player.pointee.streamFormat.mChannelsPerFrame, player.pointee.streamFormat.mBytesPerFrame, bufferSizeFrames * 3) 294 | 295 | // set render proc to supply samples from input unit 296 | var callbackStruct = AURenderCallbackStruct() 297 | callbackStruct.inputProc = InputRenderProc 298 | callbackStruct.inputProcRefCon = player 299 | 300 | Utility.check(error: AudioUnitSetProperty(player.pointee.inputUnit, 301 | kAudioOutputUnitProperty_SetInputCallback, 302 | kAudioUnitScope_Global, 303 | 0, 304 | &callbackStruct, 305 | sizeof(callbackStruct)), 306 | operation: "Couldn't set input callback") 307 | 308 | Utility.check(error: AudioUnitInitialize(player.pointee.inputUnit), 309 | operation: "Couldn't initialize input unit") 310 | 311 | player.pointee.firstInputSampleTime = -1 312 | player.pointee.inToOutSampleTimeOffset = -1 313 | 314 | Swift.print("Bottom of CreateInputUnit()\n") 315 | } 316 | 317 | 318 | func CreateMyAUGraph(player: UnsafeMutablePointer) { 319 | 320 | // create a new AUGraph 321 | Utility.check(error: NewAUGraph(&player.pointee.graph), 322 | operation: "NewAUGraph failed"); 323 | 324 | // generate description that will match default output 325 | // ComponentDescription outputcd = {0}; 326 | // outputcd.componentType = kAudioUnitType_Output; 327 | // outputcd.componentSubType = kAudioUnitSubType_DefaultOutput; 328 | // outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; 329 | // 330 | // Component comp = FindNextComponent(NULL, &outputcd); 331 | // if (comp == NULL) { 332 | // printf ("can't get output unit"); exit (-1); 333 | // } 334 | 335 | var outputCd = AudioComponentDescription() 336 | outputCd.componentType = kAudioUnitType_Output 337 | outputCd.componentSubType = kAudioUnitSubType_DefaultOutput 338 | outputCd.componentManufacturer = kAudioUnitManufacturer_Apple 339 | 340 | var comp: AudioComponent? = AudioComponentFindNext(NULL, &outputCd) 341 | if comp == nil { 342 | Swift.print("can't get output unit") 343 | exit(-1) 344 | } 345 | 346 | // adds a node with above description to the graph 347 | var outputNode = AUNode() 348 | Utility.check(error: AUGraphAddNode(player.pointee.graph, 349 | &outputCd, 350 | &outputNode), 351 | operation: "AUGraphAddNode[kAudioUnitSubType_DefaultOutput] failed"); 352 | 353 | if part2 { 354 | 355 | // add a mixer to the graph, 356 | var mixerCd = AudioComponentDescription() 357 | mixerCd.componentType = kAudioUnitType_Mixer 358 | mixerCd.componentSubType = kAudioUnitSubType_StereoMixer // doesn't work: kAudioUnitSubType_MatrixMixer 359 | mixerCd.componentManufacturer = kAudioUnitManufacturer_Apple 360 | 361 | var mixerNode = AUNode() 362 | Utility.check(error: AUGraphAddNode(player.pointee.graph, 363 | &mixerCd, 364 | &mixerNode), 365 | operation: "AUGraphAddNode[kAudioUnitSubType_StereoMixer] failed") 366 | 367 | // adds a node with above description to the graph 368 | var speechcd = AudioComponentDescription() 369 | speechcd.componentType = kAudioUnitType_Generator 370 | speechcd.componentSubType = kAudioUnitSubType_SpeechSynthesis 371 | speechcd.componentManufacturer = kAudioUnitManufacturer_Apple 372 | 373 | var speechNode = AUNode() 374 | Utility.check(error: AUGraphAddNode(player.pointee.graph, 375 | &speechcd, 376 | &speechNode), 377 | operation: "AUGraphAddNode[kAudioUnitSubType_AudioFilePlayer] failed"); 378 | 379 | // opening the graph opens all contained audio units but does not allocate any resources yet 380 | Utility.check(error: AUGraphOpen(player.pointee.graph), 381 | operation: "AUGraphOpen failed") 382 | 383 | // get the reference to the AudioUnit objects for the various nodes 384 | Utility.check(error: AUGraphNodeInfo(player.pointee.graph, 385 | outputNode, 386 | nil, 387 | &player.pointee.outputUnit), 388 | operation: "AUGraphNodeInfo failed") 389 | 390 | Utility.check(error: AUGraphNodeInfo(player.pointee.graph, 391 | speechNode, 392 | nil, 393 | &player.pointee.speechUnit), 394 | operation: "AUGraphNodeInfo failed") 395 | 396 | var mixerUnit = AudioUnit() 397 | Utility.check(error: AUGraphNodeInfo(player.pointee.graph, 398 | mixerNode, 399 | nil, 400 | &mixerUnit), 401 | operation: "AUGraphNodeInfo failed") 402 | 403 | // set ASBDs here 404 | var propertySize: UInt32 = sizeof(AudioStreamBasicDescription) 405 | Utility.check(error: AudioUnitSetProperty(player.pointee.outputUnit, 406 | kAudioUnitProperty_StreamFormat, 407 | kAudioUnitScope_Input, 408 | 0, 409 | &player.pointee.streamFormat, 410 | propertySize), 411 | operation: "Couldn't set stream format on output unit") 412 | 413 | // problem: badComponentInstance (-2147450879) 414 | Utility.check(error: AudioUnitSetProperty(mixerUnit, 415 | kAudioUnitProperty_StreamFormat, 416 | kAudioUnitScope_Input, 417 | 0, 418 | &player.pointee.streamFormat, 419 | propertySize), 420 | operation: "Couldn't set stream format on mixer unit bus 0") 421 | 422 | Utility.check(error: AudioUnitSetProperty(mixerUnit, 423 | kAudioUnitProperty_StreamFormat, 424 | kAudioUnitScope_Input, 425 | 1, 426 | &player.pointee.streamFormat, 427 | propertySize), 428 | operation: "Couldn't set stream format on mixer unit bus 1") 429 | 430 | // connections 431 | // mixer output scope / bus 0 to outputUnit input scope / bus 0 432 | // mixer input scope / bus 0 to render callback (from ringbuffer, which in turn is from inputUnit) 433 | // mixer input scope / bus 1 to speech unit output scope / bus 0 434 | 435 | Utility.check(error: AUGraphConnectNodeInput(player.pointee.graph, 436 | mixerNode, 437 | 0, 438 | outputNode, 439 | 0), 440 | operation: "Couldn't connect mixer output(0) to outputNode (0)") 441 | 442 | Utility.check(error: AUGraphConnectNodeInput(player.pointee.graph, 443 | speechNode, 444 | 0, 445 | mixerNode, 446 | 1), 447 | operation: "Couldn't connect speech synth unit output (0) to mixer input (1)") 448 | 449 | var callbackStruct = AURenderCallbackStruct() 450 | callbackStruct.inputProc = GraphRenderProc 451 | callbackStruct.inputProcRefCon = player 452 | Utility.check(error: AudioUnitSetProperty(mixerUnit, 453 | kAudioUnitProperty_SetRenderCallback, 454 | kAudioUnitScope_Global, 455 | 0, 456 | &callbackStruct, 457 | sizeof(callbackStruct)), 458 | operation: "Couldn't set render callback on mixer unit") 459 | 460 | } else { 461 | 462 | // opening the graph opens all contained audio units but does not allocate any resources yet 463 | Utility.check(error: AUGraphOpen(player.pointee.graph), 464 | operation: "AUGraphOpen failed") 465 | 466 | // get the reference to the AudioUnit object for the output graph node 467 | Utility.check(error: AUGraphNodeInfo(player.pointee.graph, 468 | outputNode, 469 | nil, 470 | &player.pointee.outputUnit), 471 | operation: "AUGraphNodeInfo failed") 472 | 473 | // set the stream format on the output unit's input scope 474 | var propertySize: UInt32 = sizeof(AudioStreamBasicDescription) 475 | Utility.check(error: AudioUnitSetProperty(player.pointee.outputUnit, 476 | kAudioUnitProperty_StreamFormat, 477 | kAudioUnitScope_Input, 478 | 0, 479 | &player.pointee.streamFormat, 480 | propertySize), 481 | operation: "Couldn't set stream format on output unit") 482 | 483 | var callbackStruct = AURenderCallbackStruct() 484 | callbackStruct.inputProc = GraphRenderProc 485 | callbackStruct.inputProcRefCon = player 486 | 487 | Utility.check(error: AudioUnitSetProperty(player.pointee.outputUnit, 488 | kAudioUnitProperty_SetRenderCallback, 489 | kAudioUnitScope_Global, 490 | 0, 491 | &callbackStruct, 492 | sizeof(callbackStruct)), 493 | operation: "Couldn't set render callback on output unit") 494 | } 495 | 496 | // now initialize the graph (causes resources to be allocated) 497 | Utility.check(error: AUGraphInitialize(player.pointee.graph), 498 | operation: "AUGraphInitialize failed"); 499 | 500 | player.pointee.firstOutputSampleTime = -1 501 | 502 | Swift.print("Bottom of CreateSimpleAUGraph()\n") 503 | } 504 | 505 | if part2 { 506 | func PrepareSpeechAU(player: UnsafeMutablePointer) { 507 | var chan: SpeechChannel 508 | 509 | var propsize: UInt32 = sizeof(SpeechChannel) 510 | Utility.check(error: AudioUnitGetProperty(player.pointee.speechUnit, 511 | kAudioUnitProperty_SpeechChannel, 512 | kAudioUnitScope_Global, 513 | 0, 514 | &chan, 515 | &propsize), 516 | operation: "AudioFileGetProperty[kAudioUnitProperty_SpeechChannel] failed") 517 | 518 | let myString = CFStringCreateWithCString(kCFAllocatorDefault, "Please purchase as many copies of our\n Core Audio book as you possibly can", CFStringBuiltInEncodings.UTF8.rawValue)! 519 | SpeakCFString(chan, myString, nil) 520 | } 521 | } 522 | 523 | //-------------------------------------------------------------------------------------------------- 524 | // MARK: Main 525 | 526 | var player = MyAUGraphPlayer() 527 | 528 | // create the input unit 529 | CreateInputUnit(player: &player) 530 | 531 | // build a graph with output unit 532 | CreateMyAUGraph(player: &player); 533 | 534 | if part2 { 535 | // configure the speech synthesizer 536 | PrepareSpeechAU(player: &player); 537 | 538 | } 539 | 540 | // start playing 541 | Utility.check(error: AudioOutputUnitStart(player.inputUnit), 542 | operation: "AudioOutputUnitStart failed") 543 | Utility.check(error: AUGraphStart(player.graph), 544 | operation: "AUGraphStart failed"); 545 | 546 | // and wait 547 | Swift.print("Capturing, press to stop:\n") 548 | getchar() 549 | 550 | // cleanup 551 | AUGraphStop (player.graph) 552 | AUGraphUninitialize (player.graph) 553 | AUGraphClose(player.graph) 554 | 555 | --------------------------------------------------------------------------------