├── ios
├── Assets
│ └── .gitkeep
├── Classes
│ ├── FlutterPcmSoundPlugin.h
│ └── FlutterPcmSoundPlugin.m
└── flutter_pcm_sound.podspec
├── android
├── settings.gradle
├── .gitignore
├── src
│ └── main
│ │ ├── AndroidManifest.xml
│ │ └── java
│ │ └── com
│ │ └── lib
│ │ └── flutter_pcm_sound
│ │ └── FlutterPcmSoundPlugin.java
└── build.gradle
├── macos
├── Classes
│ ├── FlutterPcmSoundPlugin.h
│ └── FlutterPcmSoundPlugin.m
└── flutter_pcm_sound.podspec
├── site
└── logo.png
├── .gitignore
├── example
├── pubspec.yaml
└── lib
│ └── main.dart
├── pubspec.yaml
├── LICENSE
├── CHANGELOG.md
├── README.md
└── lib
└── flutter_pcm_sound.dart
/ios/Assets/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/android/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'flutter_pcm_sound'
2 |
--------------------------------------------------------------------------------
/macos/Classes/FlutterPcmSoundPlugin.h:
--------------------------------------------------------------------------------
1 | ../../ios/Classes/FlutterPcmSoundPlugin.h
--------------------------------------------------------------------------------
/macos/Classes/FlutterPcmSoundPlugin.m:
--------------------------------------------------------------------------------
1 | ../../ios/Classes/FlutterPcmSoundPlugin.m
--------------------------------------------------------------------------------
/site/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chipweinberger/flutter_pcm_sound/HEAD/site/logo.png
--------------------------------------------------------------------------------
/android/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/workspace.xml
5 | /.idea/libraries
6 | .DS_Store
7 | /build
8 | /captures
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .vscode
3 | .dart_tool/
4 | pubspec.lock
5 | example/*
6 | !example/lib/
7 | !example/lib/main.dart
8 | !example/pubspec.yaml
9 |
--------------------------------------------------------------------------------
/android/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/ios/Classes/FlutterPcmSoundPlugin.h:
--------------------------------------------------------------------------------
1 |
2 | #if TARGET_OS_OSX
3 | #import
4 | #else
5 | #import
6 | #endif
7 |
8 | #define NAMESPACE @"flutter_pcm_sound"
9 |
10 | @interface FlutterPcmSoundPlugin : NSObject
11 | @end
12 |
--------------------------------------------------------------------------------
/example/pubspec.yaml:
--------------------------------------------------------------------------------
1 | name: example
2 | description: flutter_pcm_sound example app.
3 |
4 | # The following line prevents the package from being accidentally published to
5 | # pub.dev using `flutter pub publish`. This is preferred for private packages.
6 | publish_to: 'none'
7 |
8 | environment:
9 | sdk: ">=2.15.1 <4.0.0"
10 |
11 | dependencies:
12 | flutter:
13 | sdk: flutter
14 |
15 | flutter_pcm_sound:
16 | path: ../
17 |
18 | dev_dependencies:
19 | flutter_test:
20 | sdk: flutter
21 |
--------------------------------------------------------------------------------
/pubspec.yaml:
--------------------------------------------------------------------------------
1 | name: flutter_pcm_sound
2 | description: Send real-time PCM audio (16-bit integer) to your device speakers
3 | version: 3.3.3
4 | homepage: https://github.com/chipweinberger/flutter_pcm_sound
5 |
6 | environment:
7 | sdk: ">=2.15.1 <4.0.0"
8 | flutter: ">=2.5.0"
9 |
10 | dependencies:
11 | flutter:
12 | sdk: flutter
13 |
14 | flutter:
15 | plugin:
16 | platforms:
17 | android:
18 | package: com.lib.flutter_pcm_sound
19 | pluginClass: FlutterPcmSoundPlugin
20 | ios:
21 | pluginClass: FlutterPcmSoundPlugin
22 | macos:
23 | pluginClass: FlutterPcmSoundPlugin
24 |
25 |
--------------------------------------------------------------------------------
/android/build.gradle:
--------------------------------------------------------------------------------
1 | group 'com.lib.flutter_pcm_sound'
2 | version '1.0'
3 |
4 | apply plugin: 'com.android.library'
5 |
6 | android {
7 | compileSdkVersion 33
8 |
9 | compileOptions {
10 | sourceCompatibility JavaVersion.VERSION_17
11 | targetCompatibility JavaVersion.VERSION_17
12 | }
13 |
14 | defaultConfig {
15 | minSdkVersion 19
16 | }
17 |
18 | gradle.projectsEvaluated {
19 | tasks.withType(JavaCompile) {
20 | options.compilerArgs << "-Xlint:deprecation"
21 | options.compilerArgs << "-Xlint:unchecked"
22 | }
23 | }
24 |
25 | namespace 'com.lib.flutter_pcm_sound'
26 | }
27 |
--------------------------------------------------------------------------------
/ios/flutter_pcm_sound.podspec:
--------------------------------------------------------------------------------
1 | #
2 | # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
3 | # Run `pod lib lint flutter_pcm_sound.podspec` to validate before publishing.
4 | #
5 | Pod::Spec.new do |s|
6 | s.name = 'flutter_pcm_sound'
7 | s.version = '0.0.1'
8 | s.summary = 'Flutter plugin for PCM sound'
9 | s.description = 'Flutter plugin for PCM sound'
10 | s.homepage = 'https://github.com/chipweinberger/flutter_pcm_sound'
11 | s.license = { :file => '../LICENSE' }
12 | s.author = { 'Chip Weinberger' => 'weinbergerc@gmail.com' }
13 | s.source = { :path => '.' }
14 | s.source_files = 'Classes/**/*'
15 | s.public_header_files = 'Classes/**/*.h'
16 | s.dependency 'Flutter'
17 | s.platform = :ios, '9.0'
18 | s.framework = 'CoreAudio'
19 | s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', }
20 | end
21 |
--------------------------------------------------------------------------------
/macos/flutter_pcm_sound.podspec:
--------------------------------------------------------------------------------
1 | #
2 | # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
3 | # Run `pod lib lint flutter_pcm_sound.podspec' to validate before publishing.
4 | #
5 | Pod::Spec.new do |s|
6 | s.name = 'flutter_pcm_sound'
7 | s.version = '0.0.1'
8 | s.summary = 'Flutter plugin for PCM sound'
9 | s.description = 'Flutter plugin for PCM sound'
10 | s.homepage = 'https://github.com/chipweinberger/flutter_pcm_sound'
11 | s.license = { :file => '../LICENSE' }
12 | s.author = { 'Chip Weinberger' => 'weinbergerc@gmail.com' }
13 | s.source = { :path => '.' }
14 | s.source_files = 'Classes/**/*'
15 | s.public_header_files = 'Classes/**/*.h'
16 | s.dependency 'FlutterMacOS'
17 | s.platform = :osx, '10.11'
18 | s.framework = 'CoreAudio'
19 | s.pod_target_xcconfig = {
20 | 'DEFINES_MODULE' => 'YES',
21 | }
22 | end
23 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | This is free and unencumbered software released into the public domain.
2 |
3 | Anyone is free to copy, modify, publish, use, compile, sell, or
4 | distribute this software, either in source code form or as a compiled
5 | binary, for any purpose, commercial or non-commercial, and by any
6 | means.
7 |
8 | In jurisdictions that recognize copyright laws, the author or authors
9 | of this software dedicate any and all copyright interest in the
10 | software to the public domain. We make this dedication for the benefit
11 | of the public at large and to the detriment of our heirs and
12 | successors. We intend this dedication to be an overt act of
13 | relinquishment in perpetuity of all present and future rights to this
14 | software under copyright law.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 | OTHER DEALINGS IN THE SOFTWARE.
23 |
24 | <<<<<<< HEAD
25 | For more information, please refer to
26 | =======
27 |
28 | For more information, please refer to
29 | >>>>>>> d0de85e (initial commit: plugin skeleton)
30 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 3.3.3
2 | * **[Docs]** update readme
3 |
4 | ## 3.3.2
5 | * **[Docs]** update readme
6 |
7 | ## 3.3.1
8 | * **[Fix]** more potential race conditions (theoretical)
9 |
10 | ## 3.3.0
11 | * **[Fix]** potential race conditions that could theoretically cause `OnFeedSamples` to misfire
12 |
13 | ## 3.2.6
14 | * **[Docs]** more docs clarifications
15 |
16 | ## 3.2.5
17 | * **[Docs]** document when the feed callback is invoked
18 |
19 | ## 3.2.4
20 | * **[Fix]** calling `start` could cause null deref
21 |
22 | ## 3.2.3
23 | * **[Fix]** iOS: prevent audio blips when locking screen
24 |
25 | ## 3.2.2
26 | * **[Fix]** iOS: avoid `AudioOutputUnitStart` error 561015905 when resuming after screen lock → unlock
27 |
28 | ## 3.2.1
29 | (redacted)
30 |
31 | ## 3.2.0
32 | * **[Feature]** `start` returns true if it invoked your callback
33 |
34 | ## 3.1.7
35 | * **[Fix]** Android: allow `feed` to be called with zero length buffer
36 |
37 | ## 3.1.6
38 | * **[Improve]** calling `start` again when already started will do nothing, as expected.
39 |
40 | ## 3.1.5
41 | * **[Feature]** Android & iOS: invoke feed callback when remainingFrames=0 to signal that playback stopped
42 |
43 | ## 3.1.4
44 | * **[Fix]** Android: callback could be skipped sometimes
45 |
46 | ## 3.1.3
47 | * **[Feature]** add `playAndRecord` iOS category
48 |
49 | ## 3.1.2
50 | * **[Improve]** add `setup` check before feed
51 |
52 | ## 3.1.1
53 | * **[Improve]** Android: feed 200 samples at a time, to align with common feed rates
54 | * **[Improve]** iOS: don't try to play until setup is called
55 |
56 | ## 3.1.0
57 | * **[API]** remove -1 feed threshold support
58 |
59 | ## 3.0.1
60 | * **[API]** iOS: clear input samples to prevent annoying noises when debugging
61 |
62 | ## 3.0.0
63 | * **[API]** simplify api: remove `start`
64 | * **[API]** simplify api: remove `stop`
65 | * **[API]** simplify api: to start & stop, just feed & stop feeding.
66 | * **[API]** simplify api: remove `remainingSamples`. uneeded.
67 |
68 | ## 2.0.0
69 | * **[API]** simplify api: combine `pause` & `stop` into single function
70 |
71 | ## 1.2.7
72 | * **[Fix]** Example: accidentally pushed changes
73 |
74 | ## 1.2.6
75 | * **[Improve]** Android: continue to refine htz
76 |
77 | ## 1.2.5
78 | * **[Improve]** Android: target 100htz when feed theshold is not set
79 |
80 | ## 1.2.4
81 | * **[Feature]** `setFeedThreshold(-1)` will ignore the threshold
82 |
83 | ## 1.2.3
84 | * **[Fix]** Android: setLogLevel would hang
85 |
86 | ## 1.2.2
87 | * **[Fix]** MacOS: fix warnings
88 |
89 | ## 1.2.1
90 | * **[Fix]** Android: Fix crash when releasing PCM player
91 |
92 | ## 1.2.0
93 | * **[Feature]** iOS: add support for AVAudioSessionCategory
94 |
95 | ## 1.1.0
96 | * **[Fix]** android: fix crash when `release` is called
97 |
98 | ## 1.0.1
99 | * **[Readme]** update
100 |
101 | ## 1.0.0
102 | * **[Feature]** Initial Release.
103 |
--------------------------------------------------------------------------------
/example/lib/main.dart:
--------------------------------------------------------------------------------
1 | import 'package:flutter/material.dart';
2 | import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
3 |
4 | void main() {
5 | runApp(MyApp());
6 | }
7 |
8 | class MyApp extends StatelessWidget {
9 | const MyApp({Key? key}) : super(key: key);
10 |
11 | @override
12 | Widget build(BuildContext context) {
13 | return PcmSoundApp();
14 | }
15 | }
16 |
17 | class PcmSoundApp extends StatefulWidget {
18 | @override
19 | _PcmSoundAppState createState() => _PcmSoundAppState();
20 | }
21 |
22 | class _PcmSoundAppState extends State with WidgetsBindingObserver {
23 | static const int sampleRate = 48000;
24 | bool _isPlaying = false;
25 | bool _isActive = true;
26 | int _remainingFrames = 0;
27 | MajorScale scale = MajorScale(sampleRate: sampleRate, noteDuration: 0.20);
28 |
29 | final GlobalKey _scaffoldMessengerKey =
30 | GlobalKey();
31 |
32 | @override
33 | void initState() {
34 | super.initState();
35 | WidgetsBinding.instance.addObserver(this); // register observer
36 |
37 | FlutterPcmSound.setLogLevel(LogLevel.verbose).onError(_showError);
38 | FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1).onError(_showError);
39 | FlutterPcmSound.setFeedThreshold(sampleRate ~/ 10).onError(_showError);
40 | FlutterPcmSound.setFeedCallback(_onFeed);
41 | }
42 |
43 | @override
44 | void dispose() {
45 | WidgetsBinding.instance.removeObserver(this); // clean up
46 | FlutterPcmSound.release().onError(_showError);
47 | super.dispose();
48 | }
49 |
50 | @override
51 | void didChangeAppLifecycleState(AppLifecycleState state) {
52 | setState(() {
53 | _isActive = state == AppLifecycleState.resumed;
54 | });
55 | }
56 |
57 | bool _showError(Object? err, StackTrace st) {
58 | _scaffoldMessengerKey.currentState?.showSnackBar(
59 | SnackBar(content: Text("feed failed: $err")),
60 | );
61 | return true;
62 | }
63 |
64 | void _onFeed(int remainingFrames) async {
65 | setState(() {
66 | _remainingFrames = remainingFrames;
67 | });
68 |
69 | // Only feed if playing AND app is active
70 | if (_isPlaying && _isActive) {
71 | List frames = scale.generate(periods: 20);
72 | await FlutterPcmSound.feed(PcmArrayInt16.fromList(frames))
73 | .onError(_showError);
74 | }
75 | }
76 |
77 | @override
78 | Widget build(BuildContext context) {
79 | return MaterialApp(
80 | scaffoldMessengerKey: _scaffoldMessengerKey,
81 | theme: ThemeData(primarySwatch: Colors.blue),
82 | home: Scaffold(
83 | appBar: AppBar(centerTitle: true, title: Text('Flutter PCM Sound')),
84 | body: Center(
85 | child: Column(
86 | mainAxisAlignment: MainAxisAlignment.spaceEvenly,
87 | children: [
88 | ElevatedButton(
89 | onPressed: () {
90 | _isPlaying = true;
91 | FlutterPcmSound.start();
92 | },
93 | child: Text('Play'),
94 | ),
95 | ElevatedButton(
96 | onPressed: () {
97 | _isPlaying = false;
98 | },
99 | child: Text('Stop'),
100 | ),
101 | Text('$_remainingFrames Remaining Frames'),
102 | ],
103 | ),
104 | ),
105 | ),
106 | );
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://pub.dartlang.org/packages/flutter_pcm_sound)
2 |
3 |
4 |
5 |
6 |
7 | Send real-time PCM audio (16-bit integer) to your device speakers, from your Flutter app!
8 |
9 | ## No Dependencies
10 |
11 | FlutterPcmSound has zero dependencies besides Flutter, Android, iOS, and MacOS themselves.
12 |
13 | ## *Not* for Audio Files
14 |
15 | Unlike other plugins, `flutter_pcm_sound` does *not* use audio files (For example: [flutter_soloud](https://pub.dev/packages/flutter_soloud), [sound_pool](https://pub.dev/packages/soundpool)).
16 |
17 | Instead, `flutter_pcm_sound` is for apps that generate audio in realtime a few milliseconds before you hear it. For example, using [dart_melty_soundfont](https://pub.dev/packages/dart_melty_soundfont).
18 |
19 |
20 | ## Callback Based, For Real-Time Audio
21 |
22 | In contrast to [raw_sound](https://pub.dev/packages/raw_sound), FlutterPcmSound uses a callback `setFeedCallback` to signal when to feed more samples.
23 |
24 | You can lower the feed threshold using `setFeedThreshold` to achieve real time audio, or increase it to have a cushy buffer.
25 |
26 | ## Event Based Feeding
27 |
28 | Unlike traditional audio libraries which use a high-frequency timer-based audio callback, `flutter_pcm_sound` uses a low-frequency event-based callback. This integrates more seamlessly in the existing Flutter event loop, without necessitating an Isolate to ensure precise timing.
29 |
30 | Your feed callback is invoked _once_ for each of these events:
31 | - **Low-buffer event** – when the number of buffered frames falls **below** the threshold set with `setFeedThreshold`, i.e `remainingFrames < threshold`.
32 | - **Zero event** – when the buffer is fully drained, i.e. `remainingFrames == 0`.
33 |
34 | **Note:** _once_ means once per `feed()` — every time you feed new data, the plugin will trigger another low-buffer or zero event when necessary.
35 |
36 | > 💡 **Tip:** By altering how many extra samples you `feed` beyond your threshold, you can control how often `flutter_pcm_sound` invokes your feed callback.
37 |
38 | > 💡 **Tip:** You should still consider running your sound code in a Dart `Isolate`, so that it is decoupled from UI framedrops.
39 |
40 | ## Timer Based Feeding
41 |
42 | If you prefer, it's easy to wrap `flutter_pcm_sound` to simulate traditional timer-based feeding, i.e. invoking your feed callback at a specific preferrred rate, using a Dart timer.
43 |
44 | 1) Set a large feed threshold so that `flutter_pcm_sound` regularly tells you its `remainingFrames`
45 |
46 | 2) Start a Dart-side `Timer.periodic(...)` or `Ticker`
47 |
48 | 3) On each timer tick, call your feed callback with the estimated remaining frames. i.e. `estimatedRemainingFrames = remainingFrames - (elapsedSeconds * sampleRate)`
49 |
50 |
51 | For an example, click here
52 |
53 | ```dart
54 | import 'dart:async';
55 | import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
56 |
57 | typedef FeedCallback = List Function(int remainingFrames);
58 |
59 | /// wraps FlutterPcmSound w/ timer-based feeding
60 | class FlutterPcmTimer {
61 | // --- config ---
62 | static int _sampleRate = 48000;
63 | static int _channelCount = 1;
64 | static int _tickHz = 60;
65 |
66 | // --- state ---
67 | static FeedCallback? _onFeed;
68 | static bool _isSetup = false;
69 | static bool _playing = false;
70 | static Timer? _timer;
71 |
72 | // last native event snapshot
73 | static int _lastEventFrames = 0;
74 | static int _lastEventMicros = 0; // timestamp
75 |
76 | // for UI
77 | static bool get isPlaying => _playing;
78 |
79 | static Future setup({
80 | int sampleRate = 48000,
81 | int channelCount = 1,
82 | int tickHz = 60,
83 | IosAudioCategory iosAudioCategory = IosAudioCategory.playback,
84 | bool iosAllowBackgroundAudio = false,
85 | }) async {
86 | _sampleRate = sampleRate;
87 | _channelCount = channelCount;
88 | _tickHz = tickHz;
89 |
90 | await FlutterPcmSound.setup(
91 | sampleRate: _sampleRate,
92 | channelCount: _channelCount,
93 | iosAudioCategory: iosAudioCategory,
94 | iosAllowBackgroundAudio: iosAllowBackgroundAudio,
95 | );
96 |
97 | // Huge threshold → plugin regularly reports `remainingFrames`.
98 | await FlutterPcmSound.setFeedThreshold(_sampleRate * 60 * 60 * 24 * 365);
99 |
100 | FlutterPcmSound.setFeedCallback((remaining) {
101 | _lastEventFrames = remaining;
102 | _lastEventMicros = _nowMicros();
103 | if (remaining == 0 && _playing) {
104 | // Refill ASAP, but outside the native callback.
105 | scheduleMicrotask(_tick);
106 | }
107 | });
108 |
109 | _isSetup = true;
110 | }
111 |
112 | static void setFeedCallback(FeedCallback? cb) => _onFeed = cb;
113 |
114 | static void start() {
115 | if (!_isSetup) throw StateError('Call SoundTimer.setup(...) first.');
116 | if (_playing) return;
117 |
118 | _playing = true;
119 | FlutterPcmSound.start();
120 |
121 | final period = Duration(milliseconds: (1000 / _tickHz).round());
122 | _timer ??= Timer.periodic(period, (_) => _tick());
123 | }
124 |
125 | static void stop() {
126 | if (!_playing && _timer == null) return;
127 | _playing = false;
128 | _timer?.cancel();
129 | _timer = null;
130 | }
131 |
132 | // --- internals ---
133 |
134 | static void _tick() async {
135 | if (!_playing) return;
136 |
137 | final estRemaining = _estimatedRemainingFramesNow();
138 | if (_onFeed == null) return;
139 |
140 | final samples = _onFeed!(estRemaining);
141 | if (samples.isEmpty) return;
142 |
143 | if (!_playing) return; // guard before async
144 | await FlutterPcmSound.feed(PcmArrayInt16.fromList(samples));
145 | }
146 |
147 | static int _estimatedRemainingFramesNow() {
148 | final lastFrames = _lastEventFrames;
149 | final lastMicros = _lastEventMicros;
150 | if (lastMicros == 0) return 0;
151 |
152 | final elapsedMicros = _nowMicros() - lastMicros;
153 | final elapsedFrames = ((elapsedMicros / 1e6) * _sampleRate).round();
154 | final est = lastFrames - elapsedFrames;
155 | return est > 0 ? est : 0;
156 | }
157 |
158 | static int _nowMicros() => DateTime.now().microsecondsSinceEpoch;
159 | }
160 | ```
161 |
162 |
163 | ## One-Pedal Driving
164 |
165 | To play audio, just keep calling `feed`.
166 |
167 | To stop audio, just stop calling `feed`.
168 |
169 | > 💡 **Tip:** If you prefer a traditional timer-based API with `start()` and `stop()`, I recommend wrapping `flutter_pcm_sound` as described in [Timer Based Feeding](#timer-based-feeding).
170 |
171 | ## Is Playing?
172 |
173 | When your feed callback hits `remainingFrames=0` you know playing stopped.
174 |
175 | ## Usage
176 |
177 | ```dart
178 | // for testing purposes, a C-Major scale
179 | MajorScale scale = MajorScale(sampleRate: 44100, noteDuration: 0.25);
180 |
181 | // invoked whenever we need to feed more samples to the platform
182 | void onFeed(int remainingFrames) async {
183 | // you could use 'remainingFrames' to feed very precisely.
184 | // But here we just load a few thousand samples everytime we run low.
185 | List frame = scale.generate(periods: 20);
186 | await FlutterPcmSound.feed(PcmArrayInt16.fromList(frame));
187 | }
188 |
189 | await FlutterPcmSound.setup(sampleRate: 44100, channelCount: 1);
190 | await FlutterPcmSound.setFeedThreshold(8000);
191 | FlutterPcmSound.setFeedCallback(onFeed);
192 | FlutterPcmSound.start(); // for convenience. Equivalent to calling onFeed(0);
193 | ```
194 |
195 | ## ⭐ Stars ⭐
196 |
197 | Please star this repo & on [pub.dev](https://pub.dev/packages/flutter_pcm_sound). We all benefit from having a larger community.
198 |
199 | ## Example App
200 |
201 | Enable the platforms you need.
202 |
203 | ```
204 | cd ./example
205 | flutter config --enable-macos-desktop
206 | flutter config --enable-android
207 | flutter config --enable-ios
208 | flutter create .
209 | flutter run
210 | ```
211 |
212 |
213 |
214 |
--------------------------------------------------------------------------------
/lib/flutter_pcm_sound.dart:
--------------------------------------------------------------------------------
1 | import 'dart:math' as math;
2 | import 'dart:async';
3 | import 'dart:typed_data';
4 | import 'package:flutter/services.dart';
5 |
6 | enum LogLevel {
7 | none,
8 | error,
9 | standard,
10 | verbose,
11 | }
12 |
13 | // Apple Documentation: https://developer.apple.com/documentation/avfaudio/avaudiosessioncategory
14 | enum IosAudioCategory {
15 | soloAmbient, // same as ambient, but other apps will be muted. Other apps will be muted.
16 | ambient, // same as soloAmbient, but other apps are not muted.
17 | playback, // audio will play when phone is locked, like the music app
18 | playAndRecord //
19 | }
20 |
21 | class FlutterPcmSound {
22 | static const MethodChannel _channel = const MethodChannel('flutter_pcm_sound/methods');
23 |
24 | static Function(int)? onFeedSamplesCallback;
25 |
26 | static LogLevel _logLevel = LogLevel.standard;
27 |
28 | static bool _needsStart = true;
29 |
30 | /// set log level
31 | static Future setLogLevel(LogLevel level) async {
32 | _logLevel = level;
33 | return await _invokeMethod('setLogLevel', {'log_level': level.index});
34 | }
35 |
36 | /// setup audio
37 | /// 'avAudioCategory' is for iOS only,
38 | /// enabled by default on other platforms
39 | static Future setup(
40 | {required int sampleRate,
41 | required int channelCount,
42 | IosAudioCategory iosAudioCategory = IosAudioCategory.playback,
43 | bool iosAllowBackgroundAudio = false,
44 | }) async {
45 | return await _invokeMethod('setup', {
46 | 'sample_rate': sampleRate,
47 | 'num_channels': channelCount,
48 | 'ios_audio_category': iosAudioCategory.name,
49 | 'ios_allow_background_audio' : iosAllowBackgroundAudio,
50 | });
51 | }
52 |
53 | /// queue 16-bit samples (little endian)
54 | static Future feed(PcmArrayInt16 buffer) async {
55 | if (_needsStart && buffer.count != 0) _needsStart = false;
56 | return await _invokeMethod('feed', {'buffer': buffer.bytes.buffer.asUint8List()});
57 | }
58 |
59 | /// set the threshold at which we call the
60 | /// feed callback. i.e. if we have less than X
61 | /// queued frames, the feed callback will be invoked
62 | static Future setFeedThreshold(int threshold) async {
63 | return await _invokeMethod('setFeedThreshold', {'feed_threshold': threshold});
64 | }
65 |
66 | /// Your feed callback is invoked _once_ for each of these events:
67 | /// - Low-buffer event: when the number of buffered frames falls below the threshold set with `setFeedThreshold`
68 | /// - Zero event: when the buffer is fully drained (`remainingFrames == 0`)
69 | /// Note: once means once per `feed()`. Every time you feed new data, it allows
70 | /// the plugin to trigger another low-buffer or zero event.
71 | static void setFeedCallback(Function(int)? callback) {
72 | onFeedSamplesCallback = callback;
73 | _channel.setMethodCallHandler(_methodCallHandler);
74 | }
75 |
76 | /// convenience function:
77 | /// * if needed, invokes your feed callback to start playback
78 | /// * returns true if your callback was invoked
79 | static bool start() {
80 | if (_needsStart && onFeedSamplesCallback != null) {
81 | onFeedSamplesCallback!(0);
82 | return true;
83 | } else {
84 | return false;
85 | }
86 | }
87 |
88 | /// release all audio resources
89 | static Future release() async {
90 | return await _invokeMethod('release');
91 | }
92 |
93 | static Future _invokeMethod(String method, [dynamic arguments]) async {
94 | if (_logLevel.index >= LogLevel.standard.index) {
95 | String args = '';
96 | if (method == 'feed') {
97 | Uint8List data = arguments['buffer'];
98 | if (data.lengthInBytes > 6) {
99 | args = '(${data.lengthInBytes ~/ 2} samples) ${data.sublist(0, 6)} ...';
100 | } else {
101 | args = '(${data.lengthInBytes ~/ 2} samples) $data';
102 | }
103 | } else if (arguments != null) {
104 | args = arguments.toString();
105 | }
106 | print("[PCM] invoke: $method $args");
107 | }
108 | return await _channel.invokeMethod(method, arguments);
109 | }
110 |
111 | static Future _methodCallHandler(MethodCall call) async {
112 | if (_logLevel.index >= LogLevel.standard.index) {
113 | String func = '[[ ${call.method} ]]';
114 | String args = call.arguments.toString();
115 | print("[PCM] $func $args");
116 | }
117 | switch (call.method) {
118 | case 'OnFeedSamples':
119 | int remainingFrames = call.arguments["remaining_frames"];
120 | _needsStart = remainingFrames == 0;
121 | if (onFeedSamplesCallback != null) {
122 | onFeedSamplesCallback!(remainingFrames);
123 | }
124 | break;
125 | default:
126 | print('Method not implemented');
127 | }
128 | }
129 | }
130 |
131 | class PcmArrayInt16 {
132 | final ByteData bytes;
133 |
134 | PcmArrayInt16({required this.bytes});
135 |
136 | factory PcmArrayInt16.zeros({required int count}) {
137 | Uint8List list = Uint8List(count * 2);
138 | return PcmArrayInt16(bytes: list.buffer.asByteData());
139 | }
140 |
141 | factory PcmArrayInt16.empty() {
142 | return PcmArrayInt16.zeros(count: 0);
143 | }
144 |
145 | factory PcmArrayInt16.fromList(List list) {
146 | var byteData = ByteData(list.length * 2);
147 | for (int i = 0; i < list.length; i++) {
148 | byteData.setInt16(i * 2, list[i], Endian.host);
149 | }
150 | return PcmArrayInt16(bytes: byteData);
151 | }
152 |
153 | int get count => bytes.lengthInBytes ~/ 2;
154 |
155 | operator [](int idx) {
156 | int vv = bytes.getInt16(idx * 2, Endian.host);
157 | return vv;
158 | }
159 |
160 | operator []=(int idx, int value) {
161 | return bytes.setInt16(idx * 2, value, Endian.host);
162 | }
163 | }
164 |
165 | // for testing
166 | class MajorScale {
167 | int _periodCount = 0;
168 | int sampleRate = 44100;
169 | double noteDuration = 0.25;
170 |
171 | MajorScale({required this.sampleRate, required this.noteDuration});
172 |
173 | // C Major Scale (Just Intonation)
174 | List get scale {
175 | List c = [261.63, 294.33, 327.03, 348.83, 392.44, 436.05, 490.55, 523.25];
176 | return [c[0]] + c + c.reversed.toList().sublist(0, c.length - 1);
177 | }
178 |
179 | // total periods needed to play the entire note
180 | int _periodsForNote(double freq) {
181 | int nFramesPerPeriod = (sampleRate / freq).round();
182 | int totalFramesForDuration = (noteDuration * sampleRate).round();
183 | return totalFramesForDuration ~/ nFramesPerPeriod;
184 | }
185 |
186 | // total periods needed to play the whole scale
187 | int get _periodsForScale {
188 | int total = 0;
189 | for (double freq in scale) {
190 | total += _periodsForNote(freq);
191 | }
192 | return total;
193 | }
194 |
195 | // what note are we currently playing
196 | int get noteIdx {
197 | int accum = 0;
198 | for (int n = 0; n < scale.length; n++) {
199 | accum += _periodsForNote(scale[n]);
200 | if (_periodCount < accum) {
201 | return n;
202 | }
203 | }
204 | return scale.length - 1;
205 | }
206 |
207 | // generate a sine wave
208 | List cosineWave({int periods = 1, int sampleRate = 44100, double freq = 440, double volume = 0.5}) {
209 | final period = 1.0 / freq;
210 | final nFramesPerPeriod = (period * sampleRate).toInt();
211 | final totalFrames = nFramesPerPeriod * periods;
212 | final step = math.pi * 2 / nFramesPerPeriod;
213 | List data = List.filled(totalFrames, 0);
214 | for (int i = 0; i < totalFrames; i++) {
215 | data[i] = (math.cos(step * (i % nFramesPerPeriod)) * volume * 32768).toInt() - 16384;
216 | }
217 | return data;
218 | }
219 |
220 | void reset() {
221 | _periodCount = 0;
222 | }
223 |
224 | // generate the next X periods of the major scale
225 | List generate({required int periods, double volume = 0.5}) {
226 | List frames = [];
227 | for (int i = 0; i < periods; i++) {
228 | _periodCount %= _periodsForScale;
229 | frames += cosineWave(periods: 1, sampleRate: sampleRate, freq: scale[noteIdx], volume: volume);
230 | _periodCount++;
231 | }
232 | return frames;
233 | }
234 | }
235 |
--------------------------------------------------------------------------------
/android/src/main/java/com/lib/flutter_pcm_sound/FlutterPcmSoundPlugin.java:
--------------------------------------------------------------------------------
1 | package com.lib.flutter_pcm_sound;
2 |
3 | import android.os.Build;
4 | import android.media.AudioFormat;
5 | import android.media.AudioManager;
6 | import android.media.AudioTrack;
7 | import android.media.AudioAttributes;
8 | import android.os.Handler;
9 | import android.os.Looper;
10 | import android.os.SystemClock;
11 |
12 | import androidx.annotation.NonNull;
13 |
14 | import java.util.Map;
15 | import java.util.HashMap;
16 | import java.util.List;
17 | import java.util.ArrayList;
18 | import java.util.concurrent.LinkedBlockingQueue;
19 | import java.util.concurrent.TimeUnit;
20 | import java.io.StringWriter;
21 | import java.io.PrintWriter;
22 | import java.nio.ByteBuffer;
23 |
24 | import io.flutter.embedding.engine.plugins.FlutterPlugin;
25 | import io.flutter.plugin.common.BinaryMessenger;
26 | import io.flutter.plugin.common.MethodCall;
27 | import io.flutter.plugin.common.MethodChannel;
28 |
29 | /**
30 | * FlutterPcmSoundPlugin implements a "one pedal" PCM sound playback mechanism.
31 | * Playback starts automatically when samples are fed and stops when no more samples are available.
32 | */
33 | public class FlutterPcmSoundPlugin implements
34 | FlutterPlugin,
35 | MethodChannel.MethodCallHandler
36 | {
37 | private static final String CHANNEL_NAME = "flutter_pcm_sound/methods";
38 | private static final int MAX_FRAMES_PER_BUFFER = 200;
39 |
40 | private MethodChannel mMethodChannel;
41 | private Handler mainThreadHandler = new Handler(Looper.getMainLooper());
42 | private Thread playbackThread;
43 | private volatile boolean mShouldCleanup = false;
44 |
45 | private AudioTrack mAudioTrack;
46 | private int mNumChannels;
47 | private int mMinBufferSize;
48 | private boolean mDidSetup = false;
49 |
50 | private long mFeedThreshold = 8000;
51 | private long mTotalFeeds = 0;
52 | private long mLastLowBufferFeed = 0;
53 | private long mLastZeroFeed = 0;
54 |
55 | // Thread-safe queue for storing audio samples
56 | private final LinkedBlockingQueue mSamples = new LinkedBlockingQueue<>();
57 |
58 | // Log level enum (kept for potential future use)
59 | private enum LogLevel {
60 | NONE,
61 | ERROR,
62 | STANDARD,
63 | VERBOSE
64 | }
65 |
66 | private LogLevel mLogLevel = LogLevel.VERBOSE;
67 |
68 | @Override
69 | public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) {
70 | BinaryMessenger messenger = binding.getBinaryMessenger();
71 | mMethodChannel = new MethodChannel(messenger, CHANNEL_NAME);
72 | mMethodChannel.setMethodCallHandler(this);
73 | }
74 |
75 | @Override
76 | public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
77 | mMethodChannel.setMethodCallHandler(null);
78 | cleanup();
79 | }
80 |
81 | @Override
82 | @SuppressWarnings("deprecation") // Needed for compatibility with Android < 23
83 | public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result result) {
84 | try {
85 | switch (call.method) {
86 | case "setLogLevel": {
87 | result.success(true);
88 | break;
89 | }
90 | case "setup": {
91 | int sampleRate = call.argument("sample_rate");
92 | mNumChannels = call.argument("num_channels");
93 |
94 | // Cleanup existing resources if any
95 | if (mAudioTrack != null) {
96 | cleanup();
97 | }
98 |
99 | int channelConfig = (mNumChannels == 2) ?
100 | AudioFormat.CHANNEL_OUT_STEREO :
101 | AudioFormat.CHANNEL_OUT_MONO;
102 |
103 | mMinBufferSize = AudioTrack.getMinBufferSize(
104 | sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
105 |
106 | if (mMinBufferSize == AudioTrack.ERROR || mMinBufferSize == AudioTrack.ERROR_BAD_VALUE) {
107 | result.error("AudioTrackError", "Invalid buffer size.", null);
108 | return;
109 | }
110 |
111 | if (Build.VERSION.SDK_INT >= 23) { // Android 6 (Marshmallow) and above
112 | mAudioTrack = new AudioTrack.Builder()
113 | .setAudioAttributes(new AudioAttributes.Builder()
114 | .setUsage(AudioAttributes.USAGE_MEDIA)
115 | .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
116 | .build())
117 | .setAudioFormat(new AudioFormat.Builder()
118 | .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
119 | .setSampleRate(sampleRate)
120 | .setChannelMask(channelConfig)
121 | .build())
122 | .setBufferSizeInBytes(mMinBufferSize)
123 | .setTransferMode(AudioTrack.MODE_STREAM)
124 | .build();
125 | } else {
126 | mAudioTrack = new AudioTrack(
127 | AudioManager.STREAM_MUSIC,
128 | sampleRate,
129 | channelConfig,
130 | AudioFormat.ENCODING_PCM_16BIT,
131 | mMinBufferSize,
132 | AudioTrack.MODE_STREAM);
133 | }
134 |
135 | if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
136 | result.error("AudioTrackError", "AudioTrack initialization failed.", null);
137 | mAudioTrack.release();
138 | mAudioTrack = null;
139 | return;
140 | }
141 |
142 | // reset
143 | mSamples.clear();
144 | mShouldCleanup = false;
145 |
146 | // start playback thread
147 | playbackThread = new Thread(this::playbackThreadLoop, "PCMPlaybackThread");
148 | playbackThread.setPriority(Thread.MAX_PRIORITY);
149 | playbackThread.start();
150 |
151 | mDidSetup = true;
152 |
153 | result.success(true);
154 | break;
155 | }
156 | case "feed": {
157 |
158 | // check setup (to match iOS behavior)
159 | if (mDidSetup == false) {
160 | result.error("Setup", "must call setup first", null);
161 | return;
162 | }
163 |
164 | byte[] buffer = call.argument("buffer");
165 |
166 | // Split for better performance
167 | List chunks = split(buffer, MAX_FRAMES_PER_BUFFER);
168 |
169 | // Push samples
170 | synchronized (mSamples) {
171 | for (ByteBuffer chunk : chunks) {
172 | mSamples.add(chunk);
173 | }
174 | mTotalFeeds += 1;
175 | }
176 |
177 | result.success(true);
178 | break;
179 | }
180 | case "setFeedThreshold": {
181 | long feedThreshold = ((Number) call.argument("feed_threshold")).longValue();
182 |
183 | synchronized (mSamples) {
184 | mFeedThreshold = feedThreshold;
185 | }
186 |
187 | result.success(true);
188 | break;
189 | }
190 | case "release": {
191 | cleanup();
192 | result.success(true);
193 | break;
194 | }
195 | default:
196 | result.notImplemented();
197 | break;
198 | }
199 |
200 |
201 | } catch (Exception e) {
202 | StringWriter sw = new StringWriter();
203 | PrintWriter pw = new PrintWriter(sw);
204 | e.printStackTrace(pw);
205 | String stackTrace = sw.toString();
206 | result.error("androidException", e.toString(), stackTrace);
207 | return;
208 | }
209 | }
210 |
211 | /**
212 | * Cleans up resources by stopping the playback thread and releasing AudioTrack.
213 | */
214 | private void cleanup() {
215 | // stop playback thread
216 | if (playbackThread != null) {
217 | mShouldCleanup = true;
218 | playbackThread.interrupt();
219 | try {
220 | playbackThread.join();
221 | } catch (InterruptedException e) {
222 | Thread.currentThread().interrupt();
223 | }
224 | playbackThread = null;
225 | mDidSetup = false;
226 | }
227 | }
228 |
229 | /**
230 | * Invokes the 'OnFeedSamples' callback with the number of remaining frames.
231 | */
232 | private void invokeFeedCallback(long remainingFrames) {
233 | Map response = new HashMap<>();
234 | response.put("remaining_frames", remainingFrames);
235 | mMethodChannel.invokeMethod("OnFeedSamples", response);
236 | }
237 |
238 | /**
239 | * The main loop of the playback thread.
240 | */
241 | private void playbackThreadLoop() {
242 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
243 |
244 | mAudioTrack.play();
245 |
246 | while (!mShouldCleanup) {
247 | ByteBuffer data = null;
248 | try {
249 | // blocks indefinitely until new data
250 | data = mSamples.take();
251 | } catch (InterruptedException e) {
252 | Thread.currentThread().interrupt();
253 | continue;
254 | }
255 |
256 | // write
257 | mAudioTrack.write(data, data.remaining(), AudioTrack.WRITE_BLOCKING);
258 |
259 | long remainingFrames;
260 | long totalFeeds;
261 | long feedThreshold;
262 |
263 | // grab shared data
264 | synchronized (mSamples) {
265 | long totalBytes = 0;
266 | for (ByteBuffer sampleBuffer : mSamples) {
267 | totalBytes += sampleBuffer.remaining();
268 | }
269 | remainingFrames = totalBytes / (2 * mNumChannels);
270 | totalFeeds = mTotalFeeds;
271 | feedThreshold = mFeedThreshold;
272 | }
273 |
274 | // check for events
275 | boolean isLowBufferEvent = (remainingFrames <= feedThreshold) && (mLastLowBufferFeed != totalFeeds);
276 | boolean isZeroCrossingEvent = (remainingFrames == 0) && (mLastZeroFeed != totalFeeds);
277 |
278 | // send events
279 | if (isLowBufferEvent || isZeroCrossingEvent) {
280 | if (isLowBufferEvent) {mLastLowBufferFeed = totalFeeds;}
281 | if (isZeroCrossingEvent) {mLastZeroFeed = totalFeeds;}
282 | mainThreadHandler.post(() -> invokeFeedCallback(remainingFrames));
283 | }
284 | }
285 |
286 | mAudioTrack.stop();
287 | mAudioTrack.flush();
288 | mAudioTrack.release();
289 | mAudioTrack = null;
290 | }
291 |
292 |
293 | private List split(byte[] buffer, int maxSize) {
294 | List chunks = new ArrayList<>();
295 | int offset = 0;
296 | while (offset < buffer.length) {
297 | int length = Math.min(buffer.length - offset, maxSize);
298 | ByteBuffer b = ByteBuffer.wrap(buffer, offset, length);
299 | chunks.add(b);
300 | offset += length;
301 | }
302 | return chunks;
303 | }
304 | }
305 |
--------------------------------------------------------------------------------
/ios/Classes/FlutterPcmSoundPlugin.m:
--------------------------------------------------------------------------------
1 | #import "FlutterPcmSoundPlugin.h"
2 | #import
3 |
4 | #if TARGET_OS_IOS
5 | #import
6 | #endif
7 |
8 | #define kOutputBus 0
9 | #define NAMESPACE @"flutter_pcm_sound"
10 |
11 | typedef NS_ENUM(NSUInteger, LogLevel) {
12 | none = 0,
13 | error = 1,
14 | standard = 2,
15 | verbose = 3,
16 | };
17 |
18 | @interface FlutterPcmSoundPlugin ()
19 | @property(nonatomic) NSObject *registrar;
20 | @property(nonatomic) FlutterMethodChannel *mMethodChannel;
21 | @property(nonatomic) LogLevel mLogLevel;
22 | @property(nonatomic) AudioComponentInstance mAudioUnit;
23 | @property(nonatomic) NSMutableData *mSamples;
24 | @property(nonatomic) int mNumChannels;
25 | @property(nonatomic) int mFeedThreshold;
26 | @property(nonatomic) NSUInteger mTotalFeeds;
27 | @property(nonatomic) NSUInteger mLastLowBufferFeed;
28 | @property(nonatomic) NSUInteger mLastZeroFeed;
29 | @property(nonatomic) bool mDidSetup;
30 | @property(nonatomic) BOOL mIsAppActive;
31 | @property(nonatomic) BOOL mAllowBackgroundAudio;
32 | @end
33 |
34 | @implementation FlutterPcmSoundPlugin
35 |
36 | + (void)registerWithRegistrar:(NSObject *)registrar
37 | {
38 | FlutterMethodChannel *methodChannel = [FlutterMethodChannel methodChannelWithName:NAMESPACE @"/methods"
39 | binaryMessenger:[registrar messenger]];
40 |
41 | FlutterPcmSoundPlugin *instance = [[FlutterPcmSoundPlugin alloc] init];
42 | instance.mMethodChannel = methodChannel;
43 | instance.mLogLevel = verbose;
44 | instance.mSamples = [NSMutableData new];
45 | instance.mFeedThreshold = 8000;
46 | instance.mTotalFeeds = 0;
47 | instance.mLastLowBufferFeed = 0;
48 | instance.mLastZeroFeed = 0;
49 | instance.mDidSetup = false;
50 | instance.mIsAppActive = true;
51 | instance.mAllowBackgroundAudio = false;
52 |
53 | #if TARGET_OS_IOS
54 | NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
55 | [nc addObserver:instance selector:@selector(onWillResignActive:) name:UIApplicationWillResignActiveNotification object:nil];
56 | [nc addObserver:instance selector:@selector(onDidBecomeActive:) name:UIApplicationDidBecomeActiveNotification object:nil];
57 | #endif
58 |
59 | [registrar addMethodCallDelegate:instance channel:methodChannel];
60 | }
61 |
62 | #if TARGET_OS_IOS
63 | - (void)onWillResignActive:(NSNotification *)note {
64 | self.mIsAppActive = NO;
65 | }
66 |
67 | - (void)onDidBecomeActive:(NSNotification *)note {
68 | self.mIsAppActive = YES;
69 | }
70 | #endif
71 |
72 | - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result
73 | {
74 | @try
75 | {
76 | if ([@"setLogLevel" isEqualToString:call.method])
77 | {
78 | NSDictionary *args = (NSDictionary*)call.arguments;
79 | NSNumber *logLevelNumber = args[@"log_level"];
80 |
81 | self.mLogLevel = (LogLevel)[logLevelNumber integerValue];
82 |
83 | result(@YES);
84 | }
85 | else if ([@"setup" isEqualToString:call.method])
86 | {
87 | NSDictionary *args = (NSDictionary*)call.arguments;
88 | NSNumber *sampleRate = args[@"sample_rate"];
89 | NSNumber *numChannels = args[@"num_channels"];
90 | #if TARGET_OS_IOS
91 | NSString *iosAudioCategory = args[@"ios_audio_category"];
92 | self.mAllowBackgroundAudio = [args[@"ios_allow_background_audio"] boolValue];
93 | #endif
94 |
95 | self.mNumChannels = [numChannels intValue];
96 |
97 | #if TARGET_OS_IOS
98 | // iOS audio category
99 | AVAudioSessionCategory category = AVAudioSessionCategorySoloAmbient;
100 | if ([iosAudioCategory isEqualToString:@"ambient"]) {
101 | category = AVAudioSessionCategoryAmbient;
102 | } else if ([iosAudioCategory isEqualToString:@"soloAmbient"]) {
103 | category = AVAudioSessionCategorySoloAmbient;
104 | } else if ([iosAudioCategory isEqualToString:@"playback"]) {
105 | category = AVAudioSessionCategoryPlayback;
106 | }
107 | else if ([iosAudioCategory isEqualToString:@"playAndRecord"]) {
108 | category = AVAudioSessionCategoryPlayAndRecord;
109 | }
110 |
111 | // Set the AVAudioSession category based on the string value
112 | NSError *error = nil;
113 | [[AVAudioSession sharedInstance] setCategory:category error:&error];
114 | if (error) {
115 | NSLog(@"Error setting AVAudioSession category: %@", error);
116 | result([FlutterError errorWithCode:@"AVAudioSessionError"
117 | message:@"Error setting AVAudioSession category"
118 | details:[error localizedDescription]]);
119 | return;
120 | }
121 |
122 | // Activate the audio session
123 | [[AVAudioSession sharedInstance] setActive:YES error:&error];
124 | if (error) {
125 | NSLog(@"Error activating AVAudioSession: %@", error);
126 | result([FlutterError errorWithCode:@"AVAudioSessionError"
127 | message:@"Error activating AVAudioSession"
128 | details:[error localizedDescription]]);
129 | return;
130 | }
131 | #endif
132 |
133 | // cleanup
134 | if (_mAudioUnit != nil) {
135 | [self cleanup];
136 | }
137 |
138 | // create
139 | AudioComponentDescription desc;
140 | desc.componentType = kAudioUnitType_Output;
141 | #if TARGET_OS_IOS
142 | desc.componentSubType = kAudioUnitSubType_RemoteIO;
143 | #else // MacOS
144 | desc.componentSubType = kAudioUnitSubType_DefaultOutput;
145 | #endif
146 | desc.componentFlags = 0;
147 | desc.componentFlagsMask = 0;
148 | desc.componentManufacturer = kAudioUnitManufacturer_Apple;
149 |
150 | AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
151 | OSStatus status = AudioComponentInstanceNew(inputComponent, &_mAudioUnit);
152 | if (status != noErr) {
153 | NSString* message = [NSString stringWithFormat:@"AudioComponentInstanceNew failed. OSStatus: %@", @(status)];
154 | result([FlutterError errorWithCode:@"AudioUnitError" message:message details:nil]);
155 | return;
156 | }
157 |
158 | // set stream format
159 | AudioStreamBasicDescription audioFormat;
160 | audioFormat.mSampleRate = [sampleRate intValue];
161 | audioFormat.mFormatID = kAudioFormatLinearPCM;
162 | audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
163 | audioFormat.mFramesPerPacket = 1;
164 | audioFormat.mChannelsPerFrame = self.mNumChannels;
165 | audioFormat.mBitsPerChannel = 16;
166 | audioFormat.mBytesPerFrame = self.mNumChannels * (audioFormat.mBitsPerChannel / 8);
167 | audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame * audioFormat.mFramesPerPacket;
168 |
169 | status = AudioUnitSetProperty(_mAudioUnit,
170 | kAudioUnitProperty_StreamFormat,
171 | kAudioUnitScope_Input,
172 | kOutputBus,
173 | &audioFormat,
174 | sizeof(audioFormat));
175 | if (status != noErr) {
176 | NSString* message = [NSString stringWithFormat:@"AudioUnitSetProperty StreamFormat failed. OSStatus: %@", @(status)];
177 | result([FlutterError errorWithCode:@"AudioUnitError" message:message details:nil]);
178 | return;
179 | }
180 |
181 | // set callback
182 | AURenderCallbackStruct callback;
183 | callback.inputProc = RenderCallback;
184 | callback.inputProcRefCon = (__bridge void *)(self);
185 |
186 | status = AudioUnitSetProperty(_mAudioUnit,
187 | kAudioUnitProperty_SetRenderCallback,
188 | kAudioUnitScope_Global,
189 | kOutputBus,
190 | &callback,
191 | sizeof(callback));
192 | if (status != noErr) {
193 | NSString* message = [NSString stringWithFormat:@"AudioUnitSetProperty SetRenderCallback failed. OSStatus: %@", @(status)];
194 | result([FlutterError errorWithCode:@"AudioUnitError" message:message details:nil]);
195 | return;
196 | }
197 |
198 | // initialize
199 | status = AudioUnitInitialize(_mAudioUnit);
200 | if (status != noErr) {
201 | NSString* message = [NSString stringWithFormat:@"AudioUnitInitialize failed. OSStatus: %@", @(status)];
202 | result([FlutterError errorWithCode:@"AudioUnitError" message:message details:nil]);
203 | return;
204 | }
205 |
206 | self.mDidSetup = true;
207 |
208 | result(@YES);
209 | }
210 | else if ([@"feed" isEqualToString:call.method])
211 | {
212 | // setup check
213 | if (self.mDidSetup == false) {
214 | result([FlutterError errorWithCode:@"Setup" message:@"must call setup first" details:nil]);
215 | return;
216 | }
217 |
218 | // If background audio is not allowed, feeding immediately after a lock→unlock
219 | // can cause AudioOutputUnitStart to fail with code 561015905 because the app is not
220 | // fully active yet. Rather than surfacing this transient error, we report success
221 | // and tell Dart the frames were consumed, prompting it to continue feeding.
222 | // This hides the temporary failure and keeps the API simple.
223 | if (!self.mIsAppActive && !self.mAllowBackgroundAudio) {
224 | @synchronized (self.mSamples) {[self.mSamples setLength:0];}
225 | [self.mMethodChannel invokeMethod:@"OnFeedSamples" arguments:@{@"remaining_frames": @(0)}];
226 | result(@YES);
227 | return;
228 | }
229 |
230 | NSDictionary *args = (NSDictionary*)call.arguments;
231 | FlutterStandardTypedData *buffer = args[@"buffer"];
232 |
233 | @synchronized (self.mSamples) {
234 | [self.mSamples appendData:buffer.data];
235 | self.mTotalFeeds += 1;
236 | }
237 |
238 | // start
239 | OSStatus status = AudioOutputUnitStart(_mAudioUnit);
240 | if (status != noErr) {
241 | NSString* message = [NSString stringWithFormat:@"AudioOutputUnitStart failed. OSStatus: %@", @(status)];
242 | result([FlutterError errorWithCode:@"AudioUnitError" message:message details:nil]);
243 | return;
244 | }
245 |
246 | result(@YES);
247 | }
248 | else if ([@"setFeedThreshold" isEqualToString:call.method])
249 | {
250 | NSDictionary *args = (NSDictionary*)call.arguments;
251 | NSNumber *feedThreshold = args[@"feed_threshold"];
252 |
253 | @synchronized (self.mSamples) {
254 | self.mFeedThreshold = [feedThreshold intValue];
255 | }
256 |
257 | result(@YES);
258 | }
259 | else if([@"release" isEqualToString:call.method])
260 | {
261 | [self cleanup];
262 | result(@YES);
263 | }
264 | else
265 | {
266 | result([FlutterError errorWithCode:@"functionNotImplemented" message:call.method details:nil]);
267 | }
268 | }
269 | @catch (NSException *e)
270 | {
271 | NSString *stackTrace = [[e callStackSymbols] componentsJoinedByString:@"\n"];
272 | NSDictionary *details = @{@"stackTrace": stackTrace};
273 | result([FlutterError errorWithCode:@"iosException" message:[e reason] details:details]);
274 | }
275 | }
276 |
277 | - (void)cleanup
278 | {
279 | if (_mAudioUnit != nil) {
280 | AudioOutputUnitStop(_mAudioUnit);
281 | AudioUnitUninitialize(_mAudioUnit);
282 | AudioComponentInstanceDispose(_mAudioUnit);
283 | _mAudioUnit = nil;
284 | self.mDidSetup = false;
285 | }
286 | @synchronized (self.mSamples) {
287 | [self.mSamples setLength:0];
288 | }
289 | }
290 |
291 | - (void)stopAudioUnit
292 | {
293 | if (_mAudioUnit != nil) {
294 | UInt32 isRunning = 0;
295 | UInt32 size = sizeof(isRunning);
296 | OSStatus status = AudioUnitGetProperty(_mAudioUnit,
297 | kAudioOutputUnitProperty_IsRunning,
298 | kAudioUnitScope_Global,
299 | 0,
300 | &isRunning,
301 | &size);
302 | if (status != noErr) {
303 | NSLog(@"AudioUnitGetProperty IsRunning failed. OSStatus: %@", @(status));
304 | return;
305 | }
306 | if (isRunning) {
307 | status = AudioOutputUnitStop(_mAudioUnit);
308 | if (status != noErr) {
309 | NSLog(@"AudioOutputUnitStop failed. OSStatus: %@", @(status));
310 | } else {
311 | NSLog(@"AudioUnit stopped because no more samples");
312 | }
313 | }
314 | }
315 | }
316 |
317 |
318 | static OSStatus RenderCallback(void *inRefCon,
319 | AudioUnitRenderActionFlags *ioActionFlags,
320 | const AudioTimeStamp *inTimeStamp,
321 | UInt32 inBusNumber,
322 | UInt32 inNumberFrames,
323 | AudioBufferList *ioData)
324 | {
325 | FlutterPcmSoundPlugin *instance = (__bridge FlutterPcmSoundPlugin *)(inRefCon);
326 |
327 | NSUInteger totalFeeds = 0;
328 | NSUInteger remainingFrames;
329 | NSUInteger feedThreshold = 0;
330 |
331 | @synchronized (instance.mSamples) {
332 |
333 | // clear
334 | memset(ioData->mBuffers[0].mData, 0, ioData->mBuffers[0].mDataByteSize);
335 |
336 | NSUInteger bytesToCopy = MIN(ioData->mBuffers[0].mDataByteSize, [instance.mSamples length]);
337 |
338 | // provide samples
339 | memcpy(ioData->mBuffers[0].mData, [instance.mSamples bytes], bytesToCopy);
340 |
341 | // pop front bytes
342 | NSRange range = NSMakeRange(0, bytesToCopy);
343 | [instance.mSamples replaceBytesInRange:range withBytes:NULL length:0];
344 |
345 | // grab shared data
346 | remainingFrames = [instance.mSamples length] / (instance.mNumChannels * sizeof(short));
347 | totalFeeds = instance.mTotalFeeds;
348 | feedThreshold = (NSUInteger)instance.mFeedThreshold;
349 | }
350 |
351 | // check for events
352 | BOOL isLowBufferEvent = (remainingFrames <= feedThreshold) && (instance.mLastLowBufferFeed != totalFeeds);
353 | BOOL isZeroCrossingEvent = (remainingFrames == 0) && (instance.mLastZeroFeed != totalFeeds);
354 |
355 | // stop running, if needed
356 | if (remainingFrames == 0) {
357 | dispatch_async(dispatch_get_main_queue(), ^{
358 | @synchronized (instance.mSamples) { // re-check
359 | if ([instance.mSamples length] != 0) {return;}
360 | }
361 | [instance stopAudioUnit];
362 | });
363 | }
364 |
365 | // send events
366 | if (isLowBufferEvent || isZeroCrossingEvent) {
367 | if(isLowBufferEvent) {instance.mLastLowBufferFeed = totalFeeds;}
368 | if(isZeroCrossingEvent) {instance.mLastZeroFeed = totalFeeds;}
369 | NSDictionary *response = @{@"remaining_frames": @(remainingFrames)};
370 | dispatch_async(dispatch_get_main_queue(), ^{
371 | [instance.mMethodChannel invokeMethod:@"OnFeedSamples" arguments:response];
372 | });
373 | }
374 |
375 | return noErr;
376 | }
377 |
378 |
379 | @end
380 |
--------------------------------------------------------------------------------