1 package org.libsdl.app;
3 import android.media.AudioFormat;
4 import android.media.AudioManager;
5 import android.media.AudioRecord;
6 import android.media.AudioTrack;
7 import android.media.MediaRecorder;
8 import android.os.Build;
9 import android.util.Log;
11 public class SDLAudioManager
13 protected static final String TAG = "SDLAudio";
15 protected static AudioTrack mAudioTrack;
16 protected static AudioRecord mAudioRecord;
18 public static void initialize() {
25 protected static String getAudioFormatString(int audioFormat) {
26 switch (audioFormat) {
27 case AudioFormat.ENCODING_PCM_8BIT:
29 case AudioFormat.ENCODING_PCM_16BIT:
31 case AudioFormat.ENCODING_PCM_FLOAT:
34 return Integer.toString(audioFormat);
38 protected static int[] open(boolean isCapture, int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
43 Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", requested " + desiredFrames + " frames of " + desiredChannels + " channel " + getAudioFormatString(audioFormat) + " audio at " + sampleRate + " Hz");
45 /* On older devices let's use known good settings */
46 if (Build.VERSION.SDK_INT < 21) {
47 if (desiredChannels > 2) {
52 /* AudioTrack has sample rate limitation of 48000 (fixed in 5.0.2) */
53 if (Build.VERSION.SDK_INT < 22) {
54 if (sampleRate < 8000) {
56 } else if (sampleRate > 48000) {
61 if (audioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
62 int minSDKVersion = (isCapture ? 23 : 21);
63 if (Build.VERSION.SDK_INT < minSDKVersion) {
64 audioFormat = AudioFormat.ENCODING_PCM_16BIT;
69 case AudioFormat.ENCODING_PCM_8BIT:
72 case AudioFormat.ENCODING_PCM_16BIT:
75 case AudioFormat.ENCODING_PCM_FLOAT:
79 Log.v(TAG, "Requested format " + audioFormat + ", getting ENCODING_PCM_16BIT");
80 audioFormat = AudioFormat.ENCODING_PCM_16BIT;
86 switch (desiredChannels) {
88 channelConfig = AudioFormat.CHANNEL_IN_MONO;
91 channelConfig = AudioFormat.CHANNEL_IN_STEREO;
94 Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
96 channelConfig = AudioFormat.CHANNEL_IN_STEREO;
100 switch (desiredChannels) {
102 channelConfig = AudioFormat.CHANNEL_OUT_MONO;
105 channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
108 channelConfig = AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
111 channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
114 channelConfig = AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
117 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
120 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
123 if (Build.VERSION.SDK_INT >= 23) {
124 channelConfig = AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
126 Log.v(TAG, "Requested " + desiredChannels + " channels, getting 5.1 surround");
128 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
132 Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
134 channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
139 Log.v(TAG, "Speaker configuration (and order of channels):");
141 if ((channelConfig & 0x00000004) != 0) {
142 Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT");
144 if ((channelConfig & 0x00000008) != 0) {
145 Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT");
147 if ((channelConfig & 0x00000010) != 0) {
148 Log.v(TAG, " CHANNEL_OUT_FRONT_CENTER");
150 if ((channelConfig & 0x00000020) != 0) {
151 Log.v(TAG, " CHANNEL_OUT_LOW_FREQUENCY");
153 if ((channelConfig & 0x00000040) != 0) {
154 Log.v(TAG, " CHANNEL_OUT_BACK_LEFT");
156 if ((channelConfig & 0x00000080) != 0) {
157 Log.v(TAG, " CHANNEL_OUT_BACK_RIGHT");
159 if ((channelConfig & 0x00000100) != 0) {
160 Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT_OF_CENTER");
162 if ((channelConfig & 0x00000200) != 0) {
163 Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT_OF_CENTER");
165 if ((channelConfig & 0x00000400) != 0) {
166 Log.v(TAG, " CHANNEL_OUT_BACK_CENTER");
168 if ((channelConfig & 0x00000800) != 0) {
169 Log.v(TAG, " CHANNEL_OUT_SIDE_LEFT");
171 if ((channelConfig & 0x00001000) != 0) {
172 Log.v(TAG, " CHANNEL_OUT_SIDE_RIGHT");
176 frameSize = (sampleSize * desiredChannels);
178 // Let the user pick a larger buffer if they really want -- but ye
179 // gods they probably shouldn't, the minimums are horrifyingly high
183 minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
185 minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
187 desiredFrames = Math.max(desiredFrames, (minBufferSize + frameSize - 1) / frameSize);
189 int[] results = new int[4];
192 if (mAudioRecord == null) {
193 mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
194 channelConfig, audioFormat, desiredFrames * frameSize);
196 // see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
197 if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
198 Log.e(TAG, "Failed during initialization of AudioRecord");
199 mAudioRecord.release();
204 mAudioRecord.startRecording();
207 results[0] = mAudioRecord.getSampleRate();
208 results[1] = mAudioRecord.getAudioFormat();
209 results[2] = mAudioRecord.getChannelCount();
212 if (mAudioTrack == null) {
213 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
215 // Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
216 // Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
217 // Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
218 if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
219 /* Try again, with safer values */
221 Log.e(TAG, "Failed during initialization of Audio Track");
222 mAudioTrack.release();
230 results[0] = mAudioTrack.getSampleRate();
231 results[1] = mAudioTrack.getAudioFormat();
232 results[2] = mAudioTrack.getChannelCount();
234 results[3] = desiredFrames;
236 Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", got " + results[3] + " frames of " + results[2] + " channel " + getAudioFormatString(results[1]) + " audio at " + results[0] + " Hz");
242 * This method is called by SDL using JNI.
244 public static int[] audioOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
245 return open(false, sampleRate, audioFormat, desiredChannels, desiredFrames);
249 * This method is called by SDL using JNI.
251 public static void audioWriteFloatBuffer(float[] buffer) {
252 if (mAudioTrack == null) {
253 Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
257 for (int i = 0; i < buffer.length;) {
258 int result = mAudioTrack.write(buffer, i, buffer.length - i, AudioTrack.WRITE_BLOCKING);
261 } else if (result == 0) {
264 } catch(InterruptedException e) {
268 Log.w(TAG, "SDL audio: error return from write(float)");
275 * This method is called by SDL using JNI.
277 public static void audioWriteShortBuffer(short[] buffer) {
278 if (mAudioTrack == null) {
279 Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
283 for (int i = 0; i < buffer.length;) {
284 int result = mAudioTrack.write(buffer, i, buffer.length - i);
287 } else if (result == 0) {
290 } catch(InterruptedException e) {
294 Log.w(TAG, "SDL audio: error return from write(short)");
301 * This method is called by SDL using JNI.
303 public static void audioWriteByteBuffer(byte[] buffer) {
304 if (mAudioTrack == null) {
305 Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
309 for (int i = 0; i < buffer.length; ) {
310 int result = mAudioTrack.write(buffer, i, buffer.length - i);
313 } else if (result == 0) {
316 } catch(InterruptedException e) {
320 Log.w(TAG, "SDL audio: error return from write(byte)");
327 * This method is called by SDL using JNI.
329 public static int[] captureOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
330 return open(true, sampleRate, audioFormat, desiredChannels, desiredFrames);
333 /** This method is called by SDL using JNI. */
334 public static int captureReadFloatBuffer(float[] buffer, boolean blocking) {
335 return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
338 /** This method is called by SDL using JNI. */
339 public static int captureReadShortBuffer(short[] buffer, boolean blocking) {
340 if (Build.VERSION.SDK_INT < 23) {
341 return mAudioRecord.read(buffer, 0, buffer.length);
343 return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
347 /** This method is called by SDL using JNI. */
348 public static int captureReadByteBuffer(byte[] buffer, boolean blocking) {
349 if (Build.VERSION.SDK_INT < 23) {
350 return mAudioRecord.read(buffer, 0, buffer.length);
352 return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
356 /** This method is called by SDL using JNI. */
357 public static void audioClose() {
358 if (mAudioTrack != null) {
360 mAudioTrack.release();
365 /** This method is called by SDL using JNI. */
366 public static void captureClose() {
367 if (mAudioRecord != null) {
369 mAudioRecord.release();
374 /** This method is called by SDL using JNI. */
375 public static void audioSetThreadPriority(boolean iscapture, int device_id) {
378 /* Set thread name */
380 Thread.currentThread().setName("SDLAudioC" + device_id);
382 Thread.currentThread().setName("SDLAudioP" + device_id);
385 /* Set thread priority */
386 android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
388 } catch (Exception e) {
389 Log.v(TAG, "modify thread properties failed " + e.toString());
393 public static native int nativeSetupJNI();