1 package org.libsdl.app;
3 import android.media.*;
4 import android.os.Build;
5 import android.util.Log;
7 public class SDLAudioManager
9 protected static final String TAG = "SDLAudio";
11 protected static AudioTrack mAudioTrack;
12 protected static AudioRecord mAudioRecord;
14 public static void initialize() {
21 protected static String getAudioFormatString(int audioFormat) {
22 switch (audioFormat) {
23 case AudioFormat.ENCODING_PCM_8BIT:
25 case AudioFormat.ENCODING_PCM_16BIT:
27 case AudioFormat.ENCODING_PCM_FLOAT:
30 return Integer.toString(audioFormat);
34 protected static int[] open(boolean isCapture, int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
39 Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", requested " + desiredFrames + " frames of " + desiredChannels + " channel " + getAudioFormatString(audioFormat) + " audio at " + sampleRate + " Hz");
41 /* On older devices let's use known good settings */
42 if (Build.VERSION.SDK_INT < 21) {
43 if (desiredChannels > 2) {
46 if (sampleRate < 8000) {
48 } else if (sampleRate > 48000) {
53 if (audioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
54 int minSDKVersion = (isCapture ? 23 : 21);
55 if (Build.VERSION.SDK_INT < minSDKVersion) {
56 audioFormat = AudioFormat.ENCODING_PCM_16BIT;
61 case AudioFormat.ENCODING_PCM_8BIT:
64 case AudioFormat.ENCODING_PCM_16BIT:
67 case AudioFormat.ENCODING_PCM_FLOAT:
71 Log.v(TAG, "Requested format " + audioFormat + ", getting ENCODING_PCM_16BIT");
72 audioFormat = AudioFormat.ENCODING_PCM_16BIT;
78 switch (desiredChannels) {
80 channelConfig = AudioFormat.CHANNEL_IN_MONO;
83 channelConfig = AudioFormat.CHANNEL_IN_STEREO;
86 Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
88 channelConfig = AudioFormat.CHANNEL_IN_STEREO;
92 switch (desiredChannels) {
94 channelConfig = AudioFormat.CHANNEL_OUT_MONO;
97 channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
100 channelConfig = AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
103 channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
106 channelConfig = AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
109 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
112 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
115 if (Build.VERSION.SDK_INT >= 23) {
116 channelConfig = AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
118 Log.v(TAG, "Requested " + desiredChannels + " channels, getting 5.1 surround");
120 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
124 Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
126 channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
131 Log.v(TAG, "Speaker configuration (and order of channels):");
133 if ((channelConfig & 0x00000004) != 0) {
134 Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT");
136 if ((channelConfig & 0x00000008) != 0) {
137 Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT");
139 if ((channelConfig & 0x00000010) != 0) {
140 Log.v(TAG, " CHANNEL_OUT_FRONT_CENTER");
142 if ((channelConfig & 0x00000020) != 0) {
143 Log.v(TAG, " CHANNEL_OUT_LOW_FREQUENCY");
145 if ((channelConfig & 0x00000040) != 0) {
146 Log.v(TAG, " CHANNEL_OUT_BACK_LEFT");
148 if ((channelConfig & 0x00000080) != 0) {
149 Log.v(TAG, " CHANNEL_OUT_BACK_RIGHT");
151 if ((channelConfig & 0x00000100) != 0) {
152 Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT_OF_CENTER");
154 if ((channelConfig & 0x00000200) != 0) {
155 Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT_OF_CENTER");
157 if ((channelConfig & 0x00000400) != 0) {
158 Log.v(TAG, " CHANNEL_OUT_BACK_CENTER");
160 if ((channelConfig & 0x00000800) != 0) {
161 Log.v(TAG, " CHANNEL_OUT_SIDE_LEFT");
163 if ((channelConfig & 0x00001000) != 0) {
164 Log.v(TAG, " CHANNEL_OUT_SIDE_RIGHT");
168 frameSize = (sampleSize * desiredChannels);
170 // Let the user pick a larger buffer if they really want -- but ye
171 // gods they probably shouldn't, the minimums are horrifyingly high
175 minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
177 minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
179 desiredFrames = Math.max(desiredFrames, (minBufferSize + frameSize - 1) / frameSize);
181 int[] results = new int[4];
184 if (mAudioRecord == null) {
185 mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
186 channelConfig, audioFormat, desiredFrames * frameSize);
188 // see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
189 if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
190 Log.e(TAG, "Failed during initialization of AudioRecord");
191 mAudioRecord.release();
196 mAudioRecord.startRecording();
199 results[0] = mAudioRecord.getSampleRate();
200 results[1] = mAudioRecord.getAudioFormat();
201 results[2] = mAudioRecord.getChannelCount();
202 results[3] = desiredFrames;
205 if (mAudioTrack == null) {
206 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
208 // Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
209 // Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
210 // Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
211 if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
212 /* Try again, with safer values */
214 Log.e(TAG, "Failed during initialization of Audio Track");
215 mAudioTrack.release();
223 results[0] = mAudioTrack.getSampleRate();
224 results[1] = mAudioTrack.getAudioFormat();
225 results[2] = mAudioTrack.getChannelCount();
226 results[3] = desiredFrames;
229 Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", got " + results[3] + " frames of " + results[2] + " channel " + getAudioFormatString(results[1]) + " audio at " + results[0] + " Hz");
235 * This method is called by SDL using JNI.
237 public static int[] audioOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
238 return open(false, sampleRate, audioFormat, desiredChannels, desiredFrames);
242 * This method is called by SDL using JNI.
244 public static void audioWriteFloatBuffer(float[] buffer) {
245 if (mAudioTrack == null) {
246 Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
250 for (int i = 0; i < buffer.length;) {
251 int result = mAudioTrack.write(buffer, i, buffer.length - i, AudioTrack.WRITE_BLOCKING);
254 } else if (result == 0) {
257 } catch(InterruptedException e) {
261 Log.w(TAG, "SDL audio: error return from write(float)");
268 * This method is called by SDL using JNI.
270 public static void audioWriteShortBuffer(short[] buffer) {
271 if (mAudioTrack == null) {
272 Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
276 for (int i = 0; i < buffer.length;) {
277 int result = mAudioTrack.write(buffer, i, buffer.length - i);
280 } else if (result == 0) {
283 } catch(InterruptedException e) {
287 Log.w(TAG, "SDL audio: error return from write(short)");
294 * This method is called by SDL using JNI.
296 public static void audioWriteByteBuffer(byte[] buffer) {
297 if (mAudioTrack == null) {
298 Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
302 for (int i = 0; i < buffer.length; ) {
303 int result = mAudioTrack.write(buffer, i, buffer.length - i);
306 } else if (result == 0) {
309 } catch(InterruptedException e) {
313 Log.w(TAG, "SDL audio: error return from write(byte)");
320 * This method is called by SDL using JNI.
322 public static int[] captureOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
323 return open(true, sampleRate, audioFormat, desiredChannels, desiredFrames);
326 /** This method is called by SDL using JNI. */
327 public static int captureReadFloatBuffer(float[] buffer, boolean blocking) {
328 return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
331 /** This method is called by SDL using JNI. */
332 public static int captureReadShortBuffer(short[] buffer, boolean blocking) {
333 if (Build.VERSION.SDK_INT < 23) {
334 return mAudioRecord.read(buffer, 0, buffer.length);
336 return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
340 /** This method is called by SDL using JNI. */
341 public static int captureReadByteBuffer(byte[] buffer, boolean blocking) {
342 if (Build.VERSION.SDK_INT < 23) {
343 return mAudioRecord.read(buffer, 0, buffer.length);
345 return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
349 /** This method is called by SDL using JNI. */
350 public static void audioClose() {
351 if (mAudioTrack != null) {
353 mAudioTrack.release();
358 /** This method is called by SDL using JNI. */
359 public static void captureClose() {
360 if (mAudioRecord != null) {
362 mAudioRecord.release();
367 /** This method is called by SDL using JNI. */
368 public static void audioSetThreadPriority(boolean iscapture, int device_id) {
371 /* Set thread name */
373 Thread.currentThread().setName("SDLAudioC" + device_id);
375 Thread.currentThread().setName("SDLAudioP" + device_id);
378 /* Set thread priority */
379 android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
381 } catch (Exception e) {
382 Log.v(TAG, "modify thread properties failed " + e.toString());
386 public static native int nativeSetupJNI();