I am using Audio Recorder for Record Audio up to Android OS 13 it was working fine but with Android OS 14, it is not working properly.
Basically inside the application User records audio for 15 seconds and then can play recorded audio.
I created Audio Record Thread like this
public class AudioRecordThread implements Runnable {
public static final int MAX_FREQUENCY = 10000;
public static final int SAMPLE_RATE = 44100;
public static final int BUFFER_SIZE = 1024;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
public AudioRecordThread(File file, OnRecorderListener listener) throws IOException {
this.bufferSize = BUFFER_SIZE;
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.audioData = new AudioData(this.bufferSize);
this.outputStream = outputStream(file);
this.listener = listener;
this.list_frequency = new ArrayList<>();
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
e.printStackTrace();
mediaCodec.release();
throw new IOException(e);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
e.printStackTrace();
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
}
So now with Android OS 14, it records Audio but only in 5 Seconds and it plays recorded Audio of 15 seconds in 5 Seconds like a triple speed. How can I record 15 seconds audio as proper 1X speed in Android OS 14?