Skip to content

Instantly share code, notes, and snippets.

@yavor87
Last active August 3, 2023 21:40
  • Star 17 You must be signed in to star a gist
  • Fork 3 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save yavor87/b44c5096d211ce63c595 to your computer and use it in GitHub Desktop.
Record, play and visualize raw audio data in Android
ShortBuffer mSamples; // the samples to play
int mNumSamples; // number of samples to play
void playAudio() {
new Thread(new Runnable() {
@Override
public void run() {
int bufferSize = AudioTrack.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize == AudioTrack.ERROR || bufferSize == AudioTrack.ERROR_BAD_VALUE) {
bufferSize = SAMPLE_RATE * 2;
}
AudioTrack audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize,
AudioTrack.MODE_STREAM);
audioTrack.play();
Log.v(LOG_TAG, "Audio streaming started");
short[] buffer = new short[bufferSize];
mSamples.rewind();
int limit = mNumSamples;
int totalWritten = 0;
while (mSamples.position() < limit && mShouldContinue) {
int numSamplesLeft = limit - mSamples.position();
int samplesToWrite;
if (numSamplesLeft >= buffer.length) {
mSamples.get(buffer);
samplesToWrite = buffer.length;
} else {
for (int i = numSamplesLeft; i < buffer.length; i++) {
buffer[i] = 0;
}
mSamples.get(buffer, 0, numSamplesLeft);
samplesToWrite = numSamplesLeft;
}
totalWritten += samplesToWrite;
audioTrack.write(buffer, 0, samplesToWrite);
}
if (!mShouldContinue) {
audioTrack.release();
}
Log.v(LOG_TAG, "Audio streaming finished. Samples written: " + totalWritten);
}
}).start();
}
final int SAMPLE_RATE = 44100; // The sampling rate
boolean mShouldContinue; // Indicates if recording / playback should stop
void recordAudio() {
new Thread(new Runnable() {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
// buffer size in bytes
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize == AudioRecord.ERROR || bufferSize == AudioRecord.ERROR_BAD_VALUE) {
bufferSize = SAMPLE_RATE * 2;
}
short[] audioBuffer = new short[bufferSize / 2];
AudioRecord record = new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
if (record.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(LOG_TAG, "Audio Record can't initialize!");
return;
}
record.startRecording();
Log.v(LOG_TAG, "Start recording");
long shortsRead = 0;
while (mShouldContinue) {
int numberOfShort = record.read(audioBuffer, 0, audioBuffer.length);
shortsRead += numberOfShort;
// Do something with the audioBuffer
}
record.stop();
record.release();
Log.v(LOG_TAG, String.format("Recording stopped. Samples read: %d", shortsRead));
}
}).start();
}
audioTrack.setPlaybackPositionUpdateListener(new AudioTrack.OnPlaybackPositionUpdateListener() {
@Override
public void onPeriodicNotification(AudioTrack track) {
if (track.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
int currentFrame = track.getPlaybackHeadPosition();
int elapsedSeconds = (currentFrame * 1000) / SAMPLE_RATE;
}
}
@Override
public void onMarkerReached(AudioTrack track) {
Log.v(LOG_TAG, "Audio file end reached");
track.release();
}
});
audioTrack.setPositionNotificationPeriod(SAMPLE_RATE / 30); // 30 times per second
audioTrack.setNotificationMarkerPosition(mNumSamples);
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioRecord record = new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
44100,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
AudioRecord record = new AudioRecord.Builder()
.setAudioSource(MediaRecorder.AudioSource.DEFAULT)
.setAudioFormat(new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(SAMPLE_RATE)
.setChannelMask(AudioFormat.CHANNEL_IN_MONO)
.build())
.setBufferSizeInBytes(bufferSize)
.build();
int mBufferSize = AudioTrack.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (mBufferSize == AudioTrack.ERROR || mBufferSize == AudioTrack.ERROR_BAD_VALUE) {
// For some readon we couldn't obtain a buffer size
mBufferSize = SAMPLE_RATE * CHANNELS * 2;
}
AudioTrack mAudioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
mBufferSize,
AudioTrack.MODE_STREAM);
AudioTrack audioTrack = new AudioTrack.Builder()
.setAudioAttributes(new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_MEDIA)
.setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
.build())
.setAudioFormat(new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(SAMPLE_RATE)
.setChannelMask(AudioFormat.CHANNEL_OUT_MONO).build())
.setBufferSizeInBytes(bufferSize)
.setTransferMode(AudioTrack.MODE_STREAM)
.build();
@Nikhil-Chacharkar
Copy link

does this code work for real time audio output like microphone i.e. record and plays audio simultaneously.?

@fazliraziq
Copy link

I also need some suggestion on the real time recording and playing simultaneously.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment