Skip to content

Instantly share code, notes, and snippets.

@ltpquang
Created June 12, 2017 03:42
Show Gist options
  • Save ltpquang/592ea2213b31fe505a0fa8dc135a24fe to your computer and use it in GitHub Desktop.
Save ltpquang/592ea2213b31fe505a0fa8dc135a24fe to your computer and use it in GitHub Desktop.
Extract, decode, repeat cycle
/**
* Created by ltpquang on 12/20/16.
* <p>
* "Whether you think you can, or you think you can't - you're right."
* --Henry Ford--
*/
public class SimpleAudioExtractor {
private static final String TAG = SimpleAudioExtractor.class.getSimpleName();
int TIMEOUT_USEC = 10000;
// AssetFileDescriptor fileDescriptor;
// String filePath;
private MediaCodec decoder = null;
private MediaExtractor extractor = null;
private int trackIndex;
private boolean setupDone;
//private boolean decodeDone;
private boolean autoLoop;
private Queue<Byte> data = new LinkedBlockingQueue<>();
/**
* AssetFileDescriptor afd = getAssets().openFd("edm_overlay_video.mp4");
*/
public SimpleAudioExtractor(String filePath, boolean autoLoop) throws IOException {
File file = new File(filePath);
FileInputStream inputStream = new FileInputStream(file);
extractor = new MediaExtractor();
extractor.setDataSource(inputStream.getFD());
setup(extractor);
setupDone = true;
this.autoLoop = autoLoop;
}
private void setup(MediaExtractor extractor) {
try {
trackIndex = selectTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found");
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
decoder = createDecoder(format);
decoder.start();
// doExtract(extractor, trackIndex, decoder, outputSurface);
} catch (Exception e) {
Log.e(TAG, "setup: ", e);
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
public void destroy() {
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
public byte getNextByte() {
try {
if (data.size() < 1000) {
byte[] newData = doExtract(extractor, decoder);
for (byte b :
newData) {
data.add(Byte.valueOf(b));
}
}
return data.poll();
} catch (Exception e) {
Log.e(TAG, "getNextByte: ", e);
return 0;
}
}
private MediaCodec createDecoder(MediaFormat mediaFormat) throws IOException {
int sampleRate = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int channelConfig = mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int samplingFreq[] = {
96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
16000, 12000, 11025, 8000
};
// Search the Sampling Frequencies
int sampleIndex = -1;
for (int i = 0; i < samplingFreq.length; ++i) {
if (samplingFreq[i] == sampleRate) {
Log.d("TAG", "kSamplingFreq " + samplingFreq[i] + " i : " + i);
sampleIndex = i;
}
}
if (sampleIndex == -1) {
return null;
}
int audioProfile = MediaCodecInfo.CodecProfileLevel.AACObjectLC;
ByteBuffer csd = ByteBuffer.allocate(2);
csd.put((byte) ((audioProfile << 3) | (sampleIndex >> 1)));
csd.position(1);
csd.put((byte) ((byte) ((sampleIndex << 7) & 0x80) | (channelConfig << 3)));
csd.flip();
mediaFormat.setByteBuffer("csd-0", csd); // add csd-0
for (int k = 0; k < csd.capacity(); ++k) {
Log.e("TAG", "csd : " + csd.array()[k]);
}
MediaCodec decoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
decoder.configure(mediaFormat, null, null, 0);
return decoder;
}
/**
* Selects the video track, if any.
*
* @return the track index, or -1 if no video track is found.
*/
private int selectTrack(MediaExtractor extractor) {
// Select the first video track we find, ignore the rest.
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
return i;
}
}
return -1;
}
boolean extractDone = false;
boolean decodeDone = false;
boolean shouldExtract() {
return autoLoop || !extractDone;
}
boolean shouldDecode() {
return !decodeDone;
}
/**
* Work loop.
*/
private byte[] doExtract(MediaExtractor extractor, MediaCodec decoder) throws IOException {
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean frameFetched = false;
while (shouldExtract() || shouldDecode()) {
if (frameFetched) {
break;
}
// Feed more data to the decoder.
if (shouldExtract()) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize >= 0) {
decoder.queueInputBuffer(
inputBufIndex, 0, chunkSize,
extractor.getSampleTime(),
extractor.getSampleFlags());
}
if (!extractor.advance()) {
if (autoLoop) {
extractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
} else {
//End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
extractDone = true;
}
}
} else {
Log.d(TAG, "input buffer not available");
}
}
if (shouldDecode()) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
Log.d(TAG, "decoder output buffers changed");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
Log.d(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "output EOS");
decodeDone = true;
}
boolean doRender = (info.size != 0);
// As soon as we call releaseOutputBuffer, the buffer will be forwarded
// to SurfaceTexture to convert to a texture. The API doesn't guarantee
// that the texture will be available before the call returns, so we
// need to wait for the onFrameAvailable callback to fire.
ByteBuffer byteBuffer = decoder.getOutputBuffers()[decoderStatus];
byte[] result = new byte[info.size];
byteBuffer.get(result);
byteBuffer.clear();
decoder.releaseOutputBuffer(decoderStatus, doRender);
return result;
}
} else {
decodeDone = true;
}
}
return new byte[0];
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment