device에서 지원 하는 codec을 NDK로의 추가 구현 없이도 유연하게 사용할 수 있게 되었다.
MediaCodec을 이용하여 codec으로부터의 output data를 application이 쉽게 얻을 수 있다는 말이다.
시간을 좀 들여서 작성을 해보았다.
API level 16부터 추가된 MediaCodec, MediaExtractor, MediaFormat 등의 사용에 참고만 하였으면 하는 바램이다.
접기
/*
* Test code for MediaCodec, MediaExtractor, MediaFormat class since API 16
*
* I've verified that only H.264(AVC) video and MP3 audio works correctly
* But the other codec returns some errors.
*
* TODO : To handle two MediaCodec for decode audio and video simultaneous and render video frame synchronous with audio playback time.
*/
package com.ek.mediacodec;
//import android.R;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnVideoSizeChangedListener;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.app.Activity;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity extends Activity implements
OnErrorListener, OnVideoSizeChangedListener, SurfaceHolder.Callback {
public static final String TAG = "Client.MediaCodec";
private int mVideoWidth;
private int mVideoHeight;
private MediaPlayer mMediaPlayer;
private SurfaceView mVideoView;
private SurfaceHolder mSurfaceHolder;
private Bundle extra;
private String mPath;
private Button button;
private Button mediacodec_button;
private CheckBox mediacodec_checkbox;
private TextView status;
private boolean bStart = false;
private Thread mThread;
private Handler mHandler;
private boolean mRender_flag = false;
private MediaExtractor mExtractor;
private MediaCodec mMediaCodec;
//private MediaCodec mMediaCodec_Audio;
private ByteBuffer[] mInputBuffers = null;
private ByteBuffer[] mOutputBuffers = null;
//temp buffer to get sampledata.
private ByteBuffer mTempInputBuffer;
private AudioTrack mAudioTrack;
private MediaFormat mFormat;
private String mMime;
private static int mInput_buff_size;
private static int mOutput_buff_size;
private int input_cnt = 0;
private int output_cnt = 0;
private int mOffset = 0;
private String status_update;
private String mSelectedmime;
private static final int LOCAL_VIDEO = 0;
// TODO: change file path
private static final String mVideofile_path = "/mnt/sdcard/[AVC_AAC]D1_25fps_1500kbps_Secret_Magic.avi";
private static final String mAudiofile_path = "/mnt/sdcard/[MP3]Good_Day.mp3";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mVideoView = (SurfaceView) findViewById(R.id.videoView1);
mSurfaceHolder = mVideoView.getHolder();
mSurfaceHolder.addCallback((SurfaceHolder.Callback) this);
extra = getIntent().getExtras();
mMediaPlayer = new MediaPlayer();
//MediaPlayer playback
button = (Button) findViewById(R.id.button);
button.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(bStart == false ) {
Log.v(TAG, "START PLAYBACK using MEDIAPLAYER");
playVideo(LOCAL_VIDEO);
button.setText(R.string.stop);
}
else {
Log.v(TAG, "STOP PLAYBACK using MEDIAPLAYER");
bStart = false;
mMediaPlayer.stop();
mMediaPlayer.reset();
button.setText(R.string.start);
}
}
});
//MediaCodec Checkbox - If this is not checked, we consider video codec is selected.
mediacodec_checkbox = (CheckBox) findViewById(R.id.checkBox1);
//MediaCodec input/output buffer working status
status = (TextView) findViewById(R.id.status);
//MediaCodec playback
mediacodec_button = (Button) findViewById(R.id.mediacodec_button);
mediacodec_button.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(bStart == false ) {
Log.v(TAG, "START PLAYBACK using MEDIACODEC");
if( StartMediaCodec() == false )
{
Toast.makeText(getApplicationContext(), "Can't play", Toast.LENGTH_LONG).show();
}
else
{
bStart = true;
mediacodec_button.setText(R.string.stop);
}
}
else {
Log.v(TAG, "STOP PLAYBACK using MEDIACODEC");
bStart = false;
StopMediaCodec();
mediacodec_button.setText(R.string.start);
}
}
});
mHandler = new Handler();
}
private boolean IsVideo(String mime)
{
if( mime.contains("video") )
return true;
else
return false;
}
private void StopMediaCodec() {
if(mThread != null && mThread.isAlive()) {
mThread.interrupt();
}
if( mAudioTrack != null )
{
mAudioTrack.flush();
mAudioTrack.stop();
mAudioTrack.release();
}
mMediaCodec.flush();
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
mExtractor.release();
mExtractor = null;
input_cnt = 0;
output_cnt = 0;
}
private boolean StartMediaCodec() {
if( mediacodec_checkbox.isChecked())
{
mPath = mAudiofile_path;//mAudiofile_path;
mSelectedmime = "audio";
}
else
{
mPath = mVideofile_path;
mSelectedmime = "video";
}
/* init Extractor for queue sample data to Mediacodec */
mExtractor = new MediaExtractor();
mExtractor.setDataSource(mPath);
int i = 0;
int numTracks = mExtractor.getTrackCount();
for ( i = 0; i < numTracks; ++i ) {
mFormat = mExtractor.getTrackFormat(i);
if( mFormat == null)
{
Log.v(TAG, "Fail to get getTrackFormat");
return false;
}
mMime = mFormat.getString(MediaFormat.KEY_MIME);
Log.v(TAG, "mime:" + mMime);
if( mMime.contains(mSelectedmime))
break;
else
mMime = ""; //can't find wanted format
}
if (!mMime.isEmpty()) {
Log.v(TAG, "selected mime:" + mMime + ", selected trackid:" + i);
mExtractor.selectTrack(i);
}
else
{
Log.v(TAG, "This file doesn't contain " + mSelectedmime );
return false;
}
/* GetCodec info */
/*
MediaCodecInfo codecinfo;
int codecnum = MediaCodecList.getCodecCount();
int i = 0;
for ( i=0 ; i < codecnum; i++ )
{
codecinfo = MediaCodecList.getCodecInfoAt(i);
}
*/
mMediaCodec = MediaCodec.createDecoderByType(mMime);
MediaFormat mMediaFormat = new MediaFormat();
if( mMime.contains("audio"))
{
mMediaFormat = MediaFormat.createAudioFormat(mMime, mFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
}
else //video
{
mMediaFormat = MediaFormat.createVideoFormat(mMime, mFormat.getInteger(MediaFormat.KEY_WIDTH), mFormat.getInteger(MediaFormat.KEY_HEIGHT));
}
/* Init MediaCodec */
mMediaCodec.configure(mMediaFormat, mSurfaceHolder.getSurface(), null, 0);
mMediaCodec.start();
mInputBuffers = mMediaCodec.getInputBuffers();
mOutputBuffers = mMediaCodec.getOutputBuffers();
if( mInputBuffers == null || mOutputBuffers == null )
{
Log.v(TAG, "Fail to get mInputBuffers or mOutputBuffers");
return false;
}
mInput_buff_size = mInputBuffers[0].capacity();
mOutput_buff_size = mOutputBuffers[0].capacity();
mTempInputBuffer = ByteBuffer.allocate(mInput_buff_size/2);
Log.v(TAG, "Got buffer from MediaCodec. inputbuffer_size:"+mInput_buff_size+", outputbuffer_size:"+mOutput_buff_size);
/* Init AudioTrack */
if( mMime.contains("audio"))
{
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_DEFAULT,
mOutput_buff_size,
AudioTrack.MODE_STREAM);
if(mAudioTrack == null || mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED)
return false;
mAudioTrack.play();
}
/* Thread */
mThread = new Thread(new Runnable() {
public void run() {
long presentationTimeUs = 0;
int sample_size = 0;
int inputqueue_flag = 0;
boolean not_eof = true;
int sample_flag = 0;
int inputBufferIndex = 0;
int outputBufferIndex = 0;
int trackIndex = 0;
for (;;) {
if( mThread.interrupted()) break;
if( not_eof == true )
{
//Check to get available inputbuffer index. why dequeueinputbuffer timedelay doesn't work? I want to get it 30ms interval.
inputBufferIndex = mMediaCodec.dequeueInputBuffer(0); //(30000);
Log.v(TAG, "inputBufferIndex:"+inputBufferIndex);
if ( inputBufferIndex >= 0 ) {
mInputBuffers[inputBufferIndex].clear();
// fill inputBuffers[inputBufferIndex] with valid data
input_cnt++;
while( mOffset+sample_size < mInput_buff_size )
{
not_eof = mExtractor.advance();
sample_flag = mExtractor.getSampleFlags();
mTempInputBuffer.clear();
sample_size = mExtractor.readSampleData(mTempInputBuffer, 0);
Log.v(TAG, "sample_size:" + sample_size + ", offset:" + mOffset + ", not_eof:" + not_eof + ", sample_flag:" + sample_flag);
if( sample_size >= 0) {
mTempInputBuffer.limit(sample_size);
trackIndex = mExtractor.getSampleTrackIndex();
presentationTimeUs = mExtractor.getSampleTime();
mOffset += sample_size;
if( mOffset < mInput_buff_size)
{
mInputBuffers[inputBufferIndex].put(mTempInputBuffer.array(), 0, sample_size);
Log.v(TAG, "mInputBuffers position:" + mInputBuffers[inputBufferIndex].position());
}
else
{
mOffset -= sample_size;
Log.v(TAG, "Read oversize!");
break;
}
}
else
break;
if( not_eof == false )
{
inputqueue_flag = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
sample_size = 0;
Log.v(TAG, "EOF reached");
break;
}
else if(sample_flag == MediaExtractor.SAMPLE_FLAG_SYNC )
{
inputqueue_flag = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
}
else
{
inputqueue_flag = 0;
}
}
//deliver valid input buffer to MediaCodec
Log.v(TAG, "queueInputBuffer:" + inputBufferIndex + ", presentationTimeUs:" + presentationTimeUs + ", sample_flag:" + sample_flag);
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, mOffset, presentationTimeUs, inputqueue_flag);
mOffset = 0;
sample_size = 0;
}
}
MediaCodec.BufferInfo buf_info = new MediaCodec.BufferInfo();
//Check to get available outputBuffer index
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(buf_info, 0);
Log.v(TAG, "outputBufferIndex:"+outputBufferIndex);
if (outputBufferIndex >= 0) {
// outputBuffer is ready to be processed or rendered.
output_cnt++;
Log.v(TAG, "buf_info offset:" + buf_info.offset + ", size:" + buf_info.size + ", timeus:" + buf_info.presentationTimeUs + ", remain:" + mOutputBuffers[outputBufferIndex].remaining());
// I don't know why outputBuffers doesn't support transformation to array type.
// Log.d(TAG, "outputBuffers[outputBufferIndex].array()"+outputBuffers[outputBufferIndex].hasArray());
// So convert ByteBuffer to byte[] manually.
if( mMime.contains("audio"))
{
byte[] pcm = new byte[buf_info.size];
mOutputBuffers[outputBufferIndex].get(pcm, 0, buf_info.size);
//deliver pcm data to audiotrack.
mAudioTrack.write(pcm, 0, buf_info.size);
mRender_flag = false;
}
else
{
// trust rendering to MediaCodec.
mRender_flag = true;
}
mMediaCodec.releaseOutputBuffer(outputBufferIndex, mRender_flag);
mOutputBuffers[outputBufferIndex].clear();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mOutputBuffers = mMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
MediaFormat format = mMediaCodec.getOutputFormat();
}
//buf_info = null;
status_update = "inbuf:"+input_cnt + " outbuf:" + output_cnt;
mHandler.post(new Runnable() {
public void run() {
status.setText(status_update);
}
});
}
}
});
mThread.start();
return true;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_main, menu);
return true;
}
private void playVideo(Integer Media) {
doCleanUp();
try {
switch(Media) {
case LOCAL_VIDEO:
mPath = mVideofile_path;
break;
}
mMediaPlayer.setDataSource(mPath);
mMediaPlayer.setDisplay(mSurfaceHolder);
mMediaPlayer.prepare();
mMediaPlayer.setOnVideoSizeChangedListener((OnVideoSizeChangedListener) this);
}catch(Exception e) {
}
}
public void onPrepared(MediaPlayer mediaplayer) {
}
public void surfaceCreated(SurfaceHolder holder) {
}
private void doCleanUp() {
mVideoWidth = 0;
mVideoHeight = 0;
}
private void startVideoPlayback() {
mMediaPlayer.start();
bStart = true;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
@Override
public void onVideoSizeChanged(MediaPlayer arg0, int arg1, int arg2) {
// TODO Auto-generated method stub
mVideoWidth = arg1;
mVideoHeight = arg2;
mSurfaceHolder.setFixedSize(mVideoWidth, mVideoHeight);
Log.v(TAG, "setFixedSize " + mVideoWidth + " X " + mVideoHeight);
startVideoPlayback();
}
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
// TODO Auto-generated method stub
String error;
error = "error(" + what + "," + extra + ")";
Toast.makeText(getApplicationContext(), error, Toast.LENGTH_LONG).show();
mp.release();
bStart = false;
return false;
}
}
접기