Skip to content

Instantly share code, notes, and snippets.

@salememd
Created May 4, 2018 14:44
Show Gist options
  • Save salememd/e78bb82559585239ee6f9128b10913a4 to your computer and use it in GitHub Desktop.
Save salememd/e78bb82559585239ee6f9128b10913a4 to your computer and use it in GitHub Desktop.
MediaCodec & ImageReader to grab frames
package ly.umbrella.opencvtest2;
import android.content.Context;
import android.graphics.Bitmap;
import android.media.Image;
import android.media.ImageReader;
import android.util.Log;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import ly.umbrella.opencvtest2.helpers.MultiMediaSource;
import ly.umbrella.opencvtest2.helpers.VideoProcessor;
import static ly.umbrella.opencvtest2.helpers.Utilits.YUV_420_888_toRGB;
/**
* Created by Dexter on 11/29/2017.
*/
public class VideoPreview extends AudioPreview implements ImageReader.OnImageAvailableListener {
private boolean isVideoStarted;
private Mat viewFrame = null;
private VideoProcessor videoProcessor;
private Thread videoExecutor;
public VideoPreview(Context context) {
super(context);
this.isVideoStarted = false;
}
// prepare video
public void init(){
super.init();
final MultiMediaSource ds = super.getDataSource();
videoExecutor = new Thread(new Runnable() {
@Override
public void run() {
try {
videoProcessor = new VideoProcessor(VideoPreview.this, ds);
}catch (Exception e){
Log.d("errer",e.getMessage());
}
while (videoProcessor != null && videoProcessor.run()){
try {
Thread.sleep(34);
}catch (Exception e){}
}
VideoPreview.this.clean();
}
});
}
// play video
public void start(){
super.start();
try {
this.isVideoStarted = true;
videoExecutor.start();
}catch (Exception e){}
}
// get next frame to preview it
public Mat getPreviewFrame() {
try {
if (!this.isVideoStarted) {
start();
return null;
}
return viewFrame.clone();
} catch (Exception e) {
return null;
}
}
// destroy objects
public void clean(){
super.clean();
if(videoExecutor != null){
videoExecutor.interrupt();
videoExecutor = null;
}
if(videoProcessor != null) {
videoProcessor.clean();
videoProcessor = null;
}
if(viewFrame != null){
viewFrame.release();
viewFrame = null;
}
}
// each time new frame available, convirrt the frame to RGB and than to OpenCV MAT
@Override
public void onImageAvailable(ImageReader reader) {
try {
Log.d("newFrame","img");
Image image = reader.acquireNextImage();
try {
Bitmap p = YUV_420_888_toRGB(image, 640 , 360, super.getContext());
Mat mat = new Mat();
Utils.bitmapToMat(p,mat);
Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGBA2BGR);
viewFrame = mat;
}catch (Exception e){
Log.d("Encoding Error",e.getMessage());
}
if (image != null)
image.close();
} catch (Exception e) {
Log.d("Unknown Error",e.getMessage());
}
}
}
package ly.umbrella.opencvtest2.helpers;
import android.graphics.ImageFormat;
import android.media.ImageReader;
import android.media.MediaCodec;
import android.media.MediaDataSource;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import java.nio.ByteBuffer;
/**
* Created by Dexter on 12/14/2017.
*/
public class VideoProcessor {
private MediaExtractor mediaExtractor;
private long counterTime;
private long deltaTime;
private int frameCount;
private boolean eos;
private MediaCodec.BufferInfo bufferInfo;
private MediaCodec decoder;
private int timeoutUs = 1000000; // 1 second timeout
private long playStartTime = System.currentTimeMillis();
private long frameDisplayTime = playStartTime;
Surface mReaderSurface;
ImageReader mReader;
public VideoProcessor(ImageReader.OnImageAvailableListener r, MultiMediaSource ds) throws Exception {
mReader = ImageReader.newInstance(1/*don't need it*/ , 1/*don't need it*/, ImageFormat.YUV_420_888, 2);
mReaderSurface = mReader.getSurface();
HandlerThread mHandlerThread = new HandlerThread("hh");
mHandlerThread.start();
Handler mHandler = new Handler(mHandlerThread.getLooper());
mReader.setOnImageAvailableListener(r, mHandler);
mediaExtractor = new MediaExtractor();
mediaExtractor.setDataSource(ds);//"http://192.168.1.220/ARplatform/upload/preview/c090c1bf088251748ccc.mp4"
int numTracks = mediaExtractor.getTrackCount();
String mine_type = null;
MediaFormat format = null;
for (int i = 0; i < numTracks; ++i) {
format = mediaExtractor.getTrackFormat(i);
mine_type = format.getString(MediaFormat.KEY_MIME);
if (mine_type.startsWith("video/")) {
// Must select the track we are going to get data by readSampleData()
mediaExtractor.selectTrack(i);
// Set required key for MediaCodec in decoder mode
// Check http://developer.android.com/reference/android/media/MediaFormat.html
format.setInteger(MediaFormat.KEY_CAPTURE_RATE, 24);
format.setInteger(MediaFormat.KEY_PUSH_BLANK_BUFFERS_ON_STOP, 1);
break;
}
}
frameCount = 0;
eos = false;
// TODO: Check if valid track has been selected by selectTrack()
decoder = MediaCodec.createDecoderByType(mine_type);
decoder.configure(format, mReaderSurface, null, 0 /* 0:decoder 1:encoder */);
decoder.start();
// Count FPS
counterTime = System.currentTimeMillis();
bufferInfo = new MediaCodec.BufferInfo();
}
public boolean run() {
try {
if (!eos) {
int inputBufferIndex = -1;
inputBufferIndex = decoder.dequeueInputBuffer(timeoutUs);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferIndex);
int sampleSize = mediaExtractor.readSampleData(inputBuffer, 0);
if (sampleSize > 0) {
frameDisplayTime = (mediaExtractor.getSampleTime() >> 10) + playStartTime;
// Video data is valid,send input buffer to MediaCodec for decode
decoder.queueInputBuffer(inputBufferIndex, 0, sampleSize, mediaExtractor.getSampleTime(), 0);
mediaExtractor.advance();
} else {
// End-Of-Stream (EOS)
decoder.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
eos = true;
}
}
int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, timeoutUs);
if (outputBufferIndex >= 0) {
while (frameDisplayTime > System.currentTimeMillis()) {
try {
Thread.sleep(10);
} catch (Exception e) {
}
}
// outputBuffer is ready to be processed or rendered.
decoder.releaseOutputBuffer(outputBufferIndex, true /*true:render to surface*/);
// Count FPS
frameCount++;
deltaTime = System.currentTimeMillis() - counterTime;
if (deltaTime > 1000) {
Log.v("SampleMediaCodec", (((float) frameCount / (float) deltaTime) * 1000) + " fps");
counterTime = System.currentTimeMillis();
frameCount = 0;
}
return true;
}
return true;
} else {
return false;
}
}catch (Exception e){
return true;
}
}
public synchronized void clean() {
if(mReaderSurface !=null) {
mReaderSurface.release();
mReaderSurface = null;
}
if(mReader !=null) {
mReader.close();
mReader = null;
}
if(decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if(mediaExtractor != null) {
mediaExtractor.release();
mediaExtractor = null;
}
}
}
@Pascal66
Copy link

Do you have the othermissing parts ?
Thank you

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment