I am using Android camera2 in order to extract frames in real time in preview mode to stitch them in NDK side(openCv).
I use ImageReader
surface to get frames, but it is limited to a max frame value.
ImageReader.newInstance(800,600,ImageFormat.YUV_420_888, MAX_FRAME);
it works good but since we need all frames, must remove ImageReader
target surface from CaptureRequest.Builder
and add it again. so we have a lag at this point.
How can we have an ImageReader
without max frame or how can we get all frames from live video in camera2 without any lag in preview?
My code:
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(texture);
surfaces = new ArrayList<>();
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
mPreviewCaptureCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
// closePreviewSession();
mPreviewSession = session;
try {
session.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
};
mCameraDevice.createCaptureSession(surfaces, mPreviewCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void addImageReaderSurface() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(texture);
surfaces = new ArrayList<>();
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
newImageReader(10);
surfaces.add(mImageReader.getSurface());
mPreviewBuilder.addTarget(mImageReader.getSurface());
mPreviewCaptureCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
};
mCameraDevice.createCaptureSession(surfaces, mPreviewCaptureCallback, mBackgroundHandler);
} catch (Exception e) {
e.printStackTrace();
}
}
private void newImageReader(final int count) {
mImageReader = ImageReader.newInstance(800, 600, ImageFormat.YUV_420_888, count);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
index++;
Log.e("xxx", "frame " + index + " received");
if (index == count) {
index = 0;
// remove it before reached max value
addImageReaderSurface();
}
}
}, mBackgroundHandler);
}
After acquiring an image from the reader with
Image img = reader.acquireLatestImage()
, you must callimg.close()
.