下面列出了android.media.ImageReader#acquireNextImage ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public void onImageAvailable(ImageReader reader) {
synchronized (mLock) {
if (borrowedImage == null) {
Image i = reader.acquireNextImage();
borrowedImage = i;
if (bDebug) {
Log.d(TAG, "==== OnImageReady ====");
Log.d(TAG, "Size:" + i.getWidth() + "x" + i.getHeight());
Log.d(TAG, "Format:" + i.getFormat());
Log.d(TAG, "#Planes:" + i.getPlanes().length);
Log.d(TAG, "Y-Plane Pixel Stride:" + i.getPlanes()[0].getPixelStride());
Log.d(TAG, "Y-Planes Row Stride" + i.getPlanes()[0].getRowStride());
Log.d(TAG, "Y(i[0]) Start Address:" + NativeRender.getPointerFromByteBuffer(i.getPlanes()[0].getBuffer(), 0));
Log.d(TAG, "U(i[0]) Start Address:" + NativeRender.getPointerFromByteBuffer(i.getPlanes()[1].getBuffer(), 0));
Log.d(TAG, "V(i[0]) Start Address:" + NativeRender.getPointerFromByteBuffer(i.getPlanes()[2].getBuffer(), 0));
Log.d(TAG, "======================");
}
if (mCallback != null) mCallback.onFrameReady();
}
}
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireNextImage()) {
Image.Plane[] planes = image.getPlanes();
if (planes.length > 0) {
ByteBuffer buffer = planes[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
mCallback.onPictureTaken(data);
}
}
}
@Override
public void onImageAvailable(ImageReader reader) {
Image mImage = reader.acquireNextImage();
if (mImage == null) {
return;
}
mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage));
mImage.close();
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireNextImage()) {
Image.Plane[] planes = image.getPlanes();
if (planes.length > 0) {
ByteBuffer buffer = planes[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
mCallback.onPictureTaken(data);
}
}
}
@Override
public void onImageAvailable(ImageReader reader) {
Image mImage = reader.acquireNextImage();
if(mImage == null) {
return;
}
mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage));
mImage.close();
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireNextImage()) {
Image.Plane[] planes = image.getPlanes();
if (planes.length > 0) {
ByteBuffer buffer = planes[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
mCallback.onPictureTaken(data);
}
}
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireNextImage()) {
Image.Plane[] planes = image.getPlanes();
if (planes.length > 0) {
ByteBuffer buffer = planes[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
mCallback.onPictureTaken(data);
}
}
}
@Override
public void takePicture(final Handler handler,
final CameraShutterCallback shutter,
CameraPictureCallback raw,
CameraPictureCallback postview,
final CameraPictureCallback jpeg) {
// TODO: We never call raw or postview
final CaptureAvailableListener picListener =
new CaptureAvailableListener() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
long timestamp, long frameNumber) {
if (shutter != null) {
handler.post(new Runnable() {
@Override
public void run() {
if (mShutterSoundEnabled) {
mNoisemaker.play(MediaActionSound.SHUTTER_CLICK);
}
shutter.onShutter(AndroidCamera2ProxyImpl.this);
}});
}
}
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireNextImage()) {
if (jpeg != null) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] pixels = new byte[buffer.remaining()];
buffer.get(pixels);
handler.post(new Runnable() {
@Override
public void run() {
jpeg.onPictureTaken(pixels, AndroidCamera2ProxyImpl.this);
}});
}
}
}};
try {
mDispatchThread.runJob(new Runnable() {
@Override
public void run() {
// Wait until PREVIEW_ACTIVE or better
mCameraState.waitForStates(
~(AndroidCamera2StateHolder.CAMERA_PREVIEW_ACTIVE - 1));
mCameraHandler.obtainMessage(CameraActions.CAPTURE_PHOTO, picListener)
.sendToTarget();
}
});
} catch (RuntimeException ex) {
mCameraAgent.getCameraExceptionHandler().onDispatchThreadException(ex);
}
}
@Override
public void onImageAvailable(ImageReader reader) {
final Bitmap bitmap;
try (Image image = reader.acquireNextImage()) {
bitmap = mImagePreprocessor.preprocessImage(image);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
mImage.setImageBitmap(bitmap);
}
});
final Collection<Recognition> results = mTensorFlowClassifier.doRecognize(bitmap);
Log.d(TAG, "Got the following results from Tensorflow: " + results);
runOnUiThread(new Runnable() {
@Override
public void run() {
if (results == null || results.isEmpty()) {
mResultText.setText("I don't understand what I see");
} else {
StringBuilder sb = new StringBuilder();
Iterator<Recognition> it = results.iterator();
int counter = 0;
while (it.hasNext()) {
Recognition r = it.next();
sb.append(r.getTitle());
counter++;
if (counter < results.size() - 1 ) {
sb.append(", ");
} else if (counter == results.size() - 1) {
sb.append(" or ");
}
}
mResultText.setText(sb.toString());
}
}
});
if (mTtsEngine != null) {
// speak out loud the result of the image recognition
mTtsSpeaker.speakResults(mTtsEngine, results);
} else {
// if theres no TTS, we don't need to wait until the utterance is spoken, so we set
// to ready right away.
setReady(true);
}
}
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
Log.d("image", "size [ w: " + image.getWidth() + " h: " + image.getHeight() + " ]");
}