Refactor and implementation of the camera API 1, it fixes ()

* Fallback to legacy camera API for problematic devices

* Refactor and implementation of the camera API 1

* Refactor and implementation of the camera API 1, it fixes 

* Fix some corrections of @andrewharp

* Some fixes for @andrewharp

* fix a missing line

* Some more sanity fixes

* Optimization for the calculation of the preview size,
Also, add validation to check if we already choose the preview size

* Fixed some lines that were over 100 characters

* Update CameraActivity.java

fixed indentations

* Update ClassifierActivity.java

Fixed indentations

* Update StylizeActivity.java

Fixed indentations.

* Update StylizeActivity.java

fix spaces

* Update StylizeActivity.java

Fix indentation.
This commit is contained in:
osdamv 2017-07-24 13:05:02 -06:00 committed by Vijay Vasudevan
parent 0e4d9bc200
commit 003deb88b7
6 changed files with 507 additions and 250 deletions

View File

@ -19,22 +19,34 @@ package org.tensorflow.demo;
import android.Manifest;
import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.Image.Plane;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Trace;
import android.util.Size;
import android.view.KeyEvent;
import android.view.WindowManager;
import android.widget.Toast;
import java.nio.ByteBuffer;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.demo.R;
public abstract class CameraActivity extends Activity implements OnImageAvailableListener {
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
public abstract class CameraActivity extends Activity implements OnImageAvailableListener, Camera.
PreviewCallback {
private static final Logger LOGGER = new Logger();
private static final int PERMISSIONS_REQUEST = 1;
@ -46,6 +58,20 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
private Handler handler;
private HandlerThread handlerThread;
private boolean useCamera2API;
protected Bitmap rgbFrameBitmap = null;
private int[] rgbBytes = null;
protected int previewWidth = 0;
protected int previewHeight = 0;
protected Bitmap croppedBitmap = null;
protected static final boolean SAVE_PREVIEW_BITMAP = false;
protected long lastProcessingTimeMs;
protected Bitmap cropCopyBitmap;
protected ResultsView resultsView;
protected boolean computing = false;
protected Runnable postInferenceCallback;
protected byte[][] yuvBytes=new byte[3][];
protected int yRowStride;
@Override
protected void onCreate(final Bundle savedInstanceState) {
@ -62,6 +88,93 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
}
}
/**
* Callback for android.hardware.Camera API
*/
@Override
public void onPreviewFrame(final byte[] bytes, final Camera camera) {
if (computing) {
return;
}
computing = true;
yuvBytes[0] = bytes;
try {
// Initialize the storage bitmaps once when the resolution is known.
if (rgbBytes == null) {
Camera.Size previewSize = camera.getParameters().getPreviewSize();
previewHeight = previewSize.height;
previewWidth = previewSize.width;
rgbBytes = new int[previewWidth * previewHeight];
onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
}
ImageUtils.convertYUV420SPToARGB8888(bytes, rgbBytes, previewWidth, previewHeight, false);
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
return;
}
postInferenceCallback = new Runnable() {
@Override
public void run() {
camera.addCallbackBuffer(bytes);
}
};
processImageRGBbytes(rgbBytes);
}
/**
* Callback for Camera2 API
*/
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = null;
//We need wait until we have some size from onPreviewSizeChosen
if (previewWidth == 0 || previewHeight == 0) {
return;
}
rgbBytes = new int[previewWidth * previewHeight];
try {
image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (computing) {
image.close();
return;
}
computing = true;
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
rgbBytes,
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
false);
image.close();
} catch (final Exception e) {
if (image != null) {
image.close();
}
LOGGER.e(e, "Exception!");
Trace.endSection();
return;
}
processImageRGBbytes(rgbBytes);
Trace.endSection();
}
@Override
public synchronized void onStart() {
LOGGER.d("onStart " + this);
@ -123,8 +236,8 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
switch (requestCode) {
case PERMISSIONS_REQUEST: {
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED
&& grantResults[1] == PackageManager.PERMISSION_GRANTED) {
&& grantResults[0] == PackageManager.PERMISSION_GRANTED
&& grantResults[1] == PackageManager.PERMISSION_GRANTED) {
setFragment();
} else {
requestPermission();
@ -135,7 +248,8 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED && checkSelfPermission(PERMISSION_STORAGE) == PackageManager.PERMISSION_GRANTED;
return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED &&
checkSelfPermission(PERMISSION_STORAGE) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
@ -143,25 +257,80 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA) || shouldShowRequestPermissionRationale(PERMISSION_STORAGE)) {
Toast.makeText(CameraActivity.this, "Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA) ||
shouldShowRequestPermissionRationale(PERMISSION_STORAGE)) {
Toast.makeText(CameraActivity.this,
"Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
}
requestPermissions(new String[] {PERMISSION_CAMERA, PERMISSION_STORAGE}, PERMISSIONS_REQUEST);
}
}
// Returns true if the device supports the required hardware level, or better.
boolean isHardwareLevelSupported(CameraCharacteristics characteristics, int requiredLevel) {
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel;
}
// deviceLevel is not LEGACY, can use numerical sort
return requiredLevel <= deviceLevel;
}
private String chooseCamera() {
final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (final String cameraId : manager.getCameraIdList()) {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
useCamera2API = isHardwareLevelSupported(characteristics,
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
LOGGER.i("Camera API lv2?: %s", useCamera2API);
return cameraId;
}
} catch (CameraAccessException e) {
LOGGER.e(e, "Not allowed to access camera");
}
return null;
}
protected void setFragment() {
final Fragment fragment =
CameraConnectionFragment.newInstance(
new CameraConnectionFragment.ConnectionCallback() {
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
CameraActivity.this.onPreviewSizeChosen(size, rotation);
}
},
this,
getLayoutId(),
getDesiredPreviewFrameSize());
String cameraId = chooseCamera();
Fragment fragment;
if (useCamera2API) {
CameraConnectionFragment camera2Fragment =
CameraConnectionFragment.newInstance(
new CameraConnectionFragment.ConnectionCallback() {
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
previewHeight = size.getHeight();
previewWidth = size.getWidth();
CameraActivity.this.onPreviewSizeChosen(size, rotation);
}
},
this,
getLayoutId(),
getDesiredPreviewFrameSize());
camera2Fragment.setCamera(cameraId);
fragment = camera2Fragment;
} else {
fragment = new LegacyCameraConnectionFragment(this, getLayoutId());
}
getFragmentManager()
.beginTransaction()
@ -213,6 +382,7 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
return super.onKeyDown(keyCode, event);
}
protected abstract void processImageRGBbytes(int[] rgbBytes ) ;
protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
protected abstract int getLayoutId();
protected abstract Size getDesiredPreviewFrameSize();

View File

@ -353,58 +353,44 @@ public class CameraConnectionFragment extends Fragment {
super.onPause();
}
public void setCamera(String cameraId) {
this.cameraId = cameraId;
}
/**
* Sets up member variables related to camera.
*
* @param width The width of available size for camera preview
* @param height The height of available size for camera preview
*/
private void setUpCameraOutputs(final int width, final int height) {
private void setUpCameraOutputs() {
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (final String cameraId : manager.getCameraIdList()) {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// For still image captures, we use the largest available size.
final Size largest =
Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
new CompareSizesByArea());
if (map == null) {
continue;
}
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// For still image captures, we use the largest available size.
final Size largest =
Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
new CompareSizesByArea());
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize =
chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(),
inputSize.getHeight());
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize =
chooseOptimalSize(
map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(),
inputSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked.
final int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
} else {
textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
}
CameraConnectionFragment.this.cameraId = cameraId;
// We fit the aspect ratio of TextureView to the size of preview we picked.
final int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
} else {
textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
}
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
@ -425,7 +411,7 @@ public class CameraConnectionFragment extends Fragment {
* Opens the camera specified by {@link CameraConnectionFragment#cameraId}.
*/
private void openCamera(final int width, final int height) {
setUpCameraOutputs(width, height);
setUpCameraOutputs();
configureTransform(width, height);
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);

View File

@ -22,22 +22,19 @@ import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.media.Image;
import android.media.Image.Plane;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
import android.os.Trace;
import android.util.Size;
import android.util.TypedValue;
import android.view.Display;
import java.util.List;
import java.util.Vector;
import org.tensorflow.demo.OverlayView.DrawCallback;
import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.demo.R;
public class ClassifierActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
@ -64,39 +61,25 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
private static final String INPUT_NAME = "input";
private static final String OUTPUT_NAME = "output";
private static final String MODEL_FILE = "file:///android_asset/tensorflow_inception_graph.pb";
private static final String LABEL_FILE =
"file:///android_asset/imagenet_comp_graph_label_strings.txt";
private static final boolean SAVE_PREVIEW_BITMAP = false;
private static final boolean MAINTAIN_ASPECT = true;
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
private Classifier classifier;
private Integer sensorOrientation;
private int previewWidth = 0;
private int previewHeight = 0;
private byte[][] yuvBytes;
private int[] rgbBytes = null;
private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null;
private Bitmap cropCopyBitmap;
private boolean computing = false;
private Classifier classifier;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
private ResultsView resultsView;
private BorderedText borderedText;
private long lastProcessingTimeMs;
@Override
protected int getLayoutId() {
@ -112,9 +95,8 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
final float textSizePx = TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
@ -129,7 +111,6 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
INPUT_NAME,
OUTPUT_NAME);
resultsView = (ResultsView) findViewById(R.id.results);
previewWidth = size.getWidth();
previewHeight = size.getHeight();
@ -141,15 +122,13 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
sensorOrientation = rotation + screenOrientation;
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbBytes = new int[previewWidth * previewHeight];
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
INPUT_SIZE, INPUT_SIZE,
sensorOrientation, MAINTAIN_ASPECT);
frameToCropTransform = ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
INPUT_SIZE, INPUT_SIZE,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
@ -165,52 +144,7 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
});
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (computing) {
image.close();
return;
}
computing = true;
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
final int yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
image.close();
} catch (final Exception e) {
if (image != null) {
image.close();
}
LOGGER.e(e, "Exception!");
Trace.endSection();
return;
}
protected void processImageRGBbytes(int[] rgbBytes ) {
rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
@ -219,7 +153,6 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
if (SAVE_PREVIEW_BITMAP) {
ImageUtils.saveBitmap(croppedBitmap);
}
runInBackground(
new Runnable() {
@Override
@ -227,15 +160,19 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
LOGGER.i("Detect: %s", results);
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
if (resultsView==null) {
resultsView = (ResultsView) findViewById(R.id.results);
}
resultsView.setResults(results);
requestRender();
computing = false;
if (postInferenceCallback != null) {
postInferenceCallback.run();
}
}
});
Trace.endSection();
}
@Override

View File

@ -126,6 +126,7 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
tracker = new MultiBoxTracker(this);
if (USE_YOLO) {
detector =
TensorFlowYoloDetector.create(
@ -270,15 +271,17 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
rgbBytes,
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
false);
image.close();
} catch (final Exception e) {
@ -344,6 +347,8 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
Trace.endSection();
}
protected void processImageRGBbytes(int[] rgbBytes ) {}
@Override
protected int getLayoutId() {
return R.layout.camera_connection_fragment_tracking;

View File

@ -0,0 +1,205 @@
package org.tensorflow.demo;
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.app.Fragment;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import org.tensorflow.demo.env.Logger;
public class LegacyCameraConnectionFragment extends Fragment {
private Camera camera;
private static final Logger LOGGER = new Logger();
private Camera.PreviewCallback imageListener;
/**
* The layout identifier to inflate for this Fragment.
*/
private int layout;
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener,
final int layout) {
this.imageListener = imageListener;
this.layout = layout;
}
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
try {
Camera.Parameters parameters = camera.getParameters();
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
} catch (IOException exception) {
camera.release();
}
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
int bufferSize = s.height * s.width * 3 / 2;
camera.addCallbackBuffer(new byte[bufferSize]);
textureView.setAspectRatio(s.height, s.width);
camera.startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
}
};
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView textureView;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread backgroundThread;
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
camera.startPreview();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
public void onPause() {
stopCamera();
stopBackgroundThread();
super.onPause();
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
protected void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
private int getCameraId() {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
return i;
}
return -1; // No camera found
}
}

View File

@ -28,6 +28,7 @@ import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.hardware.Camera;
import android.media.Image;
import android.media.Image.Plane;
import android.media.ImageReader;
@ -58,7 +59,6 @@ import org.tensorflow.demo.OverlayView.DrawCallback;
import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.demo.R;
/**
* Sample activity that stylizes the camera preview according to "A Learned Representation For
@ -97,10 +97,6 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
private int previewWidth = 0;
private int previewHeight = 0;
private byte[][] yuvBytes;
private int[] rgbBytes = null;
private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null;
private final float[] styleVals = new float[NUM_STYLES];
private int[] intValues;
@ -108,18 +104,13 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
private int frameNum = 0;
private Bitmap cropCopyBitmap;
private Bitmap textureCopyBitmap;
private boolean computing = false;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
private BorderedText borderedText;
private long lastProcessingTimeMs;
private TensorFlowInferenceInterface inferenceInterface;
private int lastOtherStyle = 1;
@ -363,9 +354,8 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
final float textSizePx = TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
@ -393,7 +383,6 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
grid = (GridView) findViewById(R.id.grid_layout);
grid.setAdapter(adapter);
grid.setOnTouchListener(gridTouchAdapter);
setStyle(adapter.items[0], 1.0f);
}
@ -455,78 +444,42 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
}
}
@Override
public void onImageAvailable(final ImageReader reader) {
Image image = null;
private void resetPreviewBuffers() {
croppedBitmap = Bitmap.createBitmap(desiredSize, desiredSize, Config.ARGB_8888);
try {
image = reader.acquireLatestImage();
frameToCropTransform = ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
desiredSize, desiredSize,
sensorOrientation, true);
if (image == null) {
return;
}
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
yuvBytes = new byte[3][];
intValues = new int[desiredSize * desiredSize];
floatValues = new float[desiredSize * desiredSize * 3];
initializedSize = desiredSize;
}
if (computing) {
image.close();
return;
}
protected void processImageRGBbytes(int[] rgbBytes ) {
if (desiredSize != initializedSize) {
LOGGER.i(
"Initializing at size preview size %dx%d, stylize size %d",
previewWidth, previewHeight, desiredSize);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(desiredSize, desiredSize, Config.ARGB_8888);
frameToCropTransform = ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
desiredSize, desiredSize,
sensorOrientation, true);
if (desiredSize != initializedSize) {
LOGGER.i(
"Initializing at size preview size %dx%d, stylize size %d",
previewWidth, previewHeight, desiredSize);
rgbBytes = new int[previewWidth * previewHeight];
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(desiredSize, desiredSize, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
desiredSize, desiredSize,
sensorOrientation, true);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
yuvBytes = new byte[3][];
intValues = new int[desiredSize * desiredSize];
floatValues = new float[desiredSize * desiredSize * 3];
initializedSize = desiredSize;
}
computing = true;
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
final int yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
image.close();
} catch (final Exception e) {
if (image != null) {
image.close();
}
LOGGER.e(e, "Exception!");
Trace.endSection();
return;
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
yuvBytes = new byte[3][];
intValues = new int[desiredSize * desiredSize];
floatValues = new float[desiredSize * desiredSize * 3];
initializedSize = desiredSize;
}
rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
@ -536,24 +489,24 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
ImageUtils.saveBitmap(croppedBitmap);
}
runInBackground(
new Runnable() {
@Override
public void run() {
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final long startTime = SystemClock.uptimeMillis();
stylizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
textureCopyBitmap = Bitmap.createBitmap(croppedBitmap);
requestRender();
computing = false;
}
});
Trace.endSection();
runInBackground(new Runnable() {
@Override
public void run() {
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final long startTime = SystemClock.uptimeMillis();
stylizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
textureCopyBitmap = Bitmap.createBitmap(croppedBitmap);
requestRender();
computing = false;
if (postInferenceCallback != null) {
postInferenceCallback.run();
}
}
});
if (desiredSize != initializedSize) {
resetPreviewBuffers();
}
}
private void stylizeImage(final Bitmap bitmap) {
@ -584,6 +537,7 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
}
// Copy the input data into TensorFlow.
LOGGER.i("Width: %s , Height: %s",bitmap.getWidth(),bitmap.getHeight());
inferenceInterface.feed(
INPUT_NODE, floatValues, 1, bitmap.getWidth(), bitmap.getHeight(), 3);
inferenceInterface.feed(STYLE_NODE, styleVals, NUM_STYLES);