Remove lite/examples/android example

This example has been migrated to the new TensorFlow Examples
repo @ https://github.com/tensorflow/examples/tree/master/lite.

RELNOTES=Removed TensorFlow Lite Android example (moved to new examples repo).
PiperOrigin-RevId: 246173733
This commit is contained in:
Jared Duke 2019-05-01 12:04:38 -07:00 committed by TensorFlower Gardener
parent 935c86ac55
commit 195678bf99
66 changed files with 8 additions and 6181 deletions

View File

@ -157,7 +157,7 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
getAssets(), TF_OD_API_MODEL_FILE, TF_OD_API_LABELS_FILE, TF_OD_API_INPUT_SIZE);
cropSize = TF_OD_API_INPUT_SIZE;
} catch (final IOException e) {
LOGGER.e("Exception initializing classifier!", e);
LOGGER.e(e, "Exception initializing classifier!");
Toast toast =
Toast.makeText(
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);

View File

@ -1,61 +0,0 @@
# Description:
# TensorFlow camera demo app for Android.
load("@build_bazel_rules_android//android:rules.bzl", "android_binary")
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE"])
# Build the demo native demo lib from the original directory to reduce code
# reuse. Note that the Java counterparts (ObjectTracker.java and
# ImageUtils.java) are still duplicated.
cc_library(
name = "tensorflow_native_libs",
srcs = [
"//tensorflow/examples/android:libtensorflow_demo.so",
],
tags = [
"manual",
"notap",
],
)
android_binary(
name = "tflite_demo",
srcs = glob([
"app/src/main/java/**/*.java",
]),
aapt_version = "aapt",
# Package assets from assets dir as well as all model targets.
# Remove undesired models (and corresponding Activities in source)
# to reduce APK size.
assets = [
"//tensorflow/lite/examples/android/app/src/main/assets:labels_mobilenet_quant_v1_224.txt",
"@tflite_mobilenet_quant//:mobilenet_v1_1.0_224_quant.tflite",
"@tflite_conv_actions_frozen//:conv_actions_frozen.tflite",
"//tensorflow/lite/examples/android/app/src/main/assets:conv_actions_labels.txt",
"@tflite_mobilenet_ssd//:mobilenet_ssd.tflite",
"@tflite_mobilenet_ssd_quant//:detect.tflite",
"//tensorflow/lite/examples/android/app/src/main/assets:box_priors.txt",
"//tensorflow/lite/examples/android/app/src/main/assets:labelmap.txt",
],
assets_dir = "",
custom_package = "org.tensorflow.lite.demo",
inline_constants = 1,
manifest = "app/src/main/AndroidManifest.xml",
nocompress_extensions = [
".tflite",
],
resource_files = glob(["app/src/main/res/**"]),
tags = [
"manual",
"notap",
],
deps = [
":tensorflow_native_libs",
"//tensorflow/lite/java:tensorflowlite",
],
)

View File

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module external.linked.project.id="android" external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$" external.system.id="GRADLE" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="java-gradle" name="Java-Gradle">
<configuration>
<option name="BUILD_FOLDER_PATH" value="$MODULE_DIR$/build" />
<option name="BUILDABLE" value="false" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.gradle" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -1,54 +1,9 @@
# TF Lite Android App Example
# TF Lite Android Example (Deprecated)
A simple Android example that demonstrates image classification and object
detection using the camera, as well as speech recognition using the microphone.
This example has been moved to the new
[TensorFlow examples repo](https://github.com/tensorflow/examples), and split
into several distinct examples:
## Building in Android Studio with TensorFlow Lite AAR from JCenter.
The build.gradle is configured to use TensorFlow Lite's nightly build.
If you see a build error related to compatibility with Tensorflow Lite's Java
API (example: method X is undefined for type Interpreter), there has likely been
a backwards compatible change to the API. You will need to pull new app code
that's compatible with the nightly build and may need to first wait a few days
for our external and internal code to merge.
## Building from Source with Bazel
1. Follow the [Bazel steps for the TF Demo App](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android#bazel):
1. [Install Bazel and Android Prerequisites](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android#install-bazel-and-android-prerequisites).
It's easiest with Android Studio.
- You'll need at least SDK version 23.
- Make sure to install the latest version of Bazel. Some distributions
ship with Bazel 0.5.4, which is too old.
- Bazel requires Android Build Tools `26.0.1` or higher.
- You also need to install the Android Support Repository, available
through Android Studio under `Android SDK Manager -> SDK Tools ->
Android Support Repository`.
2. [Edit your `WORKSPACE`](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android#edit-workspace)
to add SDK and NDK targets.
NOTE: As long as you have the SDK and NDK installed, the `./configure`
script will create these rules for you. Answer "Yes" when the script asks
to automatically configure the `./WORKSPACE`.
- Make sure the `api_level` in `WORKSPACE` is set to an SDK version that
you have installed.
- By default, Android Studio will install the SDK to `~/Android/Sdk` and
the NDK to `~/Android/Sdk/ndk-bundle`.
2. Build this demo app with Bazel. The demo needs C++11. We configure the fat_apk_cpu flag to package support for 4 hardware variants. You may replace it with --config=android_arm64 on a 64-bit device and --config=android_arm for 32-bit device:
```shell
bazel build -c opt --cxxopt='--std=c++11' --fat_apk_cpu=x86,x86_64,arm64-v8a,armeabi-v7a \
//tensorflow/lite/examples/android:tflite_demo
```
3. Install the demo on a
[debug-enabled device](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android#install):
```shell
adb install bazel-bin/tensorflow/lite/examples/android/tflite_demo.apk
```
* [Image Classification](https://github.com/tensorflow/examples/tree/master/lite/examples/image_classification/android)
* [Object Detection](https://github.com/tensorflow/examples/tree/master/lite/examples/object_detection/android)
* [Speech Commands](https://github.com/tensorflow/examples/tree/master/lite/examples/speech_commands/android)

View File

@ -1,50 +0,0 @@
apply plugin: 'com.android.application'
// import DownloadModels task
project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets'
project.ext.TMP_DIR = project.buildDir.toString() + '/downloads'
// Download default models; if you wish to use your own models then
// place them in the "assets" directory and comment out this line.
apply from: "download-models.gradle"
android {
compileSdkVersion 26
buildToolsVersion '28.0.3'
defaultConfig {
applicationId "org.tensorflow.lite.demo"
minSdkVersion 15
targetSdkVersion 26
versionCode 1
versionName "1.0"
}
lintOptions {
abortOnError false
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
aaptOptions {
noCompress "tflite"
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
repositories {
maven {
url 'https://google.bintray.com/tensorflow'
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly'
}

View File

@ -1,78 +0,0 @@
/*
* download-models.gradle
* Downloads model files from ${MODEL_URL} into application's asset folder
* Input:
* project.ext.TMP_DIR: absolute path to hold downloaded zip files
* project.ext.ASSET_DIR: absolute path to save unzipped model files
* Output:
* 3 model files will be downloaded into given folder of ext.ASSET_DIR
*/
// hard coded model files
def models = ['https://storage.googleapis.com/download.tensorflow.org/models/tflite/conv_actions_tflite.zip',
'https://storage.googleapis.com/download.tensorflow.org/models/tflite/mobilenet_ssd_tflite_v1.zip',
'https://storage.googleapis.com/download.tensorflow.org/models/tflite/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.zip',
'http://download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz',
'http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_224_quant.tgz']
// Root URL for model archives
def MODEL_URL = 'https://storage.googleapis.com/download.tensorflow.org/models/tflite'
buildscript {
repositories {
jcenter()
}
dependencies {
classpath 'de.undercouch:gradle-download-task:3.2.0'
}
}
import de.undercouch.gradle.tasks.download.Download
task downloadFile(type: Download){
for (modelUrl in models) {
def localFile = modelUrl.split("/")[-1]
println "Downloading ${localFile} from ${modelUrl}"
src modelUrl
}
dest new File(project.ext.TMP_DIR)
overwrite true
}
task extractModels(type: Copy) {
for (f in models) {
def localFile = f.split("/")[-1]
def localExt = localFile.split("[.]")[-1]
if (localExt == "tgz") {
from tarTree(project.ext.TMP_DIR + '/' + localFile)
} else {
from zipTree(project.ext.TMP_DIR + '/' + localFile)
}
}
into file(project.ext.ASSET_DIR)
fileMode 0644
exclude '**/LICENSE'
def needDownload = false
for (f in models) {
def localFile = f.split("/")[-1]
if (!(new File(project.ext.TMP_DIR + '/' + localFile)).exists()) {
needDownload = true
}
}
if (needDownload) {
dependsOn downloadFile
}
}
tasks.whenTaskAdded { task ->
if (task.name == 'assembleDebug') {
task.dependsOn 'extractModels'
}
if (task.name == 'assembleRelease') {
task.dependsOn 'extractModels'
}
}

View File

@ -1,60 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.tensorflow.lite.demo">
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<application android:allowBackup="true"
android:label="@string/app_name"
android:icon="@drawable/ic_launcher"
android:theme="@style/MaterialTheme">
<activity android:name="org.tensorflow.demo.ClassifierActivity"
android:screenOrientation="portrait"
android:label="@string/activity_name_classification">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="org.tensorflow.demo.DetectorActivity"
android:screenOrientation="portrait"
android:label="@string/activity_name_detection">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="org.tensorflow.demo.SpeechActivity"
android:screenOrientation="portrait"
android:label="@string/activity_name_speech">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -1,12 +0,0 @@
_silence_
_unknown_
yes
no
up
down
left
right
on
off
stop
go

View File

@ -1,91 +0,0 @@
???
person
bicycle
car
motorcycle
airplane
bus
train
truck
boat
traffic light
fire hydrant
???
stop sign
parking meter
bench
bird
cat
dog
horse
sheep
cow
elephant
bear
zebra
giraffe
???
backpack
umbrella
???
???
handbag
tie
suitcase
frisbee
skis
snowboard
sports ball
kite
baseball bat
baseball glove
skateboard
surfboard
tennis racket
bottle
???
wine glass
cup
fork
knife
spoon
bowl
banana
apple
sandwich
orange
broccoli
carrot
hot dog
pizza
donut
cake
chair
couch
potted plant
bed
???
dining table
???
???
toilet
???
tv
laptop
mouse
remote
keyboard
cell phone
microwave
oven
toaster
sink
refrigerator
???
book
clock
vase
scissors
teddy bear
hair drier
toothbrush

View File

@ -1,74 +0,0 @@
/*
* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tensorflow.demo;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
/**
* A {@link TextureView} that can be adjusted to a specified aspect ratio.
*/
public class AutoFitTextureView extends TextureView {
private int ratioWidth = 0;
private int ratioHeight = 0;
public AutoFitTextureView(final Context context) {
this(context, null);
}
public AutoFitTextureView(final Context context, final AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(final int width, final int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
ratioWidth = width;
ratioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
final int width = MeasureSpec.getSize(widthMeasureSpec);
final int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == ratioWidth || 0 == ratioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * ratioWidth / ratioHeight) {
setMeasuredDimension(width, width * ratioHeight / ratioWidth);
} else {
setMeasuredDimension(height * ratioWidth / ratioHeight, height);
}
}
}
}

View File

@ -1,450 +0,0 @@
/*
* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tensorflow.demo;
import android.Manifest;
import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.Image.Plane;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Trace;
import android.util.Size;
import android.view.KeyEvent;
import android.view.Surface;
import android.view.WindowManager;
import android.widget.Toast;
import java.nio.ByteBuffer;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.lite.demo.R; // Explicit import needed for internal Google builds.
public abstract class CameraActivity extends Activity
implements OnImageAvailableListener, Camera.PreviewCallback {
private static final Logger LOGGER = new Logger();
private static final int PERMISSIONS_REQUEST = 1;
private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
private static final String PERMISSION_STORAGE = Manifest.permission.WRITE_EXTERNAL_STORAGE;
private boolean debug = false;
private Handler handler;
private HandlerThread handlerThread;
private boolean useCamera2API;
private boolean isProcessingFrame = false;
private byte[][] yuvBytes = new byte[3][];
private int[] rgbBytes = null;
private int yRowStride;
protected int previewWidth = 0;
protected int previewHeight = 0;
private Runnable postInferenceCallback;
private Runnable imageConverter;
@Override
protected void onCreate(final Bundle savedInstanceState) {
LOGGER.d("onCreate " + this);
super.onCreate(null);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_camera);
if (hasPermission()) {
setFragment();
} else {
requestPermission();
}
}
protected int[] getRgbBytes() {
imageConverter.run();
return rgbBytes;
}
protected int getLuminanceStride() {
return yRowStride;
}
protected byte[] getLuminance() {
return yuvBytes[0];
}
/**
* Callback for android.hardware.Camera API
*/
@Override
public void onPreviewFrame(final byte[] bytes, final Camera camera) {
if (isProcessingFrame) {
LOGGER.w("Dropping frame!");
return;
}
try {
// Initialize the storage bitmaps once when the resolution is known.
if (rgbBytes == null) {
Camera.Size previewSize = camera.getParameters().getPreviewSize();
previewHeight = previewSize.height;
previewWidth = previewSize.width;
rgbBytes = new int[previewWidth * previewHeight];
onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
}
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
return;
}
isProcessingFrame = true;
yuvBytes[0] = bytes;
yRowStride = previewWidth;
imageConverter =
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
}
};
postInferenceCallback =
new Runnable() {
@Override
public void run() {
camera.addCallbackBuffer(bytes);
isProcessingFrame = false;
}
};
processImage();
}
/**
* Callback for Camera2 API
*/
@Override
public void onImageAvailable(final ImageReader reader) {
//We need wait until we have some size from onPreviewSizeChosen
if (previewWidth == 0 || previewHeight == 0) {
return;
}
if (rgbBytes == null) {
rgbBytes = new int[previewWidth * previewHeight];
}
try {
final Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (isProcessingFrame) {
image.close();
return;
}
isProcessingFrame = true;
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
imageConverter =
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
}
};
postInferenceCallback =
new Runnable() {
@Override
public void run() {
image.close();
isProcessingFrame = false;
}
};
processImage();
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
Trace.endSection();
return;
}
Trace.endSection();
}
@Override
public synchronized void onStart() {
LOGGER.d("onStart " + this);
super.onStart();
}
@Override
public synchronized void onResume() {
LOGGER.d("onResume " + this);
super.onResume();
handlerThread = new HandlerThread("inference");
handlerThread.start();
handler = new Handler(handlerThread.getLooper());
}
@Override
public synchronized void onPause() {
LOGGER.d("onPause " + this);
if (!isFinishing()) {
LOGGER.d("Requesting finish");
finish();
}
handlerThread.quitSafely();
try {
handlerThread.join();
handlerThread = null;
handler = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
super.onPause();
}
@Override
public synchronized void onStop() {
LOGGER.d("onStop " + this);
super.onStop();
}
@Override
public synchronized void onDestroy() {
LOGGER.d("onDestroy " + this);
super.onDestroy();
}
protected synchronized void runInBackground(final Runnable r) {
if (handler != null) {
handler.post(r);
}
}
@Override
public void onRequestPermissionsResult(
final int requestCode, final String[] permissions, final int[] grantResults) {
if (requestCode == PERMISSIONS_REQUEST) {
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED
&& grantResults[1] == PackageManager.PERMISSION_GRANTED) {
setFragment();
} else {
requestPermission();
}
}
}
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED &&
checkSelfPermission(PERMISSION_STORAGE) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA) ||
shouldShowRequestPermissionRationale(PERMISSION_STORAGE)) {
Toast.makeText(CameraActivity.this,
"Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
}
requestPermissions(new String[] {PERMISSION_CAMERA, PERMISSION_STORAGE}, PERMISSIONS_REQUEST);
}
}
// Returns true if the device supports the required hardware level, or better.
private boolean isHardwareLevelSupported(
CameraCharacteristics characteristics, int requiredLevel) {
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel;
}
// deviceLevel is not LEGACY, can use numerical sort
return requiredLevel <= deviceLevel;
}
private String chooseCamera() {
final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (final String cameraId : manager.getCameraIdList()) {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// Fallback to camera1 API for internal cameras that don't have full support.
// This should help with legacy situations where using the camera2 API causes
// distorted or otherwise broken previews.
useCamera2API = (facing == CameraCharacteristics.LENS_FACING_EXTERNAL)
|| isHardwareLevelSupported(characteristics,
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
LOGGER.i("Camera API lv2?: %s", useCamera2API);
return cameraId;
}
} catch (CameraAccessException e) {
LOGGER.e(e, "Not allowed to access camera");
}
return null;
}
protected void setFragment() {
String cameraId = chooseCamera();
Fragment fragment;
if (useCamera2API) {
CameraConnectionFragment camera2Fragment =
CameraConnectionFragment.newInstance(
new CameraConnectionFragment.ConnectionCallback() {
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
previewHeight = size.getHeight();
previewWidth = size.getWidth();
CameraActivity.this.onPreviewSizeChosen(size, rotation);
}
},
this,
getLayoutId(),
getDesiredPreviewFrameSize());
camera2Fragment.setCamera(cameraId);
fragment = camera2Fragment;
} else {
fragment =
new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
}
getFragmentManager()
.beginTransaction()
.replace(R.id.container, fragment)
.commit();
}
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
// Because of the variable row stride it's not possible to know in
// advance the actual necessary dimensions of the yuv planes.
for (int i = 0; i < planes.length; ++i) {
final ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null) {
LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
yuvBytes[i] = new byte[buffer.capacity()];
}
buffer.get(yuvBytes[i]);
}
}
public boolean isDebug() {
return debug;
}
public void requestRender() {
final OverlayView overlay = (OverlayView) findViewById(R.id.debug_overlay);
if (overlay != null) {
overlay.postInvalidate();
}
}
public void addCallback(final OverlayView.DrawCallback callback) {
final OverlayView overlay = (OverlayView) findViewById(R.id.debug_overlay);
if (overlay != null) {
overlay.addCallback(callback);
}
}
public void onSetDebug(final boolean debug) {}
@Override
public boolean onKeyDown(final int keyCode, final KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN || keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
debug = !debug;
requestRender();
onSetDebug(debug);
return true;
}
return super.onKeyDown(keyCode, event);
}
protected void readyForNextImage() {
if (postInferenceCallback != null) {
postInferenceCallback.run();
}
}
protected int getScreenOrientation() {
switch (getWindowManager().getDefaultDisplay().getRotation()) {
case Surface.ROTATION_270:
return 270;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_90:
return 90;
default:
return 0;
}
}
protected abstract void processImage();
protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
protected abstract int getLayoutId();
protected abstract Size getDesiredPreviewFrameSize();
}

View File

@ -1,634 +0,0 @@
/*
* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tensorflow.demo;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.text.TextUtils;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.lite.demo.R; // Explicit import needed for internal Google builds.
public class CameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of
* containing a DESIRED_SIZE x DESIRED_SIZE square.
*/
private static final int MINIMUM_PREVIEW_SIZE = 320;
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String FRAGMENT_DIALOG = "dialog";
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/**
* Callback for Activities to use to initialize their data once the
* selected preview size is known.
*/
public interface ConnectionCallback {
void onPreviewSizeChosen(Size size, int cameraRotation);
}
/**
* ID of the current {@link CameraDevice}.
*/
private String cameraId;
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView textureView;
/**
* A {@link CameraCaptureSession } for camera preview.
*/
private CameraCaptureSession captureSession;
/**
* A reference to the opened {@link CameraDevice}.
*/
private CameraDevice cameraDevice;
/**
* The rotation in degrees of the camera sensor from the display.
*/
private Integer sensorOrientation;
/**
* The {@link android.util.Size} of camera preview.
*/
private Size previewSize;
/**
* {@link android.hardware.camera2.CameraDevice.StateCallback}
* is called when {@link CameraDevice} changes its state.
*/
private final CameraDevice.StateCallback stateCallback =
new CameraDevice.StateCallback() {
@Override
public void onOpened(final CameraDevice cd) {
// This method is called when the camera is opened. We start camera preview here.
cameraOpenCloseLock.release();
cameraDevice = cd;
createCameraPreviewSession();
}
@Override
public void onDisconnected(final CameraDevice cd) {
cameraOpenCloseLock.release();
cd.close();
cameraDevice = null;
}
@Override
public void onError(final CameraDevice cd, final int error) {
cameraOpenCloseLock.release();
cd.close();
cameraDevice = null;
final Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread backgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler backgroundHandler;
/**
* An {@link ImageReader} that handles preview frame capture.
*/
private ImageReader previewReader;
/**
* {@link android.hardware.camera2.CaptureRequest.Builder} for the camera preview
*/
private CaptureRequest.Builder previewRequestBuilder;
/**
* {@link CaptureRequest} generated by {@link #previewRequestBuilder}
*/
private CaptureRequest previewRequest;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private final Semaphore cameraOpenCloseLock = new Semaphore(1);
/**
* A {@link OnImageAvailableListener} to receive frames as they are available.
*/
private final OnImageAvailableListener imageListener;
/** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
private final Size inputSize;
/**
* The layout identifier to inflate for this Fragment.
*/
private final int layout;
private final ConnectionCallback cameraConnectionCallback;
private CameraConnectionFragment(
final ConnectionCallback connectionCallback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
this.cameraConnectionCallback = connectionCallback;
this.imageListener = imageListener;
this.layout = layout;
this.inputSize = inputSize;
}
/**
* Shows a {@link Toast} on the UI thread.
*
* @param text The message to show
*/
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(
new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the minimum of both, or an exact match if possible.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height);
// Collect the supported resolutions that are at least as big as the preview Surface
boolean exactSizeFound = false;
final List<Size> bigEnough = new ArrayList<Size>();
final List<Size> tooSmall = new ArrayList<Size>();
for (final Size option : choices) {
if (option.equals(desiredSize)) {
// Set the size but don't return yet so that remaining sizes will still be logged.
exactSizeFound = true;
}
if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
bigEnough.add(option);
} else {
tooSmall.add(option);
}
}
LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
if (exactSizeFound) {
LOGGER.i("Exact size match found.");
return desiredSize;
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
return chosenSize;
} else {
LOGGER.e("Couldn't find any suitable preview size");
return choices[0];
}
}
public static CameraConnectionFragment newInstance(
final ConnectionCallback callback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
}
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
openCamera(textureView.getWidth(), textureView.getHeight());
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
public void setCamera(String cameraId) {
this.cameraId = cameraId;
}
/**
* Sets up member variables related to camera.
*/
private void setUpCameraOutputs() {
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize =
chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(),
inputSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked.
final int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
} else {
textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
}
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
} catch (final NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
// TODO(andrewharp): abstract ErrorDialog/RuntimeException handling out into new method and
// reuse throughout app.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
throw new RuntimeException(getString(R.string.camera_error));
}
cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
}
/**
* Opens the camera specified by {@link CameraConnectionFragment#cameraId}.
*/
private void openCamera(final int width, final int height) {
setUpCameraOutputs();
configureTransform(width, height);
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(cameraId, stateCallback, backgroundHandler);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
} catch (final InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
cameraOpenCloseLock.acquire();
if (null != captureSession) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != previewReader) {
previewReader.close();
previewReader = null;
}
} catch (final InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
cameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
backgroundThread = new HandlerThread("ImageListener");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
private final CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {}
@Override
public void onCaptureCompleted(
final CameraCaptureSession session,
final CaptureRequest request,
final TotalCaptureResult result) {}
};
/**
* Creates a new {@link CameraCaptureSession} for camera preview.
*/
private void createCameraPreviewSession() {
try {
final SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
// This is the output Surface we need to start preview.
final Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(surface);
LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
// Create the reader for the preview frames.
previewReader =
ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
previewRequestBuilder.addTarget(previewReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession(
Arrays.asList(surface, previewReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
captureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(
previewRequest, captureCallback, backgroundHandler);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
}
@Override
public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
},
null);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
}
/**
* Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(final int viewWidth, final int viewHeight) {
final Activity activity = getActivity();
if (null == textureView || null == previewSize || null == activity) {
return;
}
final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
final Matrix matrix = new Matrix();
final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
final float centerX = viewRect.centerX();
final float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
final float scale =
Math.max(
(float) viewHeight / previewSize.getHeight(),
(float) viewWidth / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
textureView.setTransform(matrix);
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(final Size lhs, final Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum(
(long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
/**
* Shows an error message dialog.
*/
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(final String message) {
final ErrorDialog dialog = new ErrorDialog();
final Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@Override
public Dialog onCreateDialog(final Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(
android.R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialogInterface, final int i) {
activity.finish();
}
})
.create();
}
}
}

View File

@ -1,107 +0,0 @@
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo;
import android.graphics.Bitmap;
import android.graphics.RectF;
import java.util.List;
/**
* Generic interface for interacting with different recognition engines.
*/
public interface Classifier {
/**
* An immutable result returned by a Classifier describing what was recognized.
*/
public class Recognition {
/**
* A unique identifier for what has been recognized. Specific to the class, not the instance of
* the object.
*/
private final String id;
/**
* Display name for the recognition.
*/
private final String title;
/**
* A sortable score for how good the recognition is relative to others. Higher should be better.
*/
private final Float confidence;
/** Optional location within the source image for the location of the recognized object. */
private RectF location;
public Recognition(
final String id, final String title, final Float confidence, final RectF location) {
this.id = id;
this.title = title;
this.confidence = confidence;
this.location = location;
}
public String getId() {
return id;
}
public String getTitle() {
return title;
}
public Float getConfidence() {
return confidence;
}
public RectF getLocation() {
return new RectF(location);
}
public void setLocation(RectF location) {
this.location = location;
}
@Override
public String toString() {
String resultString = "";
if (id != null) {
resultString += "[" + id + "] ";
}
if (title != null) {
resultString += title + " ";
}
if (confidence != null) {
resultString += String.format("(%.1f%%) ", confidence * 100.0f);
}
if (location != null) {
resultString += location + " ";
}
return resultString.trim();
}
}
List<Recognition> recognizeImage(Bitmap bitmap);
void enableStatLogging(final boolean debug);
String getStatString();
void close();
}

View File

@ -1,197 +0,0 @@
/*
* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tensorflow.demo;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
import android.util.Size;
import android.util.TypedValue;
import java.util.List;
import java.util.Vector;
import org.tensorflow.demo.OverlayView.DrawCallback;
import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.lite.demo.R; // Explicit import needed for internal Google builds.
public class ClassifierActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
protected static final boolean SAVE_PREVIEW_BITMAP = false;
private ResultsView resultsView;
private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null;
private Bitmap cropCopyBitmap = null;
private long lastProcessingTimeMs;
// These are the settings for the original v1 Inception model. If you want to
// use a model that's been produced from the TensorFlow for Poets codelab,
// you'll need to set IMAGE_SIZE = 299, IMAGE_MEAN = 128, IMAGE_STD = 128,
// INPUT_NAME = "Mul", and OUTPUT_NAME = "final_result".
// You'll also need to update the MODEL_FILE and LABEL_FILE paths to point to
// the ones you produced.
//
// To use v3 Inception model, strip the DecodeJpeg Op from your retrained
// model first:
//
// python strip_unused.py \
// --input_graph=<retrained-pb-file> \
// --output_graph=<your-stripped-pb-file> \
// --input_node_names="Mul" \
// --output_node_names="final_result" \
// --input_binary=true
private static final int INPUT_SIZE = 224;
private static final String MODEL_FILE = "mobilenet_v1_1.0_224_quant.tflite";
private static final String LABEL_FILE = "labels_mobilenet_quant_v1_224.txt";
private static final boolean MAINTAIN_ASPECT = true;
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
private Integer sensorOrientation;
private Classifier classifier;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
private BorderedText borderedText;
@Override
protected int getLayoutId() {
return R.layout.camera_connection_fragment;
}
@Override
protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE;
}
private static final float TEXT_SIZE_DIP = 10;
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx = TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
classifier = TFLiteImageClassifier.create(getAssets(), MODEL_FILE, LABEL_FILE, INPUT_SIZE);
previewWidth = size.getWidth();
previewHeight = size.getHeight();
sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);
frameToCropTransform = ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
INPUT_SIZE, INPUT_SIZE,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
addCallback(
new DrawCallback() {
@Override
public void drawCallback(final Canvas canvas) {
renderDebug(canvas);
}
});
}
@Override
protected void processImage() {
rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
// For examining the actual TF input.
if (SAVE_PREVIEW_BITMAP) {
ImageUtils.saveBitmap(croppedBitmap);
}
runInBackground(
new Runnable() {
@Override
public void run() {
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
LOGGER.i("Detect: %s", results);
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
if (resultsView == null) {
resultsView = (ResultsView) findViewById(R.id.results);
}
resultsView.setResults(results);
requestRender();
readyForNextImage();
}
});
}
@Override
public void onSetDebug(boolean debug) {
classifier.enableStatLogging(debug);
}
private void renderDebug(final Canvas canvas) {
if (!isDebug()) {
return;
}
final Bitmap copy = cropCopyBitmap;
if (copy != null) {
final Matrix matrix = new Matrix();
final float scaleFactor = 2;
matrix.postScale(scaleFactor, scaleFactor);
matrix.postTranslate(
canvas.getWidth() - copy.getWidth() * scaleFactor,
canvas.getHeight() - copy.getHeight() * scaleFactor);
canvas.drawBitmap(copy, matrix, new Paint());
final Vector<String> lines = new Vector<String>();
if (classifier != null) {
String statString = classifier.getStatString();
String[] statLines = statString.split("\n");
for (String line : statLines) {
lines.add(line);
}
}
lines.add("Frame: " + previewWidth + "x" + previewHeight);
lines.add("Crop: " + copy.getWidth() + "x" + copy.getHeight());
lines.add("View: " + canvas.getWidth() + "x" + canvas.getHeight());
lines.add("Rotation: " + sensorOrientation);
lines.add("Inference time: " + lastProcessingTimeMs + "ms");
borderedText.drawLines(canvas, 10, canvas.getHeight() - 10, lines);
}
}
}

View File

@ -1,301 +0,0 @@
/*
* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tensorflow.demo;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
import android.util.Size;
import android.util.TypedValue;
import android.widget.Toast;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Vector;
import org.tensorflow.demo.OverlayView.DrawCallback;
import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.demo.tracking.MultiBoxTracker;
import org.tensorflow.lite.demo.R; // Explicit import needed for internal Google builds.
/**
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
* objects.
*/
public class DetectorActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
// Configuration values for the prepackaged SSD model.
private static final int TF_OD_API_INPUT_SIZE = 300;
private static final boolean TF_OD_API_IS_QUANTIZED = true;
private static final String TF_OD_API_MODEL_FILE = "detect.tflite";
private static final String TF_OD_API_LABELS_FILE = "labelmap.txt";
// Which detection model to use: by default uses Tensorflow Object Detection API frozen
// checkpoints.
private enum DetectorMode {
TF_OD_API;
}
private static final DetectorMode MODE = DetectorMode.TF_OD_API;
// Minimum detection confidence to track a detection.
private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.6f;
private static final boolean MAINTAIN_ASPECT = false;
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
private static final boolean SAVE_PREVIEW_BITMAP = false;
private static final float TEXT_SIZE_DIP = 10;
private Integer sensorOrientation;
private Classifier detector;
private long lastProcessingTimeMs;
private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null;
private Bitmap cropCopyBitmap = null;
private boolean computingDetection = false;
private long timestamp = 0;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
private MultiBoxTracker tracker;
private byte[] luminanceCopy;
private BorderedText borderedText;
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
tracker = new MultiBoxTracker(this);
int cropSize = TF_OD_API_INPUT_SIZE;
try {
detector =
TFLiteObjectDetectionAPIModel.create(
getAssets(),
TF_OD_API_MODEL_FILE,
TF_OD_API_LABELS_FILE,
TF_OD_API_INPUT_SIZE,
TF_OD_API_IS_QUANTIZED);
cropSize = TF_OD_API_INPUT_SIZE;
} catch (final IOException e) {
LOGGER.e("Exception initializing classifier!", e);
Toast toast =
Toast.makeText(
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show();
finish();
}
previewWidth = size.getWidth();
previewHeight = size.getHeight();
sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback(
new DrawCallback() {
@Override
public void drawCallback(final Canvas canvas) {
tracker.draw(canvas);
if (isDebug()) {
tracker.drawDebug(canvas);
}
}
});
addCallback(
new DrawCallback() {
@Override
public void drawCallback(final Canvas canvas) {
if (!isDebug()) {
return;
}
final Bitmap copy = cropCopyBitmap;
if (copy == null) {
return;
}
final int backgroundColor = Color.argb(100, 0, 0, 0);
canvas.drawColor(backgroundColor);
final Matrix matrix = new Matrix();
final float scaleFactor = 2;
matrix.postScale(scaleFactor, scaleFactor);
matrix.postTranslate(
canvas.getWidth() - copy.getWidth() * scaleFactor,
canvas.getHeight() - copy.getHeight() * scaleFactor);
canvas.drawBitmap(copy, matrix, new Paint());
final Vector<String> lines = new Vector<String>();
if (detector != null) {
final String statString = detector.getStatString();
final String[] statLines = statString.split("\n");
for (final String line : statLines) {
lines.add(line);
}
}
lines.add("");
lines.add("Frame: " + previewWidth + "x" + previewHeight);
lines.add("Crop: " + copy.getWidth() + "x" + copy.getHeight());
lines.add("View: " + canvas.getWidth() + "x" + canvas.getHeight());
lines.add("Rotation: " + sensorOrientation);
lines.add("Inference time: " + lastProcessingTimeMs + "ms");
borderedText.drawLines(canvas, 10, canvas.getHeight() - 10, lines);
}
});
}
OverlayView trackingOverlay;
@Override
protected void processImage() {
++timestamp;
final long currTimestamp = timestamp;
byte[] originalLuminance = getLuminance();
tracker.onFrame(
previewWidth,
previewHeight,
getLuminanceStride(),
sensorOrientation,
originalLuminance,
timestamp);
trackingOverlay.postInvalidate();
// No mutex needed as this method is not reentrant.
if (computingDetection) {
readyForNextImage();
return;
}
computingDetection = true;
LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
if (luminanceCopy == null) {
luminanceCopy = new byte[originalLuminance.length];
}
System.arraycopy(originalLuminance, 0, luminanceCopy, 0, originalLuminance.length);
readyForNextImage();
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
// For examining the actual TF input.
if (SAVE_PREVIEW_BITMAP) {
ImageUtils.saveBitmap(croppedBitmap);
}
runInBackground(
new Runnable() {
@Override
public void run() {
LOGGER.i("Running detection on image " + currTimestamp);
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final Canvas canvas = new Canvas(cropCopyBitmap);
final Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Style.STROKE);
paint.setStrokeWidth(2.0f);
float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
switch (MODE) {
case TF_OD_API:
minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
break;
}
final List<Classifier.Recognition> mappedRecognitions =
new LinkedList<Classifier.Recognition>();
for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation();
if (location != null && result.getConfidence() >= minimumConfidence) {
canvas.drawRect(location, paint);
cropToFrameTransform.mapRect(location);
result.setLocation(location);
mappedRecognitions.add(result);
}
}
tracker.trackResults(mappedRecognitions, luminanceCopy, currTimestamp);
trackingOverlay.postInvalidate();
requestRender();
computingDetection = false;
}
});
}
@Override
protected int getLayoutId() {
return R.layout.camera_connection_fragment_tracking;
}
@Override
protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE;
}
@Override
public void onSetDebug(final boolean debug) {
detector.enableStatLogging(debug);
}
}

View File

@ -1,216 +0,0 @@
package org.tensorflow.demo;
/*
* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.app.Fragment;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import java.util.List;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.lite.demo.R; // Explicit import needed for internal Google builds.
public class LegacyCameraConnectionFragment extends Fragment {
private Camera camera;
private static final Logger LOGGER = new Logger();
private Camera.PreviewCallback imageListener;
private Size desiredSize;
/**
* The layout identifier to inflate for this Fragment.
*/
private int layout;
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
this.desiredSize = desiredSize;
}
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
try {
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
Size[] sizes = new Size[cameraSizes.size()];
int i = 0;
for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height);
}
Size previewSize =
CameraConnectionFragment.chooseOptimalSize(
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
} catch (IOException exception) {
camera.release();
}
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
textureView.setAspectRatio(s.height, s.width);
camera.startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView textureView;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread backgroundThread;
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
camera.startPreview();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
public void onPause() {
stopCamera();
stopBackgroundThread();
super.onPause();
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
protected void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
private int getCameraId() {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
return i;
}
return -1; // No camera found
}
}

View File

@ -1,52 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import java.util.LinkedList;
import java.util.List;
/**
* A simple View providing a render callback to other classes.
*/
public class OverlayView extends View {
private final List<DrawCallback> callbacks = new LinkedList<DrawCallback>();
public OverlayView(final Context context, final AttributeSet attrs) {
super(context, attrs);
}
/**
* Interface defining the callback for client classes.
*/
public interface DrawCallback {
public void drawCallback(final Canvas canvas);
}
public void addCallback(final DrawCallback callback) {
callbacks.add(callback);
}
@Override
public synchronized void draw(final Canvas canvas) {
for (final DrawCallback callback : callbacks) {
callback.drawCallback(canvas);
}
}
}

View File

@ -1,67 +0,0 @@
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import java.util.List;
import org.tensorflow.demo.Classifier.Recognition;
public class RecognitionScoreView extends View implements ResultsView {
private static final float TEXT_SIZE_DIP = 24;
private List<Recognition> results;
private final float textSizePx;
private final Paint fgPaint;
private final Paint bgPaint;
public RecognitionScoreView(final Context context, final AttributeSet set) {
super(context, set);
textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
fgPaint = new Paint();
fgPaint.setTextSize(textSizePx);
bgPaint = new Paint();
bgPaint.setColor(0xcc4285f4);
}
@Override
public void setResults(final List<Recognition> results) {
this.results = results;
postInvalidate();
}
@Override
public void onDraw(final Canvas canvas) {
final int x = 10;
int y = (int) (fgPaint.getTextSize() * 1.5f);
canvas.drawPaint(bgPaint);
if (results != null) {
for (final Recognition recog : results) {
canvas.drawText(recog.getTitle() + ": " + recog.getConfidence(), x, y, fgPaint);
y += (int) (fgPaint.getTextSize() * 1.5f);
}
}
}
}

View File

@ -1,186 +0,0 @@
/*
* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tensorflow.demo;
import android.util.Log;
import android.util.Pair;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
import java.util.List;
/** Reads in results from an instantaneous audio recognition model and smoothes them over time. */
public class RecognizeCommands {
// Configuration settings.
private List<String> labels = new ArrayList<String>();
private long averageWindowDurationMs;
private float detectionThreshold;
private int suppressionMs;
private int minimumCount;
private long minimumTimeBetweenSamplesMs;
// Working variables.
private Deque<Pair<Long, float[]>> previousResults = new ArrayDeque<Pair<Long, float[]>>();
private String previousTopLabel;
private int labelsCount;
private long previousTopLabelTime;
private float previousTopLabelScore;
private static final String SILENCE_LABEL = "_silence_";
private static final long MINIMUM_TIME_FRACTION = 4;
public RecognizeCommands(
List<String> inLabels,
long inAverageWindowDurationMs,
float inDetectionThreshold,
int inSuppressionMS,
int inMinimumCount,
long inMinimumTimeBetweenSamplesMS) {
labels = inLabels;
averageWindowDurationMs = inAverageWindowDurationMs;
detectionThreshold = inDetectionThreshold;
suppressionMs = inSuppressionMS;
minimumCount = inMinimumCount;
labelsCount = inLabels.size();
previousTopLabel = SILENCE_LABEL;
previousTopLabelTime = Long.MIN_VALUE;
previousTopLabelScore = 0.0f;
minimumTimeBetweenSamplesMs = inMinimumTimeBetweenSamplesMS;
}
/** Holds information about what's been recognized. */
public static class RecognitionResult {
public final String foundCommand;
public final float score;
public final boolean isNewCommand;
public RecognitionResult(String inFoundCommand, float inScore, boolean inIsNewCommand) {
foundCommand = inFoundCommand;
score = inScore;
isNewCommand = inIsNewCommand;
}
}
private static class ScoreForSorting implements Comparable<ScoreForSorting> {
public final float score;
public final int index;
public ScoreForSorting(float inScore, int inIndex) {
score = inScore;
index = inIndex;
}
@Override
public int compareTo(ScoreForSorting other) {
if (this.score > other.score) {
return -1;
} else if (this.score < other.score) {
return 1;
} else {
return 0;
}
}
}
public RecognitionResult processLatestResults(float[] currentResults, long currentTimeMS) {
if (currentResults.length != labelsCount) {
throw new RuntimeException(
"The results for recognition should contain "
+ labelsCount
+ " elements, but there are "
+ currentResults.length);
}
if ((!previousResults.isEmpty()) && (currentTimeMS < previousResults.getFirst().first)) {
throw new RuntimeException(
"You must feed results in increasing time order, but received a timestamp of "
+ currentTimeMS
+ " that was earlier than the previous one of "
+ previousResults.getFirst().first);
}
final int howManyResults = previousResults.size();
// Ignore any results that are coming in too frequently.
if (howManyResults > 1) {
final long timeSinceMostRecent = currentTimeMS - previousResults.getLast().first;
if (timeSinceMostRecent < minimumTimeBetweenSamplesMs) {
return new RecognitionResult(previousTopLabel, previousTopLabelScore, false);
}
}
// Add the latest results to the head of the queue.
previousResults.addLast(new Pair<Long, float[]>(currentTimeMS, currentResults));
// Prune any earlier results that are too old for the averaging window.
final long timeLimit = currentTimeMS - averageWindowDurationMs;
while (previousResults.getFirst().first < timeLimit) {
previousResults.removeFirst();
}
// If there are too few results, assume the result will be unreliable and
// bail.
final long earliestTime = previousResults.getFirst().first;
final long samplesDuration = currentTimeMS - earliestTime;
if ((howManyResults < minimumCount)
|| (samplesDuration < (averageWindowDurationMs / MINIMUM_TIME_FRACTION))) {
Log.v("RecognizeResult", "Too few results");
return new RecognitionResult(previousTopLabel, 0.0f, false);
}
// Calculate the average score across all the results in the window.
float[] averageScores = new float[labelsCount];
for (Pair<Long, float[]> previousResult : previousResults) {
final float[] scoresTensor = previousResult.second;
int i = 0;
while (i < scoresTensor.length) {
averageScores[i] += scoresTensor[i] / howManyResults;
++i;
}
}
// Sort the averaged results in descending score order.
ScoreForSorting[] sortedAverageScores = new ScoreForSorting[labelsCount];
for (int i = 0; i < labelsCount; ++i) {
sortedAverageScores[i] = new ScoreForSorting(averageScores[i], i);
}
Arrays.sort(sortedAverageScores);
// See if the latest top score is enough to trigger a detection.
final int currentTopIndex = sortedAverageScores[0].index;
final String currentTopLabel = labels.get(currentTopIndex);
final float currentTopScore = sortedAverageScores[0].score;
// If we've recently had another label trigger, assume one that occurs too
// soon afterwards is a bad result.
long timeSinceLastTop;
if (previousTopLabel.equals(SILENCE_LABEL) || (previousTopLabelTime == Long.MIN_VALUE)) {
timeSinceLastTop = Long.MAX_VALUE;
} else {
timeSinceLastTop = currentTimeMS - previousTopLabelTime;
}
boolean isNewCommand;
if ((currentTopScore > detectionThreshold) && (timeSinceLastTop > suppressionMs)) {
previousTopLabel = currentTopLabel;
previousTopLabelTime = currentTimeMS;
previousTopLabelScore = currentTopScore;
isNewCommand = true;
} else {
isNewCommand = false;
}
return new RecognitionResult(currentTopLabel, currentTopScore, isNewCommand);
}
}

View File

@ -1,23 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo;
import java.util.List;
import org.tensorflow.demo.Classifier.Recognition;
public interface ResultsView {
public void setResults(final List<Recognition> results);
}

View File

@ -1,381 +0,0 @@
/*
* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Demonstrates how to run an audio recognition model in Android.
This example loads a simple speech recognition model trained by the tutorial at
https://www.tensorflow.org/tutorials/audio_training
The model files should be downloaded automatically from the TensorFlow website,
but if you have a custom model you can update the LABEL_FILENAME and
MODEL_FILENAME constants to point to your own files.
The example application displays a list view with all of the known audio labels,
and highlights each one when it thinks it has detected one through the
microphone. The averaging of results to give a more reliable signal happens in
the RecognizeCommands helper class.
*/
package org.tensorflow.demo;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.demo.R; // Explicit import needed for internal Google builds.
/**
* An activity that listens for audio and then uses a TensorFlow model to detect particular classes,
* by default a small set of action words.
*/
public class SpeechActivity extends Activity {
// Constants that control the behavior of the recognition code and model
// settings. See the audio recognition tutorial for a detailed explanation of
// all these, but you should customize them to match your training settings if
// you are running your own model.
private static final int SAMPLE_RATE = 16000;
private static final int SAMPLE_DURATION_MS = 1000;
private static final int RECORDING_LENGTH = (int) (SAMPLE_RATE * SAMPLE_DURATION_MS / 1000);
private static final long AVERAGE_WINDOW_DURATION_MS = 500;
private static final float DETECTION_THRESHOLD = 0.70f;
private static final int SUPPRESSION_MS = 1500;
private static final int MINIMUM_COUNT = 3;
private static final long MINIMUM_TIME_BETWEEN_SAMPLES_MS = 30;
private static final String LABEL_FILENAME = "file:///android_asset/conv_actions_labels.txt";
private static final String MODEL_FILENAME = "file:///android_asset/conv_actions_frozen.tflite";
// UI elements.
private static final int REQUEST_RECORD_AUDIO = 13;
private Button quitButton;
private ListView labelsListView;
private static final String LOG_TAG = SpeechActivity.class.getSimpleName();
// Working variables.
short[] recordingBuffer = new short[RECORDING_LENGTH];
int recordingOffset = 0;
boolean shouldContinue = true;
private Thread recordingThread;
boolean shouldContinueRecognition = true;
private Thread recognitionThread;
private final ReentrantLock recordingBufferLock = new ReentrantLock();
private List<String> labels = new ArrayList<String>();
private List<String> displayedLabels = new ArrayList<>();
private RecognizeCommands recognizeCommands = null;
private Interpreter tfLite;
/** Memory-map the model file in Assets. */
private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
throws IOException {
AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel = inputStream.getChannel();
long startOffset = fileDescriptor.getStartOffset();
long declaredLength = fileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// Set up the UI.
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_speech);
quitButton = (Button) findViewById(R.id.quit);
quitButton.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View view) {
moveTaskToBack(true);
android.os.Process.killProcess(android.os.Process.myPid());
System.exit(1);
}
});
labelsListView = (ListView) findViewById(R.id.list_view);
// Load the labels for the model, but only display those that don't start
// with an underscore.
String actualLabelFilename = LABEL_FILENAME.split("file:///android_asset/", -1)[1];
Log.i(LOG_TAG, "Reading labels from: " + actualLabelFilename);
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(getAssets().open(actualLabelFilename)));
String line;
while ((line = br.readLine()) != null) {
labels.add(line);
if (line.charAt(0) != '_') {
displayedLabels.add(line.substring(0, 1).toUpperCase() + line.substring(1));
}
}
br.close();
} catch (IOException e) {
throw new RuntimeException("Problem reading label file!", e);
}
// Build a list view based on these labels.
ArrayAdapter<String> arrayAdapter =
new ArrayAdapter<String>(this, R.layout.list_text_item, displayedLabels);
labelsListView.setAdapter(arrayAdapter);
// Set up an object to smooth recognition results to increase accuracy.
recognizeCommands =
new RecognizeCommands(
labels,
AVERAGE_WINDOW_DURATION_MS,
DETECTION_THRESHOLD,
SUPPRESSION_MS,
MINIMUM_COUNT,
MINIMUM_TIME_BETWEEN_SAMPLES_MS);
String actualModelFilename = MODEL_FILENAME.split("file:///android_asset/", -1)[1];
try {
tfLite = new Interpreter(loadModelFile(getAssets(), actualModelFilename));
} catch (Exception e) {
throw new RuntimeException(e);
}
tfLite.resizeInput(0, new int[] {RECORDING_LENGTH, 1});
tfLite.resizeInput(1, new int[] {1});
// Start the recording and recognition threads.
requestMicrophonePermission();
startRecording();
startRecognition();
}
private void requestMicrophonePermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(
new String[]{android.Manifest.permission.RECORD_AUDIO}, REQUEST_RECORD_AUDIO);
}
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
if (requestCode == REQUEST_RECORD_AUDIO
&& grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startRecording();
startRecognition();
}
}
public synchronized void startRecording() {
if (recordingThread != null) {
return;
}
shouldContinue = true;
recordingThread =
new Thread(
new Runnable() {
@Override
public void run() {
record();
}
});
recordingThread.start();
}
public synchronized void stopRecording() {
if (recordingThread == null) {
return;
}
shouldContinue = false;
recordingThread = null;
}
private void record() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
// Estimate the buffer size we'll need for this device.
int bufferSize =
AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize == AudioRecord.ERROR || bufferSize == AudioRecord.ERROR_BAD_VALUE) {
bufferSize = SAMPLE_RATE * 2;
}
short[] audioBuffer = new short[bufferSize / 2];
AudioRecord record =
new AudioRecord(
MediaRecorder.AudioSource.DEFAULT,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
if (record.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(LOG_TAG, "Audio Record can't initialize!");
return;
}
record.startRecording();
Log.v(LOG_TAG, "Start recording");
// Loop, gathering audio data and copying it to a round-robin buffer.
while (shouldContinue) {
int numberRead = record.read(audioBuffer, 0, audioBuffer.length);
int maxLength = recordingBuffer.length;
int newRecordingOffset = recordingOffset + numberRead;
int secondCopyLength = Math.max(0, newRecordingOffset - maxLength);
int firstCopyLength = numberRead - secondCopyLength;
// We store off all the data for the recognition thread to access. The ML
// thread will copy out of this buffer into its own, while holding the
// lock, so this should be thread safe.
recordingBufferLock.lock();
try {
System.arraycopy(audioBuffer, 0, recordingBuffer, recordingOffset, firstCopyLength);
System.arraycopy(audioBuffer, firstCopyLength, recordingBuffer, 0, secondCopyLength);
recordingOffset = newRecordingOffset % maxLength;
} finally {
recordingBufferLock.unlock();
}
}
record.stop();
record.release();
}
public synchronized void startRecognition() {
if (recognitionThread != null) {
return;
}
shouldContinueRecognition = true;
recognitionThread =
new Thread(
new Runnable() {
@Override
public void run() {
recognize();
}
});
recognitionThread.start();
}
public synchronized void stopRecognition() {
if (recognitionThread == null) {
return;
}
shouldContinueRecognition = false;
recognitionThread = null;
}
private void recognize() {
Log.v(LOG_TAG, "Start recognition");
short[] inputBuffer = new short[RECORDING_LENGTH];
float[][] floatInputBuffer = new float[RECORDING_LENGTH][1];
float[][] outputScores = new float[1][labels.size()];
int[] sampleRateList = new int[] {SAMPLE_RATE};
// Loop, grabbing recorded data and running the recognition model on it.
while (shouldContinueRecognition) {
// The recording thread places data in this round-robin buffer, so lock to
// make sure there's no writing happening and then copy it to our own
// local version.
recordingBufferLock.lock();
try {
int maxLength = recordingBuffer.length;
int firstCopyLength = maxLength - recordingOffset;
int secondCopyLength = recordingOffset;
System.arraycopy(recordingBuffer, recordingOffset, inputBuffer, 0, firstCopyLength);
System.arraycopy(recordingBuffer, 0, inputBuffer, firstCopyLength, secondCopyLength);
} finally {
recordingBufferLock.unlock();
}
// We need to feed in float values between -1.0f and 1.0f, so divide the
// signed 16-bit inputs.
for (int i = 0; i < RECORDING_LENGTH; ++i) {
floatInputBuffer[i][0] = inputBuffer[i] / 32767.0f;
}
Object[] inputArray = {floatInputBuffer, sampleRateList};
Map<Integer, Object> outputMap = new HashMap<>();
outputMap.put(0, outputScores);
// Run the model.
tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
// Use the smoother to figure out if we've had a real recognition event.
long currentTime = System.currentTimeMillis();
final RecognizeCommands.RecognitionResult result =
recognizeCommands.processLatestResults(outputScores[0], currentTime);
runOnUiThread(
new Runnable() {
@Override
public void run() {
// If we do have a new command, highlight the right list entry.
if (!result.foundCommand.startsWith("_") && result.isNewCommand) {
int labelIndex = -1;
for (int i = 0; i < labels.size(); ++i) {
if (labels.get(i).equals(result.foundCommand)) {
labelIndex = i;
}
}
final View labelView = (View) labelsListView.getChildAt(labelIndex - 2);
ValueAnimator colorAnimation =
ValueAnimator.ofArgb(0x00b3ccff, 0xffb3ccff, 0x00b3ccff);
colorAnimation.setDuration(750);
colorAnimation.addUpdateListener(
new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animator) {
labelView.setBackgroundColor((int) animator.getAnimatedValue());
}
});
colorAnimation.start();
}
}
});
try {
// We don't need to run too frequently, so snooze for a bit.
Thread.sleep(MINIMUM_TIME_BETWEEN_SAMPLES_MS);
} catch (InterruptedException e) {
// Ignore
}
}
Log.v(LOG_TAG, "End recognition");
}
}

View File

@ -1,209 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.os.SystemClock;
import android.os.Trace;
import android.util.Log;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.PriorityQueue;
import java.util.Vector;
import org.tensorflow.lite.Interpreter;
/** A classifier specialized to label images using TensorFlow. */
public class TFLiteImageClassifier implements Classifier {
private static final String TAG = "TFLiteImageClassifier";
// Only return this many results with at least this confidence.
private static final int MAX_RESULTS = 3;
private Interpreter tfLite;
/** Dimensions of inputs. */
private static final int DIM_BATCH_SIZE = 1;
private static final int DIM_PIXEL_SIZE = 3;
private static final int DIM_IMG_SIZE_X = 224;
private static final int DIM_IMG_SIZE_Y = 224;
byte[][] labelProb;
// Pre-allocated buffers.
private Vector<String> labels = new Vector<String>();
private int[] intValues;
private ByteBuffer imgData = null;
private TFLiteImageClassifier() {}
/** Memory-map the model file in Assets. */
private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
throws IOException {
AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel = inputStream.getChannel();
long startOffset = fileDescriptor.getStartOffset();
long declaredLength = fileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
}
/**
* Initializes a native TensorFlow session for classifying images.
*
* @param assetManager The asset manager to be used to load assets.
* @param modelFilename The filepath of the model GraphDef protocol buffer.
* @param labelFilename The filepath of label file for classes.
* @param inputSize The input size. A square image of inputSize x inputSize is assumed.
* @throws IOException
*/
public static Classifier create(
AssetManager assetManager, String modelFilename, String labelFilename, int inputSize) {
TFLiteImageClassifier c = new TFLiteImageClassifier();
// Read the label names into memory.
// TODO(andrewharp): make this handle non-assets.
Log.i(TAG, "Reading labels from: " + labelFilename);
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(assetManager.open(labelFilename)));
String line;
while ((line = br.readLine()) != null) {
c.labels.add(line);
}
br.close();
} catch (IOException e) {
throw new RuntimeException("Problem reading label file!" , e);
}
c.imgData =
ByteBuffer.allocateDirect(
DIM_BATCH_SIZE * DIM_IMG_SIZE_X * DIM_IMG_SIZE_Y * DIM_PIXEL_SIZE);
c.imgData.order(ByteOrder.nativeOrder());
try {
c.tfLite = new Interpreter(loadModelFile(assetManager, modelFilename));
} catch (Exception e) {
throw new RuntimeException(e);
}
// The shape of the output is [N, NUM_CLASSES], where N is the batch size.
Log.i(TAG, "Read " + c.labels.size() + " labels");
// Pre-allocate buffers.
c.intValues = new int[inputSize * inputSize];
c.labelProb = new byte[1][c.labels.size()];
return c;
}
/** Writes Image data into a {@code ByteBuffer}. */
private void convertBitmapToByteBuffer(Bitmap bitmap) {
if (imgData == null) {
return;
}
imgData.rewind();
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
// Convert the image to floating point.
int pixel = 0;
long startTime = SystemClock.uptimeMillis();
for (int i = 0; i < DIM_IMG_SIZE_X; ++i) {
for (int j = 0; j < DIM_IMG_SIZE_Y; ++j) {
final int val = intValues[pixel++];
imgData.put((byte) ((val >> 16) & 0xFF));
imgData.put((byte) ((val >> 8) & 0xFF));
imgData.put((byte) (val & 0xFF));
}
}
long endTime = SystemClock.uptimeMillis();
Log.d(TAG, "Timecost to put values into ByteBuffer: " + Long.toString(endTime - startTime));
}
@Override
public List<Recognition> recognizeImage(final Bitmap bitmap) {
// Log this method so that it can be analyzed with systrace.
Trace.beginSection("recognizeImage");
Trace.beginSection("preprocessBitmap");
long startTime;
long endTime;
startTime = SystemClock.uptimeMillis();
convertBitmapToByteBuffer(bitmap);
// Run the inference call.
Trace.beginSection("run");
startTime = SystemClock.uptimeMillis();
tfLite.run(imgData, labelProb);
endTime = SystemClock.uptimeMillis();
Log.i(TAG, "Inf time: " + (endTime - startTime));
Trace.endSection();
// Find the best classifications.
PriorityQueue<Recognition> pq =
new PriorityQueue<Recognition>(
3,
new Comparator<Recognition>() {
@Override
public int compare(Recognition lhs, Recognition rhs) {
// Intentionally reversed to put high confidence at the head of the queue.
return Float.compare(rhs.getConfidence(), lhs.getConfidence());
}
});
for (int i = 0; i < labels.size(); ++i) {
pq.add(
new Recognition(
"" + i,
labels.size() > i ? labels.get(i) : "unknown",
(float) labelProb[0][i],
null));
}
final ArrayList<Recognition> recognitions = new ArrayList<Recognition>();
int recognitionsSize = Math.min(pq.size(), MAX_RESULTS);
for (int i = 0; i < recognitionsSize; ++i) {
recognitions.add(pq.poll());
}
Trace.endSection(); // "recognizeImage"
return recognitions;
}
@Override
public void enableStatLogging(boolean logStats) {
}
@Override
public String getStatString() {
return "";
}
@Override
public void close() {
}
}

View File

@ -1,233 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.RectF;
import android.os.Trace;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.lite.Interpreter;
/**
* Wrapper for frozen detection models trained using the Tensorflow Object Detection API:
* github.com/tensorflow/models/tree/master/research/object_detection
*/
public class TFLiteObjectDetectionAPIModel implements Classifier {
private static final Logger LOGGER = new Logger();
// Only return this many results.
private static final int NUM_DETECTIONS = 10;
private boolean isModelQuantized;
// Float model
private static final float IMAGE_MEAN = 128.0f;
private static final float IMAGE_STD = 128.0f;
// Number of threads in the java app
private static final int NUM_THREADS = 4;
// Config values.
private int inputSize;
// Pre-allocated buffers.
private Vector<String> labels = new Vector<String>();
private int[] intValues;
// outputLocations: array of shape [Batchsize, NUM_DETECTIONS,4]
// contains the location of detected boxes
private float[][][] outputLocations;
// outputClasses: array of shape [Batchsize, NUM_DETECTIONS]
// contains the classes of detected boxes
private float[][] outputClasses;
// outputScores: array of shape [Batchsize, NUM_DETECTIONS]
// contains the scores of detected boxes
private float[][] outputScores;
// numDetections: array of shape [Batchsize]
// contains the number of detected boxes
private float[] numDetections;
private ByteBuffer imgData;
private Interpreter tfLite;
/** Memory-map the model file in Assets. */
private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
throws IOException {
AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel = inputStream.getChannel();
long startOffset = fileDescriptor.getStartOffset();
long declaredLength = fileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
}
/**
* Initializes a native TensorFlow session for classifying images.
*
* @param assetManager The asset manager to be used to load assets.
* @param modelFilename The filepath of the model GraphDef protocol buffer.
* @param labelFilename The filepath of label file for classes.
* @param inputSize The size of image input
* @param isQuantized Boolean representing model is quantized or not
*/
public static Classifier create(
final AssetManager assetManager,
final String modelFilename,
final String labelFilename,
final int inputSize,
final boolean isQuantized)
throws IOException {
final TFLiteObjectDetectionAPIModel d = new TFLiteObjectDetectionAPIModel();
InputStream labelsInput = null;
labelsInput = assetManager.open(labelFilename);
BufferedReader br = null;
br = new BufferedReader(new InputStreamReader(labelsInput));
String line;
while ((line = br.readLine()) != null) {
LOGGER.w(line);
d.labels.add(line);
}
br.close();
d.inputSize = inputSize;
try {
d.tfLite = new Interpreter(loadModelFile(assetManager, modelFilename));
} catch (Exception e) {
throw new RuntimeException(e);
}
d.isModelQuantized = isQuantized;
// Pre-allocate buffers.
int numBytesPerChannel;
if (isQuantized) {
numBytesPerChannel = 1; // Quantized
} else {
numBytesPerChannel = 4; // Floating point
}
d.imgData = ByteBuffer.allocateDirect(1 * d.inputSize * d.inputSize * 3 * numBytesPerChannel);
d.imgData.order(ByteOrder.nativeOrder());
d.intValues = new int[d.inputSize * d.inputSize];
d.tfLite.setNumThreads(NUM_THREADS);
d.outputLocations = new float[1][NUM_DETECTIONS][4];
d.outputClasses = new float[1][NUM_DETECTIONS];
d.outputScores = new float[1][NUM_DETECTIONS];
d.numDetections = new float[1];
return d;
}
private TFLiteObjectDetectionAPIModel() {}
@Override
public List<Recognition> recognizeImage(final Bitmap bitmap) {
// Log this method so that it can be analyzed with systrace.
Trace.beginSection("recognizeImage");
Trace.beginSection("preprocessBitmap");
// Preprocess the image data from 0-255 int to normalized float based
// on the provided parameters.
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
imgData.rewind();
for (int i = 0; i < inputSize; ++i) {
for (int j = 0; j < inputSize; ++j) {
int pixelValue = intValues[i * inputSize + j];
if (isModelQuantized) {
// Quantized model
imgData.put((byte) ((pixelValue >> 16) & 0xFF));
imgData.put((byte) ((pixelValue >> 8) & 0xFF));
imgData.put((byte) (pixelValue & 0xFF));
} else { // Float model
imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
}
}
}
Trace.endSection(); // preprocessBitmap
// Copy the input data into TensorFlow.
Trace.beginSection("feed");
outputLocations = new float[1][NUM_DETECTIONS][4];
outputClasses = new float[1][NUM_DETECTIONS];
outputScores = new float[1][NUM_DETECTIONS];
numDetections = new float[1];
Object[] inputArray = {imgData};
Map<Integer, Object> outputMap = new HashMap<>();
outputMap.put(0, outputLocations);
outputMap.put(1, outputClasses);
outputMap.put(2, outputScores);
outputMap.put(3, numDetections);
Trace.endSection();
// Run the inference call.
Trace.beginSection("run");
tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
Trace.endSection();
// Show the best detections.
// after scaling them back to the input size.
final ArrayList<Recognition> recognitions = new ArrayList<>(NUM_DETECTIONS);
for (int i = 0; i < NUM_DETECTIONS; ++i) {
final RectF detection =
new RectF(
outputLocations[0][i][1] * inputSize,
outputLocations[0][i][0] * inputSize,
outputLocations[0][i][3] * inputSize,
outputLocations[0][i][2] * inputSize);
// SSD Mobilenet V1 Model assumes class 0 is background class
// in label file and class labels start from 1 to number_of_classes+1,
// while outputClasses correspond to class index from 0 to number_of_classes
int labelOffset = 1;
recognitions.add(
new Recognition(
"" + i,
labels.get((int) outputClasses[0][i] + labelOffset),
outputScores[0][i],
detection));
}
Trace.endSection(); // "recognizeImage"
return recognitions;
}
@Override
public void enableStatLogging(final boolean logStats) {
}
@Override
public String getStatString() {
return "";
}
@Override
public void close() {
}
}

View File

@ -1,78 +0,0 @@
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.env;
import android.content.Context;
import android.content.res.AssetManager;
import android.util.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/** Utilities for dealing with assets. */
public class AssetUtils {
private static final String TAG = AssetUtils.class.getSimpleName();
private static final int BYTE_BUF_SIZE = 2048;
/**
* Copies a file from assets.
*
* @param context application context used to discover assets.
* @param assetName the relative file name within assets.
* @param targetName the target file name, always over write the existing file.
* @throws IOException if operation fails.
*/
public static void copy(Context context, String assetName, String targetName) throws IOException {
Log.d(TAG, "creating file " + targetName + " from " + assetName);
File targetFile = null;
InputStream inputStream = null;
FileOutputStream outputStream = null;
try {
AssetManager assets = context.getAssets();
targetFile = new File(targetName);
inputStream = assets.open(assetName);
// TODO(kanlig): refactor log messages to make them more useful.
Log.d(TAG, "Creating outputstream");
outputStream = new FileOutputStream(targetFile, false /* append */);
copy(inputStream, outputStream);
} finally {
if (outputStream != null) {
outputStream.close();
}
if (inputStream != null) {
inputStream.close();
}
}
}
private static void copy(InputStream from, OutputStream to) throws IOException {
byte[] buf = new byte[BYTE_BUF_SIZE];
while (true) {
int r = from.read(buf);
if (r == -1) {
break;
}
to.write(buf, 0, r);
}
}
}

View File

@ -1,117 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.env;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface;
import java.util.Vector;
/**
* A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas.
*/
public class BorderedText {
private final Paint interiorPaint;
private final Paint exteriorPaint;
private final float textSize;
/**
* Creates a left-aligned bordered text object with a white interior, and a black exterior with
* the specified text size.
*
* @param textSize text size in pixels
*/
public BorderedText(final float textSize) {
this(Color.WHITE, Color.BLACK, textSize);
}
/**
* Create a bordered text object with the specified interior and exterior colors, text size and
* alignment.
*
* @param interiorColor the interior text color
* @param exteriorColor the exterior text color
* @param textSize text size in pixels
*/
public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
interiorPaint = new Paint();
interiorPaint.setTextSize(textSize);
interiorPaint.setColor(interiorColor);
interiorPaint.setStyle(Style.FILL);
interiorPaint.setAntiAlias(false);
interiorPaint.setAlpha(255);
exteriorPaint = new Paint();
exteriorPaint.setTextSize(textSize);
exteriorPaint.setColor(exteriorColor);
exteriorPaint.setStyle(Style.FILL_AND_STROKE);
exteriorPaint.setStrokeWidth(textSize / 8);
exteriorPaint.setAntiAlias(false);
exteriorPaint.setAlpha(255);
this.textSize = textSize;
}
public void setTypeface(Typeface typeface) {
interiorPaint.setTypeface(typeface);
exteriorPaint.setTypeface(typeface);
}
public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
canvas.drawText(text, posX, posY, exteriorPaint);
canvas.drawText(text, posX, posY, interiorPaint);
}
public void drawLines(Canvas canvas, final float posX, final float posY, Vector<String> lines) {
int lineNum = 0;
for (final String line : lines) {
drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
++lineNum;
}
}
public void setInteriorColor(final int color) {
interiorPaint.setColor(color);
}
public void setExteriorColor(final int color) {
exteriorPaint.setColor(color);
}
public float getTextSize() {
return textSize;
}
public void setAlpha(final int alpha) {
interiorPaint.setAlpha(alpha);
exteriorPaint.setAlpha(alpha);
}
public void getTextBounds(
final String line, final int index, final int count, final Rect lineBounds) {
interiorPaint.getTextBounds(line, index, count, lineBounds);
}
public void setTextAlign(final Align align) {
interiorPaint.setTextAlign(align);
exteriorPaint.setTextAlign(align);
}
}

View File

@ -1,344 +0,0 @@
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.env;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.os.Environment;
import java.io.File;
import java.io.FileOutputStream;
/**
* Utility class for manipulating images.
**/
public class ImageUtils {
@SuppressWarnings("unused")
private static final Logger LOGGER = new Logger();
static {
try {
System.loadLibrary("tensorflow_demo");
} catch (UnsatisfiedLinkError e) {
LOGGER.w("Native library not found, native RGB -> YUV conversion may be unavailable.");
}
}
/**
* Utility method to compute the allocated size in bytes of a YUV420SP image
* of the given dimensions.
*/
public static int getYUVByteSize(final int width, final int height) {
// The luminance plane requires 1 byte per pixel.
final int ySize = width * height;
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
// Each 2x2 block takes 2 bytes to encode, one each for U and V.
final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
return ySize + uvSize;
}
/**
* Saves a Bitmap object to disk for analysis.
*
* @param bitmap The bitmap to save.
*/
public static void saveBitmap(final Bitmap bitmap) {
saveBitmap(bitmap, "preview.png");
}
/**
* Saves a Bitmap object to disk for analysis.
*
* @param bitmap The bitmap to save.
* @param filename The location to save the bitmap to.
*/
public static void saveBitmap(final Bitmap bitmap, final String filename) {
final String root =
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
final File myDir = new File(root);
if (!myDir.mkdirs()) {
LOGGER.i("Make dir failed");
}
final String fname = filename;
final File file = new File(myDir, fname);
if (file.exists()) {
file.delete();
}
try {
final FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
out.flush();
out.close();
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
}
}
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
// are normalized to eight bits.
static final int kMaxChannelValue = 262143;
// Always prefer the native implementation if available.
private static boolean useNativeConversion = false;
public static void convertYUV420SPToARGB8888(
byte[] input,
int width,
int height,
int[] output) {
if (useNativeConversion) {
try {
ImageUtils.convertYUV420SPToARGB8888(input, output, width, height, false);
return;
} catch (UnsatisfiedLinkError e) {
LOGGER.w(
"Native YUV420SP -> RGB implementation not found, falling back to Java implementation");
useNativeConversion = false;
}
}
// Java implementation of YUV420SP to ARGB8888 converting
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width;
int u = 0;
int v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = 0xff & input[yp];
if ((i & 1) == 0) {
v = 0xff & input[uvp++];
u = 0xff & input[uvp++];
}
output[yp] = YUV2RGB(y, u, v);
}
}
}
private static int YUV2RGB(int y, int u, int v) {
// Adjust and check YUV values
y = (y - 16) < 0 ? 0 : (y - 16);
u -= 128;
v -= 128;
// This is the floating point equivalent. We do the conversion in integer
// because some Android devices do not have floating point in hardware.
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
// Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
public static void convertYUV420ToARGB8888(
byte[] yData,
byte[] uData,
byte[] vData,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
int[] out) {
if (useNativeConversion) {
try {
convertYUV420ToARGB8888(
yData, uData, vData, out, width, height, yRowStride, uvRowStride, uvPixelStride, false);
return;
} catch (UnsatisfiedLinkError e) {
LOGGER.w(
"Native YUV420 -> RGB implementation not found, falling back to Java implementation");
useNativeConversion = false;
}
}
int yp = 0;
for (int j = 0; j < height; j++) {
int pY = yRowStride * j;
int pUV = uvRowStride * (j >> 1);
for (int i = 0; i < width; i++) {
int uv_offset = pUV + (i >> 1) * uvPixelStride;
out[yp++] = YUV2RGB(
0xff & yData[pY + i],
0xff & uData[uv_offset],
0xff & vData[uv_offset]);
}
}
}
/**
* Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width and height. The
* input and output must already be allocated and non-null. For efficiency, no error checking is
* performed.
*
* @param input The array of YUV 4:2:0 input data.
* @param output A pre-allocated array for the ARGB 8:8:8:8 output data.
* @param width The width of the input image.
* @param height The height of the input image.
* @param halfSize If true, downsample to 50% in each dimension, otherwise not.
*/
private static native void convertYUV420SPToARGB8888(
byte[] input, int[] output, int width, int height, boolean halfSize);
/**
* Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width
* and height. The input and output must already be allocated and non-null.
* For efficiency, no error checking is performed.
*
* @param y
* @param u
* @param v
* @param uvPixelStride
* @param width The width of the input image.
* @param height The height of the input image.
* @param halfSize If true, downsample to 50% in each dimension, otherwise not.
* @param output A pre-allocated array for the ARGB 8:8:8:8 output data.
*/
private static native void convertYUV420ToARGB8888(
byte[] y,
byte[] u,
byte[] v,
int[] output,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
boolean halfSize);
/**
* Converts YUV420 semi-planar data to RGB 565 data using the supplied width
* and height. The input and output must already be allocated and non-null.
* For efficiency, no error checking is performed.
*
* @param input The array of YUV 4:2:0 input data.
* @param output A pre-allocated array for the RGB 5:6:5 output data.
* @param width The width of the input image.
* @param height The height of the input image.
*/
private static native void convertYUV420SPToRGB565(
byte[] input, byte[] output, int width, int height);
/**
* Converts 32-bit ARGB8888 image data to YUV420SP data. This is useful, for
* instance, in creating data to feed the classes that rely on raw camera
* preview frames.
*
* @param input An array of input pixels in ARGB8888 format.
* @param output A pre-allocated array for the YUV420SP output data.
* @param width The width of the input image.
* @param height The height of the input image.
*/
private static native void convertARGB8888ToYUV420SP(
int[] input, byte[] output, int width, int height);
/**
* Converts 16-bit RGB565 image data to YUV420SP data. This is useful, for
* instance, in creating data to feed the classes that rely on raw camera
* preview frames.
*
* @param input An array of input pixels in RGB565 format.
* @param output A pre-allocated array for the YUV420SP output data.
* @param width The width of the input image.
* @param height The height of the input image.
*/
private static native void convertRGB565ToYUV420SP(
byte[] input, byte[] output, int width, int height);
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another.
* Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
* cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(
final int srcWidth,
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
if (applyRotation % 90 != 0) {
LOGGER.w("Rotation of %d % 90 != 0", applyRotation);
}
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
}

View File

@ -1,190 +0,0 @@
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.env;
import android.util.Log;
import java.util.HashSet;
import java.util.Set;
/**
* Wrapper for the platform log function, allows convenient message prefixing and log disabling.
*/
public final class Logger {
private static final String DEFAULT_TAG = "tensorflow";
private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
// Classes to be ignored when examining the stack trace
private static final Set<String> IGNORED_CLASS_NAMES;
static {
IGNORED_CLASS_NAMES = new HashSet<String>(3);
IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
IGNORED_CLASS_NAMES.add("java.lang.Thread");
IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
}
private final String tag;
private final String messagePrefix;
private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
/**
* Creates a Logger using the class name as the message prefix.
*
* @param clazz the simple name of this class is used as the message prefix.
*/
public Logger(final Class<?> clazz) {
this(clazz.getSimpleName());
}
/**
* Creates a Logger using the specified message prefix.
*
* @param messagePrefix is prepended to the text of every message.
*/
public Logger(final String messagePrefix) {
this(DEFAULT_TAG, messagePrefix);
}
/**
* Creates a Logger with a custom tag and a custom message prefix. If the message prefix
* is set to <pre>null</pre>, the caller's class name is used as the prefix.
*
* @param tag identifies the source of a log message.
* @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
* being used
*/
public Logger(final String tag, final String messagePrefix) {
this.tag = tag;
final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
}
/**
* Creates a Logger using the caller's class name as the message prefix.
*/
public Logger() {
this(DEFAULT_TAG, null);
}
/**
* Creates a Logger using the caller's class name as the message prefix.
*/
public Logger(final int minLogLevel) {
this(DEFAULT_TAG, null);
this.minLogLevel = minLogLevel;
}
public void setMinLogLevel(final int minLogLevel) {
this.minLogLevel = minLogLevel;
}
public boolean isLoggable(final int logLevel) {
return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
}
/**
* Return caller's simple name.
*
* Android getStackTrace() returns an array that looks like this:
* stackTrace[0]: dalvik.system.VMStack
* stackTrace[1]: java.lang.Thread
* stackTrace[2]: com.google.android.apps.unveil.env.UnveilLogger
* stackTrace[3]: com.google.android.apps.unveil.BaseApplication
*
* This function returns the simple version of the first non-filtered name.
*
* @return caller's simple name
*/
private static String getCallerSimpleName() {
// Get the current callstack so we can pull the class of the caller off of it.
final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
for (final StackTraceElement elem : stackTrace) {
final String className = elem.getClassName();
if (!IGNORED_CLASS_NAMES.contains(className)) {
// We're only interested in the simple name of the class, not the complete package.
final String[] classParts = className.split("\\.");
return classParts[classParts.length - 1];
}
}
return Logger.class.getSimpleName();
}
private String toMessage(final String format, final Object... args) {
return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
}
public void v(final String format, final Object... args) {
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args));
}
}
public void v(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args), t);
}
}
public void d(final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args));
}
}
public void d(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args), t);
}
}
public void i(final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args));
}
}
public void i(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args), t);
}
}
public void w(final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args));
}
}
public void w(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args), t);
}
}
public void e(final String format, final Object... args) {
if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args));
}
}
public void e(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args), t);
}
}
}

View File

@ -1,143 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.env;
import android.graphics.Bitmap;
import android.text.TextUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* Size class independent of a Camera object.
*/
public class Size implements Comparable<Size>, Serializable {
// 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
// upgrading.
public static final long serialVersionUID = 7689808733290872361L;
public final int width;
public final int height;
public Size(final int width, final int height) {
this.width = width;
this.height = height;
}
public Size(final Bitmap bmp) {
this.width = bmp.getWidth();
this.height = bmp.getHeight();
}
/**
* Rotate a size by the given number of degrees.
* @param size Size to rotate.
* @param rotation Degrees {0, 90, 180, 270} to rotate the size.
* @return Rotated size.
*/
public static Size getRotatedSize(final Size size, final int rotation) {
if (rotation % 180 != 0) {
// The phone is portrait, therefore the camera is sideways and frame should be rotated.
return new Size(size.height, size.width);
}
return size;
}
public static Size parseFromString(String sizeString) {
if (TextUtils.isEmpty(sizeString)) {
return null;
}
sizeString = sizeString.trim();
// The expected format is "<width>x<height>".
final String[] components = sizeString.split("x");
if (components.length == 2) {
try {
final int width = Integer.parseInt(components[0]);
final int height = Integer.parseInt(components[1]);
return new Size(width, height);
} catch (final NumberFormatException e) {
return null;
}
} else {
return null;
}
}
public static List<Size> sizeStringToList(final String sizes) {
final List<Size> sizeList = new ArrayList<Size>();
if (sizes != null) {
final String[] pairs = sizes.split(",");
for (final String pair : pairs) {
final Size size = Size.parseFromString(pair);
if (size != null) {
sizeList.add(size);
}
}
}
return sizeList;
}
public static String sizeListToString(final List<Size> sizes) {
String sizesString = "";
if (sizes != null && sizes.size() > 0) {
sizesString = sizes.get(0).toString();
for (int i = 1; i < sizes.size(); i++) {
sizesString += "," + sizes.get(i).toString();
}
}
return sizesString;
}
public final float aspectRatio() {
return (float) width / (float) height;
}
@Override
public int compareTo(final Size other) {
return width * height - other.width * other.height;
}
@Override
public boolean equals(final Object other) {
if (other == null) {
return false;
}
if (!(other instanceof Size)) {
return false;
}
final Size otherSize = (Size) other;
return (width == otherSize.width && height == otherSize.height);
}
@Override
public int hashCode() {
return width * 32713 + height;
}
@Override
public String toString() {
return dimensionsAsString(width, height);
}
public static final String dimensionsAsString(final int width, final int height) {
return width + "x" + height;
}
}

View File

@ -1,50 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.env;
import android.os.SystemClock;
/**
* A simple utility timer for measuring CPU time and wall-clock splits.
*/
public class SplitTimer {
private final Logger logger;
private long lastWallTime;
private long lastCpuTime;
public SplitTimer(final String name) {
logger = new Logger(name);
newSplit();
}
public void newSplit() {
lastWallTime = SystemClock.uptimeMillis();
lastCpuTime = SystemClock.currentThreadTimeMillis();
}
public void endSplit(final String splitName) {
final long currWallTime = SystemClock.uptimeMillis();
final long currCpuTime = SystemClock.currentThreadTimeMillis();
logger.i(
"%s: cpu=%dms wall=%dms",
splitName, currCpuTime - lastCpuTime, currWallTime - lastWallTime);
lastWallTime = currWallTime;
lastCpuTime = currCpuTime;
}
}

View File

@ -1,421 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.tracking;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Cap;
import android.graphics.Paint.Join;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.text.TextUtils;
import android.util.Pair;
import android.util.TypedValue;
import android.widget.Toast;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import org.tensorflow.demo.Classifier.Recognition;
import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
/**
* A tracker wrapping ObjectTracker that also handles non-max suppression and matching existing
* objects to new detections.
*/
public class MultiBoxTracker {
private final Logger logger = new Logger();
private static final float TEXT_SIZE_DIP = 18;
// Maximum percentage of a box that can be overlapped by another box at detection time. Otherwise
// the lower scored box (new or old) will be removed.
private static final float MAX_OVERLAP = 0.2f;
private static final float MIN_SIZE = 16.0f;
// Allow replacement of the tracked box with new results if
// correlation has dropped below this level.
private static final float MARGINAL_CORRELATION = 0.75f;
// Consider object to be lost if correlation falls below this threshold.
private static final float MIN_CORRELATION = 0.3f;
private static final int[] COLORS = {
Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA, Color.WHITE,
Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"),
Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"),
Color.parseColor("#AA33AA"), Color.parseColor("#0D0068")
};
private final Queue<Integer> availableColors = new LinkedList<Integer>();
public ObjectTracker objectTracker;
final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>();
private static class TrackedRecognition {
ObjectTracker.TrackedObject trackedObject;
RectF location;
float detectionConfidence;
int color;
String title;
}
private final List<TrackedRecognition> trackedObjects = new LinkedList<TrackedRecognition>();
private final Paint boxPaint = new Paint();
private final float textSizePx;
private final BorderedText borderedText;
private Matrix frameToCanvasMatrix;
private int frameWidth;
private int frameHeight;
private int sensorOrientation;
private Context context;
public MultiBoxTracker(final Context context) {
this.context = context;
for (final int color : COLORS) {
availableColors.add(color);
}
boxPaint.setColor(Color.RED);
boxPaint.setStyle(Style.STROKE);
boxPaint.setStrokeWidth(12.0f);
boxPaint.setStrokeCap(Cap.ROUND);
boxPaint.setStrokeJoin(Join.ROUND);
boxPaint.setStrokeMiter(100);
textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
}
private Matrix getFrameToCanvasMatrix() {
return frameToCanvasMatrix;
}
public synchronized void drawDebug(final Canvas canvas) {
final Paint textPaint = new Paint();
textPaint.setColor(Color.WHITE);
textPaint.setTextSize(60.0f);
final Paint boxPaint = new Paint();
boxPaint.setColor(Color.RED);
boxPaint.setAlpha(200);
boxPaint.setStyle(Style.STROKE);
for (final Pair<Float, RectF> detection : screenRects) {
final RectF rect = detection.second;
canvas.drawRect(rect, boxPaint);
canvas.drawText("" + detection.first, rect.left, rect.top, textPaint);
borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first);
}
if (objectTracker == null) {
return;
}
// Draw correlations.
for (final TrackedRecognition recognition : trackedObjects) {
final ObjectTracker.TrackedObject trackedObject = recognition.trackedObject;
final RectF trackedPos = trackedObject.getTrackedPositionInPreviewFrame();
if (getFrameToCanvasMatrix().mapRect(trackedPos)) {
final String labelString = String.format("%.2f", trackedObject.getCurrentCorrelation());
borderedText.drawText(canvas, trackedPos.right, trackedPos.bottom, labelString);
}
}
final Matrix matrix = getFrameToCanvasMatrix();
objectTracker.drawDebug(canvas, matrix);
}
public synchronized void trackResults(
final List<Recognition> results, final byte[] frame, final long timestamp) {
logger.i("Processing %d results from %d", results.size(), timestamp);
processResults(timestamp, results, frame);
}
public synchronized void draw(final Canvas canvas) {
final boolean rotated = sensorOrientation % 180 == 90;
final float multiplier =
Math.min(canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
frameToCanvasMatrix =
ImageUtils.getTransformationMatrix(
frameWidth,
frameHeight,
(int) (multiplier * (rotated ? frameHeight : frameWidth)),
(int) (multiplier * (rotated ? frameWidth : frameHeight)),
sensorOrientation,
false);
for (final TrackedRecognition recognition : trackedObjects) {
final RectF trackedPos =
(objectTracker != null)
? recognition.trackedObject.getTrackedPositionInPreviewFrame()
: new RectF(recognition.location);
getFrameToCanvasMatrix().mapRect(trackedPos);
boxPaint.setColor(recognition.color);
final float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
final String labelString =
!TextUtils.isEmpty(recognition.title)
? String.format("%s %.2f", recognition.title, recognition.detectionConfidence)
: String.format("%.2f", recognition.detectionConfidence);
borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.bottom, labelString);
}
}
private boolean initialized = false;
public synchronized void onFrame(
final int w,
final int h,
final int rowStride,
final int sensorOrientation,
final byte[] frame,
final long timestamp) {
if (objectTracker == null && !initialized) {
ObjectTracker.clearInstance();
logger.i("Initializing ObjectTracker: %dx%d", w, h);
objectTracker = ObjectTracker.getInstance(w, h, rowStride, true);
frameWidth = w;
frameHeight = h;
this.sensorOrientation = sensorOrientation;
initialized = true;
if (objectTracker == null) {
String message =
"Object tracking support not found. "
+ "See tensorflow/examples/android/README.md for details.";
Toast.makeText(context, message, Toast.LENGTH_LONG).show();
logger.e(message);
}
}
if (objectTracker == null) {
return;
}
objectTracker.nextFrame(frame, null, timestamp, null, true);
// Clean up any objects not worth tracking any more.
final LinkedList<TrackedRecognition> copyList =
new LinkedList<TrackedRecognition>(trackedObjects);
for (final TrackedRecognition recognition : copyList) {
final ObjectTracker.TrackedObject trackedObject = recognition.trackedObject;
final float correlation = trackedObject.getCurrentCorrelation();
if (correlation < MIN_CORRELATION) {
logger.v("Removing tracked object %s because NCC is %.2f", trackedObject, correlation);
trackedObject.stopTracking();
trackedObjects.remove(recognition);
availableColors.add(recognition.color);
}
}
}
private void processResults(
final long timestamp, final List<Recognition> results, final byte[] originalFrame) {
final List<Pair<Float, Recognition>> rectsToTrack = new LinkedList<Pair<Float, Recognition>>();
screenRects.clear();
final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix());
for (final Recognition result : results) {
if (result.getLocation() == null) {
continue;
}
final RectF detectionFrameRect = new RectF(result.getLocation());
final RectF detectionScreenRect = new RectF();
rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
logger.v(
"Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect));
if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) {
logger.w("Degenerate rectangle! " + detectionFrameRect);
continue;
}
rectsToTrack.add(new Pair<Float, Recognition>(result.getConfidence(), result));
}
if (rectsToTrack.isEmpty()) {
logger.v("Nothing to track, aborting.");
return;
}
if (objectTracker == null) {
trackedObjects.clear();
for (final Pair<Float, Recognition> potential : rectsToTrack) {
final TrackedRecognition trackedRecognition = new TrackedRecognition();
trackedRecognition.detectionConfidence = potential.first;
trackedRecognition.location = new RectF(potential.second.getLocation());
trackedRecognition.trackedObject = null;
trackedRecognition.title = potential.second.getTitle();
trackedRecognition.color = COLORS[trackedObjects.size()];
trackedObjects.add(trackedRecognition);
if (trackedObjects.size() >= COLORS.length) {
break;
}
}
return;
}
logger.i("%d rects to track", rectsToTrack.size());
for (final Pair<Float, Recognition> potential : rectsToTrack) {
handleDetection(originalFrame, timestamp, potential);
}
}
private void handleDetection(
final byte[] frameCopy, final long timestamp, final Pair<Float, Recognition> potential) {
final ObjectTracker.TrackedObject potentialObject =
objectTracker.trackObject(potential.second.getLocation(), timestamp, frameCopy);
final float potentialCorrelation = potentialObject.getCurrentCorrelation();
logger.v(
"Tracked object went from %s to %s with correlation %.2f",
potential.second, potentialObject.getTrackedPositionInPreviewFrame(), potentialCorrelation);
if (potentialCorrelation < MARGINAL_CORRELATION) {
logger.v("Correlation too low to begin tracking %s.", potentialObject);
potentialObject.stopTracking();
return;
}
final List<TrackedRecognition> removeList = new LinkedList<TrackedRecognition>();
float maxIntersect = 0.0f;
// This is the current tracked object whose color we will take. If left null we'll take the
// first one from the color queue.
TrackedRecognition recogToReplace = null;
// Look for intersections that will be overridden by this object or an intersection that would
// prevent this one from being placed.
for (final TrackedRecognition trackedRecognition : trackedObjects) {
final RectF a = trackedRecognition.trackedObject.getTrackedPositionInPreviewFrame();
final RectF b = potentialObject.getTrackedPositionInPreviewFrame();
final RectF intersection = new RectF();
final boolean intersects = intersection.setIntersect(a, b);
final float intersectArea = intersection.width() * intersection.height();
final float totalArea = a.width() * a.height() + b.width() * b.height() - intersectArea;
final float intersectOverUnion = intersectArea / totalArea;
// If there is an intersection with this currently tracked box above the maximum overlap
// percentage allowed, either the new recognition needs to be dismissed or the old
// recognition needs to be removed and possibly replaced with the new one.
if (intersects && intersectOverUnion > MAX_OVERLAP) {
if (potential.first < trackedRecognition.detectionConfidence
&& trackedRecognition.trackedObject.getCurrentCorrelation() > MARGINAL_CORRELATION) {
// If track for the existing object is still going strong and the detection score was
// good, reject this new object.
potentialObject.stopTracking();
return;
} else {
removeList.add(trackedRecognition);
// Let the previously tracked object with max intersection amount donate its color to
// the new object.
if (intersectOverUnion > maxIntersect) {
maxIntersect = intersectOverUnion;
recogToReplace = trackedRecognition;
}
}
}
}
// If we're already tracking the max object and no intersections were found to bump off,
// pick the worst current tracked object to remove, if it's also worse than this candidate
// object.
if (availableColors.isEmpty() && removeList.isEmpty()) {
for (final TrackedRecognition candidate : trackedObjects) {
if (candidate.detectionConfidence < potential.first) {
if (recogToReplace == null
|| candidate.detectionConfidence < recogToReplace.detectionConfidence) {
// Save it so that we use this color for the new object.
recogToReplace = candidate;
}
}
}
if (recogToReplace != null) {
logger.v("Found non-intersecting object to remove.");
removeList.add(recogToReplace);
} else {
logger.v("No non-intersecting object found to remove");
}
}
// Remove everything that got intersected.
for (final TrackedRecognition trackedRecognition : removeList) {
logger.v(
"Removing tracked object %s with detection confidence %.2f, correlation %.2f",
trackedRecognition.trackedObject,
trackedRecognition.detectionConfidence,
trackedRecognition.trackedObject.getCurrentCorrelation());
trackedRecognition.trackedObject.stopTracking();
trackedObjects.remove(trackedRecognition);
if (trackedRecognition != recogToReplace) {
availableColors.add(trackedRecognition.color);
}
}
if (recogToReplace == null && availableColors.isEmpty()) {
logger.e("No room to track this object, aborting.");
potentialObject.stopTracking();
return;
}
// Finally safe to say we can track this object.
logger.v(
"Tracking object %s (%s) with detection confidence %.2f at position %s",
potentialObject,
potential.second.getTitle(),
potential.first,
potential.second.getLocation());
final TrackedRecognition trackedRecognition = new TrackedRecognition();
trackedRecognition.detectionConfidence = potential.first;
trackedRecognition.trackedObject = potentialObject;
trackedRecognition.title = potential.second.getTitle();
// Use the color from a replaced object before taking one from the color queue.
trackedRecognition.color =
recogToReplace != null ? recogToReplace.color : availableColors.poll();
trackedObjects.add(trackedRecognition);
}
}

View File

@ -1,661 +0,0 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.demo.tracking;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PointF;
import android.graphics.RectF;
import android.graphics.Typeface;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import javax.microedition.khronos.opengles.GL10;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.demo.env.Size;
/**
* True object detector/tracker class that tracks objects across consecutive preview frames.
* It provides a simplified Java interface to the analogous native object defined by
* jni/client_vision/tracking/object_tracker.*.
*
* Currently, the ObjectTracker is a singleton due to native code restrictions, and so must
* be allocated by ObjectTracker.getInstance(). In addition, release() should be called
* as soon as the ObjectTracker is no longer needed, and before a new one is created.
*
* nextFrame() should be called as new frames become available, preferably as often as possible.
*
* After allocation, new TrackedObjects may be instantiated via trackObject(). TrackedObjects
* are associated with the ObjectTracker that created them, and are only valid while that
* ObjectTracker still exists.
*/
public class ObjectTracker {
private static final Logger LOGGER = new Logger();
private static boolean libraryFound = false;
static {
try {
System.loadLibrary("tensorflow_demo");
libraryFound = true;
} catch (UnsatisfiedLinkError e) {
LOGGER.e("libtensorflow_demo.so not found, tracking unavailable");
}
}
private static final boolean DRAW_TEXT = false;
/**
* How many history points to keep track of and draw in the red history line.
*/
private static final int MAX_DEBUG_HISTORY_SIZE = 30;
/**
* How many frames of optical flow deltas to record.
* TODO(andrewharp): Push this down to the native level so it can be polled
* efficiently into a an array for upload, instead of keeping a duplicate
* copy in Java.
*/
private static final int MAX_FRAME_HISTORY_SIZE = 200;
private static final int DOWNSAMPLE_FACTOR = 2;
private final byte[] downsampledFrame;
protected static ObjectTracker instance;
private final Map<String, TrackedObject> trackedObjects;
private long lastTimestamp;
private FrameChange lastKeypoints;
private final Vector<PointF> debugHistory;
private final LinkedList<TimestampedDeltas> timestampedDeltas;
protected final int frameWidth;
protected final int frameHeight;
private final int rowStride;
protected final boolean alwaysTrack;
private static class TimestampedDeltas {
final long timestamp;
final byte[] deltas;
public TimestampedDeltas(final long timestamp, final byte[] deltas) {
this.timestamp = timestamp;
this.deltas = deltas;
}
}
/**
* A simple class that records keypoint information, which includes
* local location, score and type. This will be used in calculating
* FrameChange.
*/
public static class Keypoint {
public final float x;
public final float y;
public final float score;
public final int type;
public Keypoint(final float x, final float y) {
this.x = x;
this.y = y;
this.score = 0;
this.type = -1;
}
public Keypoint(final float x, final float y, final float score, final int type) {
this.x = x;
this.y = y;
this.score = score;
this.type = type;
}
Keypoint delta(final Keypoint other) {
return new Keypoint(this.x - other.x, this.y - other.y);
}
}
/**
* A simple class that could calculate Keypoint delta.
* This class will be used in calculating frame translation delta
* for optical flow.
*/
public static class PointChange {
public final Keypoint keypointA;
public final Keypoint keypointB;
Keypoint pointDelta;
private final boolean wasFound;
public PointChange(final float x1, final float y1,
final float x2, final float y2,
final float score, final int type,
final boolean wasFound) {
this.wasFound = wasFound;
keypointA = new Keypoint(x1, y1, score, type);
keypointB = new Keypoint(x2, y2);
}
public Keypoint getDelta() {
if (pointDelta == null) {
pointDelta = keypointB.delta(keypointA);
}
return pointDelta;
}
}
/** A class that records a timestamped frame translation delta for optical flow. */
public static class FrameChange {
public static final int KEYPOINT_STEP = 7;
public final Vector<PointChange> pointDeltas;
private final float minScore;
private final float maxScore;
public FrameChange(final float[] framePoints) {
float minScore = 100.0f;
float maxScore = -100.0f;
pointDeltas = new Vector<PointChange>(framePoints.length / KEYPOINT_STEP);
for (int i = 0; i < framePoints.length; i += KEYPOINT_STEP) {
final float x1 = framePoints[i + 0] * DOWNSAMPLE_FACTOR;
final float y1 = framePoints[i + 1] * DOWNSAMPLE_FACTOR;
final boolean wasFound = framePoints[i + 2] > 0.0f;
final float x2 = framePoints[i + 3] * DOWNSAMPLE_FACTOR;
final float y2 = framePoints[i + 4] * DOWNSAMPLE_FACTOR;
final float score = framePoints[i + 5];
final int type = (int) framePoints[i + 6];
minScore = Math.min(minScore, score);
maxScore = Math.max(maxScore, score);
pointDeltas.add(new PointChange(x1, y1, x2, y2, score, type, wasFound));
}
this.minScore = minScore;
this.maxScore = maxScore;
}
}
public static synchronized ObjectTracker getInstance(
final int frameWidth, final int frameHeight, final int rowStride, final boolean alwaysTrack) {
if (!libraryFound) {
LOGGER.e(
"Native object tracking support not found. "
+ "See tensorflow/examples/android/README.md for details.");
return null;
}
if (instance == null) {
instance = new ObjectTracker(frameWidth, frameHeight, rowStride, alwaysTrack);
instance.init();
} else {
throw new RuntimeException(
"Tried to create a new objectracker before releasing the old one!");
}
return instance;
}
public static synchronized void clearInstance() {
if (instance != null) {
instance.release();
}
}
protected ObjectTracker(
final int frameWidth, final int frameHeight, final int rowStride, final boolean alwaysTrack) {
this.frameWidth = frameWidth;
this.frameHeight = frameHeight;
this.rowStride = rowStride;
this.alwaysTrack = alwaysTrack;
this.timestampedDeltas = new LinkedList<TimestampedDeltas>();
trackedObjects = new HashMap<String, TrackedObject>();
debugHistory = new Vector<PointF>(MAX_DEBUG_HISTORY_SIZE);
downsampledFrame =
new byte
[(frameWidth + DOWNSAMPLE_FACTOR - 1)
/ DOWNSAMPLE_FACTOR
* (frameWidth + DOWNSAMPLE_FACTOR - 1)
/ DOWNSAMPLE_FACTOR];
}
protected void init() {
// The native tracker never sees the full frame, so pre-scale dimensions
// by the downsample factor.
initNative(frameWidth / DOWNSAMPLE_FACTOR, frameHeight / DOWNSAMPLE_FACTOR, alwaysTrack);
}
private final float[] matrixValues = new float[9];
private long downsampledTimestamp;
@SuppressWarnings("unused")
public synchronized void drawOverlay(final GL10 gl,
final Size cameraViewSize, final Matrix matrix) {
final Matrix tempMatrix = new Matrix(matrix);
tempMatrix.preScale(DOWNSAMPLE_FACTOR, DOWNSAMPLE_FACTOR);
tempMatrix.getValues(matrixValues);
drawNative(cameraViewSize.width, cameraViewSize.height, matrixValues);
}
public synchronized void nextFrame(
final byte[] frameData, final byte[] uvData,
final long timestamp, final float[] transformationMatrix,
final boolean updateDebugInfo) {
if (downsampledTimestamp != timestamp) {
ObjectTracker.downsampleImageNative(
frameWidth, frameHeight, rowStride, frameData, DOWNSAMPLE_FACTOR, downsampledFrame);
downsampledTimestamp = timestamp;
}
// Do Lucas Kanade using the fullframe initializer.
nextFrameNative(downsampledFrame, uvData, timestamp, transformationMatrix);
timestampedDeltas.add(new TimestampedDeltas(timestamp, getKeypointsPacked(DOWNSAMPLE_FACTOR)));
while (timestampedDeltas.size() > MAX_FRAME_HISTORY_SIZE) {
timestampedDeltas.removeFirst();
}
for (final TrackedObject trackedObject : trackedObjects.values()) {
trackedObject.updateTrackedPosition();
}
if (updateDebugInfo) {
updateDebugHistory();
}
lastTimestamp = timestamp;
}
public synchronized void release() {
releaseMemoryNative();
synchronized (ObjectTracker.class) {
instance = null;
}
}
private void drawHistoryDebug(final Canvas canvas) {
drawHistoryPoint(
canvas, frameWidth * DOWNSAMPLE_FACTOR / 2, frameHeight * DOWNSAMPLE_FACTOR / 2);
}
private void drawHistoryPoint(final Canvas canvas, final float startX, final float startY) {
final Paint p = new Paint();
p.setAntiAlias(false);
p.setTypeface(Typeface.SERIF);
p.setColor(Color.RED);
p.setStrokeWidth(2.0f);
// Draw the center circle.
p.setColor(Color.GREEN);
canvas.drawCircle(startX, startY, 3.0f, p);
p.setColor(Color.RED);
// Iterate through in backwards order.
synchronized (debugHistory) {
final int numPoints = debugHistory.size();
float lastX = startX;
float lastY = startY;
for (int keypointNum = 0; keypointNum < numPoints; ++keypointNum) {
final PointF delta = debugHistory.get(numPoints - keypointNum - 1);
final float newX = lastX + delta.x;
final float newY = lastY + delta.y;
canvas.drawLine(lastX, lastY, newX, newY, p);
lastX = newX;
lastY = newY;
}
}
}
private static int floatToChar(final float value) {
return Math.max(0, Math.min((int) (value * 255.999f), 255));
}
private void drawKeypointsDebug(final Canvas canvas) {
final Paint p = new Paint();
if (lastKeypoints == null) {
return;
}
final int keypointSize = 3;
final float minScore = lastKeypoints.minScore;
final float maxScore = lastKeypoints.maxScore;
for (final PointChange keypoint : lastKeypoints.pointDeltas) {
if (keypoint.wasFound) {
final int r =
floatToChar((keypoint.keypointA.score - minScore) / (maxScore - minScore));
final int b =
floatToChar(1.0f - (keypoint.keypointA.score - minScore) / (maxScore - minScore));
final int color = 0xFF000000 | (r << 16) | b;
p.setColor(color);
final float[] screenPoints = {keypoint.keypointA.x, keypoint.keypointA.y,
keypoint.keypointB.x, keypoint.keypointB.y};
canvas.drawRect(screenPoints[2] - keypointSize,
screenPoints[3] - keypointSize,
screenPoints[2] + keypointSize,
screenPoints[3] + keypointSize, p);
p.setColor(Color.CYAN);
canvas.drawLine(screenPoints[2], screenPoints[3],
screenPoints[0], screenPoints[1], p);
if (DRAW_TEXT) {
p.setColor(Color.WHITE);
canvas.drawText(keypoint.keypointA.type + ": " + keypoint.keypointA.score,
keypoint.keypointA.x, keypoint.keypointA.y, p);
}
} else {
p.setColor(Color.YELLOW);
final float[] screenPoint = {keypoint.keypointA.x, keypoint.keypointA.y};
canvas.drawCircle(screenPoint[0], screenPoint[1], 5.0f, p);
}
}
}
private synchronized PointF getAccumulatedDelta(final long timestamp, final float positionX,
final float positionY, final float radius) {
final RectF currPosition = getCurrentPosition(timestamp,
new RectF(positionX - radius, positionY - radius, positionX + radius, positionY + radius));
return new PointF(currPosition.centerX() - positionX, currPosition.centerY() - positionY);
}
private synchronized RectF getCurrentPosition(final long timestamp, final RectF
oldPosition) {
final RectF downscaledFrameRect = downscaleRect(oldPosition);
final float[] delta = new float[4];
getCurrentPositionNative(timestamp, downscaledFrameRect.left, downscaledFrameRect.top,
downscaledFrameRect.right, downscaledFrameRect.bottom, delta);
final RectF newPosition = new RectF(delta[0], delta[1], delta[2], delta[3]);
return upscaleRect(newPosition);
}
private void updateDebugHistory() {
lastKeypoints = new FrameChange(getKeypointsNative(false));
if (lastTimestamp == 0) {
return;
}
final PointF delta =
getAccumulatedDelta(
lastTimestamp, frameWidth / DOWNSAMPLE_FACTOR, frameHeight / DOWNSAMPLE_FACTOR, 100);
synchronized (debugHistory) {
debugHistory.add(delta);
while (debugHistory.size() > MAX_DEBUG_HISTORY_SIZE) {
debugHistory.remove(0);
}
}
}
public synchronized void drawDebug(final Canvas canvas, final Matrix frameToCanvas) {
canvas.save();
canvas.setMatrix(frameToCanvas);
drawHistoryDebug(canvas);
drawKeypointsDebug(canvas);
canvas.restore();
}
public Vector<String> getDebugText() {
final Vector<String> lines = new Vector<String>();
if (lastKeypoints != null) {
lines.add("Num keypoints " + lastKeypoints.pointDeltas.size());
lines.add("Min score: " + lastKeypoints.minScore);
lines.add("Max score: " + lastKeypoints.maxScore);
}
return lines;
}
public synchronized List<byte[]> pollAccumulatedFlowData(final long endFrameTime) {
final List<byte[]> frameDeltas = new ArrayList<byte[]>();
while (timestampedDeltas.size() > 0) {
final TimestampedDeltas currentDeltas = timestampedDeltas.peek();
if (currentDeltas.timestamp <= endFrameTime) {
frameDeltas.add(currentDeltas.deltas);
timestampedDeltas.removeFirst();
} else {
break;
}
}
return frameDeltas;
}
private RectF downscaleRect(final RectF fullFrameRect) {
return new RectF(
fullFrameRect.left / DOWNSAMPLE_FACTOR,
fullFrameRect.top / DOWNSAMPLE_FACTOR,
fullFrameRect.right / DOWNSAMPLE_FACTOR,
fullFrameRect.bottom / DOWNSAMPLE_FACTOR);
}
private RectF upscaleRect(final RectF downsampledFrameRect) {
return new RectF(
downsampledFrameRect.left * DOWNSAMPLE_FACTOR,
downsampledFrameRect.top * DOWNSAMPLE_FACTOR,
downsampledFrameRect.right * DOWNSAMPLE_FACTOR,
downsampledFrameRect.bottom * DOWNSAMPLE_FACTOR);
}
/**
* A TrackedObject represents a native TrackedObject, and provides access to the
* relevant native tracking information available after every frame update. They may
* be safely passed around and accessed externally, but will become invalid after
* stopTracking() is called or the related creating ObjectTracker is deactivated.
*
* @author andrewharp@google.com (Andrew Harp)
*/
public class TrackedObject {
private final String id;
private long lastExternalPositionTime;
private RectF lastTrackedPosition;
private boolean visibleInLastFrame;
private boolean isDead;
TrackedObject(final RectF position, final long timestamp, final byte[] data) {
isDead = false;
id = Integer.toString(this.hashCode());
lastExternalPositionTime = timestamp;
synchronized (ObjectTracker.this) {
registerInitialAppearance(position, data);
setPreviousPosition(position, timestamp);
trackedObjects.put(id, this);
}
}
public void stopTracking() {
checkValidObject();
synchronized (ObjectTracker.this) {
isDead = true;
forgetNative(id);
trackedObjects.remove(id);
}
}
public float getCurrentCorrelation() {
checkValidObject();
return ObjectTracker.this.getCurrentCorrelation(id);
}
void registerInitialAppearance(final RectF position, final byte[] data) {
final RectF externalPosition = downscaleRect(position);
registerNewObjectWithAppearanceNative(id,
externalPosition.left, externalPosition.top,
externalPosition.right, externalPosition.bottom,
data);
}
synchronized void setPreviousPosition(final RectF position, final long timestamp) {
checkValidObject();
synchronized (ObjectTracker.this) {
if (lastExternalPositionTime > timestamp) {
LOGGER.w("Tried to use older position time!");
return;
}
final RectF externalPosition = downscaleRect(position);
lastExternalPositionTime = timestamp;
setPreviousPositionNative(id,
externalPosition.left, externalPosition.top,
externalPosition.right, externalPosition.bottom,
lastExternalPositionTime);
updateTrackedPosition();
}
}
void setCurrentPosition(final RectF position) {
checkValidObject();
final RectF downsampledPosition = downscaleRect(position);
synchronized (ObjectTracker.this) {
setCurrentPositionNative(id,
downsampledPosition.left, downsampledPosition.top,
downsampledPosition.right, downsampledPosition.bottom);
}
}
private synchronized void updateTrackedPosition() {
checkValidObject();
final float[] delta = new float[4];
getTrackedPositionNative(id, delta);
lastTrackedPosition = new RectF(delta[0], delta[1], delta[2], delta[3]);
visibleInLastFrame = isObjectVisible(id);
}
public synchronized RectF getTrackedPositionInPreviewFrame() {
checkValidObject();
if (lastTrackedPosition == null) {
return null;
}
return upscaleRect(lastTrackedPosition);
}
synchronized long getLastExternalPositionTime() {
return lastExternalPositionTime;
}
public synchronized boolean visibleInLastPreviewFrame() {
return visibleInLastFrame;
}
private void checkValidObject() {
if (isDead) {
throw new RuntimeException("TrackedObject already removed from tracking!");
} else if (ObjectTracker.this != instance) {
throw new RuntimeException("TrackedObject created with another ObjectTracker!");
}
}
}
public synchronized TrackedObject trackObject(
final RectF position, final long timestamp, final byte[] frameData) {
if (downsampledTimestamp != timestamp) {
ObjectTracker.downsampleImageNative(
frameWidth, frameHeight, rowStride, frameData, DOWNSAMPLE_FACTOR, downsampledFrame);
downsampledTimestamp = timestamp;
}
return new TrackedObject(position, timestamp, downsampledFrame);
}
public synchronized TrackedObject trackObject(final RectF position, final byte[] frameData) {
return new TrackedObject(position, lastTimestamp, frameData);
}
/** ********************* NATIVE CODE ************************************ */
/** This will contain an opaque pointer to the native ObjectTracker */
private long nativeObjectTracker;
private native void initNative(int imageWidth, int imageHeight, boolean alwaysTrack);
protected native void registerNewObjectWithAppearanceNative(
String objectId, float x1, float y1, float x2, float y2, byte[] data);
protected native void setPreviousPositionNative(
String objectId, float x1, float y1, float x2, float y2, long timestamp);
protected native void setCurrentPositionNative(
String objectId, float x1, float y1, float x2, float y2);
protected native void forgetNative(String key);
protected native String getModelIdNative(String key);
protected native boolean haveObject(String key);
protected native boolean isObjectVisible(String key);
protected native float getCurrentCorrelation(String key);
protected native float getMatchScore(String key);
protected native void getTrackedPositionNative(String key, float[] points);
protected native void nextFrameNative(
byte[] frameData, byte[] uvData, long timestamp, float[] frameAlignMatrix);
protected native void releaseMemoryNative();
protected native void getCurrentPositionNative(long timestamp,
final float positionX1, final float positionY1,
final float positionX2, final float positionY2,
final float[] delta);
protected native byte[] getKeypointsPacked(float scaleFactor);
protected native float[] getKeypointsNative(boolean onlyReturnCorrespondingKeypoints);
protected native void drawNative(int viewWidth, int viewHeight, float[] frameToCanvas);
protected static native void downsampleImageNative(
int width, int height, int rowStride, byte[] input, int factor, byte[] output);
}

View File

@ -1,30 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<set xmlns:android="http://schemas.android.com/apk/res/android"
android:ordering="sequentially">
<objectAnimator
android:propertyName="backgroundColor"
android:duration="375"
android:valueFrom="0x00b3ccff"
android:valueTo="0xffb3ccff"
android:valueType="colorType"/>
<objectAnimator
android:propertyName="backgroundColor"
android:duration="375"
android:valueFrom="0xffb3ccff"
android:valueTo="0x00b3ccff"
android:valueType="colorType"/>
</set>

Binary file not shown.

Before

(image error) Size: 196 B

View File

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle" >
<solid android:color="#00000000" />
<stroke android:width="1dip" android:color="#cccccc" />
</shape>

View File

@ -1,22 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#000"
tools:context="org.tensorflow.demo.CameraActivity" />

View File

@ -1,55 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="org.tensorflow.demo.SpeechActivity">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Say one of the words below!"
android:id="@+id/textView"
android:textAlignment="center"
android:layout_gravity="top"
android:textSize="24dp"
android:layout_marginTop="10dp"
android:layout_marginLeft="10dp"
/>
<ListView
android:id="@+id/list_view"
android:layout_width="240dp"
android:layout_height="wrap_content"
android:background="@drawable/border"
android:layout_gravity="top|center_horizontal"
android:textAlignment="center"
android:layout_marginTop="100dp"
/>
<Button
android:id="@+id/quit"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Quit"
android:layout_gravity="bottom|center_horizontal"
android:layout_marginBottom="10dp"
/>
</FrameLayout>

View File

@ -1,38 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.tensorflow.demo.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true" />
<org.tensorflow.demo.RecognitionScoreView
android:id="@+id/results"
android:layout_width="match_parent"
android:layout_height="112dp"
android:layout_alignParentTop="true" />
<org.tensorflow.demo.OverlayView
android:id="@+id/debug_overlay"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentBottom="true" />
</RelativeLayout>

View File

@ -1,51 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.tensorflow.demo.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true" />
<RelativeLayout
android:id="@+id/black"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#FF000000" />
<GridView
android:id="@+id/grid_layout"
android:numColumns="7"
android:stretchMode="columnWidth"
android:layout_alignParentBottom="true"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<org.tensorflow.demo.OverlayView
android:id="@+id/overlay"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentTop="true" />
<org.tensorflow.demo.OverlayView
android:id="@+id/debug_overlay"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentTop="true" />
</RelativeLayout>

View File

@ -1,34 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.tensorflow.demo.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
<org.tensorflow.demo.OverlayView
android:id="@+id/tracking_overlay"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<org.tensorflow.demo.OverlayView
android:id="@+id/debug_overlay"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</FrameLayout>

View File

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<TextView
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/list_text_item"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="TextView"
android:textSize="24dp"
android:textAlignment="center"
android:gravity="center_horizontal"
/>

View File

@ -1,24 +0,0 @@
<!--
Copyright 2013 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Semantic definitions -->
<dimen name="horizontal_page_margin">@dimen/margin_huge</dimen>
<dimen name="vertical_page_margin">@dimen/margin_medium</dimen>
</resources>

View File

@ -1,25 +0,0 @@
<!--
Copyright 2013 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<style name="Widget.SampleMessage">
<item name="android:textAppearance">?android:textAppearanceLarge</item>
<item name="android:lineSpacingMultiplier">1.2</item>
<item name="android:shadowDy">-6.5</item>
</style>
</resources>

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!--
Base application theme for API 11+. This theme completely replaces
AppBaseTheme from res/values/styles.xml on API 11+ devices.
-->
<style name="AppBaseTheme" parent="android:Theme.Holo.Light">
<!-- API 11 theme customizations can go here. -->
</style>
<style name="FullscreenTheme" parent="android:Theme.Holo">
<item name="android:actionBarStyle">@style/FullscreenActionBarStyle</item>
<item name="android:windowActionBarOverlay">true</item>
<item name="android:windowBackground">@null</item>
<item name="metaButtonBarStyle">?android:attr/buttonBarStyle</item>
<item name="metaButtonBarButtonStyle">?android:attr/buttonBarButtonStyle</item>
</style>
<style name="FullscreenActionBarStyle" parent="android:Widget.Holo.ActionBar">
<!-- <item name="android:background">@color/black_overlay</item> -->
</style>
</resources>

View File

@ -1,22 +0,0 @@
<!--
Copyright 2013 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Holo.Light" />
</resources>

View File

@ -1,12 +0,0 @@
<resources>
<!--
Base application theme for API 14+. This theme completely replaces
AppBaseTheme from BOTH res/values/styles.xml and
res/values-v11/styles.xml on API 14+ devices.
-->
<style name="AppBaseTheme" parent="android:Theme.Holo.Light.DarkActionBar">
<!-- API 14 theme customizations can go here. -->
</style>
</resources>

View File

@ -1,21 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2013 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
</resources>

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Material.Light">
</style>
</resources>

View File

@ -1,14 +0,0 @@
<resources>
<!--
Declare custom theme attributes that allow changing which styles are
used for button bars depending on the API level.
?android:attr/buttonBarStyle is new as of API 11 so this is
necessary to support previous API levels.
-->
<declare-styleable name="ButtonBarContainerTheme">
<attr name="metaButtonBarStyle" format="reference" />
<attr name="metaButtonBarButtonStyle" format="reference" />
</declare-styleable>
</resources>

View File

@ -1,23 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<string name="app_name">TFLite Demo</string>
<string name="activity_name_classification">TFL Classify</string>
<string name="activity_name_detection">TFL Detect</string>
<string name="activity_name_speech">TFL Speech</string>
</resources>

View File

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<color name="control_background">#cc4285f4</color>
</resources>

View File

@ -1,20 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<string name="description_info">Info</string>
<string name="request_permission">This sample needs camera permission.</string>
<string name="camera_error">This device doesn\'t support Camera2 API.</string>
</resources>

View File

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<style name="MaterialTheme" parent="android:Theme.Material.Light.NoActionBar.Fullscreen" />
</resources>

View File

@ -1,32 +0,0 @@
<!--
Copyright 2013 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Define standard dimensions to comply with Holo-style grids and rhythm. -->
<dimen name="margin_tiny">4dp</dimen>
<dimen name="margin_small">8dp</dimen>
<dimen name="margin_medium">16dp</dimen>
<dimen name="margin_large">32dp</dimen>
<dimen name="margin_huge">64dp</dimen>
<!-- Semantic definitions -->
<dimen name="horizontal_page_margin">@dimen/margin_medium</dimen>
<dimen name="vertical_page_margin">@dimen/margin_medium</dimen>
</resources>

View File

@ -1,42 +0,0 @@
<!--
Copyright 2013 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Light" />
<style name="Theme.Sample" parent="Theme.Base" />
<style name="AppTheme" parent="Theme.Sample" />
<!-- Widget styling -->
<style name="Widget" />
<style name="Widget.SampleMessage">
<item name="android:textAppearance">?android:textAppearanceMedium</item>
<item name="android:lineSpacingMultiplier">1.1</item>
</style>
<style name="Widget.SampleMessageTile">
<item name="android:background">@drawable/tile</item>
<item name="android:shadowColor">#7F000000</item>
<item name="android:shadowDy">-3.5</item>
<item name="android:shadowRadius">2</item>
</style>
</resources>

View File

@ -1,26 +0,0 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.2.1'
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
// Changed since default name 'build' conflicts with
// bazel BUILD file name.
buildDir = "gradle-build"

View File

@ -1 +0,0 @@
include ':app'

View File

@ -440,7 +440,6 @@ cmd_status(){
do_bazel_nobuild() {
BUILD_TARGET="//tensorflow/..."
BUILD_TARGET="${BUILD_TARGET} -//tensorflow/lite/delegates/gpu/..."
BUILD_TARGET="${BUILD_TARGET} -//tensorflow/lite/examples/android/..."
BUILD_TARGET="${BUILD_TARGET} -//tensorflow/lite/java/demo/app/..."
BUILD_TARGET="${BUILD_TARGET} -//tensorflow/lite/schema/..."
BUILD_CMD="bazel build --nobuild ${BAZEL_FLAGS} -- ${BUILD_TARGET}"

View File

@ -36,7 +36,6 @@ BUILD_BLACKLIST = [
"tensorflow/lite/delegates/gpu",
"tensorflow/lite/delegates/gpu/metal",
"tensorflow/lite/delegates/gpu/metal/kernels",
"tensorflow/lite/examples/android",
"tensorflow/lite/experimental/objc",
"tensorflow/lite/experimental/swift",
]