Rename gesture_recognition example to magic_wand, and micro_vision example to person_detection

PiperOrigin-RevId: 273603061
This commit is contained in:
Daniel Situnayake 2019-10-08 14:16:19 -07:00 committed by TensorFlower Gardener
parent e7bb382f4d
commit ac960b6b5a
75 changed files with 349 additions and 345 deletions

View File

@ -54,15 +54,19 @@ The following examples are available:
* Colab walkthrough of model training and conversion
- [micro_speech](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/micro_speech)
* Uses a 20kb model to recognize keywords in spoken audio
* Uses a 20 KB model to recognize keywords in spoken audio
* Application code for Arduino, SparkFun Edge, and STM32F746
* Python scripts for model training and conversion
- [micro_vision](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/micro_vision)
* Uses a 250kb model to recognize presence or absence of a person in images
- [person_detection](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/person_detection)
* Uses a 250 KB model to recognize presence or absence of a person in images
captured by a camera
* Application code for SparkFun Edge
- [magic_wand](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/magic_wand)
* Uses a 20 KB model to recognize gestures using accelerometer data
* Application code for Arduino and SparkFun Edge
## Pre-generated Project Files
One of the challenges of embedded software development is that there are a lot

View File

@ -1,89 +0,0 @@
ifeq ($(TARGET), sparkfun_edge)
INCLUDES += \
-I$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/ \
-I$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_adc/
THIRD_PARTY_CC_SRCS += \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/tf_accelerometer.c \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/lis2dh12_reg.c \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_adc/tf_adc.c
THIRD_PARTY_CC_HDRS += \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/tf_accelerometer.h \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/lis2dh12_reg.h \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_adc/tf_adc.h
endif
ACCELEROMETER_HANDLER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler_test.cc
ACCELEROMETER_HANDLER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.h
OUTPUT_HANDLER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler_test.cc
OUTPUT_HANDLER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.h
GESTURE_PREDICTOR_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor_test.cc
GESTURE_PREDICTOR_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor.h \
GESTURE_RECOGNITION_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_test.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_model_data.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/angle_micro_features_data.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/circle_micro_features_data.cc
GESTURE_RECOGNITION_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_model_data.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/angle_micro_features_data.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/circle_micro_features_data.h
GESTURE_RECOGNITION_SRCS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/main.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/main_functions.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_model_data.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor.cc \
tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.cc
GESTURE_RECOGNITION_HDRS := \
tensorflow/lite/experimental/micro/examples/gesture_recognition/main_functions.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_model_data.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor.h \
tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.h
# Find any platform-specific rules for this example.
include $(wildcard tensorflow/lite/experimental/micro/examples/gesture_recognition/*/Makefile.inc)
# Tests the accelerometer handler
$(eval $(call microlite_test,gesture_accelerometer_handler_test,\
$(ACCELEROMETER_HANDLER_TEST_SRCS),$(ACCELEROMETER_HANDLER_TEST_HDRS)))
# Tests the output handler
$(eval $(call microlite_test,gesture_output_handler_test,\
$(OUTPUT_HANDLER_TEST_SRCS),$(OUTPUT_HANDLER_TEST_HDRS)))
# Tests the gesture predictor
$(eval $(call microlite_test,gesture_predictor_test,\
$(GESTURE_PREDICTOR_TEST_SRCS),$(GESTURE_PREDICTOR_TEST_HDRS)))
# Tests loading and running the gesture recognition model
$(eval $(call microlite_test,gesture_recognition_test,\
$(GESTURE_RECOGNITION_TEST_SRCS),$(GESTURE_RECOGNITION_TEST_HDRS)))
# Builds a standalone binary
$(eval $(call microlite_test,gesture_recognition,\
$(GESTURE_RECOGNITION_SRCS),$(GESTURE_RECOGNITION_HDRS)))

View File

@ -11,12 +11,12 @@ load(
)
cc_library(
name = "gesture_recognition_model_data",
name = "magic_wand_model_data",
srcs = [
"gesture_recognition_model_data.cc",
"magic_wand_model_data.cc",
],
hdrs = [
"gesture_recognition_model_data.h",
"magic_wand_model_data.h",
],
)
@ -33,12 +33,12 @@ cc_library(
)
tflite_micro_cc_test(
name = "gesture_recognition_test",
name = "magic_wand_test",
srcs = [
"gesture_recognition_test.cc",
"magic_wand_test.cc",
],
deps = [
":gesture_recognition_model_data",
":magic_wand_model_data",
":sample_feature_data",
"//tensorflow/lite:schema_fbs_version",
"//tensorflow/lite/experimental/micro:micro_framework",
@ -140,7 +140,7 @@ tflite_micro_cc_test(
)
cc_binary(
name = "gesture_recognition",
name = "magic_wand",
srcs = [
"main.cc",
"main_functions.cc",
@ -154,11 +154,11 @@ cc_binary(
deps = [
"//tensorflow/lite:schema_fbs_version",
"//tensorflow/lite/experimental/micro:micro_framework",
"//tensorflow/lite/experimental/micro/examples/gesture_recognition:accelerometer_handler",
"//tensorflow/lite/experimental/micro/examples/gesture_recognition:constants",
"//tensorflow/lite/experimental/micro/examples/gesture_recognition:gesture_predictor",
"//tensorflow/lite/experimental/micro/examples/gesture_recognition:gesture_recognition_model_data",
"//tensorflow/lite/experimental/micro/examples/gesture_recognition:output_handler",
"//tensorflow/lite/experimental/micro/examples/magic_wand:accelerometer_handler",
"//tensorflow/lite/experimental/micro/examples/magic_wand:constants",
"//tensorflow/lite/experimental/micro/examples/magic_wand:gesture_predictor",
"//tensorflow/lite/experimental/micro/examples/magic_wand:magic_wand_model_data",
"//tensorflow/lite/experimental/micro/examples/magic_wand:output_handler",
"//tensorflow/lite/experimental/micro/kernels:all_ops_resolver",
"//tensorflow/lite/schema:schema_fbs",
],

View File

@ -0,0 +1,89 @@
ifeq ($(TARGET), sparkfun_edge)
INCLUDES += \
-I$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/ \
-I$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_adc/
THIRD_PARTY_CC_SRCS += \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/tf_accelerometer.c \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/lis2dh12_reg.c \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_adc/tf_adc.c
THIRD_PARTY_CC_HDRS += \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/tf_accelerometer.h \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/lis2dh12_reg.h \
$(APOLLO3_SDK)/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_adc/tf_adc.h
endif
ACCELEROMETER_HANDLER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler_test.cc
ACCELEROMETER_HANDLER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.h
OUTPUT_HANDLER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/output_handler_test.cc
OUTPUT_HANDLER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.h
GESTURE_PREDICTOR_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/magic_wand/constants.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor_test.cc
GESTURE_PREDICTOR_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/magic_wand/constants.h \
tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor.h \
magic_wand_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_test.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_model_data.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/angle_micro_features_data.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/circle_micro_features_data.cc
magic_wand_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_model_data.h \
tensorflow/lite/experimental/micro/examples/magic_wand/angle_micro_features_data.h \
tensorflow/lite/experimental/micro/examples/magic_wand/circle_micro_features_data.h
magic_wand_SRCS := \
tensorflow/lite/experimental/micro/examples/magic_wand/main.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/main_functions.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/constants.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_model_data.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor.cc \
tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.cc
magic_wand_HDRS := \
tensorflow/lite/experimental/micro/examples/magic_wand/main_functions.h \
tensorflow/lite/experimental/micro/examples/magic_wand/constants.h \
tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_model_data.h \
tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.h \
tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor.h \
tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.h
# Find any platform-specific rules for this example.
include $(wildcard tensorflow/lite/experimental/micro/examples/magic_wand/*/Makefile.inc)
# Tests the accelerometer handler
$(eval $(call microlite_test,gesture_accelerometer_handler_test,\
$(ACCELEROMETER_HANDLER_TEST_SRCS),$(ACCELEROMETER_HANDLER_TEST_HDRS)))
# Tests the output handler
$(eval $(call microlite_test,gesture_output_handler_test,\
$(OUTPUT_HANDLER_TEST_SRCS),$(OUTPUT_HANDLER_TEST_HDRS)))
# Tests the gesture predictor
$(eval $(call microlite_test,gesture_predictor_test,\
$(GESTURE_PREDICTOR_TEST_SRCS),$(GESTURE_PREDICTOR_TEST_HDRS)))
# Tests loading and running the gesture recognition model
$(eval $(call microlite_test,magic_wand_test,\
$(magic_wand_TEST_SRCS),$(magic_wand_TEST_HDRS)))
# Builds a standalone binary
$(eval $(call microlite_test,magic_wand,\
$(magic_wand_SRCS),$(magic_wand_HDRS)))

View File

@ -1,12 +1,12 @@
# Gesture Recognition example
# Magic wand example
This example shows how you can use TensorFlow Lite to run a 20 kilobyte neural
network model to recognize magic gestures. It's designed to run on systems with
very small amounts of memory such as microcontrollers.
network model to recognize gestures with an accelerometer. It's designed to run
on systems with very small amounts of memory, such as microcontrollers.
The example application reads data from the accelerometer on the SparkFun Edge
and indicates when it has detected a gesture, then outputs the gesture on
screen.
The example application reads data from the accelerometer on an Arduino Nano 33
BLE Sense or SparkFun Edge board and indicates when it has detected a gesture,
then outputs the gesture to the serial port.
## Table of contents
@ -30,7 +30,7 @@ tensorflow/tensorflow/lite/experimental/micro/examples/ folder, then `cd` into
the source directory from a terminal and run the following command:
```bash
make -f tensorflow/lite/experimental/micro/tools/make/Makefile test_gesture_recognition_test
make -f tensorflow/lite/experimental/micro/tools/make/Makefile test_magic_wand_test
```
This will take a few minutes, and downloads frameworks the code uses like
@ -63,7 +63,7 @@ The sample has been tested with the following devices:
To use this sample application with Arduino, we've created an Arduino library
that includes it as an example that you can open in the Arduino Desktop IDE.
Download the current nightly build of the library: [hello_world.zip](https://storage.googleapis.com/tensorflow-nightly/github/tensorflow/tensorflow/lite/experimental/micro/tools/make/gen/arduino_x86_64/prj/gesture_recognition/gesture_recognition.zip)
Download the current nightly build of the library: [hello_world.zip](https://storage.googleapis.com/tensorflow-nightly/github/tensorflow/tensorflow/lite/experimental/micro/tools/make/gen/arduino_x86_64/prj/magic_wand/magic_wand.zip)
Next, import this zip file into the Arduino Desktop IDE by going to `Sketch ->
Include Library -> Add .ZIP Library...`.
@ -127,7 +127,7 @@ Next, save the file. Patching is now complete.
Once the library has been added, go to `File -> Examples`. You should see an
example near the bottom of the list named `TensorFlowLite`. Select
it and click `gesture_recognition` to load the example.
it and click `magic_wand` to load the example.
Use the Arduino Desktop IDE to build and upload the example. Once it is running,
you should see the built-in LED on your device flashing.
@ -191,13 +191,13 @@ codelab to get an understanding of the workflow.
Run the following command to build a binary for SparkFun Edge.
```
make -f tensorflow/lite/experimental/micro/tools/make/Makefile TARGET=sparkfun_edge gesture_recognition_bin
make -f tensorflow/lite/experimental/micro/tools/make/Makefile TARGET=sparkfun_edge magic_wand_bin
```
The binary will be created in the following location:
```
tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/gesture_recognition.bin
tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/magic_wand.bin
```
### Sign the binary
@ -219,7 +219,7 @@ Next, run the following command to create a signed binary:
```
python3 tensorflow/lite/experimental/micro/tools/make/downloads/AmbiqSuite-Rel2.0.0/tools/apollo3_scripts/create_cust_image_blob.py \
--bin tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/gesture_recognition.bin \
--bin tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/magic_wand.bin \
--load-address 0xC000 \
--magic-num 0xCB \
-o main_nonsecure_ota \

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.h"
int begin_index = 0;

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_ACCELEROMETER_HANDLER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_ACCELEROMETER_HANDLER_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_ACCELEROMETER_HANDLER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_ACCELEROMETER_HANDLER_H_
#define kChannelNumber 3
@ -26,4 +26,4 @@ extern TfLiteStatus SetupAccelerometer(tflite::ErrorReporter* error_reporter);
extern bool ReadAccelerometer(tflite::ErrorReporter* error_reporter,
float* input, int length, bool reset_buffer);
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_ACCELEROMETER_HANDLER_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_ACCELEROMETER_HANDLER_H_

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.h"
#include <string.h>

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/angle_micro_features_data.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/angle_micro_features_data.h"
const int g_angle_micro_f2e59fea_nohash_1_length = 128;
const int g_angle_micro_f2e59fea_nohash_1_dim = 3;

View File

@ -13,11 +13,11 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_ANGLE_MICRO_FEATURES_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_ANGLE_MICRO_FEATURES_DATA_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_ANGLE_MICRO_FEATURES_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_ANGLE_MICRO_FEATURES_DATA_H_
extern const int g_angle_micro_f2e59fea_nohash_1_length;
extern const int g_angle_micro_f2e59fea_nohash_1_dim;
extern const float g_angle_micro_f2e59fea_nohash_1_data[];
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_ANGLE_MICRO_FEATURES_DATA_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_ANGLE_MICRO_FEATURES_DATA_H_

View File

@ -13,12 +13,12 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.h"
#include <Arduino.h>
#include <Arduino_LSM9DS1.h>
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/constants.h"
// A buffer holding the last 200 sets of 3-channel values
float save_data[600] = {0.0};

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/constants.h"
// The number of expected consecutive inferences for each gesture type.
// Established with the Arduino Nano 33 BLE Sense.

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.h"
#include "Arduino.h"

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/circle_micro_features_data.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/circle_micro_features_data.h"
const int g_circle_micro_f9643d42_nohash_4_length = 128;
const int g_circle_micro_f9643d42_nohash_4_dim = 3;

View File

@ -13,11 +13,11 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_CIRCLE_MICRO_FEATURES_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_CIRCLE_MICRO_FEATURES_DATA_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_CIRCLE_MICRO_FEATURES_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_CIRCLE_MICRO_FEATURES_DATA_H_
extern const int g_circle_micro_f9643d42_nohash_4_length;
extern const int g_circle_micro_f9643d42_nohash_4_dim;
extern const float g_circle_micro_f9643d42_nohash_4_data[];
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_CIRCLE_MICRO_FEATURES_DATA_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_CIRCLE_MICRO_FEATURES_DATA_H_

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/constants.h"
// The number of expected consecutive inferences for each gesture type.
// These defaults were established with the SparkFun Edge board.

View File

@ -13,12 +13,12 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_CONSTANTS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_CONSTANTS_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_CONSTANTS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_CONSTANTS_H_
// The expected accelerometer data sample frequency
const float kTargetHz = 25;
// The number of expected consecutive inferences for each gesture type
extern const int kConsecutiveInferenceThresholds[3];
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_CONSTANTS_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_CONSTANTS_H_

View File

@ -13,9 +13,9 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/constants.h"
// How many times the most recent gesture has been matched in a row
int continuous_count = 0;

View File

@ -13,9 +13,9 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_GESTURE_PREDICTOR_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_GESTURE_PREDICTOR_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_GESTURE_PREDICTOR_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_GESTURE_PREDICTOR_H_
extern int PredictGesture(float* output);
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_GESTURE_PREDICTOR_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_GESTURE_PREDICTOR_H_

View File

@ -13,9 +13,9 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/constants.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/constants.h"
#include "tensorflow/lite/experimental/micro/testing/micro_test.h"
TF_LITE_MICRO_TESTS_BEGIN

View File

@ -14,10 +14,10 @@ limitations under the License.
==============================================================================*/
// Automatically created from a TensorFlow Lite flatbuffer using the command:
// xxd -i gesture_recognition_model.tflite > gesture_recognition_model_data.cc
// xxd -i magic_wand_model.tflite > magic_wand_model_data.cc
// See the README for a full description of the creation process.
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_model_data.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_model_data.h"
// We need to keep the data array aligned on some architectures.
#ifdef __has_attribute
@ -31,7 +31,7 @@ limitations under the License.
#define DATA_ALIGN_ATTRIBUTE
#endif
const unsigned char g_gesture_recognition_model_data[] DATA_ALIGN_ATTRIBUTE = {
const unsigned char g_magic_wand_model_data[] DATA_ALIGN_ATTRIBUTE = {
0x18, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x0e, 0x00,
0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00,
0x0e, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x18, 0x4c, 0x00, 0x00,
@ -1666,4 +1666,4 @@ const unsigned char g_gesture_recognition_model_data[] DATA_ALIGN_ATTRIBUTE = {
0x00, 0x03, 0x06, 0x00, 0x06, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00,
0x00, 0x11, 0x06, 0x00, 0x08, 0x00, 0x07, 0x00, 0x06, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x04};
const int g_gesture_recognition_model_data_len = 19600;
const int g_magic_wand_model_data_len = 19600;

View File

@ -16,12 +16,12 @@ limitations under the License.
// This is a standard TensorFlow Lite model file that has been converted into a
// C data array, so it can be easily compiled into a binary for devices that
// don't have a file system. It was created using the command:
// xxd -i gesture_recognition_model.tflite > gesture_recognition_model_data.cc
// xxd -i magic_wand_model.tflite > magic_wand_model_data.cc
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_GESTURE_RECOGNITION_MODEL_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_GESTURE_RECOGNITION_MODEL_DATA_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_MAGIC_WAND_MODEL_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_MAGIC_WAND_MODEL_DATA_H_
extern const unsigned char g_gesture_recognition_model_data[];
extern const int g_gesture_recognition_model_data_len;
extern const unsigned char g_magic_wand_model_data[];
extern const int g_magic_wand_model_data_len;
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_GESTURE_RECOGNITION_MODEL_DATA_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_MAGIC_WAND_MODEL_DATA_H_

View File

@ -13,9 +13,9 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/angle_micro_features_data.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/circle_micro_features_data.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_model_data.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/angle_micro_features_data.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/circle_micro_features_data.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_model_data.h"
#include "tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
#include "tensorflow/lite/experimental/micro/micro_interpreter.h"
@ -45,7 +45,7 @@ TF_LITE_MICRO_TEST(LoadModelAndPerformInference) {
// Map the model into a usable data structure. This doesn't involve any
// copying or parsing, it's a very lightweight operation.
const tflite::Model* model =
::tflite::GetModel(g_gesture_recognition_model_data);
::tflite::GetModel(g_magic_wand_model_data);
if (model->version() != TFLITE_SCHEMA_VERSION) {
error_reporter->Report(
"Model provided is schema version %d not equal "

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/main_functions.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/main_functions.h"
// This is the default main used on systems that have the standard C entry
// point. Other devices (for example FreeRTOS or ESP32) that have different

View File

@ -13,12 +13,12 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/main_functions.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/main_functions.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_predictor.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/gesture_recognition_model_data.h"
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/gesture_predictor.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/magic_wand_model_data.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
#include "tensorflow/lite/experimental/micro/micro_interpreter.h"
#include "tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h"
@ -64,7 +64,7 @@ void setup() {
// Map the model into a usable data structure. This doesn't involve any
// copying or parsing, it's a very lightweight operation.
model = tflite::GetModel(g_gesture_recognition_model_data);
model = tflite::GetModel(g_magic_wand_model_data);
if (model->version() != TFLITE_SCHEMA_VERSION) {
error_reporter->Report(
"Model provided is schema version %d not equal "

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_MAIN_FUNCTIONS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_MAIN_FUNCTIONS_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_MAIN_FUNCTIONS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_MAIN_FUNCTIONS_H_
// Initializes all data needed for the example. The name is important, and needs
// to be setup() for Arduino compatibility.
@ -25,4 +25,4 @@ void setup();
// compatibility.
void loop();
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_MAIN_FUNCTIONS_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_MAIN_FUNCTIONS_H_

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.h"
void HandleOutput(tflite::ErrorReporter* error_reporter, int kind) {
// light (red: wing, blue: ring, green: slope)

View File

@ -13,12 +13,12 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_OUTPUT_HANDLER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_OUTPUT_HANDLER_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_OUTPUT_HANDLER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_OUTPUT_HANDLER_H_
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
void HandleOutput(tflite::ErrorReporter* error_reporter, int kind);
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_OUTPUT_HANDLER_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MAGIC_WAND_OUTPUT_HANDLER_H_

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.h"
#include "tensorflow/lite/experimental/micro/testing/micro_test.h"
#include "tensorflow/lite/experimental/micro/testing/test_utils.h"

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/accelerometer_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/accelerometer_handler.h"
// These are headers from Ambiq's Apollo3 SDK.
#include "am_bsp.h" // NOLINT

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/output_handler.h"
#include "tensorflow/lite/experimental/micro/examples/magic_wand/output_handler.h"
#include "tensorflow/lite/experimental/micro/tools/make/downloads/AmbiqSuite-Rel2.0.0/boards/SparkFun_TensorFlow_Apollo3_BSP/bsp/am_bsp.h"
#include "tensorflow/lite/experimental/micro/tools/make/downloads/AmbiqSuite-Rel2.0.0/boards/SparkFun_TensorFlow_Apollo3_BSP/examples/example1_edge_test/src/tf_accelerometer/tf_accelerometer.h"

View File

@ -1,68 +0,0 @@
$(eval $(call add_third_party_download,$(PERSON_MODEL_URL),$(PERSON_MODEL_MD5),person_model_grayscale,))
MICRO_VISION_MODEL_SRCS := \
tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.cc \
$(MAKEFILE_DIR)/downloads/person_model_grayscale/person_detect_model_data.cc
MICRO_VISION_MODEL_HDRS := \
tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.h \
tensorflow/lite/experimental/micro/examples/micro_vision/person_detect_model_data.h
MICRO_VISION_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/micro_vision/micro_vision_test.cc \
$(MAKEFILE_DIR)/downloads/person_model_grayscale/no_person_image_data.cc \
$(MAKEFILE_DIR)/downloads/person_model_grayscale/person_image_data.cc \
$(MICRO_VISION_MODEL_SRCS)
MICRO_VISION_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/micro_vision/no_person_image_data.h \
tensorflow/lite/experimental/micro/examples/micro_vision/person_image_data.h \
$(MICRO_VISION_MODEL_HDRS)
IMAGE_PROVIDER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.cc \
tensorflow/lite/experimental/micro/examples/micro_vision/image_provider_test.cc \
tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.cc
IMAGE_PROVIDER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.h \
tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.h
DETECTION_RESPONDER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.cc \
tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder_test.cc
DETECTION_RESPONDER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.h
MICRO_VISION_SRCS := \
tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.cc \
tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.cc \
tensorflow/lite/experimental/micro/examples/micro_vision/main.cc \
tensorflow/lite/experimental/micro/examples/micro_vision/main_functions.cc \
$(MICRO_VISION_MODEL_SRCS)
MICRO_VISION_HDRS := \
tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.h \
tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.h \
tensorflow/lite/experimental/micro/examples/micro_vision/main_functions.h \
$(MICRO_VISION_MODEL_HDRS)
# Find any platform-specific rules for this example.
include $(wildcard tensorflow/lite/experimental/micro/examples/micro_vision/*/Makefile.inc)
# Tests loading and running a vision model.
$(eval $(call microlite_test,micro_vision_test,\
$(MICRO_VISION_TEST_SRCS),$(MICRO_VISION_TEST_HDRS)))
# Tests the image provider module.
$(eval $(call microlite_test,image_provider_test,\
$(IMAGE_PROVIDER_TEST_SRCS),$(IMAGE_PROVIDER_TEST_HDRS)))
# Tests the detection responder module.
$(eval $(call microlite_test,detection_responder_test,\
$(DETECTION_RESPONDER_TEST_SRCS),$(DETECTION_RESPONDER_TEST_HDRS)))
# Builds a standalone object recognition binary.
$(eval $(call microlite_test,micro_vision,\
$(MICRO_VISION_SRCS),$(MICRO_VISION_HDRS)))

View File

@ -1,14 +0,0 @@
ifeq ($(TARGET),$(filter $(TARGET),apollo3evb sparkfun_edge))
MICRO_VISION_SRCS += \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0.c \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_debug.c \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_optimized.c
MICRO_VISION_HDRS += \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0.h \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_debug.h \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_optimized.h \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_RAW8_QVGA_8bits_lsb_5fps.h \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_Walking1s_01.h \
tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/platform_Sparkfun_Edge.h
endif

View File

@ -100,7 +100,7 @@ tflite_micro_cc_test(
)
cc_binary(
name = "micro_vision",
name = "person_detection",
srcs = [
"main.cc",
"main_functions.cc",

View File

@ -0,0 +1,68 @@
$(eval $(call add_third_party_download,$(PERSON_MODEL_URL),$(PERSON_MODEL_MD5),person_model_grayscale,))
person_detection_MODEL_SRCS := \
tensorflow/lite/experimental/micro/examples/person_detection/model_settings.cc \
$(MAKEFILE_DIR)/downloads/person_model_grayscale/person_detect_model_data.cc
person_detection_MODEL_HDRS := \
tensorflow/lite/experimental/micro/examples/person_detection/model_settings.h \
tensorflow/lite/experimental/micro/examples/person_detection/person_detect_model_data.h
person_detection_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/person_detection/person_detection_test.cc \
$(MAKEFILE_DIR)/downloads/person_model_grayscale/no_person_image_data.cc \
$(MAKEFILE_DIR)/downloads/person_model_grayscale/person_image_data.cc \
$(person_detection_MODEL_SRCS)
person_detection_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/person_detection/no_person_image_data.h \
tensorflow/lite/experimental/micro/examples/person_detection/person_image_data.h \
$(person_detection_MODEL_HDRS)
IMAGE_PROVIDER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/person_detection/image_provider.cc \
tensorflow/lite/experimental/micro/examples/person_detection/image_provider_test.cc \
tensorflow/lite/experimental/micro/examples/person_detection/model_settings.cc
IMAGE_PROVIDER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/person_detection/image_provider.h \
tensorflow/lite/experimental/micro/examples/person_detection/model_settings.h
DETECTION_RESPONDER_TEST_SRCS := \
tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.cc \
tensorflow/lite/experimental/micro/examples/person_detection/detection_responder_test.cc
DETECTION_RESPONDER_TEST_HDRS := \
tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.h
person_detection_SRCS := \
tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.cc \
tensorflow/lite/experimental/micro/examples/person_detection/image_provider.cc \
tensorflow/lite/experimental/micro/examples/person_detection/main.cc \
tensorflow/lite/experimental/micro/examples/person_detection/main_functions.cc \
$(person_detection_MODEL_SRCS)
person_detection_HDRS := \
tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.h \
tensorflow/lite/experimental/micro/examples/person_detection/image_provider.h \
tensorflow/lite/experimental/micro/examples/person_detection/main_functions.h \
$(person_detection_MODEL_HDRS)
# Find any platform-specific rules for this example.
include $(wildcard tensorflow/lite/experimental/micro/examples/person_detection/*/Makefile.inc)
# Tests loading and running a vision model.
$(eval $(call microlite_test,person_detection_test,\
$(person_detection_TEST_SRCS),$(person_detection_TEST_HDRS)))
# Tests the image provider module.
$(eval $(call microlite_test,image_provider_test,\
$(IMAGE_PROVIDER_TEST_SRCS),$(IMAGE_PROVIDER_TEST_HDRS)))
# Tests the detection responder module.
$(eval $(call microlite_test,detection_responder_test,\
$(DETECTION_RESPONDER_TEST_SRCS),$(DETECTION_RESPONDER_TEST_HDRS)))
# Builds a standalone object recognition binary.
$(eval $(call microlite_test,person_detection,\
$(person_detection_SRCS),$(person_detection_HDRS)))

View File

@ -25,7 +25,7 @@ This will take a few minutes, and downloads frameworks the code uses like
finished, run:
```
make -f tensorflow/lite/experimental/micro/tools/make/Makefile test_micro_vision_test
make -f tensorflow/lite/experimental/micro/tools/make/Makefile test_person_detection_test
```
You should see a series of files get compiled, followed by some logging output
@ -37,7 +37,7 @@ and checks that the network correctly identifies them.
To understand how TensorFlow Lite does this, you can look at the `TestInvoke()`
function in
[micro_vision_test.cc](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/micro_vision/micro_vision_test.cc).
[person_detection_test.cc](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/person_detection/person_detection_test.cc).
It's a fairly small amount of code, creating an interpreter, getting a handle to
a model that's been compiled into the program, and then invoking the interpreter
with the model and sample inputs.
@ -59,13 +59,13 @@ The following command will download the required dependencies and then compile a
binary for the SparkFun Edge:
```
make -f tensorflow/lite/experimental/micro/tools/make/Makefile TARGET=sparkfun_edge micro_vision_bin
make -f tensorflow/lite/experimental/micro/tools/make/Makefile TARGET=sparkfun_edge person_detection_bin
```
The binary will be created in the following location:
```
tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/micro_vision.bin
tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/person_detection.bin
```
### Sign the binary
@ -87,7 +87,7 @@ Next, run the following command to create a signed binary:
```
python3 tensorflow/lite/experimental/micro/tools/make/downloads/AmbiqSuite-Rel2.0.0/tools/apollo3_scripts/create_cust_image_blob.py \
--bin tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/micro_vision.bin \
--bin tensorflow/lite/experimental/micro/tools/make/gen/sparkfun_edge_cortex-m4/bin/person_detection.bin \
--load-address 0xC000 \
--magic-num 0xCB \
-o main_nonsecure_ota \

View File

@ -13,13 +13,13 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_RAW8_QVGA_8bits_lsb_5fps.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_debug.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_optimized.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/platform_Sparkfun_Edge.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_RAW8_QVGA_8bits_lsb_5fps.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_debug.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_optimized.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/platform_Sparkfun_Edge.h"
// These are headers from Ambiq's Apollo3 SDK.
#include "am_bsp.h" // NOLINT

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/main_functions.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/main_functions.h"
// Arduino automatically calls the setup() and loop() functions in a sketch, so
// where other systems need their own main routine in this file, it can be left

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.h"
// This dummy implementation writes person and no person scores to the error
// console. Real applications will want to take some custom action instead, and

View File

@ -16,8 +16,8 @@ limitations under the License.
// Provides an interface to take an action based on the output from the person
// detection model.
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_DETECTION_RESPONDER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_DETECTION_RESPONDER_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_DETECTION_RESPONDER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_DETECTION_RESPONDER_H_
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
@ -31,4 +31,4 @@ limitations under the License.
void RespondToDetection(tflite::ErrorReporter* error_reporter,
uint8_t person_score, uint8_t no_person_score);
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_DETECTION_RESPONDER_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_DETECTION_RESPONDER_H_

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.h"
#include "tensorflow/lite/experimental/micro/testing/micro_test.h"
#include "tensorflow/lite/experimental/micro/testing/test_utils.h"

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_H_
#ifdef __cplusplus
extern "C" {
@ -402,4 +402,4 @@ uint32_t hm01b0_blocking_read_oneframe(hm01b0_cfg_t *psCfg, uint8_t *pui8Buffer,
}
#endif
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_H_

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_RAW8_QVGA_8BITS_LSB_5FPS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_RAW8_QVGA_8BITS_LSB_5FPS_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_RAW8_QVGA_8BITS_LSB_5FPS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_RAW8_QVGA_8BITS_LSB_5FPS_H_
#include "HM01B0.h"
@ -477,4 +477,4 @@ const hm_script_t sHM01B0InitScript[] = {
// ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
};
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_RAW8_QVGA_8BITS_LSB_5FPS_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_RAW8_QVGA_8BITS_LSB_5FPS_H_

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_WALKING1S_01_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_WALKING1S_01_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_WALKING1S_01_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_WALKING1S_01_H_
#include "HM01B0.h"
@ -53,4 +53,4 @@ const hm_script_t sHM01b0TestModeScript_Walking1s[] = {
}, // W 24 0104 01 2 1 ;
};
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_WALKING1S_01_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_WALKING1S_01_H_

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_DEBUG_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_DEBUG_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_DEBUG_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_DEBUG_H_
#ifdef __cplusplus
extern "C" {
@ -46,4 +46,4 @@ void hm01b0_framebuffer_dump(uint8_t* frame, uint32_t len);
}
#endif
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_DEBUG_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_DEBUG_H_

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_OPTIMIZED_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_OPTIMIZED_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_OPTIMIZED_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_OPTIMIZED_H_
#ifdef __cplusplus
extern "C" {
@ -46,4 +46,4 @@ uint32_t hm01b0_blocking_read_oneframe_scaled(uint8_t* buffer, int w, int h,
}
#endif
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_HM01B0_OPTIMIZED_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_HM01B0_OPTIMIZED_H_

View File

@ -0,0 +1,14 @@
ifeq ($(TARGET),$(filter $(TARGET),apollo3evb sparkfun_edge))
person_detection_SRCS += \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0.c \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_debug.c \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_optimized.c
person_detection_HDRS += \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0.h \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_debug.h \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_optimized.h \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_RAW8_QVGA_8bits_lsb_5fps.h \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_Walking1s_01.h \
tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/platform_Sparkfun_Edge.h
endif

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_PLATFORM_SPARKFUN_EDGE_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_PLATFORM_SPARKFUN_EDGE_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_PLATFORM_SPARKFUN_EDGE_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_PLATFORM_SPARKFUN_EDGE_H_
#ifdef __cplusplus
extern "C" {
@ -51,4 +51,4 @@ extern "C" {
}
#endif
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_HIMAX_DRIVER_PLATFORM_SPARKFUN_EDGE_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_HIMAX_DRIVER_PLATFORM_SPARKFUN_EDGE_H_

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/model_settings.h"
TfLiteStatus GetImage(tflite::ErrorReporter* error_reporter, int image_width,
int image_height, int channels, uint8_t* image_data) {

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_IMAGE_PROVIDER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_IMAGE_PROVIDER_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_IMAGE_PROVIDER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_IMAGE_PROVIDER_H_
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
@ -36,4 +36,4 @@ limitations under the License.
TfLiteStatus GetImage(tflite::ErrorReporter* error_reporter, int image_width,
int image_height, int channels, uint8_t* image_data);
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_IMAGE_PROVIDER_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_IMAGE_PROVIDER_H_

View File

@ -13,12 +13,12 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/image_provider.h"
#include <limits>
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/model_settings.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
#include "tensorflow/lite/experimental/micro/testing/micro_test.h"

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/gesture_recognition/main_functions.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/main_functions.h"
// This is the default main used on systems that have the standard C entry
// point. Other devices (for example FreeRTOS or ESP32) that have different

View File

@ -13,12 +13,12 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/main_functions.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/main_functions.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/person_detect_model_data.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/model_settings.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/person_detect_model_data.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
#include "tensorflow/lite/experimental/micro/micro_interpreter.h"
#include "tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h"

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_MAIN_FUNCTIONS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_MAIN_FUNCTIONS_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_MAIN_FUNCTIONS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_MAIN_FUNCTIONS_H_
// Initializes all data needed for the example. The name is important, and needs
// to be setup() for Arduino compatibility.
@ -25,4 +25,4 @@ void setup();
// compatibility.
void loop();
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_GESTURE_RECOGNITION_MAIN_FUNCTIONS_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_MAIN_FUNCTIONS_H_

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/model_settings.h"
const char* kCategoryLabels[kCategoryCount] = {
"unused",

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_MODEL_SETTINGS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_MODEL_SETTINGS_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_MODEL_SETTINGS_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_MODEL_SETTINGS_H_
// Keeping these as constant expressions allow us to allocate fixed-sized arrays
// on the stack for our working memory.
@ -32,4 +32,4 @@ constexpr int kPersonIndex = 1;
constexpr int kNotAPersonIndex = 2;
extern const char* kCategoryLabels[kCategoryCount];
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_MODEL_SETTINGS_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_MODEL_SETTINGS_H_

View File

@ -19,12 +19,12 @@ limitations under the License.
// Skip the 54 byte bmp3 header and add the reset of the bytes to a C array:
// xxd -s 54 -i /tmp/noperson.bmp3 > /tmp/noperson.cc
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_NO_PERSON_IMAGE_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_NO_PERSON_IMAGE_DATA_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_NO_PERSON_IMAGE_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_NO_PERSON_IMAGE_DATA_H_
#include <cstdint>
extern const int g_no_person_data_size;
extern const uint8_t g_no_person_data[];
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_NO_PERSON_IMAGE_DATA_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_NO_PERSON_IMAGE_DATA_H_

View File

@ -18,10 +18,10 @@ limitations under the License.
// don't have a file system. It was created using the command:
// xxd -i person_detect.tflite > person_detect_model_data.cc
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_PERSON_DETECT_MODEL_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_PERSON_DETECT_MODEL_DATA_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_PERSON_DETECT_MODEL_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_PERSON_DETECT_MODEL_DATA_H_
extern const unsigned char g_person_detect_model_data[];
extern const int g_person_detect_model_data_len;
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_PERSON_DETECT_MODEL_DATA_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_PERSON_DETECT_MODEL_DATA_H_

View File

@ -14,10 +14,10 @@ limitations under the License.
==============================================================================*/
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/model_settings.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/no_person_image_data.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/person_detect_model_data.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/person_image_data.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/model_settings.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/no_person_image_data.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/person_detect_model_data.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/person_image_data.h"
#include "tensorflow/lite/experimental/micro/micro_error_reporter.h"
#include "tensorflow/lite/experimental/micro/micro_interpreter.h"
#include "tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h"

View File

@ -19,12 +19,12 @@ limitations under the License.
// Skip the 54 byte bmp3 header and add the reset of the bytes to a C array:
// xxd -s 54 -i /tmp/person.bmp3 > /tmp/person.cc
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_PERSON_IMAGE_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_PERSON_IMAGE_DATA_H_
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_PERSON_IMAGE_DATA_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_PERSON_IMAGE_DATA_H_
#include <cstdint>
extern const int g_person_data_size;
extern const uint8_t g_person_data[];
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_MICRO_VISION_PERSON_IMAGE_DATA_H_
#endif // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_EXAMPLES_PERSON_DETECTION_PERSON_IMAGE_DATA_H_

View File

@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/detection_responder.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/detection_responder.h"
#include "am_bsp.h" // NOLINT

View File

@ -13,13 +13,13 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/experimental/micro/examples/micro_vision/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/image_provider.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_RAW8_QVGA_8bits_lsb_5fps.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_debug.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/HM01B0_optimized.h"
#include "tensorflow/lite/experimental/micro/examples/micro_vision/himax_driver/platform_Sparkfun_Edge.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_RAW8_QVGA_8bits_lsb_5fps.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_debug.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/HM01B0_optimized.h"
#include "tensorflow/lite/experimental/micro/examples/person_detection/himax_driver/platform_Sparkfun_Edge.h"
// These are headers from Ambiq's Apollo3 SDK.
#include "am_bsp.h" // NOLINT

View File

@ -22,8 +22,8 @@ import io
import numpy as np
from tensorflow.lite.experimental.micro.examples.micro_vision.utils.raw_to_bitmap import parse_file
from tensorflow.lite.experimental.micro.examples.micro_vision.utils.raw_to_bitmap import reshape_bitmaps
from tensorflow.lite.experimental.micro.examples.person_detection.utils.raw_to_bitmap import parse_file
from tensorflow.lite.experimental.micro.examples.person_detection.utils.raw_to_bitmap import reshape_bitmaps
from tensorflow.python.platform import test
_RGB_RAW = u"""

View File

@ -31,5 +31,5 @@ tar xzf arduino-cli_0.4.0_Linux_64bit.tar.gz
/tmp/arduino-cli core update-index
/tmp/arduino-cli core install arduino:mbed
# Required by gesture_recognition
# Required by magic_wand
/tmp/arduino-cli lib install Arduino_LSM9DS1@1.0.0

View File

@ -41,7 +41,7 @@ def move_person_data(library_dir):
'person_model_grayscale/person_detect_model_data.cpp'
)
new_person_data_path = os.path.join(
library_dir, 'examples/micro_vision/person_detect_model_data.cpp')
library_dir, 'examples/person_detection/person_detect_model_data.cpp')
if os.path.exists(old_person_data_path):
os.rename(old_person_data_path, new_person_data_path)
# Update include.
@ -49,7 +49,7 @@ def move_person_data(library_dir):
file_contents = source_file.read()
file_contents = file_contents.replace(
'#include "tensorflow/lite/experimental/micro/examples/' +
'micro_vision/person_detect_model_data.h"',
'person_detection/person_detect_model_data.h"',
'#include "person_detect_model_data.h"')
with open(new_person_data_path, 'w') as source_file:
source_file.write(file_contents)

View File

@ -28,8 +28,8 @@ touch ${EXAMPLES_SUBDIR_FILE}
TENSORFLOW_SRC_DIR=${LIBRARY_DIR}/src/
PERSON_DATA_FILE=${TENSORFLOW_SRC_DIR}tensorflow/lite/experimental/micro/tools/make/downloads/person_model_grayscale/person_detect_model_data.cpp
mkdir -p `dirname ${PERSON_DATA_FILE}`
echo '#include "tensorflow/lite/experimental/micro/examples/micro_vision/person_detect_model_data.h"' > ${PERSON_DATA_FILE}
mkdir -p ${LIBRARY_DIR}/examples/micro_vision
echo '#include "tensorflow/lite/experimental/micro/examples/person_detection/person_detect_model_data.h"' > ${PERSON_DATA_FILE}
mkdir -p ${LIBRARY_DIR}/examples/person_detection
EXAMPLE_INO_FILE=${LIBRARY_DIR}/examples/something/main.ino
mkdir -p `dirname ${EXAMPLE_INO_FILE}`
@ -44,7 +44,7 @@ if [[ ! -f ${EXPECTED_EXAMPLES_SUBDIR_FILE} ]]; then
exit 1
fi
EXPECTED_PERSON_DATA_FILE=${LIBRARY_DIR}/examples/micro_vision/person_detect_model_data.cpp
EXPECTED_PERSON_DATA_FILE=${LIBRARY_DIR}/examples/person_detection/person_detect_model_data.cpp
if [[ ! -f ${EXPECTED_PERSON_DATA_FILE} ]]; then
echo "${EXPECTED_PERSON_DATA_FILE} wasn't created."
exit 1

View File

@ -52,5 +52,5 @@ SIFIVE_FE310_LIB_MD5 := "06ee24c4956f8e21670ab3395861fe64"
KISSFFT_URL="https://github.com/mborgerding/kissfft/archive/v130.zip"
KISSFFT_MD5="438ba1fef5783cc5f5f201395cc477ca"
PERSON_MODEL_URL := "https://storage.googleapis.com/download.tensorflow.org/data/tf_lite_micro_person_data_grayscale.zip"
PERSON_MODEL_MD5 := "cd1059dd1c94afadd59608202732ad63"
PERSON_MODEL_URL := "https://storage.googleapis.com/download.tensorflow.org/data/tf_lite_micro_person_data_grayscale_2019_11_07.zip"
PERSON_MODEL_MD5 := "e6430de25aa92bcb807d07278a1b5b90"

View File

@ -81,8 +81,8 @@ This example shows how you can use TensorFlow Lite to run a 250 kilobyte neural
network to recognize people in images captured by a camera. It is designed to
run on systems with small amounts of memory such as microcontrollers and DSPs.
<a class="button button-primary" href="https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/micro_vision">Micro
Vision example</a>
<a class="button button-primary" href="https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/micro/examples/person_detection">Person
detection example</a>
The example is tested on the following platforms: