diff --git a/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/detection_responder.cc b/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/detection_responder.cc
index a353dc8a9b8..ae5de962fd3 100644
--- a/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/detection_responder.cc
+++ b/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/detection_responder.cc
@@ -22,7 +22,6 @@ limitations under the License.
 // should implement their own versions of this function.
 void RespondToDetection(tflite::ErrorReporter* error_reporter,
                         int8_t person_score, int8_t no_person_score) {
-
   if (person_score > no_person_score) {
     hx_drv_led_on(HX_DRV_LED_GREEN);
   } else {
diff --git a/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/image_provider.cc b/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/image_provider.cc
index 727d93c61d1..4a3ab5775be 100644
--- a/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/image_provider.cc
+++ b/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/image_provider.cc
@@ -21,14 +21,12 @@ limitations under the License.
 
 hx_drv_sensor_image_config_t g_pimg_config;
 
-
 TfLiteStatus GetImage(tflite::ErrorReporter* error_reporter, int image_width,
                       int image_height, int channels, int8_t* image_data) {
   static bool is_initialized = false;
 
   if (!is_initialized) {
-    if(hx_drv_sensor_initial(&g_pimg_config)!= HX_DRV_LIB_PASS)
-    {
+    if (hx_drv_sensor_initial(&g_pimg_config) != HX_DRV_LIB_PASS) {
       return kTfLiteError;
     }
     is_initialized = true;
@@ -36,9 +34,9 @@ TfLiteStatus GetImage(tflite::ErrorReporter* error_reporter, int image_width,
 
   hx_drv_sensor_capture(&g_pimg_config);
 
-  hx_drv_image_rescale((uint8_t*)g_pimg_config.raw_address, g_pimg_config.img_width, g_pimg_config.img_height,
-                     image_data, image_width, image_height);
-
+  hx_drv_image_rescale((uint8_t*)g_pimg_config.raw_address,
+                       g_pimg_config.img_width, g_pimg_config.img_height,
+                       image_data, image_width, image_height);
 
   return kTfLiteOk;
 }
diff --git a/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/main_functions.cc b/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/main_functions.cc
deleted file mode 100644
index f0c7a405974..00000000000
--- a/tensorflow/lite/micro/examples/person_detection_experimental/himax_we1_evb/main_functions.cc
+++ /dev/null
@@ -1,126 +0,0 @@
-/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-#include "tensorflow/lite/micro/examples/person_detection_experimental/main_functions.h"
-
-#include "tensorflow/lite/micro/examples/person_detection_experimental/detection_responder.h"
-#include "tensorflow/lite/micro/examples/person_detection_experimental/image_provider.h"
-#include "tensorflow/lite/micro/examples/person_detection_experimental/model_settings.h"
-#include "tensorflow/lite/micro/examples/person_detection_experimental/person_detect_model_data.h"
-#include "tensorflow/lite/micro/kernels/micro_ops.h"
-#include "tensorflow/lite/micro/micro_error_reporter.h"
-#include "tensorflow/lite/micro/micro_interpreter.h"
-#include "tensorflow/lite/micro/micro_mutable_op_resolver.h"
-#include "tensorflow/lite/schema/schema_generated.h"
-#include "tensorflow/lite/version.h"
-
-// Globals, used for compatibility with Arduino-style sketches.
-namespace {
-tflite::ErrorReporter* error_reporter = nullptr;
-const tflite::Model* model = nullptr;
-tflite::MicroInterpreter* interpreter = nullptr;
-TfLiteTensor* input = nullptr;
-
-// In order to use optimized tensorflow lite kernels, a signed int8 quantized
-// model is preferred over the legacy unsigned model format. This means that
-// throughout this project, input images must be converted from unisgned to
-// signed format. The easiest and quickest way to convert from unsigned to
-// signed 8-bit integers is to subtract 128 from the unsigned value to get a
-// signed value.
-
-// An area of memory to use for input, output, and intermediate arrays.
-constexpr int kTensorArenaSize = 125 * 1024;
-#pragma Bss(".tensor_arena")
-static uint8_t tensor_arena[kTensorArenaSize];
-#pragma Bss()
-}  // namespace
-
-// The name of this function is important for Arduino compatibility.
-void setup() {
-  // Set up logging. Google style is to avoid globals or statics because of
-  // lifetime uncertainty, but since this has a trivial destructor it's okay.
-  // NOLINTNEXTLINE(runtime-global-variables)
-  static tflite::MicroErrorReporter micro_error_reporter;
-  error_reporter = &micro_error_reporter;
-
-  // Map the model into a usable data structure. This doesn't involve any
-  // copying or parsing, it's a very lightweight operation.
-  model = tflite::GetModel(g_person_detect_model_data);
-  if (model->version() != TFLITE_SCHEMA_VERSION) {
-    TF_LITE_REPORT_ERROR(error_reporter,
-                         "Model provided is schema version %d not equal "
-                         "to supported version %d.",
-                         model->version(), TFLITE_SCHEMA_VERSION);
-    return;
-  }
-
-  // Pull in only the operation implementations we need.
-  // This relies on a complete list of all the ops needed by this graph.
-  // An easier approach is to just use the AllOpsResolver, but this will
-  // incur some penalty in code space for op implementations that are not
-  // needed by this graph.
-  //
-  // tflite::AllOpsResolver resolver;
-  // NOLINTNEXTLINE(runtime-global-variables)
-  static tflite::MicroMutableOpResolver<5> micro_op_resolver;
-  micro_op_resolver.AddBuiltin(
-      tflite::BuiltinOperator_DEPTHWISE_CONV_2D,
-      tflite::ops::micro::Register_DEPTHWISE_CONV_2D());
-  micro_op_resolver.AddBuiltin(tflite::BuiltinOperator_CONV_2D,
-                               tflite::ops::micro::Register_CONV_2D());
-  micro_op_resolver.AddBuiltin(tflite::BuiltinOperator_AVERAGE_POOL_2D,
-                               tflite::ops::micro::Register_AVERAGE_POOL_2D());
-  micro_op_resolver.AddBuiltin(tflite::BuiltinOperator_RESHAPE,
-                               tflite::ops::micro::Register_RESHAPE());
-  micro_op_resolver.AddBuiltin(tflite::BuiltinOperator_SOFTMAX,
-                               tflite::ops::micro::Register_SOFTMAX());
-
-  // Build an interpreter to run the model with.
-  // NOLINTNEXTLINE(runtime-global-variables)
-  static tflite::MicroInterpreter static_interpreter(
-      model, micro_op_resolver, tensor_arena, kTensorArenaSize, error_reporter);
-  interpreter = &static_interpreter;
-
-  // Allocate memory from the tensor_arena for the model's tensors.
-  TfLiteStatus allocate_status = interpreter->AllocateTensors();
-  if (allocate_status != kTfLiteOk) {
-    TF_LITE_REPORT_ERROR(error_reporter, "AllocateTensors() failed");
-    return;
-  }
-
-  // Get information about the memory area to use for the model's input.
-  input = interpreter->input(0);
-}
-
-// The name of this function is important for Arduino compatibility.
-void loop() {
-  // Get image from provider.
-  if (kTfLiteOk != GetImage(error_reporter, kNumCols, kNumRows, kNumChannels,
-                            input->data.int8)) {
-    TF_LITE_REPORT_ERROR(error_reporter, "Image capture failed.");
-  }
-
-  // Run the model on this input and make sure it succeeds.
-  if (kTfLiteOk != interpreter->Invoke()) {
-    TF_LITE_REPORT_ERROR(error_reporter, "Invoke failed.");
-  }
-
-  TfLiteTensor* output = interpreter->output(0);
-
-  // Process the inference results.
-  int8_t person_score = output->data.uint8[kPersonIndex];
-  int8_t no_person_score = output->data.uint8[kNotAPersonIndex];
-  RespondToDetection(error_reporter, person_score, no_person_score);
-}
diff --git a/tensorflow/lite/micro/himax_we1_evb/debug_log.cc b/tensorflow/lite/micro/himax_we1_evb/debug_log.cc
index 32af2625630..36ac3f3fa03 100644
--- a/tensorflow/lite/micro/himax_we1_evb/debug_log.cc
+++ b/tensorflow/lite/micro/himax_we1_evb/debug_log.cc
@@ -20,12 +20,11 @@ limitations under the License.
 #include "tensorflow/lite/micro/debug_log.h"
 #include "hx_drv_tflm.h"
 
-
 extern "C" void DebugLog(const char* s) {
   static bool is_initialized = false;
   if (!is_initialized) {
-	  hx_drv_uart_initial();
-	  is_initialized = true;
+    hx_drv_uart_initial();
+    is_initialized = true;
   }
 
   hx_drv_uart_print("%s", s);