/*
www.aifes.ai
https://github.com/Fraunhofer-IMS/AIfES_for_Arduino
Copyright (C) 2020-2022 Fraunhofer Institute for Microelectronic Circuits and Systems.
All rights reserved.
AIfES is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
AIfES-Express gesture inference
--------------------
Versions:
1.0.0 Initial version
You can find more AIfES tutorials here:
https://create.arduino.cc/projecthub/aifes_team
*/
//#include <aifes.h>
#include "AIfES_model.h"
#include <Adafruit_MPU6050.h>
#include <Adafruit_Sensor.h>
#include <Wire.h>
Adafruit_MPU6050 mpu;
#define NUM_SAMPLES 10 //15 30
#define NUM_AXES 3
#define TRUNCATE 20
#define ACCEL_THRESHOLD 20 //5
#define INTERVAL 30
#define INPUTS NUM_SAMPLES * NUM_AXES
// Baseline for calibration
float baseline[NUM_AXES];
// Features (inputs) of a single gesture
float features[INPUTS];
float output_data[3];
sensors_event_t a, g, temp;
void setup() {
Serial.begin(115200);
while (!Serial);
Serial.println("Gesture inference:");
Serial.print(INPUTS);
Serial.println(" features");
Serial.println("Repeat your gestrures");
delay(10);
// Try to initialize!
if (!mpu.begin()) {
Serial.println("Failed to find MPU6050 chip");
while (1) {
delay(10);
}
}
Serial.println("MPU6050 Found!");
Serial.println("");
Serial.println("");
mpu.setAccelerometerRange(MPU6050_RANGE_2_G);
mpu.setGyroRange(MPU6050_RANGE_250_DEG);
mpu.setFilterBandwidth(MPU6050_BAND_21_HZ);
}
void loop() {
float ax, ay, az;
// Reset Output data array
output_data[0] = 0.0f;
output_data[1] = 0.0f;
output_data[2] = 0.0f;
mpu.getEvent(&a, &g, &temp);
ax = a.acceleration.x;
ay = a.acceleration.y;
az = a.acceleration.z;
ax = constrain(ax - baseline[0], -TRUNCATE, TRUNCATE);
ay = constrain(ay - baseline[1], -TRUNCATE, TRUNCATE);
az = constrain(az - baseline[2], -TRUNCATE, TRUNCATE);
if (!motionDetected(ax, ay, az)) {
delay(10);
return;
}
recordIMU();
// Do the Inference
aifes_e_f32_fnn_inference((float*)features,(float*)output_data);
// Print it
Serial.print("Gesture 1: ");
Serial.print(output_data[0]*100,5);
Serial.println(" %");
Serial.print("Gesture 2: ");
Serial.print(output_data[1]*100,5);
Serial.println(" %");
Serial.print("Gesture 3: ");
Serial.print(output_data[2]*100,5);
Serial.println(" %");
Serial.println("");
//print_features();
delay(500);
}
void print_features() {
const uint16_t numFeatures = sizeof(features) / sizeof(float);
for (int i = 0; i < numFeatures; i++) {
Serial.print(features[i],5);
}
}
bool motionDetected(float ax, float ay, float az) {
return (abs(ax) + abs(ay) + abs(az)) > ACCEL_THRESHOLD;
}
void recordIMU() {
float ax, ay, az;
for (int i = 0; i < NUM_SAMPLES; i++) {
mpu.getEvent(&a, &g, &temp);
ax = a.acceleration.x;
ay = a.acceleration.y;
az = a.acceleration.z;
ax = constrain(ax - baseline[0], -TRUNCATE, TRUNCATE);
ay = constrain(ay - baseline[1], -TRUNCATE, TRUNCATE);
az = constrain(az - baseline[2], -TRUNCATE, TRUNCATE);
features[i * NUM_AXES + 0] = ax;
features[i * NUM_AXES + 1] = ay;
features[i * NUM_AXES + 2] = az;
delay(INTERVAL);
}
}
void calibrate() {
float ax, ay, az;
for (int i = 0; i < 10; i++) {
mpu.getEvent(&a, &g, &temp);
ax = a.acceleration.x;
ay = a.acceleration.y;
az = a.acceleration.z;
delay(100);
}
baseline[0] = ax;
baseline[1] = ay;
baseline[2] = az;
}