Coding Repository

Explore Arduino and Python code for motor control, gyroscopic calibration, object detection, and machine learning.

static void motorSetup()
{
//pinMode(Motor_EN_Pin, OUTPUT);
//digitalWrite(Motor_EN_Pin, HIGH);
ledcSetup(
1 /* LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(MotorL_A_Pin,
1 / LEDChannel /);
ledcWrite(
1 / LEDChannel /, 0); / 0-255 /
ledcSetup(
2 / LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(MotorL_B_Pin,
2 / LEDChannel /);
ledcWrite(
2 / LEDChannel /, 0); / 0-255 /
ledcSetup(
3 / LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(MotorR_A_Pin,
3 / LEDChannel /);
ledcWrite(
3 / LEDChannel /, 0); / 0-255 /
ledcSetup(
4 / LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(MotorR_B_Pin,
4 / LEDChannel /);
ledcWrite(
4 / LEDChannel /, 0); / 0-255 /
ledcSetup(
5 / LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(Motor3_A_Pin,
5 / LEDChannel /);
ledcWrite(
5 / LEDChannel /, 0); / 0-255 /
ledcSetup(
6 / LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(Motor3_B_Pin,
6 / LEDChannel /);
ledcWrite(
6 / LEDChannel /, 0); / 0-255 /
ledcSetup(
7 / LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(Motor4_A_Pin,
7 / LEDChannel /);
ledcWrite(
7 / LEDChannel /, 0); / 0-255 /
ledcSetup(
8 / LEDChannel /, 5000 / freq /, 8 / resolution /);
ledcAttachPin(Motor4_B_Pin,
8 / LEDChannel /);
ledcWrite(
8 / LEDChannel /, 0); / 0-255 /
}

static void setMotor(uint8_t la, uint8_t lb, uint8_t ra, uint8_t rb, uint8_t m3a, uint8_t m3b, uint8_t m4a, uint8_t m4b){
ledcWrite(
1 / LEDChannel /, la); / 0-255 /
ledcWrite(
2 / LEDChannel /, lb); / 0-255 /
ledcWrite(
3 / LEDChannel /, ra); / 0-255 /
ledcWrite(
4 / LEDChannel /, rb); / 0-255 /
ledcWrite(
5 / LEDChannel /, m3a); / 0-255 /
ledcWrite(
6 / LEDChannel /, m3b); / 0-255 /
ledcWrite(
7 / LEDChannel /, m4a); / 0-255 /
ledcWrite(
8 / LEDChannel /, m4b); / 0-255 */
}

Motor API

Basic Motor Control

#include <Arduino.h>

// Define motor driver pins
#define MotorL_A_Pin 1 // Motor A input Left
#define MotorR_A_Pin 2 // Motor A input Right
#define MotorL_B_Pin 3 // Motor B input Left
#define MotorR_B_Pin 4 // Motor B input Right
#define Motor3_A_Pin 9 // Motor A input 3
#define Motor4_A_Pin 8 // Motor A input 4
#define Motor3_B_Pin 7 // Motor B input 3
#define Motor4_B_Pin 10 // Motor B input 4


// Include the motorAPI.h file after defining the pins
#include "motorAPI.h"

void setup() {
// Set motor driver pins as outputs
motorSetup();
}

void loop() {
//setMotor(0, 0, 0, 50, 0, 0, 0, 0);
//setMotor(50, 0, 0, 50, 50, 50, 0, 0);
setMotor(
0, 0, 0, 0, 0, 0, 0, 0);
setMotor(
0, 50, 50, 0, 0, 50, 50, 0);
//setMotor(200, 0, 0, 200, 200, 0, 0, 0);
//setMotor(255, 0, 0, 255, 255, 0, 0, 0);
//setMotor(0, 255, 0, 255, 0, 255, 0, 255);
//setMotor(255, 0, 255, 0, 255, 0, 255, 0);
//setMotor(0, 25, 0, 25, 0, 25, 0, 25);
//setMotor(0, 0, 0, 0, 0, 0, 0, 0);
}

MPU6050 Gyroscope and Accelerometer Calibration

/* ========== LICENSE ==================================
I2Cdev device library code is placed under the MIT license
Copyright (c) 2011 Jeff Rowberg

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
=========================================================
*/


// I2Cdev and MPU6050 must be installed as libraries
#include "I2Cdev.h"
#include "MPU6050.h"
#include "Wire.h"

/////////////////////////////////// CONFIGURATION /////////////////////////////
//Change this 3 variables if you want to fine tune the skecth to your needs.
int buffersize=1000; //Amount of readings used to average, make it higher to get more precision but sketch will be slower (default:1000)
int acel_deadzone=8; //Acelerometer error allowed, make it lower to get more precision, but sketch may not converge (default:8)
int giro_deadzone=1; //Giro error allowed, make it lower to get more precision, but sketch may not converge (default:1)

// default I2C address is 0x68
// specific I2C addresses may be passed as a parameter here
// AD0 low = 0x68 (default for InvenSense evaluation board)
// AD0 high = 0x69
//MPU6050 accelgyro;
MPU6050 accelgyro(
0x68); // <-- use for AD0 high

int16_t ax, ay, az,gx, gy, gz;

int mean_ax,mean_ay,mean_az,mean_gx,mean_gy,mean_gz,state=0;
int ax_offset,ay_offset,az_offset,gx_offset,gy_offset,gz_offset;

/////////////////////////////////// SETUP ////////////////////////////////////
void setup() {
// join I2C bus (I2Cdev library doesn't do this automatically)
Wire.begin();
// COMMENT NEXT LINE IF YOU ARE USING ARDUINO DUE
//TWBR = 24; // 400kHz I2C clock (200kHz if CPU is 8MHz). Leonardo measured 250kHz.

// initialize serial communication
Serial.begin(9600);

// initialize device
accelgyro.initialize();

// wait for ready
while (Serial.available() && Serial.read()); // empty buffer
while (!Serial.available()){
Serial.println(F("Send any character to start sketch.\n"));
delay(1500);
}
while (Serial.available() && Serial.read()); // empty buffer again

// start message
Serial.println("\nMPU6050 Calibration Sketch");
delay(2000);
Serial.println("\nYour MPU6050 should be placed in horizontal position, with package letters facing up. \nDon't touch it until you see a finish message.\n");
delay(3000);
// verify connection
Serial.println(accelgyro.testConnection() ? "MPU6050 connection successful" : "MPU6050 connection failed");
delay(1000);
// reset offsets
accelgyro.setXAccelOffset(
0);
accelgyro.setYAccelOffset(
0);
accelgyro.setZAccelOffset(
0);
accelgyro.setXGyroOffset(
0);
accelgyro.setYGyroOffset(
0);
accelgyro.setZGyroOffset(
0);
}

/////////////////////////////////// LOOP ////////////////////////////////////
void loop() {
if (state==0){
Serial.println("\nReading sensors for first time...");
meansensors();
state++;
delay(1000);
}

if (state==1) {
Serial.println("\nCalculating offsets...");
calibration();
state++;
delay(1000);
}

if (state==2) {
meansensors();
Serial.println("\nFINISHED!");
Serial.print("\nSensor readings with offsets:\t");
Serial.print(mean_ax);
Serial.print("\t");
Serial.print(mean_ay);
Serial.print("\t");
Serial.print(mean_az);
Serial.print("\t");
Serial.print(mean_gx);
Serial.print("\t");
Serial.print(mean_gy);
Serial.print("\t");
Serial.println(mean_gz);
Serial.print("Your offsets:\t");
Serial.print(ax_offset);
Serial.print("\t");
Serial.print(ay_offset);
Serial.print("\t");
Serial.print(az_offset);
Serial.print("\t");
Serial.print(gx_offset);
Serial.print("\t");
Serial.print(gy_offset);
Serial.print("\t");
Serial.println(gz_offset);
Serial.println("\nData is printed as: acelX acelY acelZ giroX giroY giroZ");
Serial.println("Check that your sensor readings are close to 0 0 16384 0 0 0");
Serial.println("If calibration was succesful write down your offsets so you can set them in your projects using something similar to mpu.setXAccelOffset(youroffset)");
while (1);
}
}

/////////////////////////////////// FUNCTIONS ////////////////////////////////////
void meansensors(){
long i=0,buff_ax=0,buff_ay=0,buff_az=0,buff_gx=0,buff_gy=0,buff_gz=0;

while (i<(buffersize+101)){
// read raw accel/gyro measurements from device
accelgyro.getMotion6(&ax, &ay, &az, &gx, &gy, &gz);

if (i>100 && i<=(buffersize+100)){ //First 100 measures are discarded
buff_ax=buff_ax+ax;
buff_ay=buff_ay+ay;
buff_az=buff_az+az;
buff_gx=buff_gx+gx;
buff_gy=buff_gy+gy;
buff_gz=buff_gz+gz;
}
if (i==(buffersize+100)){
mean_ax=buff_ax/buffersize;
mean_ay=buff_ay/buffersize;
mean_az=buff_az/buffersize;
mean_gx=buff_gx/buffersize;
mean_gy=buff_gy/buffersize;
mean_gz=buff_gz/buffersize;
}
i++;
delay(2); //Needed so we don't get repeated measures
}
}

void calibration(){
ax_offset=-mean_ax/
8;
ay_offset=-mean_ay/
8;
az_offset=(
16384-mean_az)/8;

gx_offset=-mean_gx/
4;
gy_offset=-mean_gy/
4;
gz_offset=-mean_gz/
4;
while (1){
int ready=0;
accelgyro.setXAccelOffset(ax_offset);
accelgyro.setYAccelOffset(ay_offset);
accelgyro.setZAccelOffset(az_offset);

accelgyro.setXGyroOffset(gx_offset);
accelgyro.setYGyroOffset(gy_offset);
accelgyro.setZGyroOffset(gz_offset);

meansensors();
Serial.println("...");

if (abs(mean_ax)<=acel_deadzone) ready++;
else ax_offset=ax_offset-mean_ax/acel_deadzone;

if (abs(mean_ay)<=acel_deadzone) ready++;
else ay_offset=ay_offset-mean_ay/acel_deadzone;

if (abs(16384-mean_az)<=acel_deadzone) ready++;
else az_offset=az_offset+(16384-mean_az)/acel_deadzone;

if (abs(mean_gx)<=giro_deadzone) ready++;
else gx_offset=gx_offset-mean_gx/(giro_deadzone+1);

if (abs(mean_gy)<=giro_deadzone) ready++;
else gy_offset=gy_offset-mean_gy/(giro_deadzone+1);

if (abs(mean_gz)<=giro_deadzone) ready++;
else gz_offset=gz_offset-mean_gz/(giro_deadzone+1);

if (ready==6) break;
}
}

Gyroscope Motor Control

#include <Arduino.h>
#include "I2Cdev.h"
#include "MPU6050.h"
#include "Wire.h"

// Define motor driver pins
#define MotorL_A_Pin 1 // Motor A input Left
#define MotorR_A_Pin 2 // Motor A input Right
#define MotorL_B_Pin 3 // Motor B input Left
#define MotorR_B_Pin 4 // Motor B input Right
#define Motor3_A_Pin 9 // Motor A input 3
#define Motor4_A_Pin 8 // Motor A input 4
#define Motor3_B_Pin 7 // Motor B input 3
#define Motor4_B_Pin 10 // Motor B input 4

#include "motorAPI.h"

#define FREQUENCY_HZ 50
#define INTERVAL_MS (1000 / (FREQUENCY_HZ + 1))

#define KP_GYRO 0.01
#define KI_GYRO 0.0
#define KD_GYRO 0.0

static unsigned long last_interval_ms = 0;

MPU6050 imu;
int16_t gx, gy, gz;

float previous_error_gyro = 0;
float integral_gyro = 0;

void setup() {

Serial.begin(115200);

// initialize device
Serial.println("Initializing I2C devices...");
Wire.begin();
imu.initialize();
delay(10);

//Set MCU 6050 OffSet Calibration
imu.setXGyroOffset(
317);
imu.setYGyroOffset(
-95);
imu.setZGyroOffset(
11);

// Set motor driver pins as outputs
motorSetup();
}

void loop() {
if (millis() > last_interval_ms + INTERVAL_MS) {
last_interval_ms =
millis();

// read raw gyro measurements from device
imu.getRotation(&gx, &gy, &gz);

// calculate error for gyroscope
float desired_value_gyro = 0;
float error_gyro = desired_value_gyro - gx;

// calculate integral and derivative for gyroscope
integral_gyro += error_gyro INTERVAL_MS;
float derivative_gyro = (error_gyro - previous_error_gyro) / INTERVAL_MS;

// calculate PID output for gyroscope
float output_gyro = KP_GYRO error_gyro + KI_GYRO integral_gyro + KD_GYRO derivative_gyro;

// update previous error for gyroscope
previous_error_gyro = error_gyro;

// adjust motor speeds based on PID outputs
int motor1Speed = 50 + output_gyro;
int motor4Speed = 50 + output_gyro;
int motor5Speed = 50 + output_gyro;
int motor6Speed = 50 + output_gyro;

// set motor speeds
setMotor(
0, motor1Speed, motor4Speed, 0, 0, motor5Speed, motor6Speed, 0);
}
}

#include <Arduino.h>
#include "I2Cdev.h"
#include "MPU6050.h"
#include "Wire.h"

// Define motor driver pins
#define MotorL_A_Pin 1 // Motor A input Left
#define MotorR_A_Pin 2 // Motor A input Right
#define MotorL_B_Pin 3 // Motor B input Left
#define MotorR_B_Pin 4 // Motor B input Right
#define Motor3_A_Pin 9 // Motor A input 3
#define Motor4_A_Pin 8 // Motor A input 4
#define Motor3_B_Pin 7 // Motor B input 3
#define Motor4_B_Pin 10 // Motor B input 4

#include "motorAPI.h"

#define FREQUENCY_HZ 50
#define INTERVAL_MS (1000 / (FREQUENCY_HZ + 1))
#define ACC_RANGE 1 // 0: -/+2G; 1: +/-4G

// convert factor g to m/s2 ==> [-32768, +32767] ==> [-2g, +2g]
#define CONVERT_G_TO_MS2 (9.81/(16384.0/(1.+ACC_RANGE)))

#define KP 0.01
#define KI 0.0
#define KD 0.0
#define KP_GYRO 0.01
#define KI_GYRO 0.0
#define KD_GYRO 0.0

static unsigned long last_interval_ms = 0;

MPU6050 imu;
int16_t ax, ay, az, gx, gy, gz;

float previous_error = 0;
float integral = 0;
float previous_error_gyro = 0;
float integral_gyro = 0;

void setup() {

Serial.begin(115200);

// initialize device
Serial.println("Initializing I2C devices...");
Wire.begin();
imu.initialize();
delay(10);

//Set MCU 6050 OffSet Calibration
imu.setXAccelOffset(
-8078);
imu.setYAccelOffset(
-64);
imu.setZAccelOffset(
1528);
imu.setXGyroOffset(
317);
imu.setYGyroOffset(
-95);
imu.setZGyroOffset(
11);

/* Set full-scale accelerometer range.
0 = +/- 2g
1 = +/- 4g
2 = +/- 8g
3 = +/- 16g
/

imu.setFullScaleAccelRange(ACC_RANGE);

// Set motor driver pins as outputs
motorSetup();
}

void loop() {
if (millis() > last_interval_ms + INTERVAL_MS) {
last_interval_ms =
millis();

// read raw accel/gyro measurements from device
imu.getMotion6(&ax, &ay, &az, &gx, &gy, &gz);

// converting to m/s2
float ax_m_s2 = ax CONVERT_G_TO_MS2;
float ay_m_s2 = ay CONVERT_G_TO_MS2;
float az_m_s2 = az CONVERT_G_TO_MS2;

// calculate error for accelerometer
float desired_value_accel = 0;
float error_accel = desired_value_accel - ax_m_s2;

// calculate integral and derivative for accelerometer
integral += error_accel INTERVAL_MS;
float derivative_accel = (error_accel - previous_error) / INTERVAL_MS;

// calculate PID output for accelerometer
float output_accel = KP error_accel + KI integral + KD derivative_accel;

// update previous error for accelerometer
previous_error = error_accel;

// calculate error for gyroscope (you'll need to replace this with your actual desired value)
float desired_value_gyro = 0;
float error_gyro = desired_value_gyro - gx;

// calculate integral and derivative for gyroscope
integral_gyro += error_gyro INTERVAL_MS;
float derivative_gyro = (error_gyro - previous_error_gyro) / INTERVAL_MS;

// calculate PID output for gyroscope
float output_gyro = KP_GYRO error_gyro + KI_GYRO integral_gyro + KD_GYRO derivative_gyro;

// update previous error for gyroscope
previous_error_gyro = error_gyro;

// adjust motor speeds based on PID outputs
int motor1Speed = 50 + output_accel + output_gyro;
int motor4Speed = 50 + output_accel + output_gyro;
int motor5Speed = 50 + output_accel + output_gyro;
int motor6Speed = 50 + output_accel + output_gyro;

// set motor speeds
setMotor(motor1Speed,
0, 0, motor4Speed, motor5Speed, motor6Speed, 0, 0);
}
}

Gyroscope and Accelerometer Motor Control
Object Detection Code

/* Edge Impulse Arduino examples
Copyright (c) 2022 EdgeImpulse Inc.

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
/


/* Includes ---------------------------------------------------------------- /
#include <XIAO-ESP32S3-RADISH-FLOWER-RECOGNITION_inferencing.h>
#include "edge-impulse-sdk/dsp/image/image.hpp"

#include "esp_camera.h"

// Select camera model - find more camera models in camera_pins.h file here
// https://github.com/espressif/arduino-esp32/blob/master/libraries/ESP32/examples/Camera/CameraWebServer/camera_pins.h

#define CAMERA_MODEL_XIAO_ESP32S3 // Has PSRAM
//#define CAMERA_MODEL_AI_THINKER // Has PSRAM

#if defined(CAMERA_MODEL_XIAO_ESP32S3)
#define PWDN_GPIO_NUM -1
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 4
#define SIOD_GPIO_NUM 18
#define SIOC_GPIO_NUM 23

#define Y9_GPIO_NUM 36
#define Y8_GPIO_NUM 37
#define Y7_GPIO_NUM 38
#define Y6_GPIO_NUM 39
#define Y5_GPIO_NUM 35
#define Y4_GPIO_NUM 14
#define Y3_GPIO_NUM 13
#define Y2_GPIO_NUM 34
#define VSYNC_GPIO_NUM 5
#define HREF_GPIO_NUM 27
#define PCLK_GPIO_NUM 25

#elif defined(CAMERA_MODEL_AI_THINKER)
#define PWDN_GPIO_NUM 32
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 0
#define SIOD_GPIO_NUM 26
#define SIOC_GPIO_NUM 27

#define Y9_GPIO_NUM 35
#define Y8_GPIO_NUM 34
#define Y7_GPIO_NUM 39
#define Y6_GPIO_NUM 36
#define Y5_GPIO_NUM 21
#define Y4_GPIO_NUM 19
#define Y3_GPIO_NUM 18
#define Y2_GPIO_NUM 5
#define VSYNC_GPIO_NUM 25
#define HREF_GPIO_NUM 23
#define PCLK_GPIO_NUM 22

#else
#error "Camera model not selected"
#endif

/ Constant defines -------------------------------------------------------- /
#define EI_CAMERA_RAW_FRAME_BUFFER_COLS 320
#define EI_CAMERA_RAW_FRAME_BUFFER_ROWS 240
#define EI_CAMERA_FRAME_BYTE_SIZE 3

/ Private variables ------------------------------------------------------- /
static bool debug_nn = false; // Set this to true to see e.g. features generated from the raw signal
static bool is_initialised = false;
uint8_t snapshot_buf; //points to the output of the capture

static camera_config_t camera_config = {
.pin_pwdn = PWDN_GPIO_NUM,
.pin_reset = RESET_GPIO_NUM,
.pin_xclk = XCLK_GPIO_NUM,
.pin_sscb_sda = SIOD_GPIO_NUM,
.pin_sscb_scl = SIOC_GPIO_NUM,

.pin_d7 = Y9_GPIO_NUM,
.pin_d6 = Y8_GPIO_NUM,
.pin_d5 = Y7_GPIO_NUM,
.pin_d4 = Y6_GPIO_NUM,
.pin_d3 = Y5_GPIO_NUM,
.pin_d2 = Y4_GPIO_NUM,
.pin_d1 = Y3_GPIO_NUM,
.pin_d0 = Y2_GPIO_NUM,
.pin_vsync = VSYNC_GPIO_NUM,
.pin_href = HREF_GPIO_NUM,
.pin_pclk = PCLK_GPIO_NUM,

//XCLK 20MHz or 10MHz for OV2640 double FPS (Experimental)
.xclk_freq_hz =
20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,

.pixel_format = PIXFORMAT_JPEG,
//YUV422,GRAYSCALE,RGB565,JPEG
.frame_size = FRAMESIZE_QVGA,
//QQVGA-UXGA Do not use sizes above QVGA when not JPEG

.jpeg_quality =
12, //0-63 lower number means higher quality
.fb_count =
1, //if more than one, i2s runs in continuous mode. Use only with JPEG
.fb_location = CAMERA_FB_IN_PSRAM,
.grab_mode = CAMERA_GRAB_WHEN_EMPTY,
};

/* Function definitions ------------------------------------------------------- /
bool ei_camera_init(void);
void ei_camera_deinit(void);
bool ei_camera_capture(uint32_t img_width, uint32_t img_height, uint8_t out_buf) ;

/**
* @brief Arduino setup function
*/

void setup()
{
// put your setup code here, to run once:
Serial.begin(
115200);
//comment out the below line to start inference immediately after upload
while (!Serial);
Serial.println(
"Edge Impulse Inferencing Demo");
if (ei_camera_init() == false) {
ei_printf(
"Failed to initialize Camera!\r\n");
}
else {
ei_printf(
"Camera initialized\r\n");
}

ei_printf(
"\nStarting continious inference in 2 seconds...\n");
ei_sleep(
2000);
}

/**
* @brief Get data and run inferencing
*
* @param[in] debug Get debug info if true
*/

void loop()
{

// instead of wait_ms, we'll wait on the signal, this allows threads to cancel us...
if (ei_sleep(5) != EI_IMPULSE_OK) {
return;
}

snapshot_buf = (
uint8_t*)malloc(EI_CAMERA_RAW_FRAME_BUFFER_COLS EI_CAMERA_RAW_FRAME_BUFFER_ROWS EI_CAMERA_FRAME_BYTE_SIZE);

// check if allocation was successful
if(snapshot_buf == nullptr) {
ei_printf(
"ERR: Failed to allocate snapshot buffer!\n");
return;
}

ei::
signal_t signal;
signal.total_length = EI_CLASSIFIER_INPUT_WIDTH EI_CLASSIFIER_INPUT_HEIGHT;
signal.get_data = &ei_camera_get_data;

if (ei_camera_capture((size_t)EI_CLASSIFIER_INPUT_WIDTH, (size_t)EI_CLASSIFIER_INPUT_HEIGHT, snapshot_buf) == false) {
ei_printf(
"Failed to capture image\r\n");
free(snapshot_buf);
return;
}

// Run the classifier
ei_impulse_result_t result = { 0 };

EI_IMPULSE_ERROR err = run_classifier(&signal, &result, debug_nn);
if (err != EI_IMPULSE_OK) {
ei_printf(
"ERR: Failed to run classifier (%d)\n", err);
return;
}

// print the predictions
ei_printf(
"Predictions (DSP: %d ms., Classification: %d ms., Anomaly: %d ms.): \n",
result.timing.dsp, result.timing.classification, result.timing.anomaly);

#if EI_CLASSIFIER_OBJECT_DETECTION == 1
bool bb_found = result.bounding_boxes[0].value > 0;
for (size_t ix = 0; ix < result.bounding_boxes_count; ix++) {
auto bb = result.bounding_boxes[ix];
if (bb.value == 0) {
continue;
}
ei_printf(
" %s (%f) [ x: %u, y: %u, width: %u, height: %u ]\n", bb.label, bb.value, bb.x, bb.y, bb.width, bb.height);
}
if (!bb_found) {
ei_printf(
" No objects found\n");
}
#else
for (size_t ix = 0; ix < EI_CLASSIFIER_LABEL_COUNT; ix++) {
ei_printf(
" %s: %.5f\n", result.classification[ix].label,
result.classification[ix].value);
}
#endif

#if EI_CLASSIFIER_HAS_ANOMALY == 1
ei_printf(
" anomaly score: %.3f\n", result.anomaly);
#endif


free(snapshot_buf);

}

/*
@brief Setup image sensor & start streaming

@retval false if initialisation failed
/

bool ei_camera_init(void) {

if (is_initialised) return true;

#if defined(CAMERA_MODEL_ESP_EYE)
pinMode(
13, INPUT_PULLUP);
pinMode(
14, INPUT_PULLUP);
#endif

//initialize the camera
esp_err_t err = esp_camera_init(&camera_config);
if (err != ESP_OK) {
Serial.
printf("Camera init failed with error 0x%x\n", err);
return false;
}

sensor_t s = esp_camera_sensor_get();
// initial sensors are flipped vertically and colors are a bit saturated
if (s->id.PID == OV3660_PID) {
s->set_vflip(s,
1); // flip it back
s->set_brightness(s,
1); // up the brightness just a bit
s->set_saturation(s,
0); // lower the saturation
}

#if defined(CAMERA_MODEL_M5STACK_WIDE)
s->set_vflip(s,
1);
s->set_hmirror(s,
1);
#elif defined(CAMERA_MODEL_ESP_EYE)
s->set_vflip(s,
1);
s->set_hmirror(s,
1);
s->set_awb_gain(s,
1);
#endif

is_initialised =
true;
return true;
}

/*
@brief Stop streaming of sensor data
/

void ei_camera_deinit(void) {

//deinitialize the camera
esp_err_t err = esp_camera_deinit();

if (err != ESP_OK)
{
ei_printf(
"Camera deinit failed\n");
return;
}

is_initialised =
false;
return;
}


/**
@brief Capture, rescale and crop image

@param[in] img_width width of output image
@param[in] img_height height of output image
@param[in] out_buf pointer to store output image, NULL may be used
if ei_camera_frame_buffer is to be used for capture and resize/cropping.

@retval false if not initialised, image captured, rescaled or cropped failed

/

bool ei_camera_capture(uint32_t img_width, uint32_t img_height, uint8_t out_buf) {
bool do_resize = false;

if (!is_initialised) {
ei_printf(
"ERR: Camera is not initialized\r\n");
return false;
}

camera_fb_t fb = esp_camera_fb_get();

if (!fb) {
ei_printf(
"Camera capture failed\n");
return false;
}

bool converted = fmt2rgb888(fb->buf, fb->len, PIXFORMAT_JPEG, snapshot_buf);

esp_camera_fb_return(fb);

if(!converted){
ei_printf(
"Conversion failed\n");
return false;
}

if ((img_width != EI_CAMERA_RAW_FRAME_BUFFER_COLS)
|| (img_height != EI_CAMERA_RAW_FRAME_BUFFER_ROWS)) {
do_resize =
true;
}

if (do_resize) {
ei::image::processing::crop_and_interpolate_rgb888(
out_buf,
EI_CAMERA_RAW_FRAME_BUFFER_COLS,
EI_CAMERA_RAW_FRAME_BUFFER_ROWS,
out_buf,
img_width,
img_height);
}


return true;
}

static int ei_camera_get_data(size_t offset, size_t length, float out_ptr)
{
// we already have a RGB888 buffer, so recalculate offset into pixel index
size_t pixel_ix = offset 3;
size_t pixels_left = length;
size_t out_ptr_ix = 0;

while (pixels_left != 0) {
out_ptr[out_ptr_ix] = (snapshot_buf[pixel_ix] <<
16) + (snapshot_buf[pixel_ix + 1] << 8) + snapshot_buf[pixel_ix + 2];

// go to the next pixel
out_ptr_ix++;
pixel_ix+=
3;
pixels_left--;
}
// and done!
return 0;
}

#if !defined(EI_CLASSIFIER_SENSOR) || EI_CLASSIFIER_SENSOR != EI_CLASSIFIER_SENSOR_CAMERA
#error "Invalid model for current sensor"
#endif

Object Detection and Motor Control

/* Edge Impulse Arduino examples
Copyright (c) 2022 EdgeImpulse Inc.

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
/


/* Includes ---------------------------------------------------------------- /
#include <Arduino.h>
#include <XIAO-ESP32S3-RADISH-FLOWER-RECOGNITION_inferencing.h>
#include "edge-impulse-sdk/dsp/image/image.hpp"

#include "esp_camera.h"

// Select camera model - find more camera models in camera_pins.h file here
// https://github.com/espressif/arduino-esp32/blob/master/libraries/ESP32/examples/Camera/CameraWebServer/camera_pins.h

#define CAMERA_MODEL_XIAO_ESP32S3 // Has PSRAM

#define PWDN_GPIO_NUM -1
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 10
#define SIOD_GPIO_NUM 40
#define SIOC_GPIO_NUM 39

#define Y9_GPIO_NUM 48
#define Y8_GPIO_NUM 11
#define Y7_GPIO_NUM 12
#define Y6_GPIO_NUM 14
#define Y5_GPIO_NUM 16
#define Y4_GPIO_NUM 18
#define Y3_GPIO_NUM 17
#define Y2_GPIO_NUM 15
#define VSYNC_GPIO_NUM 38
#define HREF_GPIO_NUM 47
#define PCLK_GPIO_NUM 13

#define LED_GPIO_NUM 21


/ Constant defines -------------------------------------------------------- /
#define EI_CAMERA_RAW_FRAME_BUFFER_COLS 320
#define EI_CAMERA_RAW_FRAME_BUFFER_ROWS 240
#define EI_CAMERA_FRAME_BYTE_SIZE 3

/ Private variables ------------------------------------------------------- /
static bool debug_nn = false; // Set this to true to see e.g. features generated from the raw signal
static bool is_initialised = false;
uint8_t snapshot_buf; //points to the output of the capture

static camera_config_t camera_config = {
.pin_pwdn = PWDN_GPIO_NUM,
.pin_reset = RESET_GPIO_NUM,
.pin_xclk = XCLK_GPIO_NUM,
.pin_sscb_sda = SIOD_GPIO_NUM,
.pin_sscb_scl = SIOC_GPIO_NUM,

.pin_d7 = Y9_GPIO_NUM,
.pin_d6 = Y8_GPIO_NUM,
.pin_d5 = Y7_GPIO_NUM,
.pin_d4 = Y6_GPIO_NUM,
.pin_d3 = Y5_GPIO_NUM,
.pin_d2 = Y4_GPIO_NUM,
.pin_d1 = Y3_GPIO_NUM,
.pin_d0 = Y2_GPIO_NUM,
.pin_vsync = VSYNC_GPIO_NUM,
.pin_href = HREF_GPIO_NUM,
.pin_pclk = PCLK_GPIO_NUM,

//XCLK 20MHz or 10MHz for OV2640 double FPS (Experimental)
.xclk_freq_hz =
20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,

.pixel_format = PIXFORMAT_JPEG,
//YUV422,GRAYSCALE,RGB565,JPEG
.frame_size = FRAMESIZE_QVGA,
//QQVGA-UXGA Do not use sizes above QVGA when not JPEG

.jpeg_quality =
12, //0-63 lower number means higher quality
.fb_count =
1, //if more than one, i2s runs in continuous mode. Use only with JPEG
.fb_location = CAMERA_FB_IN_PSRAM,
.grab_mode = CAMERA_GRAB_WHEN_EMPTY,
};

/* Function definitions ------------------------------------------------------- /
bool ei_camera_init(void);
void ei_camera_deinit(void);
bool ei_camera_capture(uint32_t img_width, uint32_t img_height, uint8_t out_buf) ;

// Define motor driver pins
#define MotorL_A_Pin 0 // Motor A input Left
#define MotorL_B_Pin 1 // Motor B input Left
#define MotorR_A_Pin 2 // Motor A input Right
#define MotorR_B_Pin 3 // Motor B input 2Right
#define Motor3_A_Pin 10 // Motor A input 3
#define Motor3_B_Pin 9 // Motor B input 3
#define Motor4_A_Pin 8 // Motor A input 4
#define Motor4_B_Pin 7 // Motor B input 4


// Include the motorAPI.h file after defining the pins
#include "motorAPI.h"

/**
* @brief Arduino setup function
*/

void setup()
{
motorSetup();
// put your setup code here, to run once:
Serial.begin(
115200);
//comment out the below line to start inference immediately after upload
while (!Serial);
Serial.println(
"Edge Impulse Inferencing Demo");
if (ei_camera_init() == false) {
ei_printf(
"Failed to initialize Camera!\r\n");
}
else {
ei_printf(
"Camera initialized\r\n");
}

ei_printf(
"\nStarting continious inference in 2 seconds...\n");
ei_sleep(
2000);
}

/**
* @brief Get data and run inferencing
*
* @param[in] debug Get debug info if true
*/

void loop()
{

// instead of wait_ms, we'll wait on the signal, this allows threads to cancel us...
if (ei_sleep(5) != EI_IMPULSE_OK) {
return;
}

snapshot_buf = (
uint8_t*)malloc(EI_CAMERA_RAW_FRAME_BUFFER_COLS EI_CAMERA_RAW_FRAME_BUFFER_ROWS EI_CAMERA_FRAME_BYTE_SIZE);

// check if allocation was successful
if(snapshot_buf == nullptr) {
ei_printf(
"ERR: Failed to allocate snapshot buffer!\n");
return;
}

ei::
signal_t signal;
signal.total_length = EI_CLASSIFIER_INPUT_WIDTH EI_CLASSIFIER_INPUT_HEIGHT;
signal.get_data = &ei_camera_get_data;

if (ei_camera_capture((size_t)EI_CLASSIFIER_INPUT_WIDTH, (size_t)EI_CLASSIFIER_INPUT_HEIGHT, snapshot_buf) == false) {
ei_printf(
"Failed to capture image\r\n");
free(snapshot_buf);
return;
}

// Run the classifier
ei_impulse_result_t result = { 0 };

EI_IMPULSE_ERROR err = run_classifier(&signal, &result, debug_nn);
if (err != EI_IMPULSE_OK) {
ei_printf(
"ERR: Failed to run classifier (%d)\n", err);
return;
}

// print the predictions
ei_printf(
"Predictions (DSP: %d ms., Classification: %d ms., Anomaly: %d ms.): \n",
result.timing.dsp, result.timing.classification, result.timing.anomaly);

#if EI_CLASSIFIER_OBJECT_DETECTION == 1
bool bb_found = result.bounding_boxes[0].value > 0;
for (size_t ix = 0; ix < result.bounding_boxes_count; ix++) {
auto bb = result.bounding_boxes[ix];
if (bb.value > 0.9) { // Check if the prediction is greater than 0.9
// If the object is detected and is close enough, slow down the motors
if (bb.x || bb.y > 20) {
setMotor(
0, 25, 0, 25, 0, 25, 0, 25);
}
else {
setMotor(
0, 50, 0, 50, 0, 50, 0, 50);
}
}
if (bb.value == 0) {
continue;
}
ei_printf(
" %s (%f) [ x: %u, y: %u, width: %u, height: %u ]\n", bb.label, bb.value, bb.x, bb.y, bb.width, bb.height);
}
if (!bb_found) {
ei_printf(
" No objects found\n");
}
#else
for (size_t ix = 0; ix < EI_CLASSIFIER_LABEL_COUNT; ix++) {
ei_printf(
" %s: %.5f\n", result.classification[ix].label,
result.classification[ix].value);
}
#endif

#if EI_CLASSIFIER_HAS_ANOMALY == 1
ei_printf(
" anomaly score: %.3f\n", result.anomaly);
#endif


free(snapshot_buf);
}

/*
@brief Setup image sensor & start streaming

@retval false if initialisation failed
/

bool ei_camera_init(void) {

if (is_initialised) return true;

#if defined(CAMERA_MODEL_ESP_EYE)
pinMode(
13, INPUT_PULLUP);
pinMode(
14, INPUT_PULLUP);
#endif

//initialize the camera
esp_err_t err = esp_camera_init(&camera_config);
if (err != ESP_OK) {
Serial.
printf("Camera init failed with error 0x%x\n", err);
return false;
}

sensor_t s = esp_camera_sensor_get();
// initial sensors are flipped vertically and colors are a bit saturated
if (s->id.PID == OV3660_PID) {
s->set_vflip(s,
1); // flip it back
s->set_brightness(s,
1); // up the brightness just a bit
s->set_saturation(s,
0); // lower the saturation
}

#if defined(CAMERA_MODEL_M5STACK_WIDE)
s->set_vflip(s,
1);
s->set_hmirror(s,
1);
#elif defined(CAMERA_MODEL_ESP_EYE)
s->set_vflip(s,
1);
s->set_hmirror(s,
1);
s->set_awb_gain(s,
1);
#endif

is_initialised =
true;
return true;
}

/*
@brief Stop streaming of sensor data
/

void ei_camera_deinit(void) {

//deinitialize the camera
esp_err_t err = esp_camera_deinit();

if (err != ESP_OK)
{
ei_printf(
"Camera deinit failed\n");
return;
}

is_initialised =
false;
return;
}


/**
@brief Capture, rescale and crop image

@param[in] img_width width of output image
@param[in] img_height height of output image
@param[in] out_buf pointer to store output image, NULL may be used
if ei_camera_frame_buffer is to be used for capture and resize/cropping.

@retval false if not initialised, image captured, rescaled or cropped failed

/

bool ei_camera_capture(uint32_t img_width, uint32_t img_height, uint8_t out_buf) {
bool do_resize = false;

if (!is_initialised) {
ei_printf(
"ERR: Camera is not initialized\r\n");
return false;
}

camera_fb_t fb = esp_camera_fb_get();

if (!fb) {
ei_printf(
"Camera capture failed\n");
return false;
}

bool converted = fmt2rgb888(fb->buf, fb->len, PIXFORMAT_JPEG, snapshot_buf);

esp_camera_fb_return(fb);

if(!converted){
ei_printf(
"Conversion failed\n");
return false;
}

if ((img_width != EI_CAMERA_RAW_FRAME_BUFFER_COLS)
|| (img_height != EI_CAMERA_RAW_FRAME_BUFFER_ROWS)) {
do_resize =
true;
}

if (do_resize) {
ei::image::processing::crop_and_interpolate_rgb888(
out_buf,
EI_CAMERA_RAW_FRAME_BUFFER_COLS,
EI_CAMERA_RAW_FRAME_BUFFER_ROWS,
out_buf,
img_width,
img_height);
}


return true;
}

static int ei_camera_get_data(size_t offset, size_t length, float out_ptr)
{
// we already have a RGB888 buffer, so recalculate offset into pixel index
size_t pixel_ix = offset 3;
size_t pixels_left = length;
size_t out_ptr_ix = 0;

while (pixels_left != 0) {
out_ptr[out_ptr_ix] = (snapshot_buf[pixel_ix] <<
16) + (snapshot_buf[pixel_ix + 1] << 8) + snapshot_buf[pixel_ix + 2];

// go to the next pixel
out_ptr_ix++;
pixel_ix+=
3;
pixels_left--;
}
// and done!
return 0;
}

#if !defined(EI_CLASSIFIER_SENSOR) || EI_CLASSIFIER_SENSOR != EI_CLASSIFIER_SENSOR_CAMERA
#error "Invalid model for current sensor"
#endif

Bing Images Web Scraping

from bing_images import bing

bing.download_images(
"radish flowers",
90,
output_dir="/ImageCollectorDataset/Images",
pool_size=1000,
file_type="",
force_replace=True)

Keras Data Augmentation

import numpy as np
from keras.preprocessing.image
import ImageDataGenerator
import os

# create a data generator
datagen = ImageDataGenerator(
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='nearest'
)

# specify the directory of your images
image_dir = 'C:\\ImageCollectorDataset\\RadishFlowerWBoxes'
output_dir = 'C:\\ImageCollectorDataset\\DataaugmentedRadish'

# ensure output directory exists
if not os.path.exists(output_dir):
os.makedirs(output_dir)

# automatically retrieve images and their classes for train and validation sets
image_generator = datagen.flow_from_directory(
image_dir,
target_size=(1200, 1200),
batch_size=1,
save_to_dir=output_dir,
save_prefix='aug',
save_format='png'
)

# generate and save augmented images
num_aug_images_wanted = 3000 # total number of images we want to have in each class

# calculate the number of batches to run
num_files = len(os.listdir(image_dir))
num_batches = int(np.ceil(num_aug_images_wanted / num_files))

# run the generator and create about 600 augmented images
for i
in range(0, num_batches):
imgs,
labels = next(image_generator)

Epoch Limiter (Early Stopping)

from tensorflow.keras.callbacks import EarlyStopping
# apply early stopping
callbacks.append(EarlyStopping(
monitor='val_accuracy', # Monitor validation accuracy
min_delta=0.005, # Minimum change to qualify as an improvement
patience=15, # Stop after 15 epochs without improvement
verbose=1, # Print messages
restore_best_weights=True # Restore model weights from the epoch with the best value of the monitored quantity.
))

Running Detection