Merge remote-tracking branch 'origin/main'

This commit is contained in:
Luca Warmenhoven
2024-05-29 12:17:48 +02:00
23 changed files with 475 additions and 928 deletions

View File

@@ -20,4 +20,21 @@ void Connectivity::websocketSetup(char* ip, uint16_t port, char* adress){
void Connectivity::sendData(float roll, float pitch, float yaw){
String message = "{\"Sensor\": 1, \"roll\":\"" + String(roll) + "\",\"pitch\":\"" + String(pitch) + "\",\"yaw\":\"" + String(yaw) + "\"}";
webSocket.sendTXT(message);
}
/** Send a POST request to a server with provided data */
int Connectivity::httpPost(const char *serverAddress, const char *serverSubPath, const unsigned short serverPort,
const char *data, const size_t dataLength, const char *contentType)
{
if ( wifi_client.connect(serverAddress, serverPort)) {
wifi_client.printf("POST %s HTTP/1.1\r\n", serverSubPath);
wifi_client.printf("Content-Type: %s\r\n", contentType);
wifi_client.printf("Content-Length: %d\r\n", dataLength);
wifi_client.printf("Host: %s\r\n\n", serverAddress);
wifi_client.println(data);
wifi_client.stop();
return 0;
}
return 1;
}

View File

@@ -6,25 +6,25 @@
#include <ArduinoWiFiServer.h>
#include <ESP8266WiFi.h>
#include <ESP8266WiFiGeneric.h>
#include <ESP8266WiFiGratuitous.h>
#include <ESP8266WiFiMulti.h>
#include <ESP8266WiFiSTA.h>
#include <ESP8266WiFiScan.h>
#include <ESP8266WiFiType.h>
#include <WiFiClient.h>
#include <WiFiServer.h>
#include <WiFiServerSecure.h>
#include <WiFiUdp.h>
#include <WiFiClientSecure.h>
class Connectivity {
public:
void connectWiFi(char* ssid, char* pass);
void websocketSetup(char* ip, uint16_t port, char* adress);
void sendData(float roll, float pitch, float yaw);
int httpPost(const char *serverAddress, const char *serverSubPath, const unsigned short serverPort, const char *data, const size_t dataLength, const char *contentType);
private:
ESP8266WiFiMulti wifi;
WiFiClient wifi_client;
WebSocketsClient webSocket;
};
#endif

View File

@@ -5,56 +5,33 @@
void setup() {
Serial.begin(9600);
Serial.println("startup");
//connect to internet and start sensor
connectivity.connectWiFi(ssid, pass);
sensorManager.sensorSetup();
//ws server address, port and URL
webSocket.begin("145.28.160.108", 8001, "");
// try every 500 again if connection has failed
webSocket.setReconnectInterval(500);
}
void loop() {
SensorManager::eulerAngles eulerRotation = sensorManager.getEulerAngles();
SensorManager::acceleration rotationAcceleration = sensorManager.getAcelleration();
unsigned long lastTime = 0; // will store the last time the code was run
// Subtract offset
// rotation.i -= offset.i;
// rotation.j -= offset.j;
// rotation.k -= offset.k;
// rotation.w -= offset.w;
Serial.print(eulerRotation.roll);
Serial.print(" ");
Serial.print(eulerRotation.yaw);
Serial.print(" ");
Serial.print(eulerRotation.pitch);
Serial.println();
// Convert quaternion to Euler angles in radians
// Convert to degrees
// float rollDegrees = roll * 180.0f / PI;
// float pitchDegrees = pitch * 180.0f / PI;
// float yawDegrees = yaw * 180.0f / PI;
Serial.print(eulerRotation.roll);
Serial.print(" ");
Serial.print(eulerRotation.pitch);
Serial.print(" ");
Serial.print(eulerRotation.yaw);
sendData(eulerRotation.roll, eulerRotation.pitch, eulerRotation.yaw);
Serial.println();
webSocket.loop();
unsigned long currentTime = millis();
if (currentTime - lastTime >= 100) { // 100 ms has passed
String message = "{\"deviceId\": 1, \"rotationX\":\"" + String(eulerRotation.roll) + "\",\"rotationY\":\"" + String(eulerRotation.pitch) + "\",\"rotationZ\":\"" + String(eulerRotation.yaw) + "\",\"accelerationX\":\"" + String(rotationAcceleration.x) + "\",\"accelerationY\":\"" + String(rotationAcceleration.y) + "\",\"accelerationZ\":\"" + String(rotationAcceleration.z) + "\",\"type\":\"data\"}";
Serial.println(connectivity.httpPost("192.168.137.146", "/", 3445, message.c_str(), message.length(), "json"));
Serial.println(message);
lastTime = currentTime;
}
// if (Serial.available()) {
// String command = Serial.readStringUntil('\n');
// command.trim(); // remove any trailing whitespace
// if (command == "setZeroPoint") {
// setZeroPoint();
// }
// }
// }
// void setZeroPoint() {
// offset = sensorManager.readLoop();
// }
}
//acceleration.X
//acceleration.Y
//acceleration.Z
void sendData(float roll, float pitch, float yaw){
String message = "{\"Sensor\": 1, \"roll\":\"" + String(roll) + "\",\"pitch\":\"" + String(pitch) + "\",\"yaw\":\"" + String(yaw) + "\"}";
webSocket.sendTXT(message);
}

View File

@@ -5,7 +5,6 @@
SensorManager::SensorManager() {}
void SensorManager::sensorSetup() {
Wire.setClockStretchLimit(150000L); // Default stretch limit 150mS
Wire.begin();
//wait for the sensor to start before continue
if (myIMU.begin() == false) {
@@ -15,20 +14,12 @@ void SensorManager::sensorSetup() {
//start sensorfunction and start autocalibration
//once calibration is enabled it attempts to every 5 min
Wire.setClock(400000); //Increase I2C data rate to 400kHz
myIMU.calibrateAll(); //Turn on cal for Accel, Gyro, and Mag
Wire.setClock(400000);
myIMU.enableGyroIntegratedRotationVector(100); //send data every 100ms
myIMU.enableMagnetometer(100); //Send data update every 100ms
myIMU.saveCalibration(); //Saves the current dynamic calibration data (DCD) to memory
myIMU.requestCalibrationStatus(); //Sends command to get the latest calibration status
if (myIMU.calibrationComplete() == true) {
Serial.println("Calibration data successfully stored");
}
myIMU.enableAccelerometer(100); //Send data update every 100ms
Serial.println(F("magnetometer rotation enabled"));
}
//get sensordata
SensorManager::RotationQuintillions SensorManager::getQuintillions() {
if (myIMU.dataAvailable() == true) {
float i = myIMU.getQuatI();
@@ -48,7 +39,7 @@ SensorManager::RotationQuintillions SensorManager::getQuintillions() {
return rotation;
}
}
//calculate Quintillions to Euler angles from -1π to +1π
SensorManager::eulerAngles SensorManager::getEulerAngles() {
SensorManager::RotationQuintillions rotation = getQuintillions();
float roll = atan2(2.0f * (rotation.w * rotation.i + rotation.j * rotation.k), 1.0f - 2.0f * (rotation.i * rotation.i + rotation.j * rotation.j));
@@ -56,4 +47,12 @@ SensorManager::eulerAngles SensorManager::getEulerAngles() {
float yaw = atan2(2.0f * (rotation.w * rotation.k + rotation.i * rotation.j), 1.0f - 2.0f * (rotation.j * rotation.j + rotation.k * rotation.k));
eulerAngles EulerAngles = { roll, pitch, yaw };
return EulerAngles;
}
SensorManager::acceleration SensorManager::getAcelleration(){
float x = myIMU.getAccelX();
float y = myIMU.getAccelY();
float z = myIMU.getAccelZ();
acceleration Acceleration = { x, y, z };
return Acceleration;
}

View File

@@ -13,8 +13,14 @@ public:
float pitch;
float yaw;
};
eulerAngles getEulerAngles();
struct acceleration {
float x;
float y;
float z;
};
eulerAngles getEulerAngles();
acceleration getAcelleration();
private:
struct RotationQuintillions {

View File

@@ -3,21 +3,24 @@ package com.example.fitbot.exercise;
import android.util.Log;
import com.example.fitbot.util.path.GesturePath;
import com.example.fitbot.util.server.IWebSocketHandler;
import com.example.fitbot.util.server.WebSocket;
import com.example.fitbot.util.processing.IMotionDataConsumer;
import com.example.fitbot.util.server.IWebServerHandler;
import com.example.fitbot.util.server.WebServer;
import java.util.Objects;
import java.util.function.Consumer;
public class Exercise implements IWebSocketHandler {
public class Exercise implements IWebServerHandler {
private EMuscleGroup muscleGroup;
private GesturePath path;
private GesturePath leftPath;
private GesturePath rightPath;
private String title;
private String description;
private float segmentsPerSecond;
// Static fields.
private static WebSocket webSocket;
private static WebServer webSocket;
private static Exercise currentExercise = null;
@@ -25,18 +28,19 @@ public class Exercise implements IWebSocketHandler {
* Constructor for the AbstractExercise class.
*
* @param muscleGroup The muscle group of the exercise.
* @param path The path of the exercise.
* @param leftPath The path of the left hand.
* @param rightPath The path of the right hand.
* @param title The title of the exercise.
* @param description The description of the exercise.
* @param segmentsPerSecond The number of segments per second.
* This determines how fast the exercise should be performed.
*/
public Exercise(EMuscleGroup muscleGroup, String title, String description, GesturePath path, float segmentsPerSecond) {
public Exercise(EMuscleGroup muscleGroup, String title, String description, GesturePath leftPath, GesturePath rightPath) {
this.muscleGroup = muscleGroup;
this.title = title;
this.description = description;
this.path = path;
this.segmentsPerSecond = segmentsPerSecond;
this.leftPath = leftPath;
this.rightPath = rightPath;
}
/**
@@ -57,10 +61,9 @@ public class Exercise implements IWebSocketHandler {
}
try {
webSocket = WebSocket.createServer();
webSocket = WebServer.createServer();
Objects.requireNonNull(webSocket, "WebSocket server could not be created.");
webSocket.startListening();
webSocket.setEventHandler(this);
currentExercise = this;
} catch (Exception e) {
@@ -108,8 +111,8 @@ public class Exercise implements IWebSocketHandler {
/**
* Get the path of the exercise.
*/
public GesturePath getPath() {
return path;
public GesturePath[] getPath() {
return new GesturePath[]{leftPath, rightPath};
}
public String getTitle() {
@@ -126,4 +129,9 @@ public class Exercise implements IWebSocketHandler {
public double getSegmentsPerSecond() {
return segmentsPerSecond;
}
@Override
public void onReceive(String message) {
}
}

View File

@@ -1,41 +0,0 @@
package com.example.fitbot.exercise;
import com.example.fitbot.util.processing.IMotionDataConsumer;
import com.example.fitbot.util.processing.MotionData;
import com.example.fitbot.util.processing.MotionProcessor;
import com.example.fitbot.util.server.IWebSocketHandler;
import com.example.fitbot.util.server.WebSocket;
import org.joml.Vector3f;
import java.net.Socket;
public class ExerciseBuilder implements IWebSocketHandler, IMotionDataConsumer {
private MotionProcessor processor;
public ExerciseBuilder() {
this.processor = new MotionProcessor();
this.processor.setMotionDataEventHandler(this);
}
@Override
public void onDisconnected(Socket socket) {
IWebSocketHandler.super.onDisconnected(socket);
}
@Override
public void onMessageReceived(WebSocket.Message message, WebSocket.MessageReply replier) {
IWebSocketHandler.super.onMessageReceived(message, replier);
}
@Override
public void onError(Socket socket, String error) {
IWebSocketHandler.super.onError(socket, error);
}
@Override
public void accept(Vector3f transformedVector, MotionData motionData, int sampleIndex, double sampleRate, int sensorId) {
}
}

View File

@@ -76,7 +76,7 @@ public class ExerciseManager {
content.get(PROPERTY_NAME).getAsString(),
content.get(PROPERTY_DESC).getAsString(),
gesturePathFromString(content.get(PROPERTY_VECTORS).getAsString()),
DEFAULT_SEGMENT_SPEED
gesturePathFromString(content.get(PROPERTY_SEGMENT_SPEED).getAsString())
);
} catch (Exception e) {
e.printStackTrace();

View File

@@ -1,43 +0,0 @@
package com.example.fitbot.speech;
import com.aldebaran.qi.sdk.QiContext;
import com.aldebaran.qi.sdk.builder.SayBuilder;
import com.aldebaran.qi.sdk.object.locale.Language;
import com.aldebaran.qi.sdk.object.locale.Locale;
import com.aldebaran.qi.sdk.object.locale.Region;
/**
* SpeechGenerator class for generating speech for the robot
*/
public class SpeechGenerator {
private static final Locale DUTCH_LOCALE = new Locale(Language.DUTCH, Region.NETHERLANDS);
private SayBuilder builder;
/**
* Function for making the robot say something with DUTCH_LOCALE as locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
*/
public static void say(String phrase, QiContext ctx)
{
say(phrase, ctx, DUTCH_LOCALE);
}
/**
* Function for making the robot say something with a specific locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
* @param locale The locale to use
*/
public static void say(String phrase, QiContext ctx, Locale locale)
{
SayBuilder
.with(ctx)
.withLocale(locale)
.withText(phrase)
.build()
.run();
}
}

View File

@@ -1,7 +1,6 @@
package com.example.fitbot.ui.activities;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.widget.VideoView;
@@ -11,16 +10,17 @@ import com.aldebaran.qi.sdk.RobotLifecycleCallbacks;
import com.aldebaran.qi.sdk.design.activity.RobotActivity;
import com.aldebaran.qi.sdk.design.activity.conversationstatus.SpeechBarDisplayStrategy;
import com.example.fitbot.R;
import com.example.fitbot.sports.FitnessCycle;
import com.example.fitbot.exercise.EMuscleGroup;
import com.example.fitbot.exercise.Exercise;
import com.example.fitbot.ui.components.PersonalMotionPreviewElement;
import com.example.fitbot.util.Animations;
import com.example.fitbot.util.ButtonNavigation;
import com.example.fitbot.util.FitnessCycle;
import com.example.fitbot.util.path.GesturePath;
import org.joml.Vector3f;
import java.util.concurrent.CompletableFuture;
public class FitnessActivity extends RobotActivity implements RobotLifecycleCallbacks {
PersonalMotionPreviewElement personalMotionPreviewElement;
@@ -45,23 +45,22 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
GesturePath.Builder gesturePathBuilder = new GesturePath.Builder();
gesturePathBuilder.addVector(new Vector3f(-.5f, -.5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, -.5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, -.5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, -.5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, -.5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, .5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, .5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, .5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, .5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, .5f, -.5f));
for ( int i = 0; i < 40; i++)
{
gesturePathBuilder.addVector(
new Vector3f(
(float)Math.cos(Math.PI + (Math.PI / 40.0f) * i),
(float)Math.sin(Math.PI + (Math.PI / 40.0f) * i),
0
)
);
}
personalMotionPreviewElement = findViewById(R.id.personalMotionPreviewElement);
personalMotionPreviewElement.post(() -> {
Log.i("FitnessActivity", "PersonalMotionPreviewElement.post()");
Exercise exercise = new Exercise(EMuscleGroup.ARMS, "Bicep Curls", "Oefening voor de biceps.", gesturePathBuilder.build(), 1);
Exercise exercise = new Exercise(EMuscleGroup.ARMS, "Bicep Curls", "Oefening voor de biceps.", gesturePathBuilder.build(), gesturePathBuilder.build());
personalMotionPreviewElement.initialize(exercise);
});
@@ -71,11 +70,9 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
public void onRobotFocusGained(QiContext qiContext) {
// Find the VideoView by its ID
FitnessCycle.RobotMovement("bicepcurl", 10, qiContext);
// FitnessCycle.playVideo(qiContext, videoView, this);
CompletableFuture.runAsync(() -> FitnessCycle.executeMovement("bicepcurl", 10, qiContext));
personalMotionPreviewElement.provideQiContext(qiContext);
// FitnessCycle.playVideo(qiContext, videoView, this);
}
@Override
@@ -88,14 +85,8 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
// Implement your logic when the robot focus is refused
}
private Handler handler;
private Runnable runnable;
@Override
protected void onDestroy() {
super.onDestroy();
if (handler != null && runnable != null) {
handler.removeCallbacks(runnable);
}
}
}

View File

@@ -2,6 +2,7 @@ package com.example.fitbot.ui.components;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
@@ -10,7 +11,7 @@ import android.view.View;
import com.aldebaran.qi.sdk.QiContext;
import com.example.fitbot.exercise.Exercise;
import com.example.fitbot.speech.SpeechGenerator;
import com.example.fitbot.util.FitnessCycle;
import com.example.fitbot.util.path.GesturePath;
import com.example.fitbot.util.path.PathSegment;
import com.example.fitbot.util.processing.MotionData;
@@ -23,21 +24,23 @@ import org.joml.Vector4f;
public class PersonalMotionPreviewElement extends View {
private GesturePath path;
private double pathTime = 0.0D; // The timestamp at which the path is currently at.
private GesturePath[] paths;
private MotionProcessor motionProcessor;
private double pathTime = 0.0D; // The timestamp at which the path is currently at.
private double exerciseProgress = 0.0D; // The progress of the exercise. Ranges from 0 to 1.
private QiContext qiContext;
private Exercise exercise;
private Path referencePath; // The path the user is supposed to follow.
private Path performingPath; // The path the user is currently following.
private Path stickmanPath; // The path of the stickman that is drawn on the screen.
private Path targetPath; // The path the user is supposed to follow.
private Path actualPath; // The path the user is currently following.
private final Paint referencePaint = new Paint();
private final Paint targetPaint = new Paint();
private final Paint backgroundColor = new Paint();
private Paint referencePaint;
private Paint performingPaint;
private Paint textPaint;
private static final String[] USER_PHRASES = {
"Veel success met de oefening!",
@@ -45,76 +48,24 @@ public class PersonalMotionPreviewElement extends View {
"Veel plezier!"
};
// Matrices for the projection of the path segments onto the screen.
// Depth buffering sadly is not supported yet due to brain dysfunction
private Matrix4f modelViewMatrix = new Matrix4f();
private Matrix4f projectionMatrix = new Matrix4f();
private double timePassed = 0.0D; // The time that has passed since the start of the exercise, in seconds.
private long startingTime = 0L;
private Paint backgroundColor = new Paint();
/**
* Constants for the preview path projection.
*/
private final float FOV = 80.0f; // The field of view of the preview path
private final float Z_NEAR = 0.1f; // The near clipping plane
private final float Z_FAR = 1000.0f; // The far clipping plane
private Vector3f objectPosition = new Vector3f(0.0f, 0.0f, -4.0f); // The position of the camera
private Vector2f screenDimensions = new Vector2f(); // Width and height dimensions of the screen
private Vector2f rotation = new Vector2f(); // Rotation vector (yaw, pitch)
public PersonalMotionPreviewElement(Context context, AttributeSet attrs) {
super(context, attrs);
this.referencePaint = new Paint();
this.referencePaint.setColor(0xFFFF0000); // Red
this.referencePaint.setStyle(Paint.Style.STROKE);
this.referencePaint.setStyle(Paint.Style.FILL);
this.referencePaint.setStrokeWidth(5.0f);
this.referencePaint.setAntiAlias(true);
this.performingPaint = new Paint();
this.performingPaint.setColor(0xFF0000FF); // Blue
this.performingPaint.setStyle(Paint.Style.STROKE);
this.performingPaint.setStrokeWidth(5.0f);
this.textPaint = new Paint();
this.textPaint.setColor(-1);
this.textPaint.setStyle(Paint.Style.FILL);
this.textPaint.setTextSize(50.0f);
}
/**
* Method for updating the stickman gestures.
*
* This method will update the stickman gestures based on the current
* motion data that is being processed.
*/
private void updateStickmanGestures() {
// Reset previous path
stickmanPath.reset();
// TODO: Define all arm segments:
// - Upper left and right arm
// - Lower left and right arm
// - Upper left and right leg
// - Lower left and right leg
// Update all segments based on the perceived motion data.
PathSegment upperLeftArm = new PathSegment(
new Vector3f(),
new Vector3f()
);
PathSegment[] bodySegments = new PathSegment[] {
new PathSegment(new Vector3f(0.0f, -.5f, -.5f), new Vector3f(0, 0, 0)), // Left leg
new PathSegment(new Vector3f(0.0f, -.5f, .5f), new Vector3f(0, 0, 0)), // Right leg
new PathSegment(new Vector3f(0.0f, .5f, 0.0f), new Vector3f(0, 0, 0)), // Body
new PathSegment(new Vector3f(-.25f, .25f, 0f), new Vector3f(0, 0, 0)), // Left arm
new PathSegment(new Vector3f(.25f, .25f, 0f), new Vector3f(0, 0, 0)) // Right arm
};
// TODO: Generate new path for stickman
// Target paint is the filling of the target path.
this.targetPaint.setColor(-1);
this.targetPaint.setStyle(Paint.Style.STROKE);
this.targetPaint.setStrokeWidth(5.0f);
this.targetPaint.setAntiAlias(true);
}
/**
@@ -127,31 +78,41 @@ public class PersonalMotionPreviewElement extends View {
*/
public void initialize(Exercise exercise) {
Log.i("PersonalMotionPreviewElement", "Creating new PersonalMotionPreviewElement.");
this.backgroundColor = new Paint();
this.backgroundColor.setColor(0xFF000000); // Black
this.screenDimensions.x = this.getWidth();
this.screenDimensions.y = this.getHeight();
this.performingPath = new Path();
this.referencePath = new Path();
this.actualPath = new Path();
this.targetPath = new Path();
this.startingTime = System.nanoTime(); // Set the last time to the current time
this.exerciseProgress = 0.0d;
this.exercise = exercise;
this.path = exercise.getPath();
this.motionProcessor = new MotionProcessor();
this.motionProcessor.startListening();
this.motionProcessor.setMotionDataEventHandler((processed, preprocessed, sampleIndex, sampleRate, deviceId) -> {
// TODO: Implement the calculation of the `performingPath` based on the motion data
});
this.paths = exercise.getPath();
}
/**
* Function for providing a QiContext to the PersonalMotionPreviewElement.
* This function will be called by the parent activity when the QiContext is available.
* Also say something nice to the user :)
*
* @param context The QiContext to provide.
*/
public void provideQiContext(QiContext context) {
this.qiContext = context;
if ( this.motionProcessor != null )
this.motionProcessor.stopListening();
this.motionProcessor = new MotionProcessor();
this.motionProcessor.startListening();
// Handler that is called every time the motion processor receives new data.
this.motionProcessor.setMotionDataEventHandler((processed, preprocessed, sampleIndex, sampleRate, deviceId) -> {
double progress = this.motionProcessor.getAverageError(this.paths[0], 0);
this.exerciseProgress = Math.min(1, Math.max(0, progress));
this.invalidate();
Log.i("MotionProcessor", "Processed data: " + progress + " (" + preprocessed + ")");
});
saySomethingNice();
}
@@ -163,90 +124,16 @@ public class PersonalMotionPreviewElement extends View {
if (this.qiContext == null)
return;
SpeechGenerator.say(USER_PHRASES[(int) Math.floor(Math.random() * USER_PHRASES.length)], this.qiContext);
}
/**
* Method that calculates the path that will be drawn on the
* canvas. This method will be called every time new motion data is received.
*/
private void calculateDrawingPath(Vector3f transformedVector, MotionData motionData, int sampleIndex, double sampleRate) {
// Recalculate the personal path based on the new motion data
// TODO: Implement
FitnessCycle.say(USER_PHRASES[(int) Math.floor(Math.random() * USER_PHRASES.length)], this.qiContext);
}
/**
* Method for setting the gesture path that will be drawn on the canvas.
*
* @param path The gesture path to draw.
* @param exercise The exercise that the user is currently performing.
*/
public void setGesturePath(GesturePath path) {
this.path = path;
this.referencePath = getDrawablePath(path.getSegments());
}
/**
* Method for projecting a 3D point onto the screen.
* This method converts the 3D point to 2D space using a Model-View-Projection matrix transformation.
*
* @param point The point to cast to the screen.
* @param virtualWidth The width of the virtual screen.
* This is used to normalize the screen coordinates.
* @param virtualHeight The height of the virtual screen.
* @return The transformed vector in screen coordinates ranging from (0, 0) to (virtualWidth, virtualHeight).
*/
private Vector2f projectVertex(Vector3f point, int virtualWidth, int virtualHeight) {
modelViewMatrix
.identity()
.translate(-objectPosition.x, -objectPosition.y, -objectPosition.z)
.rotateX((float) Math.toRadians(rotation.y))
.rotateY((float) Math.toRadians(rotation.x));
// Transform the projection matrix to a perspective projection matrix
// Perspective transformation conserves the depth of the object
projectionMatrix
.identity()
.perspective((float) Math.toRadians(FOV), (float) virtualWidth / virtualHeight, Z_NEAR, Z_FAR);
// Convert world coordinates to screen-space using MVP matrix
Vector4f screenCoordinates = new Vector4f(point, 1.0f)
.mul(this.modelViewMatrix)
.mul(this.projectionMatrix);
// Normalize screen coordinates from (-1, 1) to (0, virtualWidth) and (0, virtualHeight)
float normalizedX = (screenCoordinates.x / screenCoordinates.w + 1.0f) * 0.5f * virtualWidth;
float normalizedY = (1.0f - screenCoordinates.y / screenCoordinates.w) * 0.5f * virtualHeight;
Log.i("VertexProjection", "Projected vertex to screen coordinates: (" + normalizedX + ", " + normalizedY + ").");
return new Vector2f(normalizedX, normalizedY);
}
/**
* Method that converts a sequence of vectors to a Path object.
* This path is a set of bezier curves that will be drawn on the canvas.
*
* @param segments The path segments in the path.
* These segments will be connected by bezier curves, which
* all have unique curvature values.
* @return The generated path object.
*/
private Path getDrawablePath(PathSegment... segments) {
Path calculatedPath = new Path();
// Starting point
Vector2f origin = projectVertex(segments[0].getStart(), getWidth(), getHeight());
calculatedPath.moveTo(origin.x, origin.y);
// Draw the path segments
for (PathSegment segment : segments) {
Vector2f startProjected = projectVertex(segment.getStart(), getWidth(), getHeight());
Vector2f endProjected = projectVertex(segment.getEnd(), getWidth(), getHeight());
calculatedPath.lineTo(startProjected.x, startProjected.y);
calculatedPath.lineTo(endProjected.x, endProjected.y);
}
return calculatedPath;
public void setExercise(Exercise exercise) {
this.exercise = exercise;
}
@@ -256,14 +143,20 @@ public class PersonalMotionPreviewElement extends View {
this.setBackgroundColor(0xFF000000); // Black
if (this.exercise == null)
return;
// Draw the sport preview canvas
canvas.drawPath(referencePath, referencePaint);
canvas.drawPath(performingPath, performingPaint);
// Draw target circle
float targetRadius = (this.screenDimensions.x + this.screenDimensions.y) / 5.0f;
canvas.drawCircle(this.screenDimensions.x / 2, this.screenDimensions.y / 2, targetRadius, this.targetPaint);
canvas.drawCircle(this.screenDimensions.x / 2, this.screenDimensions.y / 2, (float)(targetRadius * exerciseProgress), this.referencePaint);
referencePaint.setColor(
Color.argb(
255,
(int)(255 * (1.0 - exerciseProgress)),
(int)(255 * exerciseProgress),
0
)
);
timePassed = (System.nanoTime() - startingTime) / 1E9D;
this.rotation.x = (float) (Math.sin(timePassed) * 45);
this.referencePath = getDrawablePath(this.path.getSegments());
this.invalidate(); // Causes a redraw.
}
}

View File

@@ -1,13 +1,15 @@
package com.example.fitbot.sports;
import static com.example.fitbot.sports.Animations.PlayAnimation;
package com.example.fitbot.util;
import android.content.Context;
import android.net.Uri;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.VideoView;
import com.aldebaran.qi.sdk.builder.SayBuilder;
import com.aldebaran.qi.sdk.object.locale.Language;
import com.aldebaran.qi.sdk.object.locale.Locale;
import com.aldebaran.qi.sdk.object.locale.Region;
import com.example.fitbot.R;
import com.aldebaran.qi.sdk.QiContext;
import com.aldebaran.qi.sdk.builder.AnimateBuilder;
@@ -19,7 +21,18 @@ import java.util.concurrent.atomic.AtomicInteger;
public class FitnessCycle extends AppCompatActivity {
public static void RobotMovement(String Exercise, int Reps, QiContext qiContext) {
private static final Locale DUTCH_LOCALE = new Locale(Language.DUTCH, Region.NETHERLANDS);
/**
* Function for executing a movement animation a certain number of times
* on the robot
*
* @param Exercise The name of the exercise to perform
* @param Reps The number of repetitions to perform
* @param qiContext The QiContext to use
*/
public static void executeMovement(String Exercise, int Reps, QiContext qiContext) {
AtomicInteger repCount = new AtomicInteger(0);
Animation animation = AnimationBuilder.with(qiContext)
@@ -44,6 +57,38 @@ public class FitnessCycle extends AppCompatActivity {
}
}
/**
* Function for making the robot say something with DUTCH_LOCALE as locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
*/
public static void say(String phrase, QiContext ctx)
{
say(phrase, ctx, DUTCH_LOCALE);
}
/**
* Function for making the robot say something with a specific locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
* @param locale The locale to use
*/
public static void say(String phrase, QiContext ctx, Locale locale)
{
SayBuilder
.with(ctx)
.withLocale(locale)
.withText(phrase)
.build()
.run();
}
/**
* Function for playing a video in a VideoView
*
* @param videoView The VideoView to play the video in
* @param context The context to use
*/
public static void playVideo(VideoView videoView, Context context) {
// Set up the video player
if (videoView != null) {

View File

@@ -54,9 +54,11 @@ public class GesturePath {
if ( segments.length == 1)
return segments[0];
return Arrays
.stream(segments)
.reduce(segments[0], (a, b) -> PathSegment.closer(a, b, reference));
PathSegment closest = segments[0];
for ( int i = 1; i < segments.length; i++)
closest = PathSegment.closer(closest, segments[i], reference);
return closest;
}
/**

View File

@@ -3,8 +3,10 @@ package com.example.fitbot.util.processing;
import android.util.Log;
import com.example.fitbot.util.path.GesturePath;
import com.example.fitbot.util.server.IWebSocketHandler;
import com.example.fitbot.util.server.WebSocket;
import com.example.fitbot.util.server.WebServer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.jetbrains.annotations.NotNull;
import org.joml.Vector3f;
@@ -17,14 +19,16 @@ public class MotionProcessor {
public static final String DELIMITER = ";";
private final List<MotionData> preprocessedData = new ArrayList<>(); // Preprocessed motion data
private final List<Vector3f> relativePath = new ArrayList<>(); // Relative path of the motion data
private final List<Vector3f> relativeLeftPath = new ArrayList<>(); // Relative path of the left motion data
private final List<Vector3f> relativeRightPath = new ArrayList<>(); // Relative path of the motion data
private Vector3f ZERO = new Vector3f(0, 0, 0);
private float sampleRate = 1.0F; // samples/second
private IMotionDataConsumer motionDataConsumer = (p1, p2, p3, p4, p5) -> {};
private GesturePath path;
private WebSocket socket;
private final float sampleRate = 10.0F; // samples/second
private IMotionDataConsumer motionDataConsumer = (p1, p2, p3, p4, p5) -> { };
private WebServer server;
public MotionProcessor() {}
@@ -37,20 +41,14 @@ public class MotionProcessor {
*/
public void startListening() {
// Create socket server
this.socket = WebSocket.createServer();
this.server = WebServer.createServer();
Log.i("MotionProcessor", "Listening for incoming connections.");
// Check if the socket
if (socket != null) {
if (server != null) {
// Update event handler to match our functionality.
socket.setEventHandler(new IWebSocketHandler() {
@Override
public void onMessageReceived(WebSocket.Message message, WebSocket.MessageReply replier) {
parsePacket(message.message);
}
});
socket.startListening();
server.setEventHandler(this::parsePacket);
}
}
@@ -60,44 +58,55 @@ public class MotionProcessor {
* the WebSocket server.
*/
public void stopListening() {
if (socket != null) {
socket.stop();
if (server != null) {
server.stop();
}
}
/**
* Function for parsing arbitrary packet data.
*
* @param data The data to parse.
*/
public void parsePacket(@NotNull String data) {
// If the message starts with 'data', it's a data packet.
if ( data.startsWith("data")) {
Log.i("MotionProcessor", "Received data packet: " + data.split(" ")[1]);
MotionData parsedData = MotionData.decode(data.split(" ")[1]);
if (parsedData != null) {
addMotionData(parsedData);
}
// Otherwise check if it starts with 'calibrate', this is the ZERO point.
} else if ( data.startsWith("zero")) { // message to calibrate device
String[] vectorData = data.split(" ")[1].split(DELIMITER);
ZERO = new Vector3f(
Float.parseFloat(vectorData[0]),
Float.parseFloat(vectorData[1]),
Float.parseFloat(vectorData[2])
);
Log.i("MotionProcessor", "Device calibrated at " + ZERO.toString());
} else if ( data.startsWith("sampleRate")) {
this.sampleRate = Float.parseFloat(data.split(" ")[1]);
}
}
/**
* Function for setting the gesture path of the processor.
*
* @param path The path to set.
*/
public void setGesturePath(GesturePath path) {
this.path = path;
try {
JsonElement json = JsonParser.parseString(data);
if (!json.isJsonObject())
return;
JsonObject object = json.getAsJsonObject();
String[] required = {
"rotationX", "rotationY", "rotationZ",
"accelerationX", "accelerationY", "accelerationZ",
"type",
"deviceId"
};
// Ensure all properties are present in the received JSON object
for (String s : required) {
if (!object.has(s))
return;
}
// Parse the data
Vector3f rotation = new Vector3f(object.get("rotationX").getAsFloat(), object.get("rotationY").getAsFloat(), object.get("rotationZ").getAsFloat());
Vector3f acceleration = new Vector3f(object.get("accelerationX").getAsFloat(), object.get("accelerationY").getAsFloat(), object.get("accelerationZ").getAsFloat());
int deviceId = object.get("deviceId").getAsInt();
String type = object.get("type").getAsString();
MotionData motionData = new MotionData(rotation, acceleration, deviceId);
if (type.equals("calibrate")) {
ZERO = getRelativeVector(motionData);
return;
}
addMotionData(motionData);
} catch (Exception e) {
// Don't do anything ... just ignore the exception
}
}
/**
@@ -106,29 +115,35 @@ public class MotionProcessor {
* @param data The motion data to add.
*/
public void addMotionData(MotionData data) {
preprocessedData.add(data);
Vector3f previous = this.relativePath.isEmpty() ? ZERO : this.relativePath.get(this.relativePath.size() - 1);
List<Vector3f> target;
if (data.sensorId == 0)
target = relativeLeftPath;
else target = relativeRightPath;
Vector3f previous = target.isEmpty() ? ZERO : target.get(target.size() - 1);
Vector3f relativeVector = getRelativeVector(data).add(previous);
this.relativePath.add(relativeVector);
motionDataConsumer.accept(relativeVector, data, this.relativePath.size(), this.sampleRate, data.sensorId);
target.add(relativeVector);
motionDataConsumer.accept(relativeVector, data, target.size(), this.sampleRate, data.sensorId);
}
/**
* Function for updating the relative path.
*
* @param relativePath The new relative path.
* @param relativeRightPath The new relative path.
*/
public void setRelativePath(List<Vector3f> relativePath) {
this.relativePath.clear();
this.relativePath.addAll(relativePath);
public void setRelativePaths(List<Vector3f> relativeLeftPath, List<Vector3f> relativeRightPath) {
this.relativeRightPath.clear();
this.relativeLeftPath.clear();
this.relativeLeftPath.addAll(relativeLeftPath);
this.relativeRightPath.addAll(relativeRightPath);
}
/**
* Function for setting the motion data receiver.
*
* @param consumer The consumer to set.
*/
public void setMotionDataEventHandler(IMotionDataConsumer consumer) {
if ( consumer != null)
if (consumer != null)
this.motionDataConsumer = consumer;
}
@@ -163,48 +178,21 @@ public class MotionProcessor {
*/
public List<Double> getErrors(GesturePath referencePath) {
// Return the errors of the relative path compared to the reference path.
return relativePath
.stream()
.map(referencePath::getError)
.collect(Collectors.toList());
}
/**
* Function for getting the error offsets of the motion data compared to the
* reference path.
*
* @return A list of error offsets of the motion data compared to the reference path.
* If no path is set, an empty list will be returned.
*/
public List<Double> getErrors() {
if ( path == null)
return new ArrayList<>();
return getErrors(path);
List<Double> errors = new ArrayList<>();
for (Vector3f vector : relativeRightPath) {
errors.add(referencePath.getError(vector));
}
return errors;
}
/**
* Function for getting the error of the motion data compared to the reference path.
*
* @param path The path to compare the motion data to.
* @param path The path to compare the motion data to.
* @param referencePoint The reference point to compare the motion data to.
* @return The error of the motion data compared to the reference path.
*/
public double getError(GesturePath path, Vector3f referencePoint)
{
return path.getError(referencePoint);
}
/**
* Function for getting the error of the provided vector and the set path.
* If no path is set, the error will be 0.
*
* @param referencePoint The reference point to compare the path data to.
* @return The error of the motion data compared to the reference path.
*/
public double getError(Vector3f referencePoint) {
if ( path == null)
return 0;
public double getError(GesturePath path, Vector3f referencePoint) {
return path.getError(referencePoint);
}
@@ -215,24 +203,12 @@ public class MotionProcessor {
* @param referencePath The reference path to compare the motion data to.
* @return The average error of the motion data compared to the reference path.
*/
public double getAverageError(GesturePath referencePath) {
return getErrors(referencePath)
.stream()
.mapToDouble(Double::doubleValue)
.average()
.orElse(0.0D);
}
/**
* Function for calculating the average error of the motion data
* compared to the reference path.
*
* @return The average error of the motion data compared to the reference path.
*/
public double getAverageError() {
if ( path == null)
return 0;
return getAverageError(path);
public double getAverageError(GesturePath referencePath, int sensorId) {
double error = 0;
for (Double e : getErrors(referencePath)) {
error += e;
}
return error / Math.max(1, (sensorId == 0 ? relativeLeftPath : relativeRightPath).size());
}
/**
@@ -241,8 +217,7 @@ public class MotionProcessor {
* @param referencePath The reference path to compare the motion data to.
*/
public void logStatistics(GesturePath referencePath) {
Log.i("MotionProcessor", "Average path error: " + getAverageError(referencePath));
Log.i("MotionProcessor", "Path length: " + relativePath.size());
Log.i("MotionProcessor", "Path length: " + relativeRightPath.size());
Log.i("MotionProcessor", "Sample rate: " + sampleRate);
Log.i("MotionProcessor", "Calibration point: " + ZERO.toString());
}

View File

@@ -0,0 +1,11 @@
package com.example.fitbot.util.server;
import java.net.Socket;
/**
* Interface for handling WebSocket events.
*/
public interface IWebServerHandler {
void onReceive(String body);
}

View File

@@ -1,18 +0,0 @@
package com.example.fitbot.util.server;
import java.net.Socket;
/**
* Interface for handling WebSocket events.
*/
public interface IWebSocketHandler {
// Function for handling the connection of the WebSocket.
default void onConnected(Socket socket) {}
default void onDisconnected(Socket socket) {}
default void onMessageReceived(WebSocket.Message message, WebSocket.MessageReply replier) {}
default void onError(Socket socket, String error) {}
}

View File

@@ -0,0 +1,134 @@
package com.example.fitbot.util.server;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
public class WebServer implements Runnable {
private ServerSocket serverSocket;
protected IWebServerHandler eventHandler = (input) -> {}; // No-op.
private Thread thread;
private AtomicBoolean forceClose = new AtomicBoolean(false);
/**
* Constructor for creating a new WebSocket server.
*/
private WebServer() {
}
/**
* Function for creating a new WebSocket server given the provided port.
*
* @return A WebSocket connection, or null if something went wrong.
*/
public static WebServer createServer() {
try {
WebServer server = new WebServer();
server.serverSocket = new ServerSocket();
server.serverSocket.bind(new InetSocketAddress(3445));
server.serverSocket.setSoTimeout(0);
Log.i("WebServer", "Server created: " + server.serverSocket.getLocalSocketAddress() + ", " + server.serverSocket.getLocalPort());
server.thread = new Thread(server);
server.thread.start();
return server;
} catch (IOException error) {
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebServer", cause);
return null;
}
}
@Override
public void run() {
// Listen for new connections until the socket closes.
while (this.isConnected() && !this.forceClose.get()) {
try {
// Find a new connection
Socket newSocket = this.serverSocket.accept();
InputStream streamIn = newSocket.getInputStream();
// Read the incoming data
BufferedReader reader = new BufferedReader(new InputStreamReader(streamIn));
StringBuilder builder = new StringBuilder();
String line;
while ((line = reader.readLine()) != null)
builder.append(line).append("\n");
streamIn.close(); // Closes the reader, stream and socket connection.
String[] data = builder.toString().split("\n\n");
if ( data.length > 1) { // Check if the data is valid.
this.eventHandler.onReceive(data[1]);
}
} catch (IOException error) {
String reason = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebServerConnectionHandler", "Error listening to Socket connections: " + reason);
break;
}
}
}
/**
* Method for stopping the WebSocket server.
*/
public void stop() {
try {
this.serverSocket.close();
this.forceClose.set(true);
} catch (IOException error) {
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebServer", cause);
}
}
/**
* Method for setting the event handler for this WebSocket server.
*
* @param handler The handler to use. This handler will parse all events
* that occur in this WebSocket connection. The events are the followed:
* - onMessageReceived(Socket, String)
* - onConnected(Socket)
* - onDisconnected(Socket)
* - onError(Socket, String)
*/
public void setEventHandler(IWebServerHandler handler) {
this.eventHandler = handler;
}
/**
* Method for getting the ServerSocket connection
*
* @return The ServerSocket connection.
*/
public ServerSocket getSocket() {
return this.serverSocket;
}
/**
* Method for checking whether this WebSocket connection is connected.
*
* @return The connection status of the WebSocket.
*/
public boolean isConnected() {
return !this.serverSocket.isClosed();
}
}

View File

@@ -1,150 +0,0 @@
package com.example.fitbot.util.server;
import android.support.annotation.Nullable;
import android.util.Log;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class WebSocket {
private ServerSocket serverSocket;
private WebSocketConnectionHandler connectionHandler;
private final Set<Socket> clients = Collections.synchronizedSet(new HashSet<>());
protected IWebSocketHandler eventHandler = new IWebSocketHandler() {}; // NO-OP event handler.
/**
* Constructor for creating a new WebSocket server.
*/
private WebSocket() {}
/**
* Function for creating a new WebSocket server given the provided port.
* @return A WebSocket connection, or null if something went wrong.
*/
public static @Nullable WebSocket createServer() {
try {
WebSocket webSocket = new WebSocket();
webSocket.serverSocket = new ServerSocket();
webSocket.serverSocket.bind(webSocket.serverSocket.getLocalSocketAddress());
Log.i("WebSocket -- Creating new WebSocket server", "Server created: " + webSocket.serverSocket.getLocalSocketAddress() + ", " + webSocket.serverSocket.getLocalPort());
return webSocket;
} catch (IOException error)
{
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebSocket -- Creating new WebSocket server", cause);
return null;
}
}
/**
* Method for listening for incoming connections.
*/
public void startListening() {
this.connectionHandler = new WebSocketConnectionHandler(this);
this.connectionHandler.listen();
}
/**
* Method for stopping the WebSocket server.
*/
public void stop() {
try {
this.serverSocket.close();
this.connectionHandler.stop();
} catch (IOException error) {
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebSocket -- Closing server connection", cause);
}
}
/**
* Method for setting the event handler for this WebSocket server.
* @param handler The handler to use. This handler will parse all events
* that occur in this WebSocket connection. The events are the followed:
* - onMessageReceived(Socket, String)
* - onConnected(Socket)
* - onDisconnected(Socket)
* - onError(Socket, String)
*/
public void setEventHandler(IWebSocketHandler handler) {
this.eventHandler = handler;
}
/**
* Method for getting the ServerSocket connection
* @return The ServerSocket connection.
*/
public ServerSocket getSocket() {
return this.serverSocket;
}
/**
* Method for checking whether this WebSocket connection is connected.
* @return The connection status of the WebSocket.
*/
public boolean isConnected() {
return !this.serverSocket.isClosed();
}
/**
* Class representing a message received from a WebSocket connection.
*/
public static class Message {
// Enumerable representing message type (opcode).
public enum Opcode {
CONTINUING((byte) 0x0),
TEXT((byte) 0x1),
BINARY((byte) 0x2),
RES0((byte) 0x3), RES1((byte) 0x4), RES2((byte) 0x5), RES3((byte) 0x6), RES4((byte) 0x7),
CLOSE_CONNECTION((byte) 0x8),
PING((byte) 0x9),
PONG((byte) 0xA),
RES5((byte) 0xB), RES6((byte) 0xC), RES7((byte) 0xD), RES8((byte) 0xE), RES9((byte) 0xF);
byte opcode;
Opcode(final byte opcode) {
this.opcode = opcode;
}
/**
* Method for decoding the opcode of a message.
* @param opcode The opcode to decode.
* @return The message type.
*/
public static Opcode decode(byte opcode) {
return Opcode.values()[opcode & 0xF];
}
// Returns the opcode of this message type.
public byte getOpcode() { return this.opcode; }
}
public String message;
public WebSocketConnection connection;
/**
* Constructor for a WebSocket message.
* @param message The message that was sent
* @param connection The connection where the message came from.
*/
public Message(WebSocketConnection connection, String message) {
this.message = message;
this.connection = connection;
}
}
/**
* Interface for a message reply.
* This can be used for when a message has been received from a client
* to reply back to the client.
*/
public interface MessageReply {
void reply(String message);
}
}

View File

@@ -1,35 +0,0 @@
package com.example.fitbot.util.server;
import java.net.Socket;
public class WebSocketConnection {
private final WebSocket origin;
private final Socket socket;
/**
* Constructor for creating an arbitrary WebSocket connection (Client)
* @param connection The server connection
* @param socket The client socket
*/
public WebSocketConnection(WebSocket connection, Socket socket) {
this.origin = connection;
this.socket = socket;
}
/**
* Getter method for retrieving the WebSocket connection
* @return The WebSocket instance.
*/
public WebSocket getOrigin() {
return origin;
}
/**
* Getter method for retrieving the Client Socket connection.
* @return The Socket connection.
*/
public Socket getSocket() {
return socket;
}
}

View File

@@ -1,217 +0,0 @@
package com.example.fitbot.util.server;
import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.Scanner;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class WebSocketConnectionHandler implements Runnable {
private final WebSocket theSocket;
private List<Socket> clients = Collections.synchronizedList(new ArrayList<>());
private Thread thread;
private boolean forceClose = false;
/**
* Constructor for WebSocketConnectionHandler.
* This class handles all new incoming Socket connections.
*
* @param webSocket The socket to check for new connections.
*/
protected WebSocketConnectionHandler(WebSocket webSocket) {
this.theSocket = webSocket;
}
@Override
public void run() {
// Listen for new connections until the socket closes.
while (theSocket.isConnected()) {
try {
// Find a new connection
Socket newSocket = this.theSocket.getSocket().accept();
this.theSocket.eventHandler.onConnected(newSocket);
clients.add(newSocket);
InputStream streamIn = newSocket.getInputStream();
OutputStream streamOut = newSocket.getOutputStream();
// Check if the connection was successfully upgraded to WebSocket
if (upgradeConnection(streamIn, streamOut)) {
applyMessageDecoder(streamIn);
}
} catch (IOException error) {
String reason = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebSocketConnectionHandler", "Error listening to Socket connections: " + reason);
break;
}
}
}
/**
* Method for upgrading a HTTP connection to a WebSocket connection.
* This checks whether the client sent a GET header and sends back
* the required headers to upgrade the connection.
* @param streamIn The InputStream of the client socket connection.
* @param streamOut The OutputStream of the client socket connection.
* @return Whether or not the connection was successfully upgraded.
*/
private boolean upgradeConnection(InputStream streamIn, OutputStream streamOut) {
Scanner scanner = new Scanner(streamIn, "UTF-8");
String data = scanner.useDelimiter("\\r\\n\\r\\n").next();
Matcher header = Pattern.compile("^GET").matcher(data);
// Check if the header contains the GET keyword
// If this is the case, upgrade the HTTP connection to WebSocket.
if (!header.find())
return false;
Matcher match = Pattern.compile("Sec-WebSocket-Key: (.*)").matcher(data);
match.find(); // Get next match
try {
String SECAccept = Base64.getEncoder().encodeToString(
MessageDigest.getInstance("SHA-1").digest((match.group(1) + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").getBytes(StandardCharsets.UTF_8)));
byte[] response = (
"HTTP/1.1 101 Switching Protocols\r\n" +
"Connection: Upgrade\r\n" +
"Upgrade: websocket\r\n" +
"Sec-WebSocket-Accept: " +
SECAccept + "\r\n\r\n").getBytes(StandardCharsets.UTF_8);
streamOut.write(response, 0, response.length);
} catch (IOException | NoSuchAlgorithmException error) {
Log.e("WebSocketConnectionHandler", "Failed upgrading HTTP to WebSocket connection" + error.getMessage());
return false;
}
return true;
}
/**
* Method for applying a message decoder for whenever a socket receives data.
* This method attemps to decode a message received from a WebSocket connection.
* This message is in the
* @param streamIn The message stream to decode.
*/
private void applyMessageDecoder(InputStream streamIn) {
// TODO: Implement
}
/**
* Method for decoding an encoded WebSocket message
* @param bytes The message to decode, in UTF-8 format.
* @return The decoded message.
* @throws IllegalArgumentException When the `frame` content is in an incorrect format.
*/
public static String decodeWebSocketMessage(byte[] bytes) {
// Check if the packet isn't corrupted
if (bytes.length <= 2 || (bytes[0] & 0b1110) != 0)
throw new IllegalArgumentException("Attempted to decode corrupted WebSocket frame data");
WebSocket.Message.Opcode opcode = WebSocket.Message.Opcode.decode((byte) (bytes[0] & 0b11110000));
byte payloadLength = (byte) (bytes[1] & 0b01111111); // Payload size (7 bits)
boolean fin = (bytes[0] & 0b1) != 0; // Whether this is the whole message
boolean masked = (bytes[1] & 0b10000000) != 0; // Whether the 9th bit is masked
long extendedPayloadLength = 0;
int byteOffset = 2;
// Check whether the payload length is 16-bit
if (payloadLength == 126) {
// 16-bit extended payload length (byte 2 and 3)
extendedPayloadLength = ((bytes[2] & 0xFF) << 8) | (bytes[3] & 0xFF);
byteOffset += 2;
// Check whether payload length is 64-bit
} else if (payloadLength == 127) {
// 64-bit extended payload length
for (int i = 0; i < 8; i++)
extendedPayloadLength |= (long) (bytes[2 + i] & 0xFF) << ((7 - i) * 8);
byteOffset += 8;
} else {
extendedPayloadLength = payloadLength;
}
byte[] maskingKey = null;
byte[] payloadData = new byte[(int) extendedPayloadLength];
// Check if the MASK bit was set, if so, copy the key to the `maskingKey` array.
if (masked) {
maskingKey = new byte[4];
System.arraycopy(bytes, byteOffset, maskingKey, 0, 4); // move mask bytes
byteOffset += 4;
}
// Copy payload bytes into `payloadData` array.
System.arraycopy(bytes, byteOffset, payloadData, 0, payloadData.length);
// If mask is present, decode the payload data with the mask.
if (masked)
for (int i = 0; i < payloadData.length; i++)
payloadData[i] ^= maskingKey[i % 4];
// Convert payload data to string
return new String(payloadData, StandardCharsets.UTF_8);
}
/**
* Method for checking whether the connection handler is actively listening.
* @return Whether it's listening.
*/
public boolean isActive() {
return this.thread.isAlive();
}
/**
* Method for listening to all new incoming socket connections.
*/
public void listen() {
this.thread = new Thread(this);
this.thread.start();
Log.i("WebSocketConnectionHandler", "Listening started.");
}
/**
* Method for stopping the connection handler.
*/
public void stop() {
// Close the socket connection if not already closed
if (!this.theSocket.getSocket().isClosed()) {
try {
this.theSocket.getSocket().close();
} catch (IOException error) {
Log.e("WebSocketConnectionHandler", "Failed to close the socket connection: " + error.getMessage());
}
}
// Interrupt the thread
this.thread.interrupt();
// Close all connections
this.clients.forEach(client -> {
try {
client.close();
} catch (IOException error) {
Log.e("WebSocketConnectionHandler", "Failed to close client: " + error.getMessage());
}
});
this.clients.clear();
Log.i("WebSocketConnectionHandler", "Listening stopped.");
}
}

View File

@@ -1,36 +0,0 @@
package com.example.fitbot;
import static org.junit.Assert.assertEquals;
import com.example.fitbot.util.server.WebSocketConnectionHandler;
import org.junit.Test;
/**
* Created on 07/05/2024 at 18:27
* by Luca Warmenhoven.
*/
public class WebSocketMessageParsingTest {
@Test
public void parseWebSocketMessage() {
String reference = "abcdef";
final byte[] encoded = {
(byte) 129, (byte) 134, (byte) 167,
(byte) 225, (byte) 225, (byte) 210,
(byte) 198, (byte) 131, (byte) 130,
(byte) 182, (byte) 194, (byte) 135
};
String decoded = "";
try {
decoded = WebSocketConnectionHandler.decodeWebSocketMessage(encoded);
} catch (Exception e) {
System.err.println("Error occurred whilst attempting to parse input" + e.getMessage());
}
assertEquals(reference, decoded);
}
}

View File

@@ -0,0 +1,4 @@
# Issues with hardware
## Issues with libraries
The websocket library doesnt work well on the esp8266 d1 mini. It lags out the entire esp and makes it unresponsive.

View File

@@ -67,25 +67,50 @@ https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8936033/
# inleiding
De vergrijzing van de samenleving zet druk op de ouderenzorg. Er is een tekort aan zorgverleners, terwijl de vraag naar zorg toeneemt. Robotica wordt gezien als een mogelijke oplossing om dit probleem optelossen. Robots kunnen taken overnemen van verpleegsters, zoals medicatie toedienen, lichaamsverzorging en gezelschap bieden. Maar de inzet van robots in de ouderenzorg roept ook ethische dilemma's op. Ik ga onderzoeken welke etische dillemas er kunnen ontstaan in de zorg. Om dit doel te bereiken, zullen we de volgende deelvragen beantwoorden:
De vergrijzing van de samenleving zet druk op de ouderenzorg. Er is een tekort aan zorgverleners, terwijl de vraag naar zorg toeneemt. Robotica wordt gezien als een mogelijke oplossing om dit probleem op te lossen. Robots kunnen taken overnemen van verpleegsters, zoals medicatie toedienen, lichaamsverzorging en gezelschap bieden. Maar de inzet van robots in de ouderenzorg roept ook ethische dilemma's op. In dit onderzoek wordt onderzocht welke ethische Dillema's er kunnen ontstaan in de zorg. Om dit doel te bereiken, zullen de volgende deelvragen beantwoord worden:
- Wat zijn de mogelijke effecten van het gebruik van robots op het emotionele en sociale welzijn van ouderen?
- Leidt robotisering in de ouderenzorg tot onpersoonlijke en mechanische zorg?
- Welke voordelen zijn er bij gebruik van robots in de ouderenzorg
- Welke risico's zijn er over de veiligheid en privacy voor ouderen bij het gebruik van robots in de zorg?
- Welke nadelen zijn er bij gebruik van robots in de ouderenzorg
Deze vragen behandelen allerei verschillende aspecten van robotica in de ouderzorg. na het beantwoorden van de deelvragen kunnen wij een concusie trekken en kijken naar welke dillemas er kunnen ontstaan.
Deze vragen behandelen verschillende aspecten van robotica in de ouderzorg. Na het beantwoorden van de deelvragen kunnen we een conclusie trekken en kijken naar welke dilemma's er kunnen ontstaan.
## Wat zijn de mogelijke effecten van het gebruik van robots op het emotionele en sociale welzijn van ouderen?
Het sociale welzijn van ouderen is een belangrijk onderwerp. Ouderen kunnen zich vaak enzaam of afgesloten voelen van de samenleving. Dit is een groot probleem binnen in de ouderzorg omdat je niet wilt dat ouderen zich nog eenzamer gaan voelen nadat zij worden verzorgd door een robot. Daarom wil ik weten wat de effecten zijn van het gebruik van robotica in de ouderzorg, Zodat ik daaruit kan bepalen wat voor een dillema het met zich mee zou brengen.
Het sociale welzijn van ouderen is een belangrijk onderwerp. Ouderen kunnen zich vaak eenzaam of afgesloten voelen van de samenleving. Dit is een groot probleem binnen in de ouderzorg omdat je niet wilt dat ouderen zich nog eenzamer gaan voelen nadat ze worden verzorgd door een robot. Daarom is het belangrijk om te weten wat de effecten zijn van het gebruik van robotica in de ouderzorg, zodat er daaruit kan bepaald worden wat voor dilemma's het met zich mee kan brengen.
Er zijn al veel onderzoeken geweest naar dit onderwerp. Onderzoek wijst uit dat robots zowel positieve als negatieve effecten kunnen hebben. Een postief effect ervan is dat ouderen zich minder eenzaam voelen. 17 onderzoeken met 4 verschillende type robots wijsen erop dat Uit de meeste onderzoeken bleek dat gezelschapsrobots een positieve invloed hadden op (socio)psychologische (bijv. humeur, eenzaamheid, sociale connecties en communicatie) en fysiologische (bijv. stressverlaging) variabelen. De methodologische kwaliteit van de studies was over het algemeen laag.[1] Dit laat zien dat het helpt om eenzaamheid bij ouderen tegentegaan. Ze kunnen ook worden gebruikt om ouderen te helpen in contact te blijven met vrienden en familie. Dit is handig iets omdat ouderen dan dichter bij hun familie zijn wat voor de meeste ouderen heel belangrijk is om eenzaamheid tegen te gaan. Bovendien kunnen robots worden gebruikt om ouderen te motiveren om actief en betrokken te blijven bij hun leven. Fitness is niet alleen iets om fit te blijven maar brengt ook socialen aspecten met zich mee. Ik ben nu persoonlijk bezig met een project waarmee wij ouderen willen stimuleren meer te bewegen zodat zij ook wat vaker naar buiten kunnen en potentieel ook samen workouts kunnen doen.
Er zijn al veel onderzoeken geweest naar dit onderwerp. Onderzoek wijst uit dat robots zowel positieve als negatieve effecten kunnen hebben. Een positief effect ervan is dat ouderen zich minder eenzaam voelen. 17 onderzoeken met 4 verschillende type robots wijzen erop dat uit de meeste onderzoeken bleek dat gezelschapsrobots een positieve invloed hadden op (socio)psychologische (bijv. humeur, eenzaamheid, sociale connecties en communicatie) en fysiologische (bijv. stressverlaging) variabelen. De methodologische kwaliteit van de studies was over het algemeen laag volgens [(Socially Assistive Robots in Elderly Care: A Systematic Review into Effects and Effectiveness, 2010)](https://www.sciencedirect.com/science/article/abs/pii/S1525861010003476). Dit laat zien dat het helpt om eenzaamheid bij ouderen tegen te gaan. Ze kunnen ook worden gebruikt om ouderen te helpen in contact te blijven met vrienden en familie. Dit is handig iets omdat ouderen dan dichter bij hun familie zijn, wat voor de meeste ouderen heel belangrijk is om eenzaamheid tegen te gaan. Bovendien kunnen robots worden gebruikt om ouderen te motiveren om actief en betrokken te blijven bij hun leven. Fitness is niet alleen iets om fit te blijven, maar brengt ook sociale aspecten met zich mee.
Over het algemeen zijn de mogelijke effecten van robots op het emotionele en sociale welzijn van ouderen complex. Er zijn zowel mogelijke Voordelen en nadelen, en de impact van robots zal waarschijnlijk van persoon tot persoon verschillen. Het is belangrijk om deze factoren zorgvuldig af te wegen bij het beslissen of robots wel of niet in de ouderzorg te implementeren.
Over het algemeen zijn de mogelijke effecten van robots op het emotionele en sociale welzijn van ouderen complex. Er zijn zowel mogelijke Voordelen als nadelen, en de impact van robots zal waarschijnlijk van persoon tot persoon verschillen. Het is belangrijk om deze factoren zorgvuldig af te wegen bij het beslissen of robots wel of niet in de ouderzorg te implementeren.
## Kan het gebruik van robots in de ouderenzorg leiden tot een gevoel van dehumanisering of verminderde menselijke interactie voor ouderen?
## Welke voordelen zijn er bij gebruik van robots in de ouderenzorg
Het gebruik van robots in de ouderenzorg kan meerdere voordelen met zich meebrengen. Robots kunnen in sommige aspecten beter zijn dan mensen. Het is belangrijk om er achter komen wat de voordelen zijn, zodat ik kan inzien wat de positieve kant is van het gebruik van robots. Daarnaast vallen er ook wat dilemma's af omdat als er voordelen zijn dat er niet vaak een dilemma aan hangt.
Robots kunnen taken overnemen van zorgverleners, zoals het toedienen van medicijnen, het verzorgen van patiënten en het verplaatsen van patiënten. Hierdoor kunnen zorgverleners zich meer focussen op complexere zorgtaken die robots niet kunnen overnemen en meer tijd doorbrengen met ouderen. Dit heeft een positief effect op het bestrijden van de eenzaamheid van ouderen. Uit onderzoek blijkt dat 70% van de zorgverleners in verpleeghuizen gelooft dat robots hen kunnen helpen om de kwaliteit van zorg te verbeteren. [(Impacts of robot implementation on care personnel and clients in elderly-care institutions, 2019)](https://www.sciencedirect.com/science/article/pii/S1386505619300498) Het gebruik van robots zorgt dus ervoor dat de simpelere taken die vaak moeten gebeuren dat die geautomatiseerd worden. Daarnaast kunnen robots ook de hele dag en nacht door zorg bieden indien nodig. Dit zorgt ervoor dat er minder personeel nodig is voor nachtdiensten in de zorg.
Dit is een groot voordeel van het gebruik van robots. De vermindering in de werkdruk in de zorg is fijn voor de hulpverleners en ook voor de ouderen. Als de zorgverleners meer tijd hebben voor de ouderen en de zwaardere taken kunnen de robots het lichte en simpele werk overnemen.
## Welke nadelen zijn er bij gebruik van robots in de ouderenzorg
Naast de voordelen zijn er ook nadelen bij het gebruik van robots in de ouderenzorg. Deze nadelen zijn essentieel om te onderzoeken, doordat er vaak dilemma's aan nadelen hangen. Robots kunnen meerdere nadelen met zich mee brengen. Deze nadelen kunnen in het ethische aspect vallen, maar ook daar buiten.
Een nadeel dat zou kunnen vormen heeft te maken met privacy. Ouderen kunnen te maken krijgen met geheugen verlies. Dit kan leiden tot momenten dat ze niet meer weten wat ze zeggen of waar ze toestemming op geven. Een robot kan niet altijd de situatie begrijpen als een persoon te maken heeft met geheugen verlies. Dit zou een probleem kunnen zijn volgens [((The ethical issues of social assistive robotics: A critical literature review, 2021)](https://www.sciencedirect.com/science/article/pii/S0160791X21002013#bib40:~:text=3.1.1.%20Well,requiring%20ethical%20ponderation). Daarnaast slaan de robots constant data op zoals wat de gebruiker doet, waar ze naar toe gaan en met wie ze omgaan. De robots kunnen met deze informatie een profiel gaan maken per persoon met de data die ze verzamelen. Deze vormen een privacy problemen doordat het persoonlijke informatie kan zijn. Deze persoonlijke profielen kunnen worden verkocht voor commerciële doel einde of worden gelekt waardoor de persoonlijke informatie publiekelijk kan zijn.
De nadelen van het gebruik van robots in de ouderenzorg is vooral gebonden aan privacy. Als de robots persoonlijke profielen kunnen aanmaken vormt dit een gevaar voor de privacy. Deze profielen kunnen voor meerdere doeleinde worden gebruikt die een gevaar kunnen vormen voor de ouderen.
# conclusie
In dit onderzoek hebben we het gehad over het sociale welzijn van de ouderen en de voor- en nadelen van robots in de ouderen zorg. Het is belangrijk dat we voorzichtig doen met het implementeren van robots in de ouderenzorg doordat de behoeftes per persoon kunnen verschillen. We moeten goed blijven onderzoeken wat de behoeftes zijn van de ouderen omdat het belangrijk is dat ze zich niet slecht gaan voelen. De voordelen van de robots zijn dat we de druk van de zorgverleners af kunnen halen. Doordat de robots de simpele taken van de zorgverleners kunnen over nemen kan er extra tijd vrij komen om met de ouderen persoonlijke tijd door te brengen. Er zijn ook nadelen van het gebruik van robots, deze hangen vooral aan de privacy. Doordat de robots een profiel van de ouderen kunnen maken en deze kan worden misbruikt voor commerciële doel einde.
Een dilemma die kan ontstaan gaat over Privacy en gegevens. Robots verzamelen constant data over ouderen en mensen met wie ze omgaan. Deze informatie kan gevoelig zijn over de gezondheid en het welzijn van die persoon. Ook zijn er dilemma's over de behoeftes van oudere. Is het wel ethisch verantwoordelijk om robots te gebruiken tegen de zin in van ouderen. Als de robots globaal worden ingezet kan het zo zijn dat niet iedereen er comfortabel mee is en dat men op zoek is naar menselijk contact. Voor verder onderzoek adviseer ik om te kijken naar hoe we gegevens kunnen beschermen en met welke gegevens ze comfortabel zijn te delen met de robot. Dit is essentieel om te weten om de privacy te beschermen van de ouderen. Doordat het een dilemma is moeten we kijken naar de oplossingen.
(Socially Assistive Robots in Elderly Care: A Systematic Review into Effects and Effectiveness https://www.sciencedirect.com/science/article/abs/pii/S1525861010003476)[1]
(Impacts of robot implementation on care personnel and clients in elderly-care institutions https://www.sciencedirect.com/science/article/pii/S1386505619300498)
(The ethical issues of social assistive robotics: A critical literature review https://www.sciencedirect.com/science/article/pii/S0160791X21002013)
https://www.sciencedirect.com/science/article/pii/S0160791X21002013#bib40:~:text=3.1.1.%20Well,requiring%20ethical%20ponderation