Merge branch 'main' of ssh://gitlab.fdmci.hva.nl/propedeuse-hbo-ict/onderwijs/2023-2024/out-a-se-ti/blok-4/muupooviixee66

This commit is contained in:
Niels
2024-05-28 20:16:59 +02:00
29 changed files with 572 additions and 993 deletions

73
.idea/workspace.xml generated
View File

@@ -14,12 +14,9 @@
</configurations>
</component>
<component name="ChangeListManager">
<list default="true" id="00599d5b-7eb5-44da-ad7f-98bf42384c16" name="Changes" comment="Added NodeJS Server files">
<change afterPath="$PROJECT_DIR$/docs/personal-documentation/Luca/literatuuronderzoek/feedback.md" afterDir="false" />
<list default="true" id="00599d5b-7eb5-44da-ad7f-98bf42384c16" name="Changes" comment="Commit war crimes in formal Yugoslavia">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/code/web/incoming_request_handlers.js" beforeDir="false" afterPath="$PROJECT_DIR$/code/web/incoming_request_handlers.js" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/personal-documentation/Luca/literatuuronderzoek/onderzoek-voorbeeld.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/personal-documentation/Luca/literatuuronderzoek/literatuur-onderzoek.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/personal-documentation/Luca/literatuuronderzoek/onderzoek.md" beforeDir="false" />
</list>
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
@@ -88,19 +85,19 @@
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent"><![CDATA[{
"keyToString": {
"git-widget-placeholder": "main",
"node.js.detected.package.eslint": "true",
"node.js.detected.package.tslint": "true",
"node.js.selected.package.eslint": "(autodetect)",
"node.js.selected.package.tslint": "(autodetect)",
"nodejs_package_manager_path": "npm",
"settings.editor.selected.configurable": "preferences.lookFeel",
"ts.external.directory.path": "/Applications/WebStorm.app/Contents/plugins/javascript-plugin/jsLanguageServicesImpl/external",
"vue.rearranger.settings.migration": "true"
<component name="PropertiesComponent">{
&quot;keyToString&quot;: {
&quot;git-widget-placeholder&quot;: &quot;main&quot;,
&quot;node.js.detected.package.eslint&quot;: &quot;true&quot;,
&quot;node.js.detected.package.tslint&quot;: &quot;true&quot;,
&quot;node.js.selected.package.eslint&quot;: &quot;(autodetect)&quot;,
&quot;node.js.selected.package.tslint&quot;: &quot;(autodetect)&quot;,
&quot;nodejs_package_manager_path&quot;: &quot;npm&quot;,
&quot;settings.editor.selected.configurable&quot;: &quot;preferences.lookFeel&quot;,
&quot;ts.external.directory.path&quot;: &quot;/Applications/WebStorm.app/Contents/plugins/javascript-plugin/jsLanguageServicesImpl/external&quot;,
&quot;vue.rearranger.settings.migration&quot;: &quot;true&quot;
}
}]]></component>
}</component>
<component name="RecentsManager">
<key name="MoveFile.RECENT_KEYS">
<recent name="$PROJECT_DIR$/code/src/app/src/main/java/com/fitbot" />
@@ -198,7 +195,9 @@
<workItem from="1716363591734" duration="658000" />
<workItem from="1716538507910" duration="2194000" />
<workItem from="1716547378856" duration="243000" />
<workItem from="1716648462646" duration="8054000" />
<workItem from="1716648462646" duration="8555000" />
<workItem from="1716674767699" duration="21000" />
<workItem from="1716889548355" duration="1275000" />
</task>
<task id="LOCAL-00001" summary="Changes">
<created>1713528225837</created>
@@ -324,7 +323,39 @@
<option name="project" value="LOCAL" />
<updated>1716540796495</updated>
</task>
<option name="localTasksCounter" value="17" />
<task id="LOCAL-00017" summary="killed my brain doing literature">
<option name="closed" value="true" />
<created>1716658275320</created>
<option name="number" value="00017" />
<option name="presentableId" value="LOCAL-00017" />
<option name="project" value="LOCAL" />
<updated>1716658275320</updated>
</task>
<task id="LOCAL-00018" summary="updated gay">
<option name="closed" value="true" />
<created>1716658499589</created>
<option name="number" value="00018" />
<option name="presentableId" value="LOCAL-00018" />
<option name="project" value="LOCAL" />
<updated>1716658499589</updated>
</task>
<task id="LOCAL-00019" summary="Commit crack cocaine">
<option name="closed" value="true" />
<created>1716889664199</created>
<option name="number" value="00019" />
<option name="presentableId" value="LOCAL-00019" />
<option name="project" value="LOCAL" />
<updated>1716889664199</updated>
</task>
<task id="LOCAL-00020" summary="Commit war crimes in formal Yugoslavia">
<option name="closed" value="true" />
<created>1716890009616</created>
<option name="number" value="00020" />
<option name="presentableId" value="LOCAL-00020" />
<option name="project" value="LOCAL" />
<updated>1716890009616</updated>
</task>
<option name="localTasksCounter" value="21" />
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
@@ -359,6 +390,10 @@
<MESSAGE value="nonsense" />
<MESSAGE value="Added Skills Ontwikkelings Plan.docx" />
<MESSAGE value="Added NodeJS Server files" />
<option name="LAST_COMMIT_MESSAGE" value="Added NodeJS Server files" />
<MESSAGE value="killed my brain doing literature" />
<MESSAGE value="updated gay" />
<MESSAGE value="Commit crack cocaine" />
<MESSAGE value="Commit war crimes in formal Yugoslavia" />
<option name="LAST_COMMIT_MESSAGE" value="Commit war crimes in formal Yugoslavia" />
</component>
</project>

View File

@@ -20,4 +20,21 @@ void Connectivity::websocketSetup(char* ip, uint16_t port, char* adress){
void Connectivity::sendData(float roll, float pitch, float yaw){
String message = "{\"Sensor\": 1, \"roll\":\"" + String(roll) + "\",\"pitch\":\"" + String(pitch) + "\",\"yaw\":\"" + String(yaw) + "\"}";
webSocket.sendTXT(message);
}
/** Send a POST request to a server with provided data */
int Connectivity::httpPost(const char *serverAddress, const char *serverSubPath, const unsigned short serverPort,
const char *data, const size_t dataLength, const char *contentType)
{
if ( wifi_client.connect(serverAddress, serverPort)) {
wifi_client.printf("POST %s HTTP/1.1\r\n", serverSubPath);
wifi_client.printf("Content-Type: %s\r\n", contentType);
wifi_client.printf("Content-Length: %d\r\n", dataLength);
wifi_client.printf("Host: %s\r\n\n", serverAddress);
wifi_client.println(data);
wifi_client.stop();
return 0;
}
return 1;
}

View File

@@ -6,25 +6,25 @@
#include <ArduinoWiFiServer.h>
#include <ESP8266WiFi.h>
#include <ESP8266WiFiGeneric.h>
#include <ESP8266WiFiGratuitous.h>
#include <ESP8266WiFiMulti.h>
#include <ESP8266WiFiSTA.h>
#include <ESP8266WiFiScan.h>
#include <ESP8266WiFiType.h>
#include <WiFiClient.h>
#include <WiFiServer.h>
#include <WiFiServerSecure.h>
#include <WiFiUdp.h>
#include <WiFiClientSecure.h>
class Connectivity {
public:
void connectWiFi(char* ssid, char* pass);
void websocketSetup(char* ip, uint16_t port, char* adress);
void sendData(float roll, float pitch, float yaw);
int httpPost(const char *serverAddress, const char *serverSubPath, const unsigned short serverPort, const char *data, const size_t dataLength, const char *contentType);
private:
ESP8266WiFiMulti wifi;
WiFiClient wifi_client;
WebSocketsClient webSocket;
};
#endif

View File

@@ -5,56 +5,33 @@
void setup() {
Serial.begin(9600);
Serial.println("startup");
//connect to internet and start sensor
connectivity.connectWiFi(ssid, pass);
sensorManager.sensorSetup();
//ws server address, port and URL
webSocket.begin("145.28.160.108", 8001, "");
// try every 500 again if connection has failed
webSocket.setReconnectInterval(500);
}
void loop() {
SensorManager::eulerAngles eulerRotation = sensorManager.getEulerAngles();
SensorManager::acceleration rotationAcceleration = sensorManager.getAcelleration();
unsigned long lastTime = 0; // will store the last time the code was run
// Subtract offset
// rotation.i -= offset.i;
// rotation.j -= offset.j;
// rotation.k -= offset.k;
// rotation.w -= offset.w;
Serial.print(eulerRotation.roll);
Serial.print(" ");
Serial.print(eulerRotation.yaw);
Serial.print(" ");
Serial.print(eulerRotation.pitch);
Serial.println();
// Convert quaternion to Euler angles in radians
// Convert to degrees
// float rollDegrees = roll * 180.0f / PI;
// float pitchDegrees = pitch * 180.0f / PI;
// float yawDegrees = yaw * 180.0f / PI;
Serial.print(eulerRotation.roll);
Serial.print(" ");
Serial.print(eulerRotation.pitch);
Serial.print(" ");
Serial.print(eulerRotation.yaw);
sendData(eulerRotation.roll, eulerRotation.pitch, eulerRotation.yaw);
Serial.println();
webSocket.loop();
unsigned long currentTime = millis();
if (currentTime - lastTime >= 100) { // 100 ms has passed
String message = "{\"deviceId\": 1, \"rotationX\":\"" + String(eulerRotation.roll) + "\",\"rotationY\":\"" + String(eulerRotation.pitch) + "\",\"rotationZ\":\"" + String(eulerRotation.yaw) + "\",\"accelerationX\":\"" + String(rotationAcceleration.x) + "\",\"accelerationY\":\"" + String(rotationAcceleration.y) + "\",\"accelerationZ\":\"" + String(rotationAcceleration.z) + "\",\"type\":\"data\"}";
Serial.println(connectivity.httpPost("192.168.137.146", "/", 3445, message.c_str(), message.length(), "json"));
Serial.println(message);
lastTime = currentTime;
}
// if (Serial.available()) {
// String command = Serial.readStringUntil('\n');
// command.trim(); // remove any trailing whitespace
// if (command == "setZeroPoint") {
// setZeroPoint();
// }
// }
// }
// void setZeroPoint() {
// offset = sensorManager.readLoop();
// }
}
//acceleration.X
//acceleration.Y
//acceleration.Z
void sendData(float roll, float pitch, float yaw){
String message = "{\"Sensor\": 1, \"roll\":\"" + String(roll) + "\",\"pitch\":\"" + String(pitch) + "\",\"yaw\":\"" + String(yaw) + "\"}";
webSocket.sendTXT(message);
}

View File

@@ -5,7 +5,6 @@
SensorManager::SensorManager() {}
void SensorManager::sensorSetup() {
Wire.setClockStretchLimit(150000L); // Default stretch limit 150mS
Wire.begin();
//wait for the sensor to start before continue
if (myIMU.begin() == false) {
@@ -15,20 +14,12 @@ void SensorManager::sensorSetup() {
//start sensorfunction and start autocalibration
//once calibration is enabled it attempts to every 5 min
Wire.setClock(400000); //Increase I2C data rate to 400kHz
myIMU.calibrateAll(); //Turn on cal for Accel, Gyro, and Mag
Wire.setClock(400000);
myIMU.enableGyroIntegratedRotationVector(100); //send data every 100ms
myIMU.enableMagnetometer(100); //Send data update every 100ms
myIMU.saveCalibration(); //Saves the current dynamic calibration data (DCD) to memory
myIMU.requestCalibrationStatus(); //Sends command to get the latest calibration status
if (myIMU.calibrationComplete() == true) {
Serial.println("Calibration data successfully stored");
}
myIMU.enableAccelerometer(100); //Send data update every 100ms
Serial.println(F("magnetometer rotation enabled"));
}
//get sensordata
SensorManager::RotationQuintillions SensorManager::getQuintillions() {
if (myIMU.dataAvailable() == true) {
float i = myIMU.getQuatI();
@@ -48,7 +39,7 @@ SensorManager::RotationQuintillions SensorManager::getQuintillions() {
return rotation;
}
}
//calculate Quintillions to Euler angles from -1π to +1π
SensorManager::eulerAngles SensorManager::getEulerAngles() {
SensorManager::RotationQuintillions rotation = getQuintillions();
float roll = atan2(2.0f * (rotation.w * rotation.i + rotation.j * rotation.k), 1.0f - 2.0f * (rotation.i * rotation.i + rotation.j * rotation.j));
@@ -56,4 +47,12 @@ SensorManager::eulerAngles SensorManager::getEulerAngles() {
float yaw = atan2(2.0f * (rotation.w * rotation.k + rotation.i * rotation.j), 1.0f - 2.0f * (rotation.j * rotation.j + rotation.k * rotation.k));
eulerAngles EulerAngles = { roll, pitch, yaw };
return EulerAngles;
}
SensorManager::acceleration SensorManager::getAcelleration(){
float x = myIMU.getAccelX();
float y = myIMU.getAccelY();
float z = myIMU.getAccelZ();
acceleration Acceleration = { x, y, z };
return Acceleration;
}

View File

@@ -13,8 +13,14 @@ public:
float pitch;
float yaw;
};
eulerAngles getEulerAngles();
struct acceleration {
float x;
float y;
float z;
};
eulerAngles getEulerAngles();
acceleration getAcelleration();
private:
struct RotationQuintillions {

View File

@@ -3,21 +3,24 @@ package com.example.fitbot.exercise;
import android.util.Log;
import com.example.fitbot.util.path.GesturePath;
import com.example.fitbot.util.server.IWebSocketHandler;
import com.example.fitbot.util.server.WebSocket;
import com.example.fitbot.util.processing.IMotionDataConsumer;
import com.example.fitbot.util.server.IWebServerHandler;
import com.example.fitbot.util.server.WebServer;
import java.util.Objects;
import java.util.function.Consumer;
public class Exercise implements IWebSocketHandler {
public class Exercise implements IWebServerHandler {
private EMuscleGroup muscleGroup;
private GesturePath path;
private GesturePath leftPath;
private GesturePath rightPath;
private String title;
private String description;
private float segmentsPerSecond;
// Static fields.
private static WebSocket webSocket;
private static WebServer webSocket;
private static Exercise currentExercise = null;
@@ -25,18 +28,19 @@ public class Exercise implements IWebSocketHandler {
* Constructor for the AbstractExercise class.
*
* @param muscleGroup The muscle group of the exercise.
* @param path The path of the exercise.
* @param leftPath The path of the left hand.
* @param rightPath The path of the right hand.
* @param title The title of the exercise.
* @param description The description of the exercise.
* @param segmentsPerSecond The number of segments per second.
* This determines how fast the exercise should be performed.
*/
public Exercise(EMuscleGroup muscleGroup, String title, String description, GesturePath path, float segmentsPerSecond) {
public Exercise(EMuscleGroup muscleGroup, String title, String description, GesturePath leftPath, GesturePath rightPath) {
this.muscleGroup = muscleGroup;
this.title = title;
this.description = description;
this.path = path;
this.segmentsPerSecond = segmentsPerSecond;
this.leftPath = leftPath;
this.rightPath = rightPath;
}
/**
@@ -57,10 +61,9 @@ public class Exercise implements IWebSocketHandler {
}
try {
webSocket = WebSocket.createServer();
webSocket = WebServer.createServer();
Objects.requireNonNull(webSocket, "WebSocket server could not be created.");
webSocket.startListening();
webSocket.setEventHandler(this);
currentExercise = this;
} catch (Exception e) {
@@ -108,8 +111,8 @@ public class Exercise implements IWebSocketHandler {
/**
* Get the path of the exercise.
*/
public GesturePath getPath() {
return path;
public GesturePath[] getPath() {
return new GesturePath[]{leftPath, rightPath};
}
public String getTitle() {
@@ -126,4 +129,9 @@ public class Exercise implements IWebSocketHandler {
public double getSegmentsPerSecond() {
return segmentsPerSecond;
}
@Override
public void onReceive(String message) {
}
}

View File

@@ -1,41 +0,0 @@
package com.example.fitbot.exercise;
import com.example.fitbot.util.processing.IMotionDataConsumer;
import com.example.fitbot.util.processing.MotionData;
import com.example.fitbot.util.processing.MotionProcessor;
import com.example.fitbot.util.server.IWebSocketHandler;
import com.example.fitbot.util.server.WebSocket;
import org.joml.Vector3f;
import java.net.Socket;
public class ExerciseBuilder implements IWebSocketHandler, IMotionDataConsumer {
private MotionProcessor processor;
public ExerciseBuilder() {
this.processor = new MotionProcessor();
this.processor.setMotionDataEventHandler(this);
}
@Override
public void onDisconnected(Socket socket) {
IWebSocketHandler.super.onDisconnected(socket);
}
@Override
public void onMessageReceived(WebSocket.Message message, WebSocket.MessageReply replier) {
IWebSocketHandler.super.onMessageReceived(message, replier);
}
@Override
public void onError(Socket socket, String error) {
IWebSocketHandler.super.onError(socket, error);
}
@Override
public void accept(Vector3f transformedVector, MotionData motionData, int sampleIndex, double sampleRate, int sensorId) {
}
}

View File

@@ -76,7 +76,7 @@ public class ExerciseManager {
content.get(PROPERTY_NAME).getAsString(),
content.get(PROPERTY_DESC).getAsString(),
gesturePathFromString(content.get(PROPERTY_VECTORS).getAsString()),
DEFAULT_SEGMENT_SPEED
gesturePathFromString(content.get(PROPERTY_SEGMENT_SPEED).getAsString())
);
} catch (Exception e) {
e.printStackTrace();

View File

@@ -1,43 +0,0 @@
package com.example.fitbot.speech;
import com.aldebaran.qi.sdk.QiContext;
import com.aldebaran.qi.sdk.builder.SayBuilder;
import com.aldebaran.qi.sdk.object.locale.Language;
import com.aldebaran.qi.sdk.object.locale.Locale;
import com.aldebaran.qi.sdk.object.locale.Region;
/**
* SpeechGenerator class for generating speech for the robot
*/
public class SpeechGenerator {
private static final Locale DUTCH_LOCALE = new Locale(Language.DUTCH, Region.NETHERLANDS);
private SayBuilder builder;
/**
* Function for making the robot say something with DUTCH_LOCALE as locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
*/
public static void say(String phrase, QiContext ctx)
{
say(phrase, ctx, DUTCH_LOCALE);
}
/**
* Function for making the robot say something with a specific locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
* @param locale The locale to use
*/
public static void say(String phrase, QiContext ctx, Locale locale)
{
SayBuilder
.with(ctx)
.withLocale(locale)
.withText(phrase)
.build()
.run();
}
}

View File

@@ -1,7 +1,6 @@
package com.example.fitbot.ui.activities;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.widget.VideoView;
@@ -11,16 +10,17 @@ import com.aldebaran.qi.sdk.RobotLifecycleCallbacks;
import com.aldebaran.qi.sdk.design.activity.RobotActivity;
import com.aldebaran.qi.sdk.design.activity.conversationstatus.SpeechBarDisplayStrategy;
import com.example.fitbot.R;
import com.example.fitbot.sports.FitnessCycle;
import com.example.fitbot.exercise.EMuscleGroup;
import com.example.fitbot.exercise.Exercise;
import com.example.fitbot.ui.components.PersonalMotionPreviewElement;
import com.example.fitbot.util.Animations;
import com.example.fitbot.util.ButtonNavigation;
import com.example.fitbot.util.FitnessCycle;
import com.example.fitbot.util.path.GesturePath;
import org.joml.Vector3f;
import java.util.concurrent.CompletableFuture;
public class FitnessActivity extends RobotActivity implements RobotLifecycleCallbacks {
PersonalMotionPreviewElement personalMotionPreviewElement;
@@ -45,23 +45,22 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
GesturePath.Builder gesturePathBuilder = new GesturePath.Builder();
gesturePathBuilder.addVector(new Vector3f(-.5f, -.5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, -.5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, -.5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, -.5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, -.5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, .5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, .5f, -.5f));
gesturePathBuilder.addVector(new Vector3f(.5f, .5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, .5f, .5f));
gesturePathBuilder.addVector(new Vector3f(-.5f, .5f, -.5f));
for ( int i = 0; i < 40; i++)
{
gesturePathBuilder.addVector(
new Vector3f(
(float)Math.cos(Math.PI + (Math.PI / 40.0f) * i),
(float)Math.sin(Math.PI + (Math.PI / 40.0f) * i),
0
)
);
}
personalMotionPreviewElement = findViewById(R.id.personalMotionPreviewElement);
personalMotionPreviewElement.post(() -> {
Log.i("FitnessActivity", "PersonalMotionPreviewElement.post()");
Exercise exercise = new Exercise(EMuscleGroup.ARMS, "Bicep Curls", "Oefening voor de biceps.", gesturePathBuilder.build(), 1);
Exercise exercise = new Exercise(EMuscleGroup.ARMS, "Bicep Curls", "Oefening voor de biceps.", gesturePathBuilder.build(), gesturePathBuilder.build());
personalMotionPreviewElement.initialize(exercise);
});
@@ -71,11 +70,9 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
public void onRobotFocusGained(QiContext qiContext) {
// Find the VideoView by its ID
FitnessCycle.RobotMovement("bicepcurl", 10, qiContext);
// FitnessCycle.playVideo(qiContext, videoView, this);
CompletableFuture.runAsync(() -> FitnessCycle.executeMovement("bicepcurl", 10, qiContext));
personalMotionPreviewElement.provideQiContext(qiContext);
// FitnessCycle.playVideo(qiContext, videoView, this);
}
@Override
@@ -88,14 +85,8 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
// Implement your logic when the robot focus is refused
}
private Handler handler;
private Runnable runnable;
@Override
protected void onDestroy() {
super.onDestroy();
if (handler != null && runnable != null) {
handler.removeCallbacks(runnable);
}
}
}

View File

@@ -2,6 +2,7 @@ package com.example.fitbot.ui.components;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
@@ -10,7 +11,7 @@ import android.view.View;
import com.aldebaran.qi.sdk.QiContext;
import com.example.fitbot.exercise.Exercise;
import com.example.fitbot.speech.SpeechGenerator;
import com.example.fitbot.util.FitnessCycle;
import com.example.fitbot.util.path.GesturePath;
import com.example.fitbot.util.path.PathSegment;
import com.example.fitbot.util.processing.MotionData;
@@ -23,21 +24,23 @@ import org.joml.Vector4f;
public class PersonalMotionPreviewElement extends View {
private GesturePath path;
private double pathTime = 0.0D; // The timestamp at which the path is currently at.
private GesturePath[] paths;
private MotionProcessor motionProcessor;
private double pathTime = 0.0D; // The timestamp at which the path is currently at.
private double exerciseProgress = 0.0D; // The progress of the exercise. Ranges from 0 to 1.
private QiContext qiContext;
private Exercise exercise;
private Path referencePath; // The path the user is supposed to follow.
private Path performingPath; // The path the user is currently following.
private Path stickmanPath; // The path of the stickman that is drawn on the screen.
private Path targetPath; // The path the user is supposed to follow.
private Path actualPath; // The path the user is currently following.
private final Paint referencePaint = new Paint();
private final Paint targetPaint = new Paint();
private final Paint backgroundColor = new Paint();
private Paint referencePaint;
private Paint performingPaint;
private Paint textPaint;
private static final String[] USER_PHRASES = {
"Veel success met de oefening!",
@@ -45,76 +48,24 @@ public class PersonalMotionPreviewElement extends View {
"Veel plezier!"
};
// Matrices for the projection of the path segments onto the screen.
// Depth buffering sadly is not supported yet due to brain dysfunction
private Matrix4f modelViewMatrix = new Matrix4f();
private Matrix4f projectionMatrix = new Matrix4f();
private double timePassed = 0.0D; // The time that has passed since the start of the exercise, in seconds.
private long startingTime = 0L;
private Paint backgroundColor = new Paint();
/**
* Constants for the preview path projection.
*/
private final float FOV = 80.0f; // The field of view of the preview path
private final float Z_NEAR = 0.1f; // The near clipping plane
private final float Z_FAR = 1000.0f; // The far clipping plane
private Vector3f objectPosition = new Vector3f(0.0f, 0.0f, -4.0f); // The position of the camera
private Vector2f screenDimensions = new Vector2f(); // Width and height dimensions of the screen
private Vector2f rotation = new Vector2f(); // Rotation vector (yaw, pitch)
public PersonalMotionPreviewElement(Context context, AttributeSet attrs) {
super(context, attrs);
this.referencePaint = new Paint();
this.referencePaint.setColor(0xFFFF0000); // Red
this.referencePaint.setStyle(Paint.Style.STROKE);
this.referencePaint.setStyle(Paint.Style.FILL);
this.referencePaint.setStrokeWidth(5.0f);
this.referencePaint.setAntiAlias(true);
this.performingPaint = new Paint();
this.performingPaint.setColor(0xFF0000FF); // Blue
this.performingPaint.setStyle(Paint.Style.STROKE);
this.performingPaint.setStrokeWidth(5.0f);
this.textPaint = new Paint();
this.textPaint.setColor(-1);
this.textPaint.setStyle(Paint.Style.FILL);
this.textPaint.setTextSize(50.0f);
}
/**
* Method for updating the stickman gestures.
*
* This method will update the stickman gestures based on the current
* motion data that is being processed.
*/
private void updateStickmanGestures() {
// Reset previous path
stickmanPath.reset();
// TODO: Define all arm segments:
// - Upper left and right arm
// - Lower left and right arm
// - Upper left and right leg
// - Lower left and right leg
// Update all segments based on the perceived motion data.
PathSegment upperLeftArm = new PathSegment(
new Vector3f(),
new Vector3f()
);
PathSegment[] bodySegments = new PathSegment[] {
new PathSegment(new Vector3f(0.0f, -.5f, -.5f), new Vector3f(0, 0, 0)), // Left leg
new PathSegment(new Vector3f(0.0f, -.5f, .5f), new Vector3f(0, 0, 0)), // Right leg
new PathSegment(new Vector3f(0.0f, .5f, 0.0f), new Vector3f(0, 0, 0)), // Body
new PathSegment(new Vector3f(-.25f, .25f, 0f), new Vector3f(0, 0, 0)), // Left arm
new PathSegment(new Vector3f(.25f, .25f, 0f), new Vector3f(0, 0, 0)) // Right arm
};
// TODO: Generate new path for stickman
// Target paint is the filling of the target path.
this.targetPaint.setColor(-1);
this.targetPaint.setStyle(Paint.Style.STROKE);
this.targetPaint.setStrokeWidth(5.0f);
this.targetPaint.setAntiAlias(true);
}
/**
@@ -127,31 +78,41 @@ public class PersonalMotionPreviewElement extends View {
*/
public void initialize(Exercise exercise) {
Log.i("PersonalMotionPreviewElement", "Creating new PersonalMotionPreviewElement.");
this.backgroundColor = new Paint();
this.backgroundColor.setColor(0xFF000000); // Black
this.screenDimensions.x = this.getWidth();
this.screenDimensions.y = this.getHeight();
this.performingPath = new Path();
this.referencePath = new Path();
this.actualPath = new Path();
this.targetPath = new Path();
this.startingTime = System.nanoTime(); // Set the last time to the current time
this.exerciseProgress = 0.0d;
this.exercise = exercise;
this.path = exercise.getPath();
this.motionProcessor = new MotionProcessor();
this.motionProcessor.startListening();
this.motionProcessor.setMotionDataEventHandler((processed, preprocessed, sampleIndex, sampleRate, deviceId) -> {
// TODO: Implement the calculation of the `performingPath` based on the motion data
});
this.paths = exercise.getPath();
}
/**
* Function for providing a QiContext to the PersonalMotionPreviewElement.
* This function will be called by the parent activity when the QiContext is available.
* Also say something nice to the user :)
*
* @param context The QiContext to provide.
*/
public void provideQiContext(QiContext context) {
this.qiContext = context;
if ( this.motionProcessor != null )
this.motionProcessor.stopListening();
this.motionProcessor = new MotionProcessor();
this.motionProcessor.startListening();
// Handler that is called every time the motion processor receives new data.
this.motionProcessor.setMotionDataEventHandler((processed, preprocessed, sampleIndex, sampleRate, deviceId) -> {
double progress = this.motionProcessor.getAverageError(this.paths[0], 0);
this.exerciseProgress = Math.min(1, Math.max(0, progress));
this.invalidate();
Log.i("MotionProcessor", "Processed data: " + progress + " (" + preprocessed + ")");
});
saySomethingNice();
}
@@ -163,90 +124,16 @@ public class PersonalMotionPreviewElement extends View {
if (this.qiContext == null)
return;
SpeechGenerator.say(USER_PHRASES[(int) Math.floor(Math.random() * USER_PHRASES.length)], this.qiContext);
}
/**
* Method that calculates the path that will be drawn on the
* canvas. This method will be called every time new motion data is received.
*/
private void calculateDrawingPath(Vector3f transformedVector, MotionData motionData, int sampleIndex, double sampleRate) {
// Recalculate the personal path based on the new motion data
// TODO: Implement
FitnessCycle.say(USER_PHRASES[(int) Math.floor(Math.random() * USER_PHRASES.length)], this.qiContext);
}
/**
* Method for setting the gesture path that will be drawn on the canvas.
*
* @param path The gesture path to draw.
* @param exercise The exercise that the user is currently performing.
*/
public void setGesturePath(GesturePath path) {
this.path = path;
this.referencePath = getDrawablePath(path.getSegments());
}
/**
* Method for projecting a 3D point onto the screen.
* This method converts the 3D point to 2D space using a Model-View-Projection matrix transformation.
*
* @param point The point to cast to the screen.
* @param virtualWidth The width of the virtual screen.
* This is used to normalize the screen coordinates.
* @param virtualHeight The height of the virtual screen.
* @return The transformed vector in screen coordinates ranging from (0, 0) to (virtualWidth, virtualHeight).
*/
private Vector2f projectVertex(Vector3f point, int virtualWidth, int virtualHeight) {
modelViewMatrix
.identity()
.translate(-objectPosition.x, -objectPosition.y, -objectPosition.z)
.rotateX((float) Math.toRadians(rotation.y))
.rotateY((float) Math.toRadians(rotation.x));
// Transform the projection matrix to a perspective projection matrix
// Perspective transformation conserves the depth of the object
projectionMatrix
.identity()
.perspective((float) Math.toRadians(FOV), (float) virtualWidth / virtualHeight, Z_NEAR, Z_FAR);
// Convert world coordinates to screen-space using MVP matrix
Vector4f screenCoordinates = new Vector4f(point, 1.0f)
.mul(this.modelViewMatrix)
.mul(this.projectionMatrix);
// Normalize screen coordinates from (-1, 1) to (0, virtualWidth) and (0, virtualHeight)
float normalizedX = (screenCoordinates.x / screenCoordinates.w + 1.0f) * 0.5f * virtualWidth;
float normalizedY = (1.0f - screenCoordinates.y / screenCoordinates.w) * 0.5f * virtualHeight;
Log.i("VertexProjection", "Projected vertex to screen coordinates: (" + normalizedX + ", " + normalizedY + ").");
return new Vector2f(normalizedX, normalizedY);
}
/**
* Method that converts a sequence of vectors to a Path object.
* This path is a set of bezier curves that will be drawn on the canvas.
*
* @param segments The path segments in the path.
* These segments will be connected by bezier curves, which
* all have unique curvature values.
* @return The generated path object.
*/
private Path getDrawablePath(PathSegment... segments) {
Path calculatedPath = new Path();
// Starting point
Vector2f origin = projectVertex(segments[0].getStart(), getWidth(), getHeight());
calculatedPath.moveTo(origin.x, origin.y);
// Draw the path segments
for (PathSegment segment : segments) {
Vector2f startProjected = projectVertex(segment.getStart(), getWidth(), getHeight());
Vector2f endProjected = projectVertex(segment.getEnd(), getWidth(), getHeight());
calculatedPath.lineTo(startProjected.x, startProjected.y);
calculatedPath.lineTo(endProjected.x, endProjected.y);
}
return calculatedPath;
public void setExercise(Exercise exercise) {
this.exercise = exercise;
}
@@ -256,14 +143,20 @@ public class PersonalMotionPreviewElement extends View {
this.setBackgroundColor(0xFF000000); // Black
if (this.exercise == null)
return;
// Draw the sport preview canvas
canvas.drawPath(referencePath, referencePaint);
canvas.drawPath(performingPath, performingPaint);
// Draw target circle
float targetRadius = (this.screenDimensions.x + this.screenDimensions.y) / 5.0f;
canvas.drawCircle(this.screenDimensions.x / 2, this.screenDimensions.y / 2, targetRadius, this.targetPaint);
canvas.drawCircle(this.screenDimensions.x / 2, this.screenDimensions.y / 2, (float)(targetRadius * exerciseProgress), this.referencePaint);
referencePaint.setColor(
Color.argb(
255,
(int)(255 * (1.0 - exerciseProgress)),
(int)(255 * exerciseProgress),
0
)
);
timePassed = (System.nanoTime() - startingTime) / 1E9D;
this.rotation.x = (float) (Math.sin(timePassed) * 45);
this.referencePath = getDrawablePath(this.path.getSegments());
this.invalidate(); // Causes a redraw.
}
}

View File

@@ -1,13 +1,15 @@
package com.example.fitbot.sports;
import static com.example.fitbot.sports.Animations.PlayAnimation;
package com.example.fitbot.util;
import android.content.Context;
import android.net.Uri;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.VideoView;
import com.aldebaran.qi.sdk.builder.SayBuilder;
import com.aldebaran.qi.sdk.object.locale.Language;
import com.aldebaran.qi.sdk.object.locale.Locale;
import com.aldebaran.qi.sdk.object.locale.Region;
import com.example.fitbot.R;
import com.aldebaran.qi.sdk.QiContext;
import com.aldebaran.qi.sdk.builder.AnimateBuilder;
@@ -19,7 +21,18 @@ import java.util.concurrent.atomic.AtomicInteger;
public class FitnessCycle extends AppCompatActivity {
public static void RobotMovement(String Exercise, int Reps, QiContext qiContext) {
private static final Locale DUTCH_LOCALE = new Locale(Language.DUTCH, Region.NETHERLANDS);
/**
* Function for executing a movement animation a certain number of times
* on the robot
*
* @param Exercise The name of the exercise to perform
* @param Reps The number of repetitions to perform
* @param qiContext The QiContext to use
*/
public static void executeMovement(String Exercise, int Reps, QiContext qiContext) {
AtomicInteger repCount = new AtomicInteger(0);
Animation animation = AnimationBuilder.with(qiContext)
@@ -44,6 +57,38 @@ public class FitnessCycle extends AppCompatActivity {
}
}
/**
* Function for making the robot say something with DUTCH_LOCALE as locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
*/
public static void say(String phrase, QiContext ctx)
{
say(phrase, ctx, DUTCH_LOCALE);
}
/**
* Function for making the robot say something with a specific locale
* @param phrase The phrase to make the robot say
* @param ctx The QiContext to use
* @param locale The locale to use
*/
public static void say(String phrase, QiContext ctx, Locale locale)
{
SayBuilder
.with(ctx)
.withLocale(locale)
.withText(phrase)
.build()
.run();
}
/**
* Function for playing a video in a VideoView
*
* @param videoView The VideoView to play the video in
* @param context The context to use
*/
public static void playVideo(VideoView videoView, Context context) {
// Set up the video player
if (videoView != null) {

View File

@@ -54,9 +54,11 @@ public class GesturePath {
if ( segments.length == 1)
return segments[0];
return Arrays
.stream(segments)
.reduce(segments[0], (a, b) -> PathSegment.closer(a, b, reference));
PathSegment closest = segments[0];
for ( int i = 1; i < segments.length; i++)
closest = PathSegment.closer(closest, segments[i], reference);
return closest;
}
/**

View File

@@ -3,8 +3,10 @@ package com.example.fitbot.util.processing;
import android.util.Log;
import com.example.fitbot.util.path.GesturePath;
import com.example.fitbot.util.server.IWebSocketHandler;
import com.example.fitbot.util.server.WebSocket;
import com.example.fitbot.util.server.WebServer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.jetbrains.annotations.NotNull;
import org.joml.Vector3f;
@@ -17,14 +19,16 @@ public class MotionProcessor {
public static final String DELIMITER = ";";
private final List<MotionData> preprocessedData = new ArrayList<>(); // Preprocessed motion data
private final List<Vector3f> relativePath = new ArrayList<>(); // Relative path of the motion data
private final List<Vector3f> relativeLeftPath = new ArrayList<>(); // Relative path of the left motion data
private final List<Vector3f> relativeRightPath = new ArrayList<>(); // Relative path of the motion data
private Vector3f ZERO = new Vector3f(0, 0, 0);
private float sampleRate = 1.0F; // samples/second
private IMotionDataConsumer motionDataConsumer = (p1, p2, p3, p4, p5) -> {};
private GesturePath path;
private WebSocket socket;
private final float sampleRate = 10.0F; // samples/second
private IMotionDataConsumer motionDataConsumer = (p1, p2, p3, p4, p5) -> { };
private WebServer server;
public MotionProcessor() {}
@@ -37,20 +41,14 @@ public class MotionProcessor {
*/
public void startListening() {
// Create socket server
this.socket = WebSocket.createServer();
this.server = WebServer.createServer();
Log.i("MotionProcessor", "Listening for incoming connections.");
// Check if the socket
if (socket != null) {
if (server != null) {
// Update event handler to match our functionality.
socket.setEventHandler(new IWebSocketHandler() {
@Override
public void onMessageReceived(WebSocket.Message message, WebSocket.MessageReply replier) {
parsePacket(message.message);
}
});
socket.startListening();
server.setEventHandler(this::parsePacket);
}
}
@@ -60,44 +58,55 @@ public class MotionProcessor {
* the WebSocket server.
*/
public void stopListening() {
if (socket != null) {
socket.stop();
if (server != null) {
server.stop();
}
}
/**
* Function for parsing arbitrary packet data.
*
* @param data The data to parse.
*/
public void parsePacket(@NotNull String data) {
// If the message starts with 'data', it's a data packet.
if ( data.startsWith("data")) {
Log.i("MotionProcessor", "Received data packet: " + data.split(" ")[1]);
MotionData parsedData = MotionData.decode(data.split(" ")[1]);
if (parsedData != null) {
addMotionData(parsedData);
}
// Otherwise check if it starts with 'calibrate', this is the ZERO point.
} else if ( data.startsWith("zero")) { // message to calibrate device
String[] vectorData = data.split(" ")[1].split(DELIMITER);
ZERO = new Vector3f(
Float.parseFloat(vectorData[0]),
Float.parseFloat(vectorData[1]),
Float.parseFloat(vectorData[2])
);
Log.i("MotionProcessor", "Device calibrated at " + ZERO.toString());
} else if ( data.startsWith("sampleRate")) {
this.sampleRate = Float.parseFloat(data.split(" ")[1]);
}
}
/**
* Function for setting the gesture path of the processor.
*
* @param path The path to set.
*/
public void setGesturePath(GesturePath path) {
this.path = path;
try {
JsonElement json = JsonParser.parseString(data);
if (!json.isJsonObject())
return;
JsonObject object = json.getAsJsonObject();
String[] required = {
"rotationX", "rotationY", "rotationZ",
"accelerationX", "accelerationY", "accelerationZ",
"type",
"deviceId"
};
// Ensure all properties are present in the received JSON object
for (String s : required) {
if (!object.has(s))
return;
}
// Parse the data
Vector3f rotation = new Vector3f(object.get("rotationX").getAsFloat(), object.get("rotationY").getAsFloat(), object.get("rotationZ").getAsFloat());
Vector3f acceleration = new Vector3f(object.get("accelerationX").getAsFloat(), object.get("accelerationY").getAsFloat(), object.get("accelerationZ").getAsFloat());
int deviceId = object.get("deviceId").getAsInt();
String type = object.get("type").getAsString();
MotionData motionData = new MotionData(rotation, acceleration, deviceId);
if (type.equals("calibrate")) {
ZERO = getRelativeVector(motionData);
return;
}
addMotionData(motionData);
} catch (Exception e) {
// Don't do anything ... just ignore the exception
}
}
/**
@@ -106,29 +115,35 @@ public class MotionProcessor {
* @param data The motion data to add.
*/
public void addMotionData(MotionData data) {
preprocessedData.add(data);
Vector3f previous = this.relativePath.isEmpty() ? ZERO : this.relativePath.get(this.relativePath.size() - 1);
List<Vector3f> target;
if (data.sensorId == 0)
target = relativeLeftPath;
else target = relativeRightPath;
Vector3f previous = target.isEmpty() ? ZERO : target.get(target.size() - 1);
Vector3f relativeVector = getRelativeVector(data).add(previous);
this.relativePath.add(relativeVector);
motionDataConsumer.accept(relativeVector, data, this.relativePath.size(), this.sampleRate, data.sensorId);
target.add(relativeVector);
motionDataConsumer.accept(relativeVector, data, target.size(), this.sampleRate, data.sensorId);
}
/**
* Function for updating the relative path.
*
* @param relativePath The new relative path.
* @param relativeRightPath The new relative path.
*/
public void setRelativePath(List<Vector3f> relativePath) {
this.relativePath.clear();
this.relativePath.addAll(relativePath);
public void setRelativePaths(List<Vector3f> relativeLeftPath, List<Vector3f> relativeRightPath) {
this.relativeRightPath.clear();
this.relativeLeftPath.clear();
this.relativeLeftPath.addAll(relativeLeftPath);
this.relativeRightPath.addAll(relativeRightPath);
}
/**
* Function for setting the motion data receiver.
*
* @param consumer The consumer to set.
*/
public void setMotionDataEventHandler(IMotionDataConsumer consumer) {
if ( consumer != null)
if (consumer != null)
this.motionDataConsumer = consumer;
}
@@ -163,48 +178,21 @@ public class MotionProcessor {
*/
public List<Double> getErrors(GesturePath referencePath) {
// Return the errors of the relative path compared to the reference path.
return relativePath
.stream()
.map(referencePath::getError)
.collect(Collectors.toList());
}
/**
* Function for getting the error offsets of the motion data compared to the
* reference path.
*
* @return A list of error offsets of the motion data compared to the reference path.
* If no path is set, an empty list will be returned.
*/
public List<Double> getErrors() {
if ( path == null)
return new ArrayList<>();
return getErrors(path);
List<Double> errors = new ArrayList<>();
for (Vector3f vector : relativeRightPath) {
errors.add(referencePath.getError(vector));
}
return errors;
}
/**
* Function for getting the error of the motion data compared to the reference path.
*
* @param path The path to compare the motion data to.
* @param path The path to compare the motion data to.
* @param referencePoint The reference point to compare the motion data to.
* @return The error of the motion data compared to the reference path.
*/
public double getError(GesturePath path, Vector3f referencePoint)
{
return path.getError(referencePoint);
}
/**
* Function for getting the error of the provided vector and the set path.
* If no path is set, the error will be 0.
*
* @param referencePoint The reference point to compare the path data to.
* @return The error of the motion data compared to the reference path.
*/
public double getError(Vector3f referencePoint) {
if ( path == null)
return 0;
public double getError(GesturePath path, Vector3f referencePoint) {
return path.getError(referencePoint);
}
@@ -215,24 +203,12 @@ public class MotionProcessor {
* @param referencePath The reference path to compare the motion data to.
* @return The average error of the motion data compared to the reference path.
*/
public double getAverageError(GesturePath referencePath) {
return getErrors(referencePath)
.stream()
.mapToDouble(Double::doubleValue)
.average()
.orElse(0.0D);
}
/**
* Function for calculating the average error of the motion data
* compared to the reference path.
*
* @return The average error of the motion data compared to the reference path.
*/
public double getAverageError() {
if ( path == null)
return 0;
return getAverageError(path);
public double getAverageError(GesturePath referencePath, int sensorId) {
double error = 0;
for (Double e : getErrors(referencePath)) {
error += e;
}
return error / Math.max(1, (sensorId == 0 ? relativeLeftPath : relativeRightPath).size());
}
/**
@@ -241,8 +217,7 @@ public class MotionProcessor {
* @param referencePath The reference path to compare the motion data to.
*/
public void logStatistics(GesturePath referencePath) {
Log.i("MotionProcessor", "Average path error: " + getAverageError(referencePath));
Log.i("MotionProcessor", "Path length: " + relativePath.size());
Log.i("MotionProcessor", "Path length: " + relativeRightPath.size());
Log.i("MotionProcessor", "Sample rate: " + sampleRate);
Log.i("MotionProcessor", "Calibration point: " + ZERO.toString());
}

View File

@@ -0,0 +1,11 @@
package com.example.fitbot.util.server;
import java.net.Socket;
/**
* Interface for handling WebSocket events.
*/
public interface IWebServerHandler {
void onReceive(String body);
}

View File

@@ -1,18 +0,0 @@
package com.example.fitbot.util.server;
import java.net.Socket;
/**
* Interface for handling WebSocket events.
*/
public interface IWebSocketHandler {
// Function for handling the connection of the WebSocket.
default void onConnected(Socket socket) {}
default void onDisconnected(Socket socket) {}
default void onMessageReceived(WebSocket.Message message, WebSocket.MessageReply replier) {}
default void onError(Socket socket, String error) {}
}

View File

@@ -0,0 +1,134 @@
package com.example.fitbot.util.server;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
public class WebServer implements Runnable {
private ServerSocket serverSocket;
protected IWebServerHandler eventHandler = (input) -> {}; // No-op.
private Thread thread;
private AtomicBoolean forceClose = new AtomicBoolean(false);
/**
* Constructor for creating a new WebSocket server.
*/
private WebServer() {
}
/**
* Function for creating a new WebSocket server given the provided port.
*
* @return A WebSocket connection, or null if something went wrong.
*/
public static WebServer createServer() {
try {
WebServer server = new WebServer();
server.serverSocket = new ServerSocket();
server.serverSocket.bind(new InetSocketAddress(3445));
server.serverSocket.setSoTimeout(0);
Log.i("WebServer", "Server created: " + server.serverSocket.getLocalSocketAddress() + ", " + server.serverSocket.getLocalPort());
server.thread = new Thread(server);
server.thread.start();
return server;
} catch (IOException error) {
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebServer", cause);
return null;
}
}
@Override
public void run() {
// Listen for new connections until the socket closes.
while (this.isConnected() && !this.forceClose.get()) {
try {
// Find a new connection
Socket newSocket = this.serverSocket.accept();
InputStream streamIn = newSocket.getInputStream();
// Read the incoming data
BufferedReader reader = new BufferedReader(new InputStreamReader(streamIn));
StringBuilder builder = new StringBuilder();
String line;
while ((line = reader.readLine()) != null)
builder.append(line).append("\n");
streamIn.close(); // Closes the reader, stream and socket connection.
String[] data = builder.toString().split("\n\n");
if ( data.length > 1) { // Check if the data is valid.
this.eventHandler.onReceive(data[1]);
}
} catch (IOException error) {
String reason = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebServerConnectionHandler", "Error listening to Socket connections: " + reason);
break;
}
}
}
/**
* Method for stopping the WebSocket server.
*/
public void stop() {
try {
this.serverSocket.close();
this.forceClose.set(true);
} catch (IOException error) {
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebServer", cause);
}
}
/**
* Method for setting the event handler for this WebSocket server.
*
* @param handler The handler to use. This handler will parse all events
* that occur in this WebSocket connection. The events are the followed:
* - onMessageReceived(Socket, String)
* - onConnected(Socket)
* - onDisconnected(Socket)
* - onError(Socket, String)
*/
public void setEventHandler(IWebServerHandler handler) {
this.eventHandler = handler;
}
/**
* Method for getting the ServerSocket connection
*
* @return The ServerSocket connection.
*/
public ServerSocket getSocket() {
return this.serverSocket;
}
/**
* Method for checking whether this WebSocket connection is connected.
*
* @return The connection status of the WebSocket.
*/
public boolean isConnected() {
return !this.serverSocket.isClosed();
}
}

View File

@@ -1,150 +0,0 @@
package com.example.fitbot.util.server;
import android.support.annotation.Nullable;
import android.util.Log;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class WebSocket {
private ServerSocket serverSocket;
private WebSocketConnectionHandler connectionHandler;
private final Set<Socket> clients = Collections.synchronizedSet(new HashSet<>());
protected IWebSocketHandler eventHandler = new IWebSocketHandler() {}; // NO-OP event handler.
/**
* Constructor for creating a new WebSocket server.
*/
private WebSocket() {}
/**
* Function for creating a new WebSocket server given the provided port.
* @return A WebSocket connection, or null if something went wrong.
*/
public static @Nullable WebSocket createServer() {
try {
WebSocket webSocket = new WebSocket();
webSocket.serverSocket = new ServerSocket();
webSocket.serverSocket.bind(webSocket.serverSocket.getLocalSocketAddress());
Log.i("WebSocket -- Creating new WebSocket server", "Server created: " + webSocket.serverSocket.getLocalSocketAddress() + ", " + webSocket.serverSocket.getLocalPort());
return webSocket;
} catch (IOException error)
{
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebSocket -- Creating new WebSocket server", cause);
return null;
}
}
/**
* Method for listening for incoming connections.
*/
public void startListening() {
this.connectionHandler = new WebSocketConnectionHandler(this);
this.connectionHandler.listen();
}
/**
* Method for stopping the WebSocket server.
*/
public void stop() {
try {
this.serverSocket.close();
this.connectionHandler.stop();
} catch (IOException error) {
String cause = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebSocket -- Closing server connection", cause);
}
}
/**
* Method for setting the event handler for this WebSocket server.
* @param handler The handler to use. This handler will parse all events
* that occur in this WebSocket connection. The events are the followed:
* - onMessageReceived(Socket, String)
* - onConnected(Socket)
* - onDisconnected(Socket)
* - onError(Socket, String)
*/
public void setEventHandler(IWebSocketHandler handler) {
this.eventHandler = handler;
}
/**
* Method for getting the ServerSocket connection
* @return The ServerSocket connection.
*/
public ServerSocket getSocket() {
return this.serverSocket;
}
/**
* Method for checking whether this WebSocket connection is connected.
* @return The connection status of the WebSocket.
*/
public boolean isConnected() {
return !this.serverSocket.isClosed();
}
/**
* Class representing a message received from a WebSocket connection.
*/
public static class Message {
// Enumerable representing message type (opcode).
public enum Opcode {
CONTINUING((byte) 0x0),
TEXT((byte) 0x1),
BINARY((byte) 0x2),
RES0((byte) 0x3), RES1((byte) 0x4), RES2((byte) 0x5), RES3((byte) 0x6), RES4((byte) 0x7),
CLOSE_CONNECTION((byte) 0x8),
PING((byte) 0x9),
PONG((byte) 0xA),
RES5((byte) 0xB), RES6((byte) 0xC), RES7((byte) 0xD), RES8((byte) 0xE), RES9((byte) 0xF);
byte opcode;
Opcode(final byte opcode) {
this.opcode = opcode;
}
/**
* Method for decoding the opcode of a message.
* @param opcode The opcode to decode.
* @return The message type.
*/
public static Opcode decode(byte opcode) {
return Opcode.values()[opcode & 0xF];
}
// Returns the opcode of this message type.
public byte getOpcode() { return this.opcode; }
}
public String message;
public WebSocketConnection connection;
/**
* Constructor for a WebSocket message.
* @param message The message that was sent
* @param connection The connection where the message came from.
*/
public Message(WebSocketConnection connection, String message) {
this.message = message;
this.connection = connection;
}
}
/**
* Interface for a message reply.
* This can be used for when a message has been received from a client
* to reply back to the client.
*/
public interface MessageReply {
void reply(String message);
}
}

View File

@@ -1,35 +0,0 @@
package com.example.fitbot.util.server;
import java.net.Socket;
public class WebSocketConnection {
private final WebSocket origin;
private final Socket socket;
/**
* Constructor for creating an arbitrary WebSocket connection (Client)
* @param connection The server connection
* @param socket The client socket
*/
public WebSocketConnection(WebSocket connection, Socket socket) {
this.origin = connection;
this.socket = socket;
}
/**
* Getter method for retrieving the WebSocket connection
* @return The WebSocket instance.
*/
public WebSocket getOrigin() {
return origin;
}
/**
* Getter method for retrieving the Client Socket connection.
* @return The Socket connection.
*/
public Socket getSocket() {
return socket;
}
}

View File

@@ -1,217 +0,0 @@
package com.example.fitbot.util.server;
import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.Scanner;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class WebSocketConnectionHandler implements Runnable {
private final WebSocket theSocket;
private List<Socket> clients = Collections.synchronizedList(new ArrayList<>());
private Thread thread;
private boolean forceClose = false;
/**
* Constructor for WebSocketConnectionHandler.
* This class handles all new incoming Socket connections.
*
* @param webSocket The socket to check for new connections.
*/
protected WebSocketConnectionHandler(WebSocket webSocket) {
this.theSocket = webSocket;
}
@Override
public void run() {
// Listen for new connections until the socket closes.
while (theSocket.isConnected()) {
try {
// Find a new connection
Socket newSocket = this.theSocket.getSocket().accept();
this.theSocket.eventHandler.onConnected(newSocket);
clients.add(newSocket);
InputStream streamIn = newSocket.getInputStream();
OutputStream streamOut = newSocket.getOutputStream();
// Check if the connection was successfully upgraded to WebSocket
if (upgradeConnection(streamIn, streamOut)) {
applyMessageDecoder(streamIn);
}
} catch (IOException error) {
String reason = error.getMessage() == null ? "Unknown reason" : error.getMessage();
Log.e("WebSocketConnectionHandler", "Error listening to Socket connections: " + reason);
break;
}
}
}
/**
* Method for upgrading a HTTP connection to a WebSocket connection.
* This checks whether the client sent a GET header and sends back
* the required headers to upgrade the connection.
* @param streamIn The InputStream of the client socket connection.
* @param streamOut The OutputStream of the client socket connection.
* @return Whether or not the connection was successfully upgraded.
*/
private boolean upgradeConnection(InputStream streamIn, OutputStream streamOut) {
Scanner scanner = new Scanner(streamIn, "UTF-8");
String data = scanner.useDelimiter("\\r\\n\\r\\n").next();
Matcher header = Pattern.compile("^GET").matcher(data);
// Check if the header contains the GET keyword
// If this is the case, upgrade the HTTP connection to WebSocket.
if (!header.find())
return false;
Matcher match = Pattern.compile("Sec-WebSocket-Key: (.*)").matcher(data);
match.find(); // Get next match
try {
String SECAccept = Base64.getEncoder().encodeToString(
MessageDigest.getInstance("SHA-1").digest((match.group(1) + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").getBytes(StandardCharsets.UTF_8)));
byte[] response = (
"HTTP/1.1 101 Switching Protocols\r\n" +
"Connection: Upgrade\r\n" +
"Upgrade: websocket\r\n" +
"Sec-WebSocket-Accept: " +
SECAccept + "\r\n\r\n").getBytes(StandardCharsets.UTF_8);
streamOut.write(response, 0, response.length);
} catch (IOException | NoSuchAlgorithmException error) {
Log.e("WebSocketConnectionHandler", "Failed upgrading HTTP to WebSocket connection" + error.getMessage());
return false;
}
return true;
}
/**
* Method for applying a message decoder for whenever a socket receives data.
* This method attemps to decode a message received from a WebSocket connection.
* This message is in the
* @param streamIn The message stream to decode.
*/
private void applyMessageDecoder(InputStream streamIn) {
// TODO: Implement
}
/**
* Method for decoding an encoded WebSocket message
* @param bytes The message to decode, in UTF-8 format.
* @return The decoded message.
* @throws IllegalArgumentException When the `frame` content is in an incorrect format.
*/
public static String decodeWebSocketMessage(byte[] bytes) {
// Check if the packet isn't corrupted
if (bytes.length <= 2 || (bytes[0] & 0b1110) != 0)
throw new IllegalArgumentException("Attempted to decode corrupted WebSocket frame data");
WebSocket.Message.Opcode opcode = WebSocket.Message.Opcode.decode((byte) (bytes[0] & 0b11110000));
byte payloadLength = (byte) (bytes[1] & 0b01111111); // Payload size (7 bits)
boolean fin = (bytes[0] & 0b1) != 0; // Whether this is the whole message
boolean masked = (bytes[1] & 0b10000000) != 0; // Whether the 9th bit is masked
long extendedPayloadLength = 0;
int byteOffset = 2;
// Check whether the payload length is 16-bit
if (payloadLength == 126) {
// 16-bit extended payload length (byte 2 and 3)
extendedPayloadLength = ((bytes[2] & 0xFF) << 8) | (bytes[3] & 0xFF);
byteOffset += 2;
// Check whether payload length is 64-bit
} else if (payloadLength == 127) {
// 64-bit extended payload length
for (int i = 0; i < 8; i++)
extendedPayloadLength |= (long) (bytes[2 + i] & 0xFF) << ((7 - i) * 8);
byteOffset += 8;
} else {
extendedPayloadLength = payloadLength;
}
byte[] maskingKey = null;
byte[] payloadData = new byte[(int) extendedPayloadLength];
// Check if the MASK bit was set, if so, copy the key to the `maskingKey` array.
if (masked) {
maskingKey = new byte[4];
System.arraycopy(bytes, byteOffset, maskingKey, 0, 4); // move mask bytes
byteOffset += 4;
}
// Copy payload bytes into `payloadData` array.
System.arraycopy(bytes, byteOffset, payloadData, 0, payloadData.length);
// If mask is present, decode the payload data with the mask.
if (masked)
for (int i = 0; i < payloadData.length; i++)
payloadData[i] ^= maskingKey[i % 4];
// Convert payload data to string
return new String(payloadData, StandardCharsets.UTF_8);
}
/**
* Method for checking whether the connection handler is actively listening.
* @return Whether it's listening.
*/
public boolean isActive() {
return this.thread.isAlive();
}
/**
* Method for listening to all new incoming socket connections.
*/
public void listen() {
this.thread = new Thread(this);
this.thread.start();
Log.i("WebSocketConnectionHandler", "Listening started.");
}
/**
* Method for stopping the connection handler.
*/
public void stop() {
// Close the socket connection if not already closed
if (!this.theSocket.getSocket().isClosed()) {
try {
this.theSocket.getSocket().close();
} catch (IOException error) {
Log.e("WebSocketConnectionHandler", "Failed to close the socket connection: " + error.getMessage());
}
}
// Interrupt the thread
this.thread.interrupt();
// Close all connections
this.clients.forEach(client -> {
try {
client.close();
} catch (IOException error) {
Log.e("WebSocketConnectionHandler", "Failed to close client: " + error.getMessage());
}
});
this.clients.clear();
Log.i("WebSocketConnectionHandler", "Listening stopped.");
}
}

View File

@@ -1,36 +0,0 @@
package com.example.fitbot;
import static org.junit.Assert.assertEquals;
import com.example.fitbot.util.server.WebSocketConnectionHandler;
import org.junit.Test;
/**
* Created on 07/05/2024 at 18:27
* by Luca Warmenhoven.
*/
public class WebSocketMessageParsingTest {
@Test
public void parseWebSocketMessage() {
String reference = "abcdef";
final byte[] encoded = {
(byte) 129, (byte) 134, (byte) 167,
(byte) 225, (byte) 225, (byte) 210,
(byte) 198, (byte) 131, (byte) 130,
(byte) 182, (byte) 194, (byte) 135
};
String decoded = "";
try {
decoded = WebSocketConnectionHandler.decodeWebSocketMessage(encoded);
} catch (Exception e) {
System.err.println("Error occurred whilst attempting to parse input" + e.getMessage());
}
assertEquals(reference, decoded);
}
}

View File

@@ -8,18 +8,19 @@
*/
function handleIncoming(request, response, app, pool)
{
let query = 'SELECT * FROM Exercise WHERE ExerciseID = ?';
let parameters = [];
if (!request.hasOwnProperty('uid') || typeof request.uid !== 'number')
{
response
.status(400)
.send(JSON.stringify({error: 'Missing valid UID in request'}));
return;
}
query = 'SELECT * FROM Exercise';
} else parameters.push(request.uid);
// Acquire database connection
pool.getConnection()
.then(conn => {
conn.query('SELECT * FROM Exercise WHERE ExerciseID = ?', [request.uid])
conn.query(query, parameters)
.then(rows => {
if (rows.length === 0)
{
@@ -42,19 +43,21 @@ function handleIncoming(request, response, app, pool)
}));
}
})
.catch(_ => {
.catch(error => {
console.log(error);
response
.status(500)
.send(JSON.stringify({error: 'Internal server error'}));
.send(JSON.stringify({error: 'Internal server error (Querying)'}));
})
.finally(() => {
conn.end();
});
})
.catch(_ => {
.catch(error => {
console.log(error);
response
.status(500)
.send(JSON.stringify({error: 'Internal server error'}));
.send(JSON.stringify({error: 'Internal server error (Connection)'}));
});
}

View File

@@ -19,7 +19,7 @@ const databaseCredentials = {
const pool = mariadb.createPool(databaseCredentials);
// Register incoming HTTP request handlers
require('incoming_request_handlers')(app, pool);
require('./incoming_request_handlers')(app, pool);
// Start server
app.listen(serverPort, () => {

View File

@@ -0,0 +1,4 @@
# Issues with hardware
## Issues with libraries
The websocket library doesnt work well on the esp8266 d1 mini. It lags out the entire esp and makes it unresponsive.

View File

@@ -94,12 +94,6 @@ In blok 3 heb ik veel kunnen leren over het samenwerken in een team waar niveau
![Beoordeling Blok 3](../assets/beoordelingBlok3.png)
## Sterke en zwakke punten (Profesional skills)
## Smart leerdoelen
## Conclusie
## Profesional skills
https://www.hva.nl/binaries/content/assets/serviceplein-a-z/media-creatie-en-informatie/hbo-ict/competenties/professional-skills-hbo-ict-2023-2024.pdf
@@ -109,4 +103,30 @@ De vier professional skills:
* Toekomstgericht organiseren
* Onderzoekend probleemoplossen
* Persoonlijk leiderschap
* Doelgericht interacteren
* Doelgericht interacteren
STARTT
situatie
taak
actie
resultaat
tijd
toekomst
### Toekomstgericht organiseren
### Onderzoekend probleemoplossen
### Persoonlijk leiderschap
### Doelgericht interacteren
## Smart leerdoelen
## Conclusie

View File

@@ -3,21 +3,19 @@
## Inhoudsopgave
- [Inleiding](#inleiding)
- [Probleemstelling](#probleemstelling)
- [Onderzoeksvraag](#onderzoeksvraag)
* [Deelvragen](#deelvragen)
+ [Hoe ervaren ouderen het om geholpen te worden door robots bij fitness?](#hoe-ervaren-ouderen-het-om-geholpen-te-worden-door-robots-bij-fitness)
+ [Wat kunnen nadelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?](#wat-kunnen-nadelen-zijn-van-het-inzetten-van-robots-in-de-ouderenzorg-wat-fitness-betreft)
+ [Wat kunnen voordelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?](#wat-kunnen-voordelen-zijn-van-het-inzetten-van-robots-in-de-ouderenzorg-wat-fitness-betreft)
+ [Wat kunnen nadelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?](#wat-kunnen-nadelen-zijn-van-het-inzetten-van-robots-in-de-ouderenzorg-wat-fitness-betreft)
+ [Wat is het limiet van de inzet van robots in de zorg voor ouderen wat fitness betreft?](#wat-is-het-limiet-van-de-inzet-van-robots-in-de-zorg-voor-ouderen-wat-fitness-betreft)
- [Conclusie](#conclusie)
- [Bronnen](#bronnen)
## Inleiding
Ouderzorg is in deze tijd een belangrijk onderwerp. Door de vergrijzing van de bevolking neemt de vraag naar zorg toe. Tegelijkertijd is er een tekort aan zorgpersoneel. De zorg die ouderen nodig hebben kan variëren van lichte zorg tot zware zorg. Een manier om ouderen te helpen met zorg is het inzetten van robots. Robots kunnen ouderen helpen met verschillende taken, zoals bijvoorbeeld helpen met fitness. De hulp bij het fitnessen kan ervoor zorgen dat ouderen langer fit blijven en langer zelfstandig kunnen blijven wonen.
Ouderzorg is in deze tijd een belangrijk onderwerp. Door de vergrijzing van de bevolking neemt de vraag naar zorg toe. Tegelijkertijd is er een tekort aan zorgpersoneel. De zorg die ouderen nodig hebben kan variëren van lichte zorg tot zware zorg. Een manier om ouderen te helpen met zorg is het inzetten van robots. Robots kunnen ouderen helpen met verschillende taken, zoals bijvoorbeeld helpen met fitness.
Het inzetten van robots in de ouderenzorg kan echter ook ethische vragen oproepen. Hoe ervaren ouderen het om geholpen te worden door robots? Wat zijn de nadelen van het inzetten van robots in de ouderenzorg? Wat zijn de voordelen? En wat is het limiet van de inzet van robots in de zorg voor ouderen? In dit onderzoek wordt gekeken naar de ethische aspecten van het inzetten van robots in de ouderenzorg om ouderen te helpen met fitness.
In dit onderzoek wordt gekeken naar de ethische aspecten van het inzetten van robots in de ouderenzorg om ouderen te helpen met fitness. Wat zijn de voordelen van het inzetten van robots in de ouderenzorg? Wat zijn de nadelen? En wat is het limiet van de inzet van robots in de zorg voor ouderen?
## Onderzoeksvraag
@@ -27,55 +25,54 @@ In hoeverre is het etisch verantwoord om robots in te zetten in de ouderenzorg o
### Deelvragen
1. Hoe ervaren ouderen het om geholpen te worden door robots bij fitness?
1. Wat kunnen voordelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?
2. Wat kunnen nadelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?
3. Wat kunnen voordelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?
4. Wat is het limiet van de inzet van robots in de zorg voor ouderen wat fitness betreft?
#### Hoe ervaren ouderen het om geholpen te worden door robots bij fitness?
De ervaring van ouderen die geholpen worden door robots bij fitness is een belanrijk aspect van het inzetten bij robots in de ouderenzorg wat fitness betreft. Het is belangrijk dat ouderen zich op hun gemak voelen bij het gebruik van robots. Uit onderzoek blijkt dat ouderen in het algemeen positief staan tegenover het gebruik van robots in de ouderenzorg. `The interviews provide a number of conclusions, such as: the elderly had positive attitudes toward humanoid robots and electronic devices whose shapes resembled things they knew (humans, animals, smartphones, healthcare equipment)` en `The 16 older adults in our study were generally accepting of robots for partner dance-based exercise, tending to perceive it as useful, easy to use, and enjoyable. Notably, participants perceived the robot as being easier to use after dancing with it. These results suggest that older adults are open to partner dancing with a robot to improve their health.`.
Niet alle ouderen zijn even positief over het gebruik van robots in de ouderenzorg. Sommige ouderen vinden het gebruik van robots in de ouderenzorg niet prettig. `However, some participants were not positive about the robot in terms of exercise and health, finding the activity to be too easy, boring, or lacking in physical exertion.`. Hier uit blijkt wel dat het mogelijk zou kunnen zijn voor deze ouderen om gebruik te maken van robots in de ouderenzorg, maar dat er nog wel verbeteringen nodig zijn. Een aantal verbeteringen die genoemd worden zijn `Participants suggested that a robot could actively encourage them to exercise and teach them dances.` en `Future robots for partner dance-based exercise could potentially be more engaging by increasing the complexity and variety of dances.`. Ook wordt er aangegeven dat er voorkeur is naar een robot die `voice-activated, a motion-activated and touch responsive robot with nice voices/sounds.`.
#### Wat kunnen nadelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?
3. Wat is het limiet van de inzet van robots in de zorg voor ouderen wat fitness betreft?
#### Wat kunnen voordelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?
Het inzetten van robots in de ouderzorg heeft zo zijn voordelen. Uit onderzoek blijkt dat het mogelijk is om robots ouderen te laten ondersteunen bij het doen van dagelijkse oefeningen. Dit kan helpen om de leefkwaliteit van ouderen te verbeteren. `A good practice is to do physical exercise that preserves cognition while improving persons overall health enhancing their quality of life. In this sense, socially assistive robots could assist older people in their daily physical routines. (A Socially Assistive Robot For Elderly Exercise Promotion, 2019)`
Zo zijn er ook robot die met ouderen samen kunnen lopen. Dit kan helpen met de stiumulatie van beweging. Veel ouderen ervaren meer plezier in het lopen met een robot dan alleen. `The results of the analysis of enjoyment (the facial scales and smiling/laughing) suggested that the participants in the experiment experienced more enjoyment from walking with the robot than from walking alone. Moreover, the results of the time and steps analysis revealed no differences in physical burden between walking styles in the experiment, although the effect size suggested the possibility of an influence based on the time. (Nomura et al., 2021)`
De inzet van robots kan opverschillende manieren worden toegepast in de ouderenzorg. Zo kan een robot ouderen helpen met bewegen doormiddel van dansen. Het is belangrijk dat ouderen plezier hebben in het bewegen en uit onderzoek blijkt dat ouderen het leuk vinden om met een robot te dansen. `Many participants noted that the robot was easy to use, enjoyable, and performed the task well. (Chen et al., 2017)` Het kan mogelijk verwarend zijn om te dansen maar door de robot vaker te gebruiken wordt het makkelijker. `Notably, participants perceived the robot as being easier to use after dancing with it. (Chen et al., 2017)`
Een andere manier van de inzet van robots in de ouderenzorg is bij het helpen van herstellen. Dit kan gedaan worden door een robot die de herstel bewegingen van de ouderen overneemt. `If movement practice is the dominant stimulus for movement recovery, then robotic actuators may turn out to be technological ornamentation. (Kahn et al., 2006)`
#### Wat kunnen nadelen zijn van het inzetten van robots in de ouderenzorg wat fitness betreft?
Toch kunnen er ook nadelen komen kijken bij de inzet van robots in ouderenzorg. Ouderen raken verward wanneer wordt gevraagd om bewegingen in verschillende richtingen te doen. `Apart from design issues, the experimental results highlight the confusion of physical exercise when different directions are considered. (A Socially Assistive Robot For Elderly Exercise Promotion, 2019)` Zelf wanneer de lichaamsdelen gekleurd worden weergeven kunnen ouderen niet altijd de bewegingen goed onderscheiden. `Despite colouring the human limbs in different tones precisely to overcome this issue, most of the implemented deep learning techniques were not able to properly distinguish them. (A Socially Assistive Robot For Elderly Exercise Promotion, 2019)`
Ook bij het dansen met robots komen de nadelen naar boven. Niet elke oudere die danst even energiek hierdoor ervaren sommige oudren de activiteit als te makkelijk of saai. `However, some participants were not positive about the robot in terms of exercise and health, finding the activity to be too easy, boring, or lacking in physical exertion. (Chen et al., 2017)`
Bij het lopen met een robot kan het zijn dat de robot niet altijd de communicatie met de ouderen stimuleert. Hier door kan het zijn dat ouderen minder snel communiceren met de robot. `On the other hand, utterances from the robot did not encourage communication with the participants during walking. Talking while walking is considered a dual task that places a cognitive burden on older persons [18, 19], and these burdens may have inhibited communication between the participants and the robot. (Nomura et al., 2021)` Dit kan maken minder snel geneigd zijn om de robot te gebruiken.
#### Wat is het limiet van de inzet van robots in de zorg voor ouderen wat fitness betreft?
Door de voordelen en nadelen te vergelijken kan er gekeken worden naar het limiet van de inzet van robots in de zorg voor ouderen. Uit onderzoek blijkt dat ouderen positief staan tegenover het gebruik humanoïde van robots. `the elderly had positive attitudes toward humanoid robots and electronic devices whose shapes resembled things they knew (humans, animals, smartphones, healthcare equipment). (Betlej, 2022)` Ook blijkt dat ouderen de voorkeur geven aan een robot die reageert op een stem. `Furthermore, they preferred a voice-activated, a motion-activated and touch responsive robot with nice voices/sounds. (Betlej, 2022)`
Toch blijkt uit onderzoek dat het niet altijd nodig is om complexe robots in te zetten. `The question remains whether complex, and potentially expensive, devices are essential for maximizing the learning and recovery capabilities of the injured CNS, or if less complex—and likely less expensive— machines without actuation [4748] that facilitate optimal forms and amounts of practice will be the most viable solution. (Kahn et al., 2006)` Ook is de effectiviteit van robots niet altijd even duidelijk. `Our conclusion is that robotically finishing a movement for a chronic stroke subject did not add value beyond the concurrent movement practice. (Kahn et al., 2006)`
Het blijkt ook dat de resultaten van onderzoek niet altijd even duidelijk zijn. Er zijn veel factoren die invloed kunnen hebben op de resultaten. Hierdoor is niet altijd duideijk of de robot de ouderen heeft geholpen of dat er andere factoren zijn die invloed hebben gehad. `Although the results of the present field experiment showed that robots may help encourage older people to participate in walking exercises, it is difficult to generalize the results because of the use of a single type of robot, the physical and mental states of the participants, the fact that only Japanese participants were analyzed, and the fact that the study was conducted in a single facility. Moreover, it remains unclear whether the robot alone influenced the participants feelings, or whether these were the results of group dynamics involving mutual interaction effects between the robot, staff, participants, and experimenters. In addition, the experiment lacked a part of strict procedures to be conducted such as randomization of the conditions, more objective measures, and gender balance due to several schedules other than our experiment in the facility and avoidance of the participants burden. In this sense, this study remains preliminary. These problems should be investigated in future studies. In particular, the effects of walking with robots on older persons who feel as though physical exercise is bothersome should be examined. (Nomura et al., 2021)`
## Conclusie
Op basis van het onderzoek naar de inzet van robots in de ouderenzorg om ouderen te helpen met fitness, kan worden bepaald dat het etisch verantwoord is om robots te gebruiken in ouderenzorg. Wel is het belangrijk om rekening te houden met de voordelen voor de ouderen en de mogelijke nadelen die kunnen spelen.
Er zijn veel mogelijke voordelen en nadelen van het gebruik van robots in de ouderenzorg wat fitness betreft. Het is belangrijk om bij deze voor en nadelen ouderen op de eerste plek te zetten. Overwegingen die genomen worden kunnen het beste in het belang van de ouderen zijn.
Daarnaast is er een limiet aan de effectiviteit van robots, vooral bij herstel oefeningen. Robots kunnen ouderen helpen met het doen van herstel oefeningen maar vaak is de overweging of de robot wel nodig is. Vaak zijn het complexe en dure robots die geen significant verschil maken in de effectiviteit van de oefeningen. Wel kan een robot helpen met het doen van oefeningen die ouderen niet alleen kunnen doen.
Het is belangrijk om in gedachten te houden dat de resultaten van onderzoek niet altijd even duidelijk zijn. Er zijn veel factoren die invloed kunnen hebben op de resultaten. Hierdoor is het belangrijk om in de resultaten van onderzoek rekening te houden met de factoren die invloed kunnen hebben op de deze resultaten.
## Bronnen
Bronnen:
A Socially Assistive Robot for Elderly Exercise Promotion. (2019). IEEE Journals & Magazine | IEEE Xplore. https://ieeexplore.ieee.org/document/8731893 Betlej, A. (2022).
Designing Robots for Elderly from the Perspective of Potential End-Users: A Sociological Approach (Vraag 1):
Designing Robots for Elderly from the Perspective of Potential End-Users: A Sociological Approach. International Journal Of Environmental  Research And Public Health/International Journal Of Environmental Research And Public Health, 19(6), 3630. https://doi.org/10.3390/ijerph19063630 Chen, T. L., Bhattacharjee, T., Beer, J. M., Ting, L. H., Hackney, M. E., Rogers, W. A., & Kemp, C. C. (2017).
https://lib.hva.nl/discovery/fulldisplay?docid=cdi_pubmedcentral_primary_oai_pubmedcentral_nih_gov_8948980&context=PC&vid=31UKB_UAM2_INST:HVA&lang=nl&search_scope=DN_and_CI_and_EXT&adaptor=Primo%20Central&tab=Everything&query=any,contains,fitness%20robots%20elderly&offset=0
Older adults acceptance of a robot for partner dance-based exercise. PloS One, 12(10), e0182736. https://doi.org/10.1371/journal.pone.0182736 Kahn, L. E., Lum, P. S., Rymer, W. Z., & Reinkensmeyer, D. J. (2006).
Older adults' acceptance of a robot for partner dance-based exercise (Vraag 1):
Robot-assisted movement training for the stroke-impaired arm: Does it matter what the robot does? Journal Of Rehabilitation Research And Development, 43(5), 619. https://doi.org/10.1682/jrrd.2005.03.0056 Nomura, T., Kanda, T., Yamada, S., & Suzuki, T. (2021).
https://lib.hva.nl/discovery/fulldisplay?docid=cdi_gale_healthsolutions_A510237592&context=PC&vid=31UKB_UAM2_INST:HVA&lang=nl&search_scope=DN_and_CI_and_EXT&adaptor=Primo%20Central&tab=Everything&query=any,contains,fitness%20robots%20elderly&offset=0
A Socially Assistive Robot for Elderly Exercise Promotion (Vraag 2-3):
https://lib.hva.nl/discovery/fulldisplay?docid=cdi_proquest_journals_2455606148&context=PC&vid=31UKB_UAM2_INST:HVA&lang=nl&search_scope=DN_and_CI_and_EXT&adaptor=Primo%20Central&tab=Everything&query=any,contains,fitness%20robots%20elderly&offset=0
Robot-assisted movement training for the stroke-impaired arm: Does it matter what the robot does (Vraag 4)?
https://lib.hva.nl/discovery/fulldisplay?docid=cdi_proquest_miscellaneous_68177256&context=PC&vid=31UKB_UAM2_INST:HVA&lang=nl&search_scope=DN_and_CI_and_EXT&adaptor=Primo%20Central&tab=Everything&query=any,contains,limit%20robot%20fitness&offset=0
The Image Processing Using Soft Robot Technology in Fitness Motion Detection under the Internet of Things (Vraag 4):
https://lib.hva.nl/discovery/fulldisplay?docid=cdi_proquest_journals_2734392799&context=PC&vid=31UKB_UAM2_INST:HVA&lang=nl&search_scope=DN_and_CI_and_EXT&adaptor=Primo%20Central&tab=Everything&query=any,contains,robots%20in%20fitness&offset=0
The effects of assistive walking robots for health care support on older persons: a preliminary field experiment in an elder care facility. Intelligent Service Robotics, 14(1), 2532. https://doi.org/10.1007/s11370-020-00345-4

1
package-lock.json generated
View File

@@ -8,6 +8,7 @@
"name": "muupooviixee66",
"version": "1.0.0",
"dependencies": {
"body-parser": "^1.20.2",
"express": "^4.19.2",
"mariadb": "^3.3.0"
}

View File

@@ -12,6 +12,7 @@
},
"private": true,
"dependencies": {
"body-parser": "^1.20.2",
"express": "^4.19.2",
"mariadb": "^3.3.0"
}