Merge branch 'main' of ssh://gitlab.fdmci.hva.nl/propedeuse-hbo-ict/onderwijs/2023-2024/out-a-se-ti/blok-4/muupooviixee66

This commit is contained in:
SebasKoedam
2024-06-05 13:26:40 +02:00
8 changed files with 148 additions and 271 deletions

View File

@@ -40,6 +40,12 @@ public class ExerciseManager {
public static final float EXERCISE_ERROR_MARGIN = 1.0f;
public static final float EXERCISE_TIME_SCALING_FACTOR = 1.0f;
// Fields representing the statistics of the user
public static int TOTAL_REPETITIONS_REQUIRED = 0;
public static int TOTAL_REPETITIONS_PERFORMED = 0;
public static int TOTAL_EXERCISES_PREFORMED = 0;
/**
* Function for sending an HTTP request to the server.
*
@@ -60,6 +66,7 @@ public class ExerciseManager {
// Send a body if it is present
if (body != null)
connection.getOutputStream().write(body.getBytes());
InputStream stream = connection.getInputStream();
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
StringBuilder builder = new StringBuilder();

View File

@@ -12,31 +12,32 @@ import android.media.MediaPlayer;
import android.os.Bundle;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.view.View;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.VideoView;
import com.aldebaran.qi.Future;
import com.aldebaran.qi.sdk.QiContext;
import com.aldebaran.qi.sdk.QiSDK;
import com.aldebaran.qi.sdk.RobotLifecycleCallbacks;
import com.aldebaran.qi.sdk.builder.AnimateBuilder;
import com.aldebaran.qi.sdk.builder.AnimationBuilder;
import com.aldebaran.qi.sdk.design.activity.RobotActivity;
import com.aldebaran.qi.sdk.design.activity.conversationstatus.SpeechBarDisplayStrategy;
import com.aldebaran.qi.sdk.object.actuation.Animate;
import com.aldebaran.qi.sdk.object.actuation.Animation;
import com.example.fitbot.R;
import com.example.fitbot.exercise.Exercise;
import com.example.fitbot.exercise.ExerciseManager;
import com.example.fitbot.pepper.Pepper;
import com.example.fitbot.ui.components.ExerciseStatusElement;
import com.example.fitbot.util.NavigationManager;
import com.example.fitbot.util.processing.InputProcessor;
import org.joml.Vector3f;
public class FitnessActivity extends RobotActivity implements RobotLifecycleCallbacks {
// Private fields for the FitnessActivity class.
private ExerciseStatusElement exerciseStatusElement;
private InputProcessor motionProcessor;
private Exercise currentExercise;
@@ -52,6 +53,8 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
private TextView exerciseShortDescriptionTextView;
//private TextView exerciseDescriptionTextView;
private static String exerciseVideoUrl;
private Animate animate;
private VideoView videoView;
private final Object lock = new Object();
@@ -75,6 +78,7 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
super.onCreate(savedInstanceState);
QiSDK.register(this, this);
setContentView(R.layout.activity_fitness);
videoView = findViewById(R.id.videoView);
// Fill empty objects with exercise data
this.exerciseNameTextView = findViewById(R.id.textViewFitnessTitle);
@@ -118,31 +122,40 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
// Provide the context so that all queued actions can be performed.
Pepper.provideContext(qiContext, this.getClass());
exerciseStatusElement = findViewById(R.id.personalMotionPreviewElement);
// Initialize the element whenever it has been added to the screen.
// This will provide the element with the appropriate dimensions for drawing
// the canvas properly.
exerciseStatusElement.post(() -> {
this.fetchExerciseAsync((exercise) -> {
// Acquire paths from the exercise and provide them to the motion processor
Vector3f[][] vectors = new Vector3f[][]{exercise.leftPath.getAngleVectors(), exercise.rightPath.getAngleVectors()};
motionProcessor = new InputProcessor(vectors, exercise.exerciseTimeInSeconds, SENSOR_SAMPLE_RATE);
exerciseStatusElement.initialize(exercise, motionProcessor, EXERCISE_COUNT);
motionProcessor = new InputProcessor(SENSOR_SAMPLE_RATE, this);
motionProcessor.useExercise(exercise);
/* TODO: Remove if not needed */motionProcessor.setRecording(true, 10);
motionProcessor.setInputHandler(exerciseStatusElement);
/* TODO: Remove if not needed */
motionProcessor.setRecording(true, 10);
motionProcessor.startListening();
if ( videoView.isPlaying() )
{
Animation animationarmraise = AnimationBuilder.with(qiContext) // Create the builder with the context.
.withResources(R.raw.armraise) // Set the animation resource.
.build(); // Build the animation.
animate = AnimateBuilder.with(qiContext) // Create the builder with the context.
.withAnimation(animationarmraise) // Set the animation.
.build(); // Build the animate action.
Future<Void> animateFuture = animate.async().run();
}
else
{
Log.e("FitnessActivity", "VideoView is null. Check your layout XML.");
}
}, (n) -> {
int randomMessageIndex = (int) Math.floor(Math.random() * EXERCISE_NOT_FOUND_MESSAGES.length);
Pepper.say(EXERCISE_NOT_FOUND_MESSAGES[randomMessageIndex]);
Pepper.say(EXERCISE_NOT_FOUND_SEEK_HELP_MESSAGE);
NavigationManager.navigateToActivity(this, EndScreenActivity.class);
});
});
}
/**
@@ -156,17 +169,16 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
new Thread(() -> {
Exercise exercise = ExerciseManager.fetchExerciseFromDatabase();
if (exercise == null) {
onFailedFetch.handle(null);
runOnUiThread(() -> onFailedFetch.handle(null));
} else {
runOnUiThread(() -> {
onSuccessfulFetch.handle(exercise);
this.runOnUiThread(() -> {
exerciseNameTextView.setText(exercise.name);
exerciseShortDescriptionTextView.setText(exercise.shortDescription);
// exerciseDescriptionTextView.setText(exercise.description);
exerciseVideoUrl = exercise.videoUrl;
// Play the video
VideoView videoView = findViewById(R.id.videoView);
playVideo(videoView, this);
// When the video has started playing remove the loading circle
@@ -179,14 +191,11 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
return false;
});
videoView.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
videoView.setOnCompletionListener(mp -> {
if (EXERCISE_REP < EXERCISE_COUNT) {
videoView.start(); // start the video again
EXERCISE_REP++;
}
}
});
});
}
@@ -290,5 +299,4 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
});
animator.start();
}
}

View File

@@ -1,159 +0,0 @@
package com.example.fitbot.ui.components;
import android.app.Activity;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import com.example.fitbot.exercise.Exercise;
import com.example.fitbot.pepper.Pepper;
import com.example.fitbot.ui.activities.EndScreenActivity;
import com.example.fitbot.ui.activities.FitnessActivity;
import com.example.fitbot.ui.activities.MainActivity;
import com.example.fitbot.util.NavigationManager;
import com.example.fitbot.util.processing.IInputHandler;
import com.example.fitbot.util.processing.InputProcessor;
import org.joml.Vector3f;
public class ExerciseStatusElement extends View implements IInputHandler {
// Fields regarding Exercise and speech handling.
private InputProcessor motionProcessor;
private Exercise exercise;
private int exerciseCount;
private FitnessActivity parentActivity;
private final Paint userProgressPaint = new Paint();
private final Paint borderPaint = new Paint();
private final Paint backgroundPaint = new Paint();
private static final String[] STARTING_PHRASES = {
"Veel success met de oefening!",
"Je kan het!",
"Veel plezier!"
};
public ExerciseStatusElement(Context context, AttributeSet attrs) {
super(context, attrs);
if (context instanceof Activity) {
this.parentActivity = (FitnessActivity) context;
}
this.userProgressPaint.setColor(0xFFFF0000); // Red
this.userProgressPaint.setStyle(Paint.Style.FILL);
this.userProgressPaint.setStrokeWidth(5.0f);
this.userProgressPaint.setAntiAlias(true);
// Target paint is the filling of the target path.
this.borderPaint.setColor(-1);
this.borderPaint.setStyle(Paint.Style.STROKE);
this.borderPaint.setStrokeWidth(5.0f);
this.borderPaint.setAntiAlias(true);
this.backgroundPaint.setColor(0xFF000000); // Black
}
/**
* Method for initializing the PersonalMotionPreviewElement.
* This method has to be called with a "post" function when the element has been
* created, otherwise the dimensions of the element aren't initialized yet, which
* will cause the vertex projections to fail (0 width and height).
*
* @param exercise The exercise that the user is currently performing.
* @param motionProcessor The motion processor that will be used to process the user's motion.
* @param exerciseCount The total amount of exercises that the user has to perform.
*/
public void initialize(@Nullable Exercise exercise, InputProcessor motionProcessor, int exerciseCount) {
Log.i("PersonalMotionPreviewElement", "Creating new PersonalMotionPreviewElement.");
this.motionProcessor = motionProcessor;
this.exercise = exercise;
this.exerciseCount = exerciseCount;
Pepper.say(STARTING_PHRASES[(int) Math.floor(Math.random() * STARTING_PHRASES.length)]);
// Handler that is called every time the motion processor receives new data.
}
/**
* Method for setting the gesture path that will be drawn on the canvas.
*
* @param exercise The exercise that the user is currently performing.
*/
public void setExercise(Exercise exercise) {
this.motionProcessor.useExercise(exercise);
this.exercise = exercise;
Log.i("MotionProcessor", "Updating exercise in ExerciseStatusElement");
}
@Override
public void onDraw(Canvas canvas) {
canvas.drawRect(0, 0, getWidth(), getHeight(), backgroundPaint);
this.setBackgroundColor(0xFF000000); // Black
/*if (this.exercise == null)
return;*/
/*
// Draw target circle
float targetRadius = (this.screenDimensions.x + this.screenDimensions.y) / 5.0f;
canvas.drawCircle(this.screenDimensions.x / 2, this.screenDimensions.y / 2, targetRadius, this.targetPaint);
canvas.drawCircle(this.screenDimensions.x / 2, this.screenDimensions.y / 2, (targetRadius * exerciseProgress.get()/1000.0f), this.referencePaint);
referencePaint.setColor(
Color.argb(
255,
(int)(255 * (1.0 - exerciseProgress.get()/1000.0f)),
(int)(255 * exerciseProgress.get()/1000.0f),
0
)
);*/
this.invalidate();
}
@Override
public void accept(Vector3f rotationVector, int sensorId) {
Log.i("MotionProcessor", "Rotation vector received: " + rotationVector);
Log.i("MotionProcessor", "Last error offset:" + this.motionProcessor.getError(sensorId, this.motionProcessor.secondsPassed()));
// Check whether the current exercise has been completed.
// This is determined by the duration of the exercise, and the amount of time that has passed.
// The duration of the exercise originates from the database, and is stored in seconds.
// Whenever 'useExercise' is called, the timer resets and this method will be called again.
if (this.motionProcessor.hasFinished() && !this.motionProcessor.isRecording()) {
// If for some reason the parent activity is not defined,
// move back to the main screen.
if (this.parentActivity == null) {
// Move to main screen
Log.i("MotionProcessor", "Parent activity was null.");
NavigationManager.navigateToActivity(getContext(), MainActivity.class);
return;
}
// Move on to the next exercise, or finish.
if (this.exerciseCount > 0) {
this.exerciseCount--;
this.parentActivity.fetchExerciseAsync((newExercise) -> {
this.motionProcessor.useExercise(newExercise);
// Whenever the database retrieval failed, we return to the main screen.
}, (failed) -> {
// Move to main screen
Log.i("MotionProcessor", "Failed to fetch exercise from database");
NavigationManager.navigateToActivity(parentActivity, MainActivity.class);
});
} else {
// Finish the exercise.
Log.i("MotionProcessor", "Exercise has finished");
NavigationManager.navigateToActivity(parentActivity, EndScreenActivity.class);
}
}
}
}

View File

@@ -53,13 +53,15 @@ public class AnglePath {
throw new IllegalArgumentException("Input string must contain 2 elements");
Vector3f[][] angles = new Vector3f[ExerciseManager.SENSOR_COUNT][];
for ( int dataArrayIdx = 0; dataArrayIdx < parsed.getAsJsonArray().size(); dataArrayIdx++)
{
JsonArray array = parsed.getAsJsonArray().get(dataArrayIdx).getAsJsonObject().get("data").getAsJsonArray();
angles[dataArrayIdx] = new Vector3f[array.size()];
int deviceIdx = parsed.getAsJsonArray().get(dataArrayIdx).getAsJsonObject().get("deviceId").getAsInt();
angles[deviceIdx] = new Vector3f[array.size()];
for (int i = 0; i < array.size(); i++) {
JsonArray vec = array.get(i).getAsJsonArray();
angles[dataArrayIdx][i] = new Vector3f(vec.get(0).getAsFloat(), vec.get(1).getAsFloat(), vec.get(2).getAsFloat());
angles[deviceIdx][i] = new Vector3f(vec.get(0).getAsFloat(), vec.get(1).getAsFloat(), vec.get(2).getAsFloat());
}
}
return new AnglePath[] {new AnglePath(angles[0]), new AnglePath(angles[1])};

View File

@@ -2,15 +2,18 @@ package com.example.fitbot.util.processing;
import android.util.Log;
import com.aldebaran.qi.sdk.object.geometry.Vector3;
import com.example.fitbot.exercise.Exercise;
import com.example.fitbot.exercise.ExerciseManager;
import com.example.fitbot.pepper.Pepper;
import com.example.fitbot.ui.activities.EndScreenActivity;
import com.example.fitbot.ui.activities.FitnessActivity;
import com.example.fitbot.ui.activities.MainActivity;
import com.example.fitbot.util.NavigationManager;
import com.example.fitbot.util.server.WebServer;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.JsonPrimitive;
import org.jetbrains.annotations.NotNull;
import org.joml.Vector3f;
@@ -26,9 +29,21 @@ public class InputProcessor {
private final float sampleRate; // The sample rate of the motion sensor
private float exerciseDurationInSeconds;
private int repetitionsRemaining = 0;
private int exercisesRemaining = 0;
private float exerciseScore = 0.0F;
private final FitnessActivity parentActivity;
/**
* The phrases that are said by the robot whenever the exercise starts.
*/
private static final String[] STARTING_PHRASES = {
"Veel success met de oefening!",
"Je kan het!",
"Veel plezier!"
};
/**
* This field is used to determine if the motion data is being recorded.
* If this is the case, instead of functioning normally, the element
@@ -48,8 +63,6 @@ public class InputProcessor {
private double secondsPassed = 0.0D;
private long lastTime;
private IInputHandler motionDataConsumer;
private static final String[] REQUIRED_SENSOR_JSON_PROPERTIES =
{"rotationX", "rotationY", "rotationZ", "deviceId"};
@@ -60,34 +73,50 @@ public class InputProcessor {
/**
* Constructor for the motion processor.
*
* @param paths The target paths of the motion data.
* The length of this array must be equal to the
* amount of sensors available.
* @param inputSampleRate The sample rate of the motion sensor.
*/
public InputProcessor(Vector3f[][] paths, float exerciseTime, float inputSampleRate) {
this.selfRotationVectorPaths = new ArrayList[2];
this.selfRotationVectorPaths[0] = new ArrayList<>();
this.selfRotationVectorPaths[1] = new ArrayList<>();
targetRotationVectorPaths = paths;
public InputProcessor(float inputSampleRate, FitnessActivity parentActivity) {
this.sampleRate = inputSampleRate;
this.exerciseDurationInSeconds = exerciseTime;
this.parentActivity = parentActivity;
}
/**
* Function for setting the exercise to use.
* This updates the user and target path and the
* duration of the exercise.
* <p>
* This function is only initially used to select the starting exercise;
* the exercises that follow are determined by a private method 'nextExercise'
*
* @param exercise The exercise to use the paths for.
*/
public void useExercise(Exercise exercise) {
if ( this.recordingMovement )
if (this.recordingMovement)
throw new IllegalStateException("Cannot change exercise while recording movement.");
this.exercisesRemaining = 1;
this.nextExercise(exercise);
Pepper.say(STARTING_PHRASES[(int) Math.floor(Math.random() * STARTING_PHRASES.length)]);
}
/**
* Moves on to the next exercise without changing the remaining exercises.
*
* @param exercise The exercise to move on to.
*/
private void nextExercise(Exercise exercise) {
if (this.exercisesRemaining-- <= 0) {
NavigationManager.navigateToActivity(this.parentActivity, EndScreenActivity.class);
}
ExerciseManager.TOTAL_REPETITIONS_REQUIRED += ExerciseManager.DEFAULT_EXERCISE_REPETITIONS;
ExerciseManager.TOTAL_EXERCISES_PREFORMED++;
this.selfRotationVectorPaths[0] = new ArrayList<>();
this.selfRotationVectorPaths[1] = new ArrayList<>();
this.repetitionsRemaining = ExerciseManager.DEFAULT_EXERCISE_REPETITIONS;
this.targetRotationVectorPaths = new Vector3f[2][exercise.rightPath.getAngleVectors().length];
this.targetRotationVectorPaths[0] = exercise.leftPath.getAngleVectors();
this.targetRotationVectorPaths[1] = exercise.rightPath.getAngleVectors();
@@ -96,6 +125,22 @@ public class InputProcessor {
this.lastTime = System.currentTimeMillis();
}
/**
* Method that is called whenever the user performs a good repetition.
*/
public void onAdequateRepetition() {
ExerciseManager.TOTAL_REPETITIONS_PERFORMED++;
// TODO: Add animation for correct repetition
}
/**
* Method that is called whenever the user performs a bad repetition.
*/
public void onInadequateRepetition() {
// TODO: Add animation for wrong repetition
}
/**
* Function for setting whether the motion data
* should be recorded or not.
@@ -171,7 +216,7 @@ public class InputProcessor {
try {
Log.i("MotionProcessor", "Time passed: " + this.secondsPassed + "s");
if ( this.recordingMovement)
if (this.recordingMovement)
Log.i("MotionProcessor", this.secondsPassed + " / " + this.recordingDurationInSeconds);
Log.i("MotionProcessor", "Received packet data: " + data);
@@ -218,7 +263,7 @@ public class InputProcessor {
// Supposed index of the current rotation vector in the `rotationVectorPaths` array
this.selfRotationVectorPaths[deviceId].add(rotation);
if ( this.recordingMovement && this.secondsPassed >= this.recordingDurationInSeconds) {
if (this.recordingMovement && this.secondsPassed >= this.recordingDurationInSeconds) {
// Do something with the recorded data.
this.recordingMovement = false;
// Convert recorded data from `selfRotationVectorPaths` to string, and
@@ -231,7 +276,21 @@ public class InputProcessor {
Log.i("MotionProcessor", converted);
}
motionDataConsumer.accept(rotation, deviceId);
// Do something else with the vector
// TODO: Implement !!
Log.i("MotionProcessor", "Rotation vector: " + rotation.toString() + " from device: " + deviceId);
// Whenever the exercise has finished and it's not recording,
// attempt to move to the next exercise.
// If this fails, navigate back to the main activity.
if (this.hasFinished() && !this.recordingMovement) {
this.parentActivity.fetchExerciseAsync(this::nextExercise, (nil) -> {
Log.i("MotionProcessor", "Failed to fetch exercise data.");
NavigationManager.navigateToActivity(this.parentActivity, MainActivity.class);
});
}
}
}
@@ -242,8 +301,7 @@ public class InputProcessor {
*
* @return The converted string.
*/
private String convertRecordedDataToString()
{
private String convertRecordedDataToString() {
int[] intBits = new int[3];
char[] vectorChars = new char[12]; // 4 bytes per scalar, 12 chars per vector
JsonArray jsonArray = new JsonArray();
@@ -255,14 +313,14 @@ public class InputProcessor {
*/
// Iterate over all devices. In the current instance, it's 2.
for ( int deviceId = 0; deviceId < selfRotationVectorPaths.length; deviceId++) {
for (int deviceId = 0; deviceId < selfRotationVectorPaths.length; deviceId++) {
JsonObject jsonDeviceObject = new JsonObject();
jsonDeviceObject.addProperty("deviceId", deviceId);
// Data array
JsonArray jsonDeviceDataArray = new JsonArray();
for ( Vector3f vector : selfRotationVectorPaths[deviceId]) {
for (Vector3f vector : selfRotationVectorPaths[deviceId]) {
JsonArray jsonScalarArray = new JsonArray();
jsonScalarArray.add(vector.x);
jsonScalarArray.add(vector.y);
@@ -276,26 +334,6 @@ public class InputProcessor {
return jsonArray.toString();
}
/**
* Method for getting the current progress of the exercise.
* The return value will range between 0.0 and 1.0.
*
* @return The current progress of the exercise.
*/
public double getCurrentProgress() {
return secondsPassed / exerciseDurationInSeconds;
}
/**
* Function for setting the motion data receiver.
*
* @param consumer The consumer to set.
*/
public void setInputHandler(IInputHandler consumer) {
if (consumer != null)
this.motionDataConsumer = consumer;
}
/**
* Function for getting the combined (average) error value of both sensors.
public double getCombinedError()
@@ -315,9 +353,10 @@ public class InputProcessor {
*/
public double getError(int sensorId, float time) {
/*// Ensure the sensor ID is within the bounds of the array
// Ensure the sensor ID is within the bounds of the array
if (sensorId < 0 || sensorId >= selfRotationVectorPaths.length)
return 0.0d;
/*
// Index of the current rotation vector
int targetIndex = (int) ((this.exerciseDurationInSeconds / this.targetRotationVectorPaths[sensorId].length) * time);

View File

@@ -136,11 +136,6 @@
</RelativeLayout>
<com.example.fitbot.ui.components.ExerciseStatusElement
android:id="@+id/personalMotionPreviewElement"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</LinearLayout>
<Button

View File

@@ -32,3 +32,5 @@ Due to its limited support we are not uising it in our project. We were already
# Conclusion
We still used HTTP for its simplicity and because we did not need a fancy data protocol. If i were to make this project again i would still use HTTP because is will make the making of the project easy. If we were to expend our project i would UDP if we were to make it like a game. Then we might be able to make our own wii sports with pepper. Since the loss of some data wont really matter

View File

@@ -1,17 +0,0 @@
# Ideas for hardware
# making a balance bord
Since We are not able to connect the wii fit bord we have to come up with a solution. We thought of it for some time and what we want to do with it. Origanlly we wanted to use the balance bord for excersises such as standing on one leg. This is a simple leg excersise we wanted to have. We thaugt of multiple solutions to still have this excersise. However we still needed to think of a design for the frame.
# the frame
We wanted it to have a similar style to the balance bord. howevere since we can make or own we wanted to make it a bit taller. This makes it easier to implement some other excersise such as the step up. This is na excersise that benefits from a taller box than the wii fit box.
## LDR
We can use a LDR to determine if someone is standing on the bord
## Knock sensor
![alt text](../assets/knocksensor.png)