Updated PersonalMotionPreviewElement and FitnessActivity to match functionality

This commit is contained in:
Luca Warmenhoven
2024-05-22 11:48:02 +03:00
parent 53d5e48b9b
commit 7064fa3b97
2 changed files with 64 additions and 24 deletions

View File

@@ -1,7 +1,10 @@
package com.example.fitbot.ui.activities;
import android.content.Context;
import android.os.Bundle;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import com.aldebaran.qi.sdk.QiContext;
import com.aldebaran.qi.sdk.QiSDK;
@@ -42,7 +45,6 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
gesturePathBuilder.addVector(new Vector3f(-.5f, .5f, -.5f));
personalMotionPreviewElement = findViewById(R.id.personalMotionPreviewElement);
personalMotionPreviewElement.post(() -> {
Log.i("FitnessActivity", "PersonalMotionPreviewElement.post()");
@@ -56,7 +58,7 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
@Override
public void onRobotFocusGained(QiContext qiContext) {
// Implement your logic when the robot focus is gained
Animate("bicepcurl", qiContext);
Animations.Animate("bicepcurl", qiContext);
}
@@ -76,10 +78,4 @@ public class FitnessActivity extends RobotActivity implements RobotLifecycleCall
super.onDestroy();
}
public static class PersonalMotionPreviewElement extends View {
public PersonalMotionPreviewElement(Context context, AttributeSet attrs) {
super(context, attrs);
// Initialize your custom view here (optional)
}
}

View File

@@ -25,12 +25,23 @@ public class PersonalMotionPreviewElement extends View {
private double pathTime = 0.0D; // The timestamp at which the path is currently at.
private MotionProcessor motionProcessor;
private Exercise exercise;
private Path referencePath; // The path the user is supposed to follow.
private Path performingPath; // The path the user is currently following.
private Path stickmanPath; // The path of the stickman that is drawn on the screen.
private Paint referencePaint;
private Paint performingPaint;
private Paint textPaint;
// Matrices for the projection of the path segments onto the screen.
// Depth buffering sadly is not supported yet due to brain dysfunction
private Matrix4f modelViewMatrix = new Matrix4f();
private Matrix4f projectionMatrix = new Matrix4f();
private double timePassed = 0.0D; // The time that has passed since the start of the exercise, in seconds.
private long startingTime = 0L;
private Paint backgroundColor = new Paint();
@@ -40,7 +51,7 @@ public class PersonalMotionPreviewElement extends View {
private final float FOV = 80.0f; // The field of view of the preview path
private final float Z_NEAR = 0.1f; // The near clipping plane
private final float Z_FAR = 1000.0f; // The far clipping plane
private Vector3f cameraPosition = new Vector3f(0.0f, 0.0f, -1.5f); // The position of the camera
private Vector3f objectPosition = new Vector3f(0.0f, 0.0f, 0.0f); // The position of the camera
private Vector2f screenDimensions = new Vector2f(); // Width and height dimensions of the screen
private Vector2f rotation = new Vector2f(); // Rotation vector (yaw, pitch)
@@ -56,12 +67,34 @@ public class PersonalMotionPreviewElement extends View {
this.performingPaint.setColor(0xFF0000FF); // Blue
this.performingPaint.setStyle(Paint.Style.STROKE);
this.performingPaint.setStrokeWidth(5.0f);
this.textPaint = new Paint();
this.textPaint.setColor(-1);
this.textPaint.setStyle(Paint.Style.FILL);
this.textPaint.setTextSize(50.0f);
}
/**
* Method for updating the stickman gestures.
*
* This method will update the stickman gestures based on the current
* motion data that is being processed.
*/
private void updateStickmanGestures() {
// Reset previous path
stickmanPath.reset();
// TODO: Define all arm segments:
// - Upper left and right arm
// - Lower left and right arm
// - Upper left and right leg
// - Lower left and right leg
// Update all segments based on the perceived motion data.
PathSegment upperLeftArm = new PathSegment(
new Vector3f(),
new Vector3f()
);
PathSegment[] bodySegments = new PathSegment[] {
new PathSegment(new Vector3f(0.0f, -.5f, -.5f), new Vector3f(0, 0, 0)), // Left leg
new PathSegment(new Vector3f(0.0f, -.5f, .5f), new Vector3f(0, 0, 0)), // Right leg
@@ -70,12 +103,15 @@ public class PersonalMotionPreviewElement extends View {
new PathSegment(new Vector3f(.25f, .25f, 0f), new Vector3f(0, 0, 0)) // Right arm
};
// Generate new path for stickman
// TODO: Generate new path for stickman
}
/**
* Method for initializing the PersonalMotionPreviewElement.
* This method has to be called with a "post" function when the element has been
* created, otherwise the dimensions of the element aren't initialized yet, which
* will cause the vertex projections to fail (0 width and height).
*
* @param exercise The exercise that the user is currently performing.
*/
@@ -89,7 +125,10 @@ public class PersonalMotionPreviewElement extends View {
this.performingPath = new Path();
this.referencePath = new Path();
this.path = path;
this.startingTime = System.nanoTime(); // Set the last time to the current time
this.exercise = exercise;
this.path = exercise.getPath();
this.motionProcessor = new MotionProcessor();
this.motionProcessor.startListening();
this.motionProcessor.setMotionDataEventHandler((processed, preprocessed, sampleIndex, sampleRate, deviceId) -> {
@@ -137,22 +176,23 @@ public class PersonalMotionPreviewElement extends View {
* @return The transformed vector in screen coordinates ranging from (0, 0) to (virtualWidth, virtualHeight).
*/
private Vector2f projectVertex(Vector3f point, int virtualWidth, int virtualHeight) {
Log.i("VertexProjection", "Projecting vertex to screen coordinates: " + point.toString() + " with virtual width " + virtualWidth + " and virtual height " + virtualHeight + ".");
Matrix4f modelViewMatrix = new Matrix4f()
.translate(-cameraPosition.x, -cameraPosition.y, -cameraPosition.z)
modelViewMatrix
.identity()
.translate(-objectPosition.x, -objectPosition.y, -objectPosition.z)
.rotateX((float) Math.toRadians(rotation.y))
.rotateY((float) Math.toRadians(rotation.x));
Matrix4f projectionMatrix = new Matrix4f()
// Transform the projection matrix to a perspective projection matrix
// Perspective transformation conserves the depth of the object
projectionMatrix
.identity()
.perspective((float) Math.toRadians(FOV), (float) virtualWidth / virtualHeight, Z_NEAR, Z_FAR);
// Calculate Model-View-Projection matrix
Matrix4f MVP = new Matrix4f(projectionMatrix)
.mul(modelViewMatrix);
// Convert to screen coordinates
// Convert world coordinates to screen-space using MVP matrix
Vector4f screenCoordinates = new Vector4f(point, 1.0f)
.mul(MVP);
.mul(this.modelViewMatrix)
.mul(this.projectionMatrix);
// Normalize screen coordinates from (-1, 1) to (0, virtualWidth) and (0, virtualHeight)
float normalizedX = (screenCoordinates.x / screenCoordinates.w + 1.0f) * 0.5f * virtualWidth;
@@ -194,14 +234,18 @@ public class PersonalMotionPreviewElement extends View {
public void onDraw(Canvas canvas) {
canvas.drawRect(0, 0, getWidth(), getHeight(), backgroundColor);
this.setBackgroundColor(0xFF000000); // Black
if (path == null)
if (this.exercise == null)
return;
// Draw the sport preview canvas
canvas.drawPath(referencePath, referencePaint);
canvas.drawPath(performingPath, performingPaint);
canvas.drawText(this.exercise.getTitle(), 10, 40, textPaint);
timePassed = (System.nanoTime() - startingTime) / 1E9D;
this.rotation.add(1f, 0);
this.referencePath = getDrawablePath(this.path.getSegments());
this.invalidate();
this.invalidate(); // Causes a redraw.
}
}