mirror of
https://gitlab.fdmci.hva.nl/technische-informatica-sm3/ti-projectten/rooziinuubii79.git
synced 2025-08-05 12:54:57 +00:00
Compare commits
29 Commits
072b54af04
...
OpenCV
Author | SHA1 | Date | |
---|---|---|---|
c9d3b0f795 | |||
85af15d7a3 | |||
a1b50a3780 | |||
b86528595e | |||
eef4f9c79c | |||
3c23d37be1 | |||
c2886d32c9 | |||
8158c85d6e | |||
e682969ec8 | |||
0dfc3b5c13 | |||
7f786d5197 | |||
60ba177dc2 | |||
e9f998b3e7 | |||
7eeaba482e | |||
e8db00120f | |||
c65f310e81 | |||
ec3e83ef7f | |||
480d36393a | |||
fea0f19857 | |||
e1135dac0f | |||
2f4e5ae096 | |||
9e07a243ea | |||
b93a5f2dca | |||
911b870786 | |||
dd39bd3021 | |||
8aa54805ac | |||
d26d277c3c | |||
508d2ed4e2 | |||
3e202acc8d |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -13,7 +13,7 @@ src/Socket/a.out
|
||||
src/C++/Driver/cmake_install.cmake
|
||||
src/C++/Socket/a.out
|
||||
src/C++/Driver/Makefile
|
||||
src/C++/Driver/vgcore*
|
||||
vgcore*
|
||||
src/C++/Driver/cmake_install.cmake
|
||||
src/C++/Driver/Makefile
|
||||
src/C++/Driver/log
|
||||
@@ -31,3 +31,4 @@ CMakeFiles/
|
||||
Makefile
|
||||
CMakeCache.txt
|
||||
cmake_install.cmake
|
||||
src/C++/OpenCV/main
|
||||
|
20
docs/code/OpenCV.md
Normal file
20
docs/code/OpenCV.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# OpenCV
|
||||
## Requirements
|
||||
For the camera we want it to detect what is happening on the video feed and identify it so it can identify dangers.
|
||||
|
||||
|
||||
## Issues
|
||||
|
||||
* OpenCL not grabbing gpu
|
||||
* Solution: https://github.com/Smorodov/Multitarget-tracker/issues/93
|
||||
|
||||
## Installation
|
||||
### Dependencies
|
||||
* glew
|
||||
* opencv
|
||||
|
||||
|
||||
|
||||
|
||||
## Sources
|
||||
* https://github.com/UnaNancyOwen/OpenCVDNNSample/tree/master
|
@@ -1 +0,0 @@
|
||||
# retro sprint 4
|
@@ -6,7 +6,10 @@ set(CMAKE_CXX_STANDARD 23)
|
||||
find_library(PAHO_MQTTPP_LIBRARY paho-mqttpp3 PATHS /usr/local/lib)
|
||||
find_library(PAHO_MQTT_LIBRARY paho-mqtt3a PATHS /usr/local/lib)
|
||||
|
||||
include_directories(/usr/local/include)
|
||||
# Find OpenCV package
|
||||
find_package(OpenCV REQUIRED)
|
||||
find_package(OpenEXR REQUIRED)
|
||||
include_directories(${OpenCV_INCLUDE_DIRS})
|
||||
|
||||
set(SOURCE_FILES
|
||||
src/KobukiDriver/KobukiParser.cpp
|
||||
@@ -20,4 +23,4 @@ set(SOURCE_FILES
|
||||
add_executable(kobuki_control ${SOURCE_FILES})
|
||||
|
||||
# Link the static libraries
|
||||
target_link_libraries(kobuki_control ${PAHO_MQTTPP_LIBRARY} ${PAHO_MQTT_LIBRARY} pthread)
|
||||
target_link_libraries(kobuki_control ${PAHO_MQTTPP_LIBRARY} ${PAHO_MQTT_LIBRARY} ${OpenCV_LIBS} pthread OpenEXR::OpenEXR)
|
||||
|
@@ -509,32 +509,6 @@ void CKobuki::doRotation(long double th) {
|
||||
usleep(25 * 1000);
|
||||
}
|
||||
|
||||
// combines navigation to a coordinate and rotation by an angle, performs
|
||||
// movement to the selected coordinate in the robot's coordinate system
|
||||
void CKobuki::goToXy(long double xx, long double yy) {
|
||||
long double th;
|
||||
|
||||
yy = yy * -1;
|
||||
|
||||
th = atan2(yy, xx);
|
||||
doRotation(th);
|
||||
|
||||
long double s = sqrt(pow(xx, 2) + pow(yy, 2));
|
||||
|
||||
// resetnem suradnicovu sustavu robota
|
||||
x = 0;
|
||||
y = 0;
|
||||
iterationCount = 0;
|
||||
theta = 0;
|
||||
|
||||
// std::cout << "mam prejst: " << s << "[m]" << std::endl;
|
||||
|
||||
goStraight(s);
|
||||
|
||||
usleep(25 * 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
/// @brief Makes the robot move forward for 3 seconds
|
||||
/// @param speedvalue How fast it will drive forward from 0 - 1024
|
||||
void CKobuki::forward(int speedvalue) {
|
||||
|
@@ -31,7 +31,6 @@
|
||||
#include <chrono>
|
||||
#include <sstream>
|
||||
#include "KobukiParser.h"
|
||||
#include "graph.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@@ -1,71 +0,0 @@
|
||||
#ifndef GRAPH1010
|
||||
#define GRAPH1010
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <vector>
|
||||
|
||||
using namespace std;
|
||||
#define GRAPH_ENABLED true
|
||||
|
||||
class plot {
|
||||
public:
|
||||
FILE *gp;
|
||||
bool enabled,persist;
|
||||
plot(bool _persist=false,bool _enabled=GRAPH_ENABLED) {
|
||||
enabled=_enabled;
|
||||
persist=_persist;
|
||||
if (enabled) {
|
||||
if(persist)
|
||||
gp=popen("gnuplot -persist","w");
|
||||
else
|
||||
gp=popen("gnuplot","w");
|
||||
}
|
||||
}
|
||||
|
||||
void plot_data(vector<float> x,const char* style="points",const char* title="Data") {
|
||||
if(!enabled)
|
||||
return;
|
||||
fprintf(gp,"set title '%s' \n",title);
|
||||
fprintf(gp,"plot '-' w %s \n",style);
|
||||
for(int k=0;k<x.size();k++) {
|
||||
fprintf(gp,"%f\n",x[k]);
|
||||
}
|
||||
fprintf(gp,"e\n");
|
||||
fflush(gp);
|
||||
}
|
||||
|
||||
void plot_data(vector<float> x,vector<float> y,const char* style="points",const char* title="Data") {
|
||||
if(!enabled)
|
||||
return;
|
||||
fprintf(gp,"set title '%s' \n",title);
|
||||
fprintf(gp,"plot '-' w %s \n",style);
|
||||
for(int k=0;k<x.size();k++) {
|
||||
fprintf(gp,"%f %f \n",x[k],y[k]);
|
||||
}
|
||||
fprintf(gp,"e\n");
|
||||
fflush(gp);
|
||||
}
|
||||
|
||||
~plot() {
|
||||
if(enabled)
|
||||
pclose(gp);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/*
|
||||
int main(int argc,char **argv) {
|
||||
plot p;
|
||||
for(int a=0;a<100;a++) {
|
||||
vector<float> x,y;
|
||||
for(int k=a;k<a+200;k++) {
|
||||
x.push_back(k);
|
||||
y.push_back(k*k);
|
||||
}
|
||||
p.plot_data(x,y);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
*/
|
||||
|
||||
#endif
|
@@ -5,6 +5,7 @@ MqttClient::MqttClient(const std::string& address, const std::string& clientId,
|
||||
//here all the @PARAMS are getting set for the connection
|
||||
: client_(address, clientId), username_(username), password_(password), callback_(*this) {
|
||||
client_.set_callback(callback_);
|
||||
|
||||
options.set_clean_session(true);
|
||||
options.set_mqtt_version(MQTTVERSION_3_1_1); // For MQTT 3.1.1
|
||||
if (!username_.empty() && !password_.empty()) {
|
||||
|
@@ -3,11 +3,15 @@
|
||||
#include <thread>
|
||||
#include "MQTT/MqttClient.h"
|
||||
#include "KobukiDriver/CKobuki.h"
|
||||
#include <opencv4/opencv2/opencv.hpp>
|
||||
#include <opencv4/opencv2/core.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace cv;
|
||||
CKobuki robot;
|
||||
std::string readMQTT();
|
||||
void parseMQTT(std::string message);
|
||||
void CapnSend();
|
||||
//ip, clientID, username, password
|
||||
MqttClient client("ws://145.92.224.21/ws/", "KobukiRPI", "rpi", "rpiwachtwoordofzo"); // create a client object
|
||||
std::string message = "stop";
|
||||
@@ -17,7 +21,7 @@ void sendKobukiData(TKobukiData &data);
|
||||
void setup()
|
||||
{
|
||||
unsigned char *null_ptr(0);
|
||||
// robot.startCommunication("/dev/ttyUSB0", true, null_ptr);
|
||||
robot.startCommunication("/dev/ttyUSB0", true, null_ptr);
|
||||
//connect mqtt server and sub to commands
|
||||
|
||||
client.connect();
|
||||
@@ -26,18 +30,18 @@ void setup()
|
||||
|
||||
int main()
|
||||
{
|
||||
// Unset the http_proxy environment variable
|
||||
|
||||
|
||||
setup();
|
||||
std::thread image (CapnSend);
|
||||
std::thread safety([&]() { robot.robotSafety(&message); });
|
||||
std::thread sendMqtt([&]() { sendKobukiData(robot.parser.data); });
|
||||
|
||||
while(true){
|
||||
parseMQTT(readMQTT());
|
||||
}
|
||||
|
||||
sendMqtt.join();
|
||||
safety.join();
|
||||
image.join();
|
||||
}
|
||||
|
||||
std::string readMQTT()
|
||||
@@ -57,7 +61,7 @@ void parseMQTT(std::string message)
|
||||
{
|
||||
if (message == "up")
|
||||
{
|
||||
robot.forward(1024);
|
||||
robot.forward(350);
|
||||
}
|
||||
else if (message == "left")
|
||||
{
|
||||
@@ -69,7 +73,7 @@ void parseMQTT(std::string message)
|
||||
}
|
||||
else if (message == "down")
|
||||
{
|
||||
robot.forward(-800);
|
||||
robot.forward(-350);
|
||||
}
|
||||
else if (message == "stop")
|
||||
{
|
||||
@@ -276,3 +280,31 @@ void sendKobukiData(TKobukiData &data) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
|
||||
}
|
||||
}
|
||||
|
||||
void CapnSend() {
|
||||
VideoCapture cap(0);
|
||||
if (!cap.isOpened()) {
|
||||
cerr << "Error: Could not open camera" << endl;
|
||||
return;
|
||||
}
|
||||
|
||||
Mat frame;
|
||||
while (true) {
|
||||
cap >> frame; // Capture a new image frame
|
||||
if (frame.empty()) {
|
||||
cerr << "Error: Could not capture image" << endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert the image to a byte array
|
||||
vector<uchar> buf;
|
||||
imencode(".jpg", frame, buf);
|
||||
auto* enc_msg = reinterpret_cast<unsigned char*>(buf.data());
|
||||
|
||||
// Publish the image data
|
||||
client.publishMessage("kobuki/cam", string(enc_msg, enc_msg + buf.size()));
|
||||
cout << "Sent image" << endl;
|
||||
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(300)); // Send image every 1000ms
|
||||
}
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.10)
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
|
||||
# Find the Paho MQTT C++ library
|
||||
find_library(PAHO_MQTTPP_LIBRARY paho-mqttpp3 PATHS /usr/local/lib)
|
||||
find_library(PAHO_MQTT_LIBRARY paho-mqtt3a PATHS /usr/local/lib)
|
||||
|
||||
# Include the headers
|
||||
include_directories(/usr/local/include)
|
||||
|
||||
# Add the executable
|
||||
add_executable(my_program main.cpp)
|
||||
|
||||
# Link the libraries
|
||||
target_link_libraries(my_program ${PAHO_MQTTPP_LIBRARY} ${PAHO_MQTT_LIBRARY})
|
@@ -1,64 +0,0 @@
|
||||
#include <iostream>
|
||||
#include <mqtt/async_client.h>
|
||||
#include <thread> // For std::this_thread::sleep_for
|
||||
#include <chrono> // For std::chrono::seconds
|
||||
|
||||
// Define the address of the MQTT broker, the client ID, and the topic to subscribe to.
|
||||
const std::string ADDRESS("mqtt://localhost:1883"); // Broker address (Raspberry Pi)
|
||||
const std::string CLIENT_ID("raspberry_pi_client");
|
||||
const std::string TOPIC("home/commands");
|
||||
|
||||
// Define a callback class that handles incoming messages and connection events.
|
||||
class callback : public virtual mqtt::callback {
|
||||
// Called when a message arrives on a subscribed topic.
|
||||
void message_arrived(mqtt::const_message_ptr msg) override {
|
||||
std::cout << "Received message: '" << msg->get_topic()<< "' : " << msg->to_string() << std::endl;
|
||||
}
|
||||
|
||||
// Called when the connection to the broker is lost.
|
||||
void connection_lost(const std::string& cause) override {
|
||||
std::cerr << "Connection lost. Reason: " << cause << std::endl;
|
||||
}
|
||||
|
||||
// Called when a message delivery is complete.
|
||||
void delivery_complete(mqtt::delivery_token_ptr token) override {
|
||||
std::cout << "Message delivered!" << std::endl;
|
||||
}
|
||||
};
|
||||
|
||||
int main() {
|
||||
// Create an MQTT async client and set up the callback class.
|
||||
mqtt::async_client client(ADDRESS, CLIENT_ID);
|
||||
callback cb;
|
||||
client.set_callback(cb);
|
||||
|
||||
// Set up the connection options (such as username and password).
|
||||
mqtt::connect_options connOpts;
|
||||
connOpts.set_clean_session(true);
|
||||
connOpts.set_user_name("ishak");
|
||||
connOpts.set_password("kobuki");
|
||||
connOpts.set_mqtt_version(MQTTVERSION_3_1_1);
|
||||
|
||||
try {
|
||||
// Try to connect to the broker and wait until successful.
|
||||
std::cout << "Connecting to broker..." << std::endl;
|
||||
client.connect(connOpts)->wait(); // Connect with the provided options
|
||||
std::cout << "Connected!" << std::endl;
|
||||
|
||||
// Subscribe to the specified topic and wait for confirmation.
|
||||
std::cout << "Subscribing to topic: " << TOPIC << std::endl;
|
||||
client.subscribe(TOPIC, 1)->wait(); // Subscribe with QoS level 1
|
||||
|
||||
// Keep the program running to continue receiving messages from the broker.
|
||||
while (true) {
|
||||
std::this_thread::sleep_for(std::chrono::seconds(1)); // Sleep to reduce CPU usage
|
||||
}
|
||||
|
||||
} catch (const mqtt::exception &exc) {
|
||||
// Catch any MQTT exceptions and display the error message.
|
||||
std::cerr << "Error: " << exc.what() << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0; // Return 0 to indicate successful execution
|
||||
}
|
44
src/C++/OpenCV/CMakeLists.txt
Normal file
44
src/C++/OpenCV/CMakeLists.txt
Normal file
@@ -0,0 +1,44 @@
|
||||
cmake_minimum_required( VERSION 3.6 )
|
||||
|
||||
# Require C++11 (or later)
|
||||
set( CMAKE_CXX_STANDARD 23 )
|
||||
set( CMAKE_CXX_STANDARD_REQUIRED ON )
|
||||
set( CMAKE_CXX_EXTENSIONS OFF )
|
||||
set(BUILD_MODE Debug)
|
||||
# Create Project
|
||||
project( Sample )
|
||||
add_executable( YOLOv4 util.h main.cpp )
|
||||
|
||||
# Set StartUp Project
|
||||
set_property( DIRECTORY PROPERTY VS_STARTUP_PROJECT "YOLOv4" )
|
||||
|
||||
# Find Package
|
||||
# OpenCV
|
||||
find_package( OpenCV REQUIRED )
|
||||
|
||||
if( OpenCV_FOUND )
|
||||
# Additional Include Directories
|
||||
include_directories( ${OpenCV_INCLUDE_DIRS} )
|
||||
|
||||
# Additional Dependencies
|
||||
target_link_libraries( YOLOv4 ${OpenCV_LIBS} )
|
||||
endif()
|
||||
|
||||
# Download Model
|
||||
set( MODEL https://github.com/AlexeyAB/darknet/releases/download/darknet_yolo_v3_optimal/yolov4.weights )
|
||||
file( DOWNLOAD
|
||||
"${MODEL}"
|
||||
"${CMAKE_CURRENT_LIST_DIR}/yolov4.weights"
|
||||
EXPECTED_HASH SHA256=e8a4f6c62188738d86dc6898d82724ec0964d0eb9d2ae0f0a9d53d65d108d562
|
||||
SHOW_PROGRESS
|
||||
)
|
||||
|
||||
|
||||
# Download Config
|
||||
set( CONFIG https://raw.githubusercontent.com/AlexeyAB/darknet/master/cfg/yolov4.cfg )
|
||||
file( DOWNLOAD
|
||||
"${CONFIG}"
|
||||
"${CMAKE_CURRENT_LIST_DIR}/yolov4.cfg"
|
||||
EXPECTED_HASH SHA256=a6d0f8e5c62cc8378384f75a8159b95fa2964d4162e33351b00ac82e0fc46a34
|
||||
SHOW_PROGRESS
|
||||
)
|
BIN
src/C++/OpenCV/YOLOv4
Executable file
BIN
src/C++/OpenCV/YOLOv4
Executable file
Binary file not shown.
80
src/C++/OpenCV/coco.names
Normal file
80
src/C++/OpenCV/coco.names
Normal file
@@ -0,0 +1,80 @@
|
||||
person
|
||||
bicycle
|
||||
car
|
||||
motorbike
|
||||
aeroplane
|
||||
bus
|
||||
train
|
||||
truck
|
||||
boat
|
||||
traffic light
|
||||
fire hydrant
|
||||
stop sign
|
||||
parking meter
|
||||
bench
|
||||
bird
|
||||
cat
|
||||
dog
|
||||
horse
|
||||
sheep
|
||||
cow
|
||||
elephant
|
||||
bear
|
||||
zebra
|
||||
giraffe
|
||||
backpack
|
||||
umbrella
|
||||
handbag
|
||||
tie
|
||||
suitcase
|
||||
frisbee
|
||||
skis
|
||||
snowboard
|
||||
sports ball
|
||||
kite
|
||||
baseball bat
|
||||
baseball glove
|
||||
skateboard
|
||||
surfboard
|
||||
tennis racket
|
||||
bottle
|
||||
wine glass
|
||||
cup
|
||||
fork
|
||||
knife
|
||||
spoon
|
||||
bowl
|
||||
banana
|
||||
apple
|
||||
sandwich
|
||||
orange
|
||||
broccoli
|
||||
carrot
|
||||
hot dog
|
||||
pizza
|
||||
donut
|
||||
cake
|
||||
chair
|
||||
sofa
|
||||
pottedplant
|
||||
bed
|
||||
diningtable
|
||||
toilet
|
||||
tvmonitor
|
||||
laptop
|
||||
mouse
|
||||
remote
|
||||
keyboard
|
||||
cell phone
|
||||
microwave
|
||||
oven
|
||||
toaster
|
||||
sink
|
||||
refrigerator
|
||||
book
|
||||
clock
|
||||
vase
|
||||
scissors
|
||||
teddy bear
|
||||
hair drier
|
||||
toothbrush
|
209
src/C++/OpenCV/main.cpp
Normal file
209
src/C++/OpenCV/main.cpp
Normal file
@@ -0,0 +1,209 @@
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <opencv2/opencv.hpp>
|
||||
#include <opencv2/dnn.hpp>
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
|
||||
#include "util.h"
|
||||
|
||||
// Helper function to check if a file exists
|
||||
bool fileExists(const std::string &path)
|
||||
{
|
||||
return std::filesystem::exists(path);
|
||||
}
|
||||
|
||||
// Function to read class names from a file
|
||||
std::vector<std::string> _readClassNameList(const std::string &path)
|
||||
{
|
||||
std::vector<std::string> classes;
|
||||
|
||||
// Check if file exists
|
||||
if (!fileExists(path))
|
||||
{
|
||||
throw std::runtime_error("Class names file not found: " + path);
|
||||
}
|
||||
|
||||
// Try to open and read file
|
||||
std::ifstream file(path);
|
||||
if (!file.is_open())
|
||||
{
|
||||
throw std::runtime_error("Unable to open class names file: " + path);
|
||||
}
|
||||
|
||||
std::string line;
|
||||
while (std::getline(file, line))
|
||||
{
|
||||
if (!line.empty())
|
||||
{
|
||||
classes.push_back(line);
|
||||
}
|
||||
}
|
||||
|
||||
if (classes.empty())
|
||||
{
|
||||
throw std::runtime_error("No classes found in file: " + path);
|
||||
}
|
||||
|
||||
return classes;
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
try
|
||||
{
|
||||
// Open Video Capture
|
||||
cv::VideoCapture capture = cv::VideoCapture(0);
|
||||
if (!capture.isOpened())
|
||||
{
|
||||
std::cerr << "Failed to open camera device" << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Read Class Name List and Color Table
|
||||
const std::string list = "coco.names";
|
||||
const std::vector<std::string> classes = _readClassNameList(list);
|
||||
const std::vector<cv::Scalar> colors = getClassColors(classes.size());
|
||||
|
||||
// Debug: Print the size of the colors vector
|
||||
std::cout << "Number of colors: " << colors.size() << std::endl;
|
||||
|
||||
// Read Darknet
|
||||
const std::string model = "yolov4.weights";
|
||||
const std::string config = "yolov4.cfg";
|
||||
cv::dnn::Net net = cv::dnn::readNet(model, config);
|
||||
if (net.empty())
|
||||
{
|
||||
std::cerr << "Failed to load network" << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Set Preferable Backend
|
||||
net.setPreferableBackend(cv::dnn::DNN_BACKEND_OPENCV);
|
||||
|
||||
// Set Preferable Target
|
||||
net.setPreferableTarget(cv::dnn::DNN_TARGET_OPENCL);
|
||||
|
||||
while (true)
|
||||
{
|
||||
// Read Frame
|
||||
cv::Mat frame;
|
||||
capture >> frame;
|
||||
if (frame.empty())
|
||||
{
|
||||
cv::waitKey(0);
|
||||
break;
|
||||
}
|
||||
if (frame.channels() == 4)
|
||||
{
|
||||
cv::cvtColor(frame, frame, cv::COLOR_BGRA2BGR);
|
||||
}
|
||||
|
||||
// Create Blob from Input Image
|
||||
cv::Mat blob = cv::dnn::blobFromImage(frame, 1 / 255.f, cv::Size(416, 416), cv::Scalar(), true, false);
|
||||
|
||||
// Set Input Blob
|
||||
net.setInput(blob);
|
||||
|
||||
// Run Forward Network
|
||||
std::vector<cv::Mat> detections;
|
||||
net.forward(detections, getOutputsNames(net));
|
||||
|
||||
// Draw Region
|
||||
std::vector<int32_t> class_ids;
|
||||
std::vector<float> confidences;
|
||||
std::vector<cv::Rect> rectangles;
|
||||
for (cv::Mat &detection : detections)
|
||||
{
|
||||
if (detection.empty())
|
||||
{
|
||||
std::cerr << "Detection matrix is empty!" << std::endl;
|
||||
continue;
|
||||
}
|
||||
for (int32_t i = 0; i < detection.rows; i++)
|
||||
{
|
||||
cv::Mat region = detection.row(i);
|
||||
|
||||
// Retrieve Max Confidence and Class Index
|
||||
cv::Mat scores = region.colRange(5, detection.cols);
|
||||
cv::Point class_id;
|
||||
double confidence;
|
||||
cv::minMaxLoc(scores, 0, &confidence, 0, &class_id);
|
||||
|
||||
// Check Confidence
|
||||
constexpr float threshold = 0.2;
|
||||
if (threshold > confidence)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Retrieve Object Position
|
||||
const int32_t x_center = static_cast<int32_t>(region.at<float>(0) * frame.cols);
|
||||
const int32_t y_center = static_cast<int32_t>(region.at<float>(1) * frame.rows);
|
||||
const int32_t width = static_cast<int32_t>(region.at<float>(2) * frame.cols);
|
||||
const int32_t height = static_cast<int32_t>(region.at<float>(3) * frame.rows);
|
||||
const cv::Rect rectangle = cv::Rect(x_center - (width / 2), y_center - (height / 2), width, height);
|
||||
|
||||
// Add Class ID, Confidence, Rectangle
|
||||
class_ids.push_back(class_id.x);
|
||||
confidences.push_back(confidence);
|
||||
rectangles.push_back(rectangle);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove Overlap Rectangles using Non-Maximum Suppression
|
||||
constexpr float confidence_threshold = 0.5; // Confidence
|
||||
constexpr float nms_threshold = 0.5; // IoU (Intersection over Union)
|
||||
std::vector<int32_t> indices;
|
||||
cv::dnn::NMSBoxes(rectangles, confidences, confidence_threshold, nms_threshold, indices);
|
||||
|
||||
// Draw Rectangle
|
||||
for (const int32_t &index : indices)
|
||||
{
|
||||
// Bounds checking
|
||||
if (class_ids[index] >= colors.size())
|
||||
{
|
||||
std::cerr << "Color index out of bounds: " << class_ids[index] << " (max: " << colors.size() - 1 << ")" << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
const cv::Rect rectangle = rectangles[index];
|
||||
const cv::Scalar color = colors[class_ids[index]];
|
||||
|
||||
// Debug: Print the index and color
|
||||
std::cout << "Drawing rectangle with color index: " << class_ids[index] << std::endl;
|
||||
|
||||
constexpr int32_t thickness = 3;
|
||||
cv::rectangle(frame, rectangle, color, thickness);
|
||||
|
||||
std::string label = classes[class_ids[index]] + ": " + std::to_string(static_cast<int>(confidences[index] * 100)) + "%";
|
||||
|
||||
int baseLine;
|
||||
cv::Size labelSize = cv::getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine);
|
||||
int top = std::max(rectangle.y, labelSize.height);
|
||||
cv::rectangle(frame, cv::Point(rectangle.x, top - labelSize.height),
|
||||
cv::Point(rectangle.x + labelSize.width, top + baseLine), color, cv::FILLED);
|
||||
cv::putText(frame, label, cv::Point(rectangle.x, top), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(255, 255, 255), 1);
|
||||
}
|
||||
|
||||
// Show Image
|
||||
cv::imshow("Object Detection", frame);
|
||||
const int32_t key = cv::waitKey(1);
|
||||
if (key == 'q')
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
cv::destroyAllWindows();
|
||||
return 0;
|
||||
}
|
||||
catch (const std::exception &e)
|
||||
{
|
||||
std::cerr << "Error: " << e.what() << std::endl;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
// cloned and fixed from https://github.com/UnaNancyOwen/OpenCVDNNSample/tree/master
|
61
src/C++/OpenCV/util.h
Normal file
61
src/C++/OpenCV/util.h
Normal file
@@ -0,0 +1,61 @@
|
||||
#ifndef __UTIL__
|
||||
#define __UTIL__
|
||||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include <opencv2/dnn.hpp>
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
// Get Output Layers Name
|
||||
std::vector<std::string> getOutputsNames( const cv::dnn::Net& net )
|
||||
{
|
||||
static std::vector<std::string> names;
|
||||
if( names.empty() ){
|
||||
std::vector<int32_t> out_layers = net.getUnconnectedOutLayers();
|
||||
std::vector<std::string> layers_names = net.getLayerNames();
|
||||
names.resize( out_layers.size() );
|
||||
for( size_t i = 0; i < out_layers.size(); ++i ){
|
||||
names[i] = layers_names[out_layers[i] - 1];
|
||||
}
|
||||
}
|
||||
return names;
|
||||
}
|
||||
|
||||
// Get Output Layer Type
|
||||
std::string getOutputLayerType( cv::dnn::Net& net )
|
||||
{
|
||||
const std::vector<int32_t> out_layers = net.getUnconnectedOutLayers();
|
||||
const std::string output_layer_type = net.getLayer( out_layers[0] )->type;
|
||||
return output_layer_type;
|
||||
}
|
||||
|
||||
// Read Class Name List
|
||||
std::vector<std::string> readClassNameList( const std::string list_path )
|
||||
{
|
||||
std::vector<std::string> classes;
|
||||
std::ifstream ifs( list_path );
|
||||
if( !ifs.is_open() ){
|
||||
return classes;
|
||||
}
|
||||
std::string class_name = "";
|
||||
while( std::getline( ifs, class_name ) ){
|
||||
classes.push_back( class_name );
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
// Get Class Color Table for Visualize
|
||||
std::vector<cv::Scalar> getClassColors( const int32_t number_of_colors )
|
||||
{
|
||||
cv::RNG random;
|
||||
std::vector<cv::Scalar> colors;
|
||||
for( int32_t i = 0; i < number_of_colors; i++ ){
|
||||
cv::Scalar color( random.uniform( 0, 255 ), random.uniform( 0, 255 ), random.uniform( 0, 255 ) );
|
||||
colors.push_back( color );
|
||||
}
|
||||
return colors;
|
||||
}
|
||||
|
||||
#endif // __UTIL__
|
1158
src/C++/OpenCV/yolov4.cfg
Normal file
1158
src/C++/OpenCV/yolov4.cfg
Normal file
File diff suppressed because it is too large
Load Diff
BIN
src/C++/OpenCV/yolov4.weights
Normal file
BIN
src/C++/OpenCV/yolov4.weights
Normal file
Binary file not shown.
@@ -1,49 +1,31 @@
|
||||
from flask import Flask, request, render_template, jsonify, g
|
||||
from flask import Flask, Response, request, render_template, jsonify
|
||||
import paho.mqtt.client as mqtt
|
||||
import mysql.connector
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# Globale MQTT setup
|
||||
kobuki_message = "empty"
|
||||
def on_message(client, message):
|
||||
def on_message(client, userdata, message):
|
||||
global kobuki_message, latest_image
|
||||
if message.topic == "kobuki/data":
|
||||
kobuki_message = str(message.payload.decode("utf-8"))
|
||||
elif message.topic == "kobuki/cam":
|
||||
latest_image = message.payload
|
||||
|
||||
|
||||
# Create an MQTT client instance
|
||||
mqtt_client = mqtt.Client()
|
||||
mqtt_client.username_pw_set("server", "serverwachtwoordofzo")
|
||||
mqtt_client.connect("localhost", 1884, 60)
|
||||
mqtt_client.connect("localhost", 80, 60)
|
||||
mqtt_client.loop_start()
|
||||
mqtt_client.subscribe("kobuki/data")
|
||||
mqtt_client.subscribe("kobuki/cam")
|
||||
|
||||
# Database connectie-functie
|
||||
def get_db():
|
||||
if 'db' not in g: # 'g' is specifiek voor een request en leeft zolang een request duurt
|
||||
g.db = mysql.connector.connect(
|
||||
host="127.0.0.1",
|
||||
port=3306,
|
||||
user="admin",
|
||||
password="kobuki",
|
||||
database="kobuki"
|
||||
)
|
||||
return g.db
|
||||
|
||||
# Sluit de database na elke request
|
||||
@app.teardown_appcontext
|
||||
def close_db(error):
|
||||
db = g.pop('db', None)
|
||||
if db is not None:
|
||||
db.close()
|
||||
mqtt_client.on_message = on_message # this lines needs to be under the function definition otherwise it cant find which function it needs to use
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
return render_template('index.html')
|
||||
|
||||
|
||||
@app.route('/control', methods=["GET","POST"])
|
||||
def control():
|
||||
if request.authorization and request.authorization.username == 'ishak' and request.authorization.password == 'kobuki':
|
||||
@@ -58,31 +40,32 @@ def move():
|
||||
|
||||
# Verstuur de richting via MQTT
|
||||
if direction:
|
||||
mqtt_client.publish("home/commands", direction)
|
||||
|
||||
db_connection = get_db()
|
||||
cursor = db_connection.cursor()
|
||||
sql = "INSERT INTO command (command) VALUES (%s)"
|
||||
value = direction
|
||||
cursor.execute(sql, (value,))
|
||||
db_connection.commit()
|
||||
cursor.close()
|
||||
db_connection.close()
|
||||
mqtt_client.publish("home/commands", direction) # Het topic kan aangepast worden
|
||||
|
||||
return jsonify({"status": "success", "direction": direction})
|
||||
|
||||
|
||||
@app.route('/data', methods=['GET'])
|
||||
def data():
|
||||
return kobuki_message
|
||||
|
||||
@app.route("/database")
|
||||
def database():
|
||||
db = get_db()
|
||||
cursor = db.cursor()
|
||||
cursor.execute("SELECT * FROM kobuki_data")
|
||||
rows = cursor.fetchall()
|
||||
cursor.close()
|
||||
return str(rows)
|
||||
|
||||
@app.route('/image')
|
||||
def image():
|
||||
global latest_image
|
||||
if latest_image is not None:
|
||||
return Response(latest_image, mimetype='image/jpeg')
|
||||
else:
|
||||
return "No image available", 404
|
||||
|
||||
|
||||
@app.route('/phpmyadmin/<path:path>')
|
||||
def phpmyadmin_passthrough(path):
|
||||
# Laat Apache deze route direct afhandelen
|
||||
return "", 404
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True, port=5000)
|
||||
|
@@ -1,9 +1,9 @@
|
||||
// Selecteer alle knoppen en voeg een event listener toe aan elke knop
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
document.querySelectorAll(".btn").forEach(button => {
|
||||
button.addEventListener("click", function(event) {
|
||||
event.preventDefault(); // voorkomt pagina-verversing
|
||||
event.preventDefault(); // prevents page refresh
|
||||
|
||||
// Haal de waarde van de knop op
|
||||
// Get the value of the button
|
||||
const direction = event.target.value;
|
||||
|
||||
fetch("/move", {
|
||||
@@ -14,13 +14,14 @@ document.querySelectorAll(".btn").forEach(button => {
|
||||
body: JSON.stringify({ direction: direction })
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
.then(data => {script
|
||||
console.log("Success:", data);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error("Error:", error);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Fetch data from the server
|
||||
async function fetchData() {
|
||||
@@ -34,7 +35,7 @@ document.querySelectorAll(".btn").forEach(button => {
|
||||
const data = await fetchData();
|
||||
const sensorDataContainer = document.getElementById("sensor-data");
|
||||
sensorDataContainer.innerHTML = ""; // Clear previous data
|
||||
//for each object in json array create a new paragraph element and append it to the sensorDataContainer
|
||||
// For each object in JSON array, create a new paragraph element and append it to the sensorDataContainer
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
const dataElement = document.createElement("p");
|
||||
dataElement.textContent = `${key}: ${value}`;
|
||||
@@ -42,6 +43,15 @@ document.querySelectorAll(".btn").forEach(button => {
|
||||
}
|
||||
}
|
||||
|
||||
// Update the image
|
||||
function updateImage() {
|
||||
var img = document.getElementById("robot-image");
|
||||
img.src = "/image?" + new Date().getTime(); // Add timestamp to avoid caching
|
||||
}
|
||||
|
||||
// Fetch and display sensor data every 5 seconds
|
||||
setInterval(parseData, 5000);
|
||||
setInterval(parseData, 1000);
|
||||
|
||||
// Update the image every 5 seconds
|
||||
setInterval(updateImage, 200);
|
||||
});
|
@@ -1,6 +1,8 @@
|
||||
{% extends 'base.html' %} {% block head %}
|
||||
{% extends 'base.html' %}
|
||||
{% block head %}
|
||||
<link rel="stylesheet" href="../static/style.css" />
|
||||
{% endblock %} {% block content %}
|
||||
{% endblock %}
|
||||
{% block content %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
@@ -11,8 +13,8 @@
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="image-section">
|
||||
<img src="kobuki.jpg" alt="Kobuki Robot" id="robot-image" />
|
||||
<div class="robot-image">
|
||||
<img src="/image" alt="Kobuki Camera Feed" id="robot-image" />
|
||||
</div>
|
||||
<div class="button-section">
|
||||
<form id="form" action="/move" method="post">
|
||||
@@ -42,6 +44,7 @@
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="../static/script.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
7
src/config/mosquitto.conf
Normal file
7
src/config/mosquitto.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
allow_anonymous false
|
||||
password_file /etc/mosquitto/passwordfile
|
||||
listener 8080
|
||||
protocol websockets
|
||||
|
||||
listener 1884
|
||||
protocol mqtt
|
22
src/config/nginx-sites.conf
Normal file
22
src/config/nginx-sites.conf
Normal file
@@ -0,0 +1,22 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name 145.92.224.21;
|
||||
|
||||
# Proxy WebSocket connections for MQTT
|
||||
location /ws/ {
|
||||
proxy_pass http://localhost:9001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
# Proxy HTTP connections for Flask
|
||||
location / {
|
||||
proxy_pass http://localhost:5000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
7
src/config/nginx.conf
Normal file
7
src/config/nginx.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
stream {
|
||||
server {
|
||||
listen 9001;
|
||||
proxy_pass localhost:8080;
|
||||
}
|
||||
}
|
||||
|
@@ -1,25 +0,0 @@
|
||||
# Etische aspecten van het project
|
||||
|
||||
## Visie op de ethische aspecten van het Kobuki-project
|
||||
|
||||
Etische aspecten zijn heel belangrijk in het project, al ben ik wel van mening dat je niet alles kan voorkomen en ook kan waarborgen.
|
||||
|
||||
## Privacy
|
||||
|
||||
Als je bijvoorbeeld kijkt naar het gedeelte privacy, dan is het heel moeilijk om te kijken wat je gaat doen met die gegevens. Ik ga een camera gebruiken op de robot om zo te kijken
|
||||
waar de robot is en wat hij allemaal ziet. Als de robot in een brandende huis komt en dan een persoon ziet, is het wel belangrijk om die persoon goed te kunnen zien. Je zou dan niet kunnen zeggen dat je die persoon bijvoorbeeld moet vervagen, want je moet wel kunnen zien wat de status is van die persoon.
|
||||
Ook is het dan belangrijk om te kijken wat je met die gegevens gaat doen, ga je ze opslaan voor eventuele later gebruik of verwijder je ze direct. Het is heel lastig te bepalen wanneer je op zo een moment privacy schendt.
|
||||
|
||||
## Betrouwbaarheid
|
||||
|
||||
Ik vind dat je de betrouwbaarheid van de robot wel moet waarborgen,
|
||||
want als ik de robot in een brandend huis stuur en hij valt uit, dan kan dat heel gevaarlijk zijn voor de persoon die in dat huis zit. Daar vind ik dat je meer rekening mee moet houden dan met de privacy van de persoon. Het is de bedoeling dat de robot hulpmedewerkers gaat helpen en niet hun werk moeilijker maakt.
|
||||
|
||||
## Impact op hulpverleners & maatschappij
|
||||
|
||||
Als meerdere hulpmedewerkers de robot gaan gebruiken en het word een soort van standaard, dan is het wel belangrijk dat de robot betrouwbaar is en dat je erop kan vertrouwen. Het gaat immers om mensenlevens en dat is wel het belangrijkste. Het is uiteindelijk de bedoeling dat de robot hulpverleners zal helpen en niet hun werk lastiger moet maken. Als de robot een standaard hulpmiddel wordt moet hij wel gebruiksvriendelijk zijn en goed kunnen helpen. De robot moet ook zo goed mogelijk werken om zo de vertrouwen te behouden van de mensen. Als de robot fouten blijft maken en niet betrouwbaar is zullen minder mensen het gebruiken. Ik vind dan ook dat de gebruik van de robot heel transparant moet zijn. Hoe word de robot aangestuurd, hoe vergelijkt hij situaties en hoe hij daarmee omgaat.
|
||||
Als je daar al heel duidelijk in bent bouw je al wat sneller vertrouwen van de mensen op.
|
||||
|
||||
Ik vind dat in dit project de ethische aspecten heel belangrijk zijn en dat je daar ook rekening mee moet houden.
|
||||
Bij betrouwbaarheid en de impact die de robot kan hebben op de maatschappij en de hulpverleners moet je wel goed over nadenken.
|
||||
Je werkt immers met mensenlevens en dat is wel het belangrijkste. Privacy is ook heel belangrijk, maar ik vind dat je daar wel wat soepeler mee om kan gaan.
|
Binary file not shown.
Binary file not shown.
Before Width: | Height: | Size: 10 KiB |
@@ -1,21 +0,0 @@
|
||||
Motivation Letter
|
||||
|
||||
16/12/2024
|
||||
|
||||
Cognizant Digital StudioAttn. Hayo Rubingh
|
||||
|
||||
Subject: Internship Application Cognizant Digital Studio
|
||||
|
||||
Dear Mr. Rubingh,
|
||||
|
||||
With great enthusiasm, I am applying for the internship position at Cognizant Digital Studio in Amsterdam. As a second-year bachelor’s student in Technische Informatica(Computer Science) at Hogeschool Van Amsterdam, I am seeking a challenging internship where I can combine my technical skills with my passion for innovation. Cognizant’s focus IoT, and technology prototypes fits perfectly with my interests .
|
||||
|
||||
Throughout my studies, I have gained experience in software development, including Python and JavaScript, and have worked with IoT devices such as Arduino/ESP. What drives me is the opportunity to create and develop new solutions that can make life easier and more efficient. I am particularly interested in the field of IoT and the possibilities it offers for creating smart solutions. I am eager to learn more about the latest technologies and how they can be applied in real-world projects.
|
||||
|
||||
I am available to start in February 2025 and look forward to contributing to innovative projects.
|
||||
|
||||
I would be delighted to discuss my motivation and experience further in a personal interview. You can find my contact details in my CV. Thank you for considering my application. I am looking forward to hearing from you.
|
||||
|
||||
Yours sincerely,
|
||||
|
||||
Ishak Jmilou
|
@@ -1,54 +0,0 @@
|
||||

|
||||
|
||||
# Welke communicatieprotocol geeft de mogelijkheid om veilig en betrouwbaar te communiceren tussen IoT apparaten?
|
||||
|
||||
Auteur: Ishak Jmilou
|
||||
|
||||
Datum: 17-12-2024
|
||||
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
## Inleiding
|
||||
|
||||
In dit verslag wordt er gekeken naar de verschillende communicatieprotocollen die gebruikt kunnen worden om veilig en betrouwbaar te communiceren tussen IoT apparaten. Er wordt gekeken naar de verschillende protocollen en de voor- en nadelen van elk protocol.
|
||||
|
||||
---
|
||||
|
||||
## 1. Wat houdt veilige en betrouwbare communicatie tussen apparaten in?
|
||||
|
||||
Als je werkt met IoT-apparaten, is het belangrijk dat de communicatie tussen deze apparaten veilig en betrouwbaar is. Iot-apparaten verzamelen gegeven over de omgeving en communiceert deze tussen apparaten over het internet. Als deze communicatie niet veilig is, kunnen hackers deze gegevens onderscheppen en gebruiken(Ministerie van Algemene Zaken, 2022). Je wilt voorkomen dat hackers toegang krijgen tot gevoelige informatie zoals persoonlijke gegevens of bedrijfsgeheimen. Daarom is het belangrijk dat de communicatie tussen apparaten veilig en betrouwbaar is.
|
||||
|
||||
## 2. Welke protocollen zijn er om veilig en betrouwbaar te communiceren tussen apparaten?
|
||||
|
||||
Er zijn verschillende soorten protocollen die
|
||||
|
||||
|
||||
## 3. Wat zijn de voor- en nadelen van de verschillende protocollen?
|
||||
|
||||
|
||||
## literatuurlijst
|
||||
|
||||
- Singh, S., & Jyoti. (2024, June 7). Secure Communications Protocols for IoT networks: a survey. https://journal.ijprse.com/index.php/ijprse/article/view/1082
|
||||
|
||||
- Nguyen, K. T., Laurent, M., Oualha, N., CEA, & Institut Mines-Telecom. (2015). Survey on secure communication protocols for the Internet of Things. In Ad Hoc Networks (Vol. 32, pp. 17–31) [Journal-article]. http://dx.doi.org/10.1016/j.adhoc.2015.01.006
|
||||
|
||||
- Miorandi, D., Sicari, S., De Pellegrini, F., & Imrich Chlamtac. (2012). Internet of things: Vision, applications and research challenges. In Ad Hoc Networks (Vol. 10, pp. 1497–1516) [Journal-article]. Elsevier B.V. http://dx.doi.org/10.1016/j.adhoc.2012.02.016
|
||||
|
||||
- Christiano, P. (2023, November 5). Top 9 IoT communication protocols & their features in 2024: An In-Depth guide - ExpertBeacon. Expertbeacon. https://expertbeacon.com/iot-communication-protocol/
|
||||
|
||||
- Yugha, R., & Chithra, S. (2020). A survey on technologies and security protocols: Reference for future generation IoT. Journal of Network and Computer Applications, 169, 102763. https://doi.org/10.1016/j.jnca.2020.102763
|
||||
|
||||
- De Mendizábal, I. (2022, June 16). IoT Communication Protocols—IoT Data Protocols. Technical Articles. https://www.allaboutcircuits.com/technical-articles/internet-of-things-communication-protocols-iot-data-protocols/
|
||||
|
||||
- IoT-technologieën en -protocollen | Microsoft Azure. (n.d.). https://azure.microsoft.com/nl-nl/solutions/iot/iot-technology-protocols
|
||||
|
||||
- Het IoT verbinden: wat is MQTT en waarin verschilt het van CoAP? (n.d.). https://www.onlogic.com/nl/blog/het-iot-verbinden-wat-is-mqtt-en-waarin-verschilt-het-van-coap/
|
||||
|
||||
- Nader, K. (2023, October 30). Wat zijn de voordelen van het gebruik van WebSocket voor IoT-communicatie? AppMaster - Ultimate All-in No-code Platform. https://appmaster.io/nl/blog/websocket-voor-iot-communicatie
|
||||
|
||||
- Sidna, J., Amine, B., Abdallah, N., & Alami, H. E. (2020). Analysis and evaluation of communication Protocols for IoT Applications. Karbala International Journal of Modern Science. https://doi.org/10.1145/3419604.3419754
|
||||
|
||||
- Ministerie van Algemene Zaken. (2022, February 8). Hoe kan ik slimme apparaten veilig gebruiken? Rijksoverheid.nl. https://www.rijksoverheid.nl/onderwerpen/bescherming-van-consumenten/vraag-en-antwoord/hoe-kan-ik-slimme-apparaten-veilig-gebruiken
|
Reference in New Issue
Block a user