diff --git a/db.sqlite3 b/db.sqlite3 index f37ee0ba0..876dd98f8 100644 Binary files a/db.sqlite3 and b/db.sqlite3 differ diff --git a/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/GUI.py b/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/GUI.py new file mode 100644 index 000000000..bcbe995a9 --- /dev/null +++ b/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/GUI.py @@ -0,0 +1,129 @@ +import json +import cv2 +import base64 +import threading +import time +import websocket +from src.manager.ram_logging.log_manager import LogManager +from console import start_console +import numpy as np + + +class ThreadingGUI: + + def __init__(self, host="ws://127.0.0.1:2303", freq=30.0): + + # Execution control vars + self.out_period = 1.0 / freq + self.right_image = None + self.left_image = None + self.image_lock = threading.Lock() + self.ack = True + self.ack_lock = threading.Lock() + self.running = True + + self.host = host + self.msg = {"image_right": "", "image_left": ""} + + # Initialize and start the WebSocket client thread + threading.Thread(target=self.run_websocket, daemon=True).start() + + # Initialize and start the image sending thread (GUI out thread) + threading.Thread( + target=self.gui_out_thread, name="gui_out_thread", daemon=True + ).start() + + # Init websocket client + def run_websocket(self): + self.client = websocket.WebSocketApp(self.host, on_message=self.gui_in_thread) + self.client.run_forever(ping_timeout=None, ping_interval=0) + + # Process incoming messages to the GUI + def gui_in_thread(self, ws, message): + + # In this case, messages can be either acks or key strokes + if "ack" in message: + with self.ack_lock: + self.ack = True + else: + LogManager.logger.error("Unsupported msg") + + # Process outcoming messages from the GUI + def gui_out_thread(self): + while self.running: + start_time = time.time() + + # Check if a new image should be sent + with self.ack_lock: + with self.image_lock: + if self.ack: + if np.any(self.left_image) or np.any(self.right_image): + self.send_images() + self.ack = False + + # Maintain desired frequency + elapsed = time.time() - start_time + sleep_time = max(0, self.out_period - elapsed) + time.sleep(sleep_time) + + # Prepares and send image to the websocket server + def send_images(self): + + if np.any(self.left_image): + _, encoded_left_image = cv2.imencode(".JPEG", self.left_image) + b64_left = base64.b64encode(encoded_left_image).decode("utf-8") + shape_left = self.left_image.shape + else: + b64_left = None + shape_left = 0 + + if np.any(self.right_image): + _, encoded_right_image = cv2.imencode(".JPEG", self.right_image) + b64_right = base64.b64encode(encoded_right_image).decode("utf-8") + shape_right = self.right_image.shape + else: + b64_right = None + shape_right = 0 + + payload_left = { + "image_left": b64_left, + "shape_left": shape_left, + } + payload_right = { + "image_right": b64_right, + "shape_right": shape_right, + } + self.msg["image_left"] = json.dumps(payload_left) + self.msg["image_right"] = json.dumps(payload_right) + message = json.dumps(self.msg) + try: + if self.client: + self.client.send(message) + except Exception as e: + LogManager.logger.info(f"Error sending message: {e}") + + # Functions to set the next image to be sent + def setLeftImage(self, image): + with self.image_lock: + self.left_image = image + + def setRightImage(self, image): + with self.image_lock: + self.right_image = image + + +host = "ws://127.0.0.1:2303" +gui = ThreadingGUI(host) + + +# Redirect the console +start_console() + + +# Expose the user functions +def showImage(image): + gui.setRightImage(image) + + +def showLeftImage(image): + gui.setLeftImage(image) diff --git a/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/HAL.py b/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/HAL.py new file mode 100644 index 000000000..88fd85826 --- /dev/null +++ b/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/HAL.py @@ -0,0 +1,115 @@ +import numpy as np +import rclpy +import threading +import time + +from hal_interfaces.general.camera import CameraNode +from jderobot_drones.drone_wrapper import DroneWrapper + +IMG_WIDTH = 320 +IMG_HEIGHT = 240 +freq = 15.0 + +### HAL INIT ### + +print("HAL initializing", flush=True) +if not rclpy.ok(): + rclpy.init() + + + CAM_FRONTAL_TOPIC = "/" + "drone0" + "/sensor_measurements/frontal_camera/image_raw" + CAM_VENTRAL_TOPIC = "/" + "drone0" + "/sensor_measurements/ventral_camera/image_raw" + + drone = DroneWrapper() + frontal_camera_node = CameraNode(CAM_FRONTAL_TOPIC) + ventral_camera_node = CameraNode(CAM_VENTRAL_TOPIC) + + # Spin nodes so that subscription callbacks load topic data + executor = rclpy.executors.MultiThreadedExecutor() + executor.add_node(frontal_camera_node) + executor.add_node(ventral_camera_node) + def __auto_spin() -> None: + while rclpy.ok(): + executor.spin_once(timeout_sec=0) + time.sleep(1/freq) + executor_thread = threading.Thread(target=__auto_spin, daemon=True) + executor_thread.start() + +### GETTERS ### + + +def get_frontal_image(): + image = frontal_camera_node.getImage() + while image == None: + image = frontal_camera_node.getImage() + return image.data + + +def get_ventral_image(): + image = ventral_camera_node.getImage() + while image == None: + image = ventral_camera_node.getImage() + return image.data + + +def get_position(): + pos = drone.get_position() + return pos + + +def get_velocity(): + vel = drone.get_velocity() + return vel + + +def get_yaw_rate(): + yaw_rate = drone.get_yaw_rate() + return yaw_rate + + +def get_orientation(): + orientation = drone.get_orientation() + return orientation + + +def get_roll(): + roll = drone.get_roll() + return roll + + +def get_pitch(): + pitch = drone.get_pitch() + return pitch + + +def get_yaw(): + yaw = drone.get_yaw() + return yaw + + +def get_landed_state(): + state = drone.get_landed_state() + return state + + +### SETTERS ### + + +def set_cmd_pos(x, y, z, az): + drone.set_cmd_pos(x, y, z, az) + + +def set_cmd_vel(vx, vy, vz, az): + drone.set_cmd_vel(vx, vy, vz, az) + + +def set_cmd_mix(vx, vy, z, az): + drone.set_cmd_mix(vx, vy, z, az) + + +def takeoff(h=3): + drone.takeoff(h) + + +def land(): + drone.land() diff --git a/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/console.py b/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/console.py new file mode 100644 index 000000000..3c88c9d71 --- /dev/null +++ b/exercises/static/exercises/follow_road_newmanager/python_template/ros2_humble/console.py @@ -0,0 +1,19 @@ +# Functions to start and close console +import os +import sys + + +def start_console(): + # Get all the file descriptors and choose the latest one + fds = os.listdir("/dev/pts/") + console_fd = str(max(map(int, fds[:-1]))) + + sys.stderr = open('/dev/pts/' + console_fd, 'w') + sys.stdout = open('/dev/pts/' + console_fd, 'w') + sys.stdin = open('/dev/pts/' + console_fd, 'w') + + +def close_console(): + sys.stderr.close() + sys.stdout.close() + sys.stdin.close()