mirror of
https://github.com/OSURoboticsClub/Rover_2017_2018.git
synced 2025-11-09 02:31:14 +00:00
Updated layout of the main launcher, as well as ui files.
This commit is contained in:
50
ground_station/Framework/DriveSystems/RoverDriveSender.py
Normal file
50
ground_station/Framework/DriveSystems/RoverDriveSender.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import sys
|
||||||
|
from PyQt5 import QtWidgets, QtCore, QtGui, uic
|
||||||
|
import signal
|
||||||
|
import rospy
|
||||||
|
import time
|
||||||
|
from cv_bridge import CvBridge, CvBridgeError
|
||||||
|
import cv2
|
||||||
|
import qimage2ndarray
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from geometry_msgs.msg import Twist
|
||||||
|
from sensor_msgs.msg import CompressedImage
|
||||||
|
#from sensor_msgs.msg import Image, CompressedImage
|
||||||
|
|
||||||
|
|
||||||
|
class DriveTest(QtCore.QThread):
|
||||||
|
publish_message_signal = QtCore.pyqtSignal()
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(DriveTest, self).__init__()
|
||||||
|
|
||||||
|
self.not_abort = True
|
||||||
|
|
||||||
|
self.message = None
|
||||||
|
self.publisher = rospy.Publisher("/cmd_vel", Twist, queue_size=10)
|
||||||
|
|
||||||
|
rospy.init_node("test")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# TODO: Thread starting message here
|
||||||
|
while self.not_abort:
|
||||||
|
self.message = Twist()
|
||||||
|
|
||||||
|
self.message.linear.x = 1.0
|
||||||
|
self.message.angular.z = 1.0
|
||||||
|
|
||||||
|
self.publisher.publish(self.message)
|
||||||
|
|
||||||
|
self.msleep(100)
|
||||||
|
# TODO: Thread ending message here
|
||||||
|
|
||||||
|
def __publish_message(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup_start_and_kill_signals(self, start_signal, signals_and_slots_signal, kill_signal):
|
||||||
|
start_signal.connect(self.start)
|
||||||
|
kill_signal.connect(self.on_kill_threads_requested__slot)
|
||||||
|
|
||||||
|
def on_kill_threads_requested__slot(self):
|
||||||
|
self.not_abort = False
|
||||||
0
ground_station/Framework/DriveSystems/__init__.py
Normal file
0
ground_station/Framework/DriveSystems/__init__.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
#####################################
|
||||||
|
# Imports
|
||||||
|
#####################################
|
||||||
|
# Python native imports
|
||||||
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
|
import logging
|
||||||
|
import cv2
|
||||||
|
import numpy as np
|
||||||
|
import qimage2ndarray
|
||||||
|
import pprint
|
||||||
|
|
||||||
|
import rospy
|
||||||
|
from cv_bridge import CvBridge
|
||||||
|
from sensor_msgs.msg import CompressedImage
|
||||||
|
|
||||||
|
# Custom Imports
|
||||||
|
|
||||||
|
#####################################
|
||||||
|
# Global Variables
|
||||||
|
#####################################
|
||||||
|
FONT = cv2.FONT_HERSHEY_TRIPLEX
|
||||||
|
|
||||||
|
|
||||||
|
#####################################
|
||||||
|
# RoverVideoReceiver Class Definition
|
||||||
|
#####################################
|
||||||
|
class RoverVideoReceiver(QtCore.QThread):
|
||||||
|
def __init__(self, shared_objects):
|
||||||
|
super(RoverVideoReceiver, self).__init__()
|
||||||
|
|
||||||
|
# ########## Reference to class init variables ##########
|
||||||
|
self.shared_objects = shared_objects
|
||||||
|
self.right_screen = self.shared_objects["screens"]["right_screen"]
|
||||||
|
self.primary_video_display_label = self.right_screen.primary_video_label # type:QtWidgets.QLabel
|
||||||
|
self.primary_video_display_label = self.right_screen.secondary_video_label # type:QtWidgets.QLabel
|
||||||
|
self.primary_video_display_label = self.right_screen.tertiary_video_label # type:QtWidgets.QLabel
|
||||||
|
|
||||||
|
# ########## Get the settings instance ##########
|
||||||
|
self.settings = QtCore.QSettings()
|
||||||
|
|
||||||
|
# ########## Get the Pick And Plate instance of the logger ##########
|
||||||
|
self.logger = logging.getLogger("groundstation")
|
||||||
|
|
||||||
|
# ########## Thread Flags ##########
|
||||||
|
self.run_thread_flag = True
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.logger.debug("Starting Video Coordinator Thread")
|
||||||
|
|
||||||
|
while self.run_thread_flag:
|
||||||
|
|
||||||
|
self.msleep(100)
|
||||||
|
|
||||||
|
self.logger.debug("Stopping Video Coordinator Thread")
|
||||||
|
|
||||||
|
def connect_signals_and_slots(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup_signals(self, start_signal, signals_and_slots_signal, kill_signal):
|
||||||
|
start_signal.connect(self.start)
|
||||||
|
signals_and_slots_signal.connect(self.connect_signals_and_slots)
|
||||||
|
kill_signal.connect(self.on_kill_threads_requested__slot)
|
||||||
|
|
||||||
|
def on_kill_threads_requested__slot(self):
|
||||||
|
self.run_thread_flag = False
|
||||||
136
ground_station/Framework/VideoSystems/RoverVideoReceiver.py
Normal file
136
ground_station/Framework/VideoSystems/RoverVideoReceiver.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
#####################################
|
||||||
|
# Imports
|
||||||
|
#####################################
|
||||||
|
# Python native imports
|
||||||
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
|
import logging
|
||||||
|
import cv2
|
||||||
|
import numpy as np
|
||||||
|
import qimage2ndarray
|
||||||
|
import pprint
|
||||||
|
|
||||||
|
import rospy
|
||||||
|
from cv_bridge import CvBridge
|
||||||
|
from sensor_msgs.msg import CompressedImage
|
||||||
|
|
||||||
|
# Custom Imports
|
||||||
|
|
||||||
|
#####################################
|
||||||
|
# Global Variables
|
||||||
|
#####################################
|
||||||
|
FONT = cv2.FONT_HERSHEY_TRIPLEX
|
||||||
|
|
||||||
|
|
||||||
|
#####################################
|
||||||
|
# RoverVideoReceiver Class Definition
|
||||||
|
#####################################
|
||||||
|
class RoverVideoReceiver(QtCore.QThread):
|
||||||
|
publish_message_signal = QtCore.pyqtSignal()
|
||||||
|
image_ready_signal = QtCore.pyqtSignal()
|
||||||
|
|
||||||
|
def __init__(self, shared_objects, video_display_label, topic_path):
|
||||||
|
super(RoverVideoReceiver, self).__init__()
|
||||||
|
|
||||||
|
# ########## Reference to class init variables ##########
|
||||||
|
self.shared_objects = shared_objects
|
||||||
|
self.video_display_label = video_display_label # type:QtWidgets.QLabel
|
||||||
|
self.topic_path = topic_path
|
||||||
|
|
||||||
|
# ########## Get the settings instance ##########
|
||||||
|
self.settings = QtCore.QSettings()
|
||||||
|
|
||||||
|
# ########## Get the Pick And Plate instance of the logger ##########
|
||||||
|
self.logger = logging.getLogger("groundstation")
|
||||||
|
|
||||||
|
# ########## Thread Flags ##########
|
||||||
|
self.run_thread_flag = True
|
||||||
|
|
||||||
|
# ########## Class Variables ##########
|
||||||
|
# Subscription variables
|
||||||
|
# self.video_subscriber = rospy.Subscriber(self.topic_path, CompressedImage,
|
||||||
|
# self.__image_data_received_callback) # type: rospy.Subscriber
|
||||||
|
|
||||||
|
topics = rospy.get_published_topics(self.topic_path)
|
||||||
|
|
||||||
|
for group in topics:
|
||||||
|
if "image_" in group[0]:
|
||||||
|
print group[0]
|
||||||
|
|
||||||
|
self.subscription_queue_size = 10
|
||||||
|
|
||||||
|
# Steam name variable
|
||||||
|
self.video_name = self.topic_path.split("/")[2].replace("_", " ").title()
|
||||||
|
|
||||||
|
# Image variables
|
||||||
|
self.raw_image = None
|
||||||
|
self.opencv_image = None
|
||||||
|
self.pixmap = None
|
||||||
|
|
||||||
|
# Processing variables
|
||||||
|
self.bridge = CvBridge() # OpenCV ROS Video Data Processor
|
||||||
|
self.video_enabled = False
|
||||||
|
self.new_frame = False
|
||||||
|
|
||||||
|
# Assign local callbacks
|
||||||
|
self.__set_local_callbacks()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.logger.debug("Starting \"%s\" Thread")
|
||||||
|
|
||||||
|
while self.run_thread_flag:
|
||||||
|
if self.video_enabled:
|
||||||
|
self.__show_video_enabled()
|
||||||
|
else:
|
||||||
|
self.__show_video_disabled()
|
||||||
|
|
||||||
|
self.msleep(18)
|
||||||
|
|
||||||
|
self.logger.debug("Stopping \"%s\" Thread")
|
||||||
|
|
||||||
|
def __show_video_enabled(self):
|
||||||
|
if self.new_frame:
|
||||||
|
self.opencv_image = self.bridge.compressed_imgmsg_to_cv2(self.raw_image, "rgb8")
|
||||||
|
self.opencv_image = cv2.resize(self.opencv_image, (1280, 720))
|
||||||
|
self.pixmap = QtGui.QPixmap.fromImage(qimage2ndarray.array2qimage(self.opencv_image))
|
||||||
|
self.image_ready_signal.emit()
|
||||||
|
self.new_frame = False
|
||||||
|
|
||||||
|
def __show_video_disabled(self):
|
||||||
|
if self.new_frame:
|
||||||
|
fps_image = np.zeros((720, 1280, 3), np.uint8)
|
||||||
|
|
||||||
|
self.pixmap = QtGui.QPixmap.fromImage(qimage2ndarray.array2qimage(fps_image))
|
||||||
|
self.image_ready_signal.emit()
|
||||||
|
self.new_frame = False
|
||||||
|
|
||||||
|
|
||||||
|
def __on_image_update_ready(self):
|
||||||
|
self.video_display_label.setPixmap(self.pixmap)
|
||||||
|
|
||||||
|
def __image_data_received_callback(self, raw_image):
|
||||||
|
self.raw_image = raw_image
|
||||||
|
self.new_frame = True
|
||||||
|
|
||||||
|
def __set_local_callbacks(self):
|
||||||
|
self.video_display_label.mouseDoubleClickEvent = self.toggle_video_display
|
||||||
|
|
||||||
|
def toggle_video_display(self, _):
|
||||||
|
if self.video_enabled:
|
||||||
|
self.video_subscriber.unregister()
|
||||||
|
self.new_frame = True
|
||||||
|
self.video_enabled = False
|
||||||
|
else:
|
||||||
|
self.video_subscriber = rospy.Subscriber(self.topic_path, CompressedImage,
|
||||||
|
self.__image_data_received_callback)
|
||||||
|
self.video_enabled = True
|
||||||
|
|
||||||
|
def connect_signals_and_slots(self):
|
||||||
|
self.image_ready_signal.connect(self.__on_image_update_ready)
|
||||||
|
|
||||||
|
def setup_signals(self, start_signal, signals_and_slots_signal, kill_signal):
|
||||||
|
start_signal.connect(self.start)
|
||||||
|
signals_and_slots_signal.connect(self.connect_signals_and_slots)
|
||||||
|
kill_signal.connect(self.on_kill_threads_requested__slot)
|
||||||
|
|
||||||
|
def on_kill_threads_requested__slot(self):
|
||||||
|
self.run_thread_flag = False
|
||||||
134
ground_station/Framework/VideoSystems/RoverVideoReceiverOld.py
Normal file
134
ground_station/Framework/VideoSystems/RoverVideoReceiverOld.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import sys
|
||||||
|
from PyQt5 import QtWidgets, QtCore, QtGui, uic
|
||||||
|
import signal
|
||||||
|
import rospy
|
||||||
|
import time
|
||||||
|
from cv_bridge import CvBridge, CvBridgeError
|
||||||
|
import cv2
|
||||||
|
import qimage2ndarray
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from geometry_msgs.msg import Twist
|
||||||
|
from sensor_msgs.msg import CompressedImage
|
||||||
|
#from sensor_msgs.msg import Image, CompressedImage
|
||||||
|
|
||||||
|
|
||||||
|
class VideoTest(QtCore.QThread):
|
||||||
|
publish_message_signal = QtCore.pyqtSignal()
|
||||||
|
image_ready_signal = QtCore.pyqtSignal()
|
||||||
|
|
||||||
|
def __init__(self, shared_objects, screen_label, video_size=None, sub_path=None):
|
||||||
|
super(VideoTest, self).__init__()
|
||||||
|
|
||||||
|
self.not_abort = True
|
||||||
|
|
||||||
|
self.shared_objects = shared_objects
|
||||||
|
|
||||||
|
self.right_screen_label = screen_label # type: QtGui.QPixmap
|
||||||
|
self.video_size = video_size
|
||||||
|
|
||||||
|
self.message = None
|
||||||
|
|
||||||
|
self.publisher = rospy.Subscriber(sub_path, CompressedImage, self.__receive_message)
|
||||||
|
|
||||||
|
self.raw_image = None
|
||||||
|
self.cv_image = None
|
||||||
|
self.pixmap = None
|
||||||
|
self.bridge = CvBridge()
|
||||||
|
# self.bridge.com
|
||||||
|
|
||||||
|
self.new_frame = False
|
||||||
|
self.frame_count = 0
|
||||||
|
self.last_frame_time = time.time()
|
||||||
|
self.fps = 0
|
||||||
|
|
||||||
|
self.name = sub_path.split("/")[2].replace("_", " ").title()
|
||||||
|
|
||||||
|
self.font = cv2.FONT_HERSHEY_TRIPLEX
|
||||||
|
|
||||||
|
thickness = 1
|
||||||
|
baseline = 0
|
||||||
|
|
||||||
|
text_size = cv2.getTextSize(self.name, self.font, thickness, baseline)
|
||||||
|
print text_size
|
||||||
|
|
||||||
|
text_width, text_height = text_size[0]
|
||||||
|
|
||||||
|
width = text_width + 10
|
||||||
|
height = text_height + 20
|
||||||
|
|
||||||
|
self.blank_image = np.zeros((height, width, 3), np.uint8)
|
||||||
|
cv2.putText(self.blank_image, self.name, ((width - text_width) / 2, int((height * 2) / 3)), self.font, 1, (255, 255, 255), 1, cv2.LINE_AA)
|
||||||
|
self.blank_image = cv2.resize(self.blank_image, (width / 2, height / 2))
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# TODO: Thread starting message here
|
||||||
|
y_offset = 0
|
||||||
|
x_offset = 0
|
||||||
|
|
||||||
|
while self.not_abort:
|
||||||
|
if self.raw_image and self.new_frame:
|
||||||
|
self.cv_image = self.bridge.compressed_imgmsg_to_cv2(self.raw_image, "rgb8")
|
||||||
|
|
||||||
|
self.cv_image = self.__show_fps(self.cv_image)
|
||||||
|
|
||||||
|
self.cv_image[y_offset:y_offset + self.blank_image.shape[0], x_offset:x_offset + self.blank_image.shape[1]] = self.blank_image
|
||||||
|
|
||||||
|
if self.video_size:
|
||||||
|
self.cv_image = cv2.resize(self.cv_image, self.video_size)
|
||||||
|
self.pixmap = QtGui.QPixmap.fromImage(qimage2ndarray.array2qimage(self.cv_image))
|
||||||
|
self.image_ready_signal.emit()
|
||||||
|
self.new_frame = False
|
||||||
|
|
||||||
|
if (time.time() - self.last_frame_time) >= 0.5:
|
||||||
|
self.fps = int(self.frame_count / (time.time() - self. last_frame_time))
|
||||||
|
|
||||||
|
self.last_frame_time = time.time()
|
||||||
|
self.frame_count = 0
|
||||||
|
|
||||||
|
self.msleep(18)
|
||||||
|
# TODO: Thread ending message here
|
||||||
|
|
||||||
|
def __show_fps(self, image):
|
||||||
|
thickness = 1
|
||||||
|
baseline = 0
|
||||||
|
|
||||||
|
fps_string = str(self.fps)
|
||||||
|
|
||||||
|
text_size = cv2.getTextSize(fps_string, self.font, thickness, baseline)
|
||||||
|
|
||||||
|
text_width, text_height = text_size[0]
|
||||||
|
|
||||||
|
width = text_width + 10
|
||||||
|
height = text_height + 20
|
||||||
|
|
||||||
|
fps_image = np.zeros((height, width, 3), np.uint8)
|
||||||
|
|
||||||
|
cv2.putText(fps_image, fps_string, ((width - text_width) / 2, int((height * 2) / 3)), self.font, 1, (255, 255, 255), 1, cv2.LINE_AA)
|
||||||
|
fps_image = cv2.resize(fps_image, (width / 2, height / 2))
|
||||||
|
|
||||||
|
y_offset = 0
|
||||||
|
x_offset = (image.shape[1] - fps_image.shape[1]) / 2
|
||||||
|
|
||||||
|
image[y_offset:y_offset + fps_image.shape[0], x_offset:x_offset + fps_image.shape[1]] = fps_image
|
||||||
|
|
||||||
|
return image
|
||||||
|
|
||||||
|
def __on_image_update_ready(self):
|
||||||
|
self.right_screen_label.setPixmap(self.pixmap)
|
||||||
|
|
||||||
|
def __receive_message(self, message):
|
||||||
|
self.raw_image = message
|
||||||
|
self.new_frame = True
|
||||||
|
self.frame_count += 1
|
||||||
|
|
||||||
|
def connect_signals_and_slots(self):
|
||||||
|
self.image_ready_signal.connect(self.__on_image_update_ready)
|
||||||
|
|
||||||
|
def setup_signals(self, start_signal, signals_and_slots_signal, kill_signal):
|
||||||
|
start_signal.connect(self.start)
|
||||||
|
signals_and_slots_signal.connect(self.connect_signals_and_slots)
|
||||||
|
kill_signal.connect(self.on_kill_threads_requested__slot)
|
||||||
|
|
||||||
|
def on_kill_threads_requested__slot(self):
|
||||||
|
self.not_abort = False
|
||||||
0
ground_station/Framework/VideoSystems/__init__.py
Normal file
0
ground_station/Framework/VideoSystems/__init__.py
Normal file
0
ground_station/Framework/__init__.py
Normal file
0
ground_station/Framework/__init__.py
Normal file
@@ -1,10 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
|
||||||
Main file used to launch the Rover Base Station
|
|
||||||
No other files should be used for launching this application.
|
|
||||||
"""
|
|
||||||
|
|
||||||
#####################################
|
#####################################
|
||||||
# Imports
|
# Imports
|
||||||
#####################################
|
#####################################
|
||||||
@@ -13,190 +7,27 @@ import sys
|
|||||||
from PyQt5 import QtWidgets, QtCore, QtGui, uic
|
from PyQt5 import QtWidgets, QtCore, QtGui, uic
|
||||||
import signal
|
import signal
|
||||||
import rospy
|
import rospy
|
||||||
import time
|
|
||||||
from cv_bridge import CvBridge, CvBridgeError
|
|
||||||
import cv2
|
|
||||||
import qimage2ndarray
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from geometry_msgs.msg import Twist
|
|
||||||
from sensor_msgs.msg import CompressedImage
|
|
||||||
#from sensor_msgs.msg import Image, CompressedImage
|
|
||||||
# Custom Imports
|
# Custom Imports
|
||||||
|
import Framework.VideoSystems.RoverVideoCoordinator as RoverVideoCoordinator
|
||||||
|
|
||||||
#####################################
|
#####################################
|
||||||
# Global Variables
|
# Global Variables
|
||||||
#####################################
|
#####################################
|
||||||
UI_FILE_LEFT = "resources/ui/left_screen.ui"
|
UI_FILE_LEFT = "Resources/Ui/left_screen.ui"
|
||||||
UI_FILE_RIGHT = "resources/ui/right_screen.ui"
|
UI_FILE_RIGHT = "Resources/Ui/right_screen.ui"
|
||||||
|
|
||||||
#####################################
|
#####################################
|
||||||
# Class Organization
|
# Class Organization
|
||||||
#####################################
|
#####################################
|
||||||
# Class Name:
|
# Class Name:
|
||||||
# "init"
|
# "init"
|
||||||
|
# "run (if there)" - personal pref
|
||||||
# "private methods"
|
# "private methods"
|
||||||
# "public methods, minus slots"
|
# "public methods, minus slots"
|
||||||
# "slot methods"
|
# "slot methods"
|
||||||
# "static methods"
|
# "static methods"
|
||||||
|
# "run (if there)" - personal pref
|
||||||
|
|
||||||
class VideoTest(QtCore.QThread):
|
|
||||||
ROS_CORE_COMMAND = ["roscore"]
|
|
||||||
|
|
||||||
publish_message_signal = QtCore.pyqtSignal()
|
|
||||||
image_ready_signal = QtCore.pyqtSignal()
|
|
||||||
|
|
||||||
def __init__(self, screen_label, video_size=None, sub_path=None):
|
|
||||||
super(VideoTest, self).__init__()
|
|
||||||
|
|
||||||
self.not_abort = True
|
|
||||||
|
|
||||||
self.right_screen_label = screen_label # type: QtGui.QPixmap
|
|
||||||
self.video_size = video_size
|
|
||||||
|
|
||||||
#rospy.init_node("video_test")
|
|
||||||
|
|
||||||
self.message = None
|
|
||||||
|
|
||||||
self.publisher = rospy.Subscriber(sub_path, CompressedImage, self.__receive_message)
|
|
||||||
|
|
||||||
self.raw_image = None
|
|
||||||
self.cv_image = None
|
|
||||||
self.pixmap = None
|
|
||||||
self.bridge = CvBridge()
|
|
||||||
# self.bridge.com
|
|
||||||
|
|
||||||
self.image_ready_signal.connect(self.__on_image_update_ready)
|
|
||||||
|
|
||||||
self.new_frame = False
|
|
||||||
self.frame_count = 0
|
|
||||||
self.last_frame_time = time.time()
|
|
||||||
self.fps = 0
|
|
||||||
|
|
||||||
self.name = sub_path.split("/")[2].replace("_", " ").title()
|
|
||||||
|
|
||||||
self.font = cv2.FONT_HERSHEY_TRIPLEX
|
|
||||||
|
|
||||||
thickness = 1
|
|
||||||
baseline = 0
|
|
||||||
|
|
||||||
text_size = cv2.getTextSize(self.name, self.font, thickness, baseline)
|
|
||||||
print text_size
|
|
||||||
|
|
||||||
text_width, text_height = text_size[0]
|
|
||||||
|
|
||||||
width = text_width + 10
|
|
||||||
height = text_height + 20
|
|
||||||
|
|
||||||
self.blank_image = np.zeros((height, width, 3), np.uint8)
|
|
||||||
cv2.putText(self.blank_image, self.name, ((width - text_width) / 2, int((height * 2) / 3)), self.font, 1, (255, 255, 255), 1, cv2.LINE_AA)
|
|
||||||
self.blank_image = cv2.resize(self.blank_image, (width / 2, height / 2))
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
# TODO: Thread starting message here
|
|
||||||
y_offset = 0
|
|
||||||
x_offset = 0
|
|
||||||
|
|
||||||
while self.not_abort:
|
|
||||||
if self.raw_image and self.new_frame:
|
|
||||||
self.cv_image = self.bridge.compressed_imgmsg_to_cv2(self.raw_image, "rgb8")
|
|
||||||
|
|
||||||
self.cv_image = self.__show_fps(self.cv_image)
|
|
||||||
|
|
||||||
self.cv_image[y_offset:y_offset + self.blank_image.shape[0], x_offset:x_offset + self.blank_image.shape[1]] = self.blank_image
|
|
||||||
|
|
||||||
if self.video_size:
|
|
||||||
self.cv_image = cv2.resize(self.cv_image, self.video_size)
|
|
||||||
self.pixmap = QtGui.QPixmap.fromImage(qimage2ndarray.array2qimage(self.cv_image))
|
|
||||||
self.image_ready_signal.emit()
|
|
||||||
self.new_frame = False
|
|
||||||
|
|
||||||
if (time.time() - self.last_frame_time) >= 0.5:
|
|
||||||
self.fps = int(self.frame_count / (time.time() - self. last_frame_time))
|
|
||||||
|
|
||||||
self.last_frame_time = time.time()
|
|
||||||
self.frame_count = 0
|
|
||||||
|
|
||||||
self.msleep(18)
|
|
||||||
# TODO: Thread ending message here
|
|
||||||
|
|
||||||
def __show_fps(self, image):
|
|
||||||
thickness = 1
|
|
||||||
baseline = 0
|
|
||||||
|
|
||||||
fps_string = str(self.fps)
|
|
||||||
|
|
||||||
text_size = cv2.getTextSize(fps_string, self.font, thickness, baseline)
|
|
||||||
|
|
||||||
text_width, text_height = text_size[0]
|
|
||||||
|
|
||||||
width = text_width + 10
|
|
||||||
height = text_height + 20
|
|
||||||
|
|
||||||
fps_image = np.zeros((height, width, 3), np.uint8)
|
|
||||||
|
|
||||||
cv2.putText(fps_image, fps_string, ((width - text_width) / 2, int((height * 2) / 3)), self.font, 1, (255, 255, 255), 1, cv2.LINE_AA)
|
|
||||||
fps_image = cv2.resize(fps_image, (width / 2, height / 2))
|
|
||||||
|
|
||||||
y_offset = 0
|
|
||||||
x_offset = (image.shape[1] - fps_image.shape[1]) / 2
|
|
||||||
|
|
||||||
image[y_offset:y_offset + fps_image.shape[0], x_offset:x_offset + fps_image.shape[1]] = fps_image
|
|
||||||
|
|
||||||
return image
|
|
||||||
|
|
||||||
def __on_image_update_ready(self):
|
|
||||||
self.right_screen_label.setPixmap(self.pixmap)
|
|
||||||
|
|
||||||
def __receive_message(self, message):
|
|
||||||
self.raw_image = message
|
|
||||||
self.new_frame = True
|
|
||||||
self.frame_count += 1
|
|
||||||
|
|
||||||
def setup_start_and_kill_signals(self, start_signal, kill_signal):
|
|
||||||
start_signal.connect(self.start)
|
|
||||||
kill_signal.connect(self.on_kill_threads_requested__slot)
|
|
||||||
|
|
||||||
def on_kill_threads_requested__slot(self):
|
|
||||||
self.not_abort = False
|
|
||||||
|
|
||||||
|
|
||||||
class DriveTest(QtCore.QThread):
|
|
||||||
publish_message_signal = QtCore.pyqtSignal()
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(DriveTest, self).__init__()
|
|
||||||
|
|
||||||
self.not_abort = True
|
|
||||||
|
|
||||||
self.message = None
|
|
||||||
self.publisher = rospy.Publisher("/cmd_vel", Twist, queue_size=10)
|
|
||||||
|
|
||||||
rospy.init_node("test")
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
# TODO: Thread starting message here
|
|
||||||
while self.not_abort:
|
|
||||||
self.message = Twist()
|
|
||||||
|
|
||||||
self.message.linear.x = 1.0
|
|
||||||
self.message.angular.z = 1.0
|
|
||||||
|
|
||||||
self.publisher.publish(self.message)
|
|
||||||
|
|
||||||
self.msleep(100)
|
|
||||||
# TODO: Thread ending message here
|
|
||||||
|
|
||||||
def __publish_message(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setup_start_and_kill_signals(self, start_signal, kill_signal):
|
|
||||||
start_signal.connect(self.start)
|
|
||||||
kill_signal.connect(self.on_kill_threads_requested__slot)
|
|
||||||
|
|
||||||
def on_kill_threads_requested__slot(self):
|
|
||||||
self.not_abort = False
|
|
||||||
|
|
||||||
|
|
||||||
#####################################
|
#####################################
|
||||||
@@ -223,47 +54,70 @@ class GroundStation(QtCore.QObject):
|
|||||||
RIGHT_SCREEN_ID = 1
|
RIGHT_SCREEN_ID = 1
|
||||||
|
|
||||||
start_threads_signal = QtCore.pyqtSignal()
|
start_threads_signal = QtCore.pyqtSignal()
|
||||||
|
connect_signals_and_slots_signal = QtCore.pyqtSignal()
|
||||||
kill_threads_signal = QtCore.pyqtSignal()
|
kill_threads_signal = QtCore.pyqtSignal()
|
||||||
|
|
||||||
def __init__(self, parent=None,):
|
def __init__(self, parent=None,):
|
||||||
# noinspection PyArgumentList
|
# noinspection PyArgumentList
|
||||||
super(GroundStation, self).__init__(parent)
|
super(GroundStation, self).__init__(parent)
|
||||||
|
|
||||||
# self.left_screen = self.create_application_window(UI_FILE_LEFT, "Rover Ground Station Left Screen",
|
rospy.init_node("ground_station")
|
||||||
# self.LEFT_SCREEN_ID) # type: ApplicationWindow
|
|
||||||
self.right_screen = self.create_application_window(UI_FILE_RIGHT, "Rover Ground Station Right Screen",
|
self.shared_objects = {
|
||||||
|
"screens": {},
|
||||||
|
"regular_classes": {},
|
||||||
|
"threaded_classes": {}
|
||||||
|
}
|
||||||
|
|
||||||
|
# ###### Instantiate Left And Right Screens #####
|
||||||
|
self.shared_objects["screens"]["left_screen"] = \
|
||||||
|
self.create_application_window(UI_FILE_LEFT, "Rover Ground Station Left Screen",
|
||||||
self.LEFT_SCREEN_ID) # type: ApplicationWindow
|
self.LEFT_SCREEN_ID) # type: ApplicationWindow
|
||||||
|
|
||||||
# Start ROSCORE
|
self.shared_objects["screens"]["right_screen"] = \
|
||||||
self.video_test = VideoTest(self.right_screen.primary_video_label, (1280, 720), sub_path="/cameras/main_navigation/image_640x360/compressed")
|
self.create_application_window(UI_FILE_RIGHT, "Rover Ground Station Right Screen",
|
||||||
self.video_test_1 = VideoTest(self.right_screen.secondary_video_label, (640, 360), sub_path="/cameras/chassis/image_640x360/compressed")
|
self.RIGHT_SCREEN_ID) # type: ApplicationWindow
|
||||||
self.video_test_2 = VideoTest(self.right_screen.tertiary_video_label, (640, 360), sub_path="/cameras/undercarriage/image_640x360/compressed")
|
|
||||||
self.drive_test = DriveTest()
|
|
||||||
|
|
||||||
# Keep track of all threads
|
# ##### Instantiate Simple Classes #####
|
||||||
self.threads = []
|
|
||||||
self.threads.append(self.drive_test)
|
|
||||||
self.threads.append(self.video_test)
|
|
||||||
self.threads.append(self.video_test_1)
|
|
||||||
self.threads.append(self.video_test_2)
|
|
||||||
|
|
||||||
# Connect signals
|
# ##### Instantiate Threaded Classes #####
|
||||||
for thread in self.threads:
|
self
|
||||||
thread.setup_start_and_kill_signals(self.start_threads_signal, self.kill_threads_signal)
|
# self.__add_thread("Primary Video",
|
||||||
|
# RoverVideoReceiver.RoverVideoReceiver(
|
||||||
|
# self.shared_objects,
|
||||||
|
# self.shared_objects["screens"]["right_screen"].primary_video_label,
|
||||||
|
# "/cameras/main_navigation/"))
|
||||||
|
# self.__add_thread("Secondary Video",
|
||||||
|
# RoverVideoReceiverOld.VideoTest(
|
||||||
|
# self.shared_objects,
|
||||||
|
# self.shared_objects["screens"]["right_screen"].secondary_video_label,
|
||||||
|
# (640, 360),
|
||||||
|
# sub_path="/cameras/chassis/image_640x360/compressed"))
|
||||||
|
# self.__add_thread("Tertiary Video",
|
||||||
|
# RoverVideoReceiverOld.VideoTest(
|
||||||
|
# self.shared_objects,
|
||||||
|
# self.shared_objects["screens"]["right_screen"].tertiary_video_label,
|
||||||
|
# (640, 360),
|
||||||
|
# sub_path="/cameras/undercarriage/image_640x360/compressed"))
|
||||||
|
|
||||||
|
self.connect_signals_and_slots_signal.emit()
|
||||||
self.__connect_signals_to_slots()
|
self.__connect_signals_to_slots()
|
||||||
self.start_threads_signal.emit()
|
self.start_threads_signal.emit()
|
||||||
|
|
||||||
|
def __add_thread(self, thread_name, instance):
|
||||||
|
self.shared_objects["threaded_classes"][thread_name] = instance
|
||||||
|
instance.setup_signals(self.start_threads_signal, self.connect_signals_and_slots_signal, self.kill_threads_signal)
|
||||||
|
|
||||||
def __connect_signals_to_slots(self):
|
def __connect_signals_to_slots(self):
|
||||||
# self.left_screen.exit_requested_signal.connect(self.on_exit_requested__slot)
|
self.shared_objects["screens"]["left_screen"].exit_requested_signal.connect(self.on_exit_requested__slot)
|
||||||
self.right_screen.exit_requested_signal.connect(self.on_exit_requested__slot)
|
self.shared_objects["screens"]["right_screen"].exit_requested_signal.connect(self.on_exit_requested__slot)
|
||||||
|
|
||||||
def on_exit_requested__slot(self):
|
def on_exit_requested__slot(self):
|
||||||
self.kill_threads_signal.emit()
|
self.kill_threads_signal.emit()
|
||||||
|
|
||||||
# Wait for Threads
|
# Wait for Threads
|
||||||
for thread in self.threads:
|
for thread in self.threads["threaded_classes"]:
|
||||||
thread.wait()
|
self.threads["threaded_classes"][thread].wait()
|
||||||
|
|
||||||
QtGui.QGuiApplication.exit()
|
QtGui.QGuiApplication.exit()
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user