diff --git a/common/helpers/cv2_pcl/src/cv2_pcl/__init__.py b/common/helpers/cv2_pcl/src/cv2_pcl/__init__.py
index 0c299f09a..05f3ef901 100644
--- a/common/helpers/cv2_pcl/src/cv2_pcl/__init__.py
+++ b/common/helpers/cv2_pcl/src/cv2_pcl/__init__.py
@@ -3,17 +3,24 @@
import ros_numpy as rnp
import cv2
+from typing import Tuple, Union
+
Mat = np.ndarray
-def pcl_to_cv2(pcl: PointCloud2) -> Mat:
+def pcl_to_cv2(
+ pcl: PointCloud2, height: Union[int, None] = None, width: Union[int, None] = None
+) -> Mat:
"""
Convert a given PointCloud2 message to a cv2 image
"""
+ height = height or pcl.height
+ width = width or pcl.width
+
# Extract rgb image from pointcloud
frame = np.fromstring(pcl.data, dtype=np.uint8)
- frame = frame.reshape(pcl.height, pcl.width, 32)
+ frame = frame.reshape(height, width, -1)
frame = frame[:, :, 16:19]
# Ensure array is contiguous
@@ -22,16 +29,25 @@ def pcl_to_cv2(pcl: PointCloud2) -> Mat:
return frame
-def seg_to_centroid(pcl: PointCloud2, xyseg: np.ndarray) -> np.ndarray:
+def seg_to_centroid(
+ pcl: PointCloud2,
+ xyseg: np.ndarray,
+ height: Union[int, None] = None,
+ width: Union[int, None] = None,
+) -> np.ndarray:
"""
Computes the centroid of a given segment in a pointcloud
"""
+ height = height or pcl.height
+ width = width or pcl.width
+
# Convert xyseg to contours
contours = xyseg.reshape(-1, 2)
# cv2 image
- cv_im = pcl_to_cv2(pcl)
+ cv_im = pcl_to_cv2(pcl, height, width)
+
# Compute mask from contours
mask = np.zeros(shape=cv_im.shape[:2])
cv2.fillPoly(mask, pts=[contours], color=(255, 255, 255))
@@ -44,6 +60,7 @@ def seg_to_centroid(pcl: PointCloud2, xyseg: np.ndarray) -> np.ndarray:
# Unpack pointcloud into xyz array
pcl_xyz = rnp.point_cloud2.pointcloud2_to_xyz_array(pcl, remove_nans=False)
+ pcl_xyz = pcl_xyz.reshape(height, width, 3)
# Extract points of interest
xyz_points = [pcl_xyz[x][y] for x, y in indices]
@@ -51,16 +68,27 @@ def seg_to_centroid(pcl: PointCloud2, xyseg: np.ndarray) -> np.ndarray:
return np.nanmean(xyz_points, axis=0)
-def bb_to_centroid(pcl: PointCloud2, x: int, y: int, w: int, h: int) -> np.ndarray:
+def bb_to_centroid(
+ pcl: PointCloud2,
+ x: int,
+ y: int,
+ w: int,
+ h: int,
+ height: Union[int, None] = None,
+ width: Union[int, None] = None,
+) -> np.ndarray:
"""
Computes the centroid of a given bounding box in a pointcloud
"""
+ height = height or pcl.height
+ width = width or pcl.width
+
# Convert xywh to bounding box coordinates.
x1, y1, x2, y2 = x, y, x + w, y + h
# cv2 image
- frame = pcl_to_cv2(pcl)
+ frame = pcl_to_cv2(pcl, height, width)
# Compute mask from bounding box
mask = np.zeros(shape=frame.shape[:2])
mask[y1:y2, x1:x2] = 255 # bounding box dimensions
@@ -72,6 +100,7 @@ def bb_to_centroid(pcl: PointCloud2, x: int, y: int, w: int, h: int) -> np.ndarr
# Unpack pointcloud into xyz array
pcl_xyz = rnp.point_cloud2.pointcloud2_to_xyz_array(pcl, remove_nans=False)
+ pcl_xyz = pcl_xyz.reshape(height, width, 3)
# Extract points of interest
xyz_points = [pcl_xyz[x][y] for x, y in indices]
diff --git a/common/helpers/navigation_helpers/CMakeLists.txt b/common/helpers/navigation_helpers/CMakeLists.txt
new file mode 100644
index 000000000..457b6155a
--- /dev/null
+++ b/common/helpers/navigation_helpers/CMakeLists.txt
@@ -0,0 +1,202 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(navigation_helpers)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED geometry_msgs)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+# std_msgs # Or other packages containing msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES navigation_helpers
+# CATKIN_DEPENDS other_catkin_pkg
+# DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+# ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/navigation_helpers.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/navigation_helpers_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_navigation_helpers.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/common/helpers/navigation_helpers/package.xml b/common/helpers/navigation_helpers/package.xml
new file mode 100644
index 000000000..d70a08178
--- /dev/null
+++ b/common/helpers/navigation_helpers/package.xml
@@ -0,0 +1,73 @@
+
+
+<<<<<<<< HEAD:common/helpers/numpy2message/package.xml
+ numpy2message
+ 0.0.0
+ Helper functions converting between numpy arrays and ROS messages.
+========
+ navigation_helpers
+ 0.0.0
+ The navigation_helpers package
+>>>>>>>> upstream/main:common/helpers/navigation_helpers/package.xml
+
+
+
+
+<<<<<<<< HEAD:common/helpers/numpy2message/package.xml
+ Benteng Ma
+========
+ Jared Swift
+>>>>>>>> upstream/main:common/helpers/navigation_helpers/package.xml
+
+
+
+
+
+ TODO
+
+
+
+
+
+<<<<<<<< HEAD:common/helpers/numpy2message/package.xml
+
+========
+
+>>>>>>>> upstream/main:common/helpers/navigation_helpers/package.xml
+
+
+
+
+
+ Benteng Ma
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ catkin
+
+
+
+
+
+
+
+
diff --git a/common/helpers/navigation_helpers/setup.py b/common/helpers/navigation_helpers/setup.py
new file mode 100644
index 000000000..4cfc6694c
--- /dev/null
+++ b/common/helpers/navigation_helpers/setup.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python3
+
+from distutils.core import setup
+from catkin_pkg.python_setup import generate_distutils_setup
+
+setup_args = generate_distutils_setup(
+ packages=["navigation_helpers"], package_dir={"": "src"}
+)
+
+setup(**setup_args)
diff --git a/common/helpers/navigation_helpers/src/navigation_helpers/__init__.py b/common/helpers/navigation_helpers/src/navigation_helpers/__init__.py
new file mode 100644
index 000000000..969833b49
--- /dev/null
+++ b/common/helpers/navigation_helpers/src/navigation_helpers/__init__.py
@@ -0,0 +1,29 @@
+from geometry_msgs.msg import (
+ Point,
+ Pose,
+)
+
+import numpy as np
+from itertools import permutations
+
+from typing import Union, List
+
+
+def euclidian_distance(p1: Point, p2: Point) -> float:
+ return ((p1.x - p2.x) ** 2 + (p1.y - p2.y) ** 2) ** 0.5
+
+
+def min_hamiltonian_path(start: Pose, poses: List[Pose]) -> Union[None, List[Pose]]:
+ best_order = None
+ min_distance = np.inf
+
+ for perm in permutations(poses):
+ dist = euclidian_distance(start.position, perm[0].position)
+ for i in range(len(poses) - 1):
+ dist += euclidian_distance(perm[i].position, perm[i + 1].position)
+
+ if dist < min_distance:
+ min_distance = dist
+ best_order = list(perm)
+
+ return best_order
diff --git a/common/helpers/numpy2message/package.xml b/common/helpers/numpy2message/package.xml
index 62391bce1..d70a08178 100644
--- a/common/helpers/numpy2message/package.xml
+++ b/common/helpers/numpy2message/package.xml
@@ -1,25 +1,39 @@
+<<<<<<<< HEAD:common/helpers/numpy2message/package.xml
numpy2message
0.0.0
Helper functions converting between numpy arrays and ROS messages.
+========
+ navigation_helpers
+ 0.0.0
+ The navigation_helpers package
+>>>>>>>> upstream/main:common/helpers/navigation_helpers/package.xml
+<<<<<<<< HEAD:common/helpers/numpy2message/package.xml
Benteng Ma
+========
+ Jared Swift
+>>>>>>>> upstream/main:common/helpers/navigation_helpers/package.xml
- MIT
+ TODO
+<<<<<<<< HEAD:common/helpers/numpy2message/package.xml
+========
+
+>>>>>>>> upstream/main:common/helpers/navigation_helpers/package.xml
diff --git a/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_index_service b/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_index_service
index a9c5c85b6..2e460951e 100644
--- a/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_index_service
+++ b/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_index_service
@@ -6,8 +6,11 @@ from lasr_vector_databases_faiss import (
load_model,
parse_txt_file,
get_sentence_embeddings,
- create_vector_database,
+ construct_faiss_index,
+ add_vectors_to_index,
+ save_index_to_disk,
)
+from typing import List
class TxtIndexService:
@@ -18,14 +21,67 @@ class TxtIndexService:
rospy.loginfo("Text index service started")
def execute_cb(self, req: TxtIndexRequest):
- txt_fp: str = req.txt_path
- sentences_to_embed: list[str] = parse_txt_file(txt_fp)
- sentence_embeddings: np.ndarray = get_sentence_embeddings(
- sentences_to_embed, self._sentence_embedding_model
- )
- index_path: str = req.index_path
- create_vector_database(sentence_embeddings, index_path)
- return TxtIndexResponse()
+ txt_fps: List[str] = req.txt_paths
+ index_paths: List[str] = req.index_paths
+ factory_string: str = req.index_factory_string
+ vecs_per_txt_file: List[int] = []
+ n_train_vecs = 5000000
+ if len(index_paths) == 1 and len(txt_fps) > 1:
+ xn = np.memmap(
+ f"/tmp/xn.dat",
+ dtype="float32",
+ mode="w+",
+ shape=(11779430, 384),
+ )
+ for i, txt_fp in enumerate(txt_fps):
+ sentences_to_embed: List[str] = parse_txt_file(txt_fp)
+ sentence_embeddings: np.ndarray = get_sentence_embeddings(
+ sentences_to_embed, self._sentence_embedding_model
+ )
+ if i == 0:
+ index = construct_faiss_index(
+ index_factory_string=factory_string,
+ vector_dim=sentence_embeddings.shape[1],
+ )
+ xt = np.empty(
+ (n_train_vecs, sentence_embeddings.shape[1]), dtype=np.float32
+ )
+ sentences_for_training = sentence_embeddings[:100000]
+ xt[i * 100000 : (i + 1) * 100000] = sentences_for_training
+ xn[
+ i
+ * sentence_embeddings.shape[0] : (i + 1)
+ * sentence_embeddings.shape[0],
+ ] = sentence_embeddings
+ vecs_per_txt_file.append(sentence_embeddings.shape[0])
+ rospy.loginfo("Training index")
+ index.train(xt)
+ rospy.loginfo("Adding vectors to index")
+ add_vectors_to_index(index, xn)
+ rospy.loginfo("Saving index to disk")
+ save_index_to_disk(index, index_paths[0])
+
+ elif len(index_paths) != len(txt_fps):
+ rospy.logerr(
+ "Number of txt files and index paths must be the same, or only one index "
+ "path must be provided."
+ f"Got {len(txt_fps)} txt files and {len(index_paths)} index paths."
+ )
+ else:
+ for txt_fp, index_path in zip(txt_fps, index_paths):
+ sentences_to_embed: list[str] = parse_txt_file(txt_fp)
+ sentence_embeddings: np.ndarray = get_sentence_embeddings(
+ sentences_to_embed, self._sentence_embedding_model
+ )
+ index = construct_faiss_index(
+ index_factory_string=factory_string,
+ vector_dim=sentence_embeddings.shape[1],
+ )
+ add_vectors_to_index(index, sentence_embeddings)
+ save_index_to_disk(index, index_path)
+ vecs_per_txt_file.append(sentence_embeddings.shape[0])
+
+ return TxtIndexResponse(vecs_per_txt_file=vecs_per_txt_file)
if __name__ == "__main__":
diff --git a/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_query_service b/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_query_service
index e45610fd3..f0740c158 100644
--- a/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_query_service
+++ b/common/vector_databases/lasr_vector_databases_faiss/nodes/txt_query_service
@@ -15,6 +15,9 @@ from lasr_vector_databases_faiss import (
query_database,
)
+from typing import List
+from math import inf
+
class TxtQueryService:
def __init__(self):
@@ -24,19 +27,67 @@ class TxtQueryService:
rospy.loginfo("Text Query service started")
def execute_cb(self, req: TxtQueryRequest) -> TxtQueryResponse:
- txt_fp: str = req.txt_path
- index_path: str = req.index_path
+ txt_fps: List[str] = req.txt_paths
+ index_paths: List[str] = req.index_paths
query_sentence: str = req.query_sentence
- possible_matches: list[str] = parse_txt_file(txt_fp)
- query_embedding: np.ndarray = get_sentence_embeddings(
- [query_sentence], self._sentence_embedding_model # requires list of strings
- )
- distances, indices = query_database(index_path, query_embedding, k=req.k)
- nearest_matches = [possible_matches[i] for i in indices[0]]
+ vecs_per_txt_file: List[int] = req.vecs_per_txt_file
+
+ if len(index_paths) == 1 and len(txt_fps) > 1:
+ distances, indices = query_database(
+ index_paths[0],
+ get_sentence_embeddings(
+ [query_sentence],
+ self._sentence_embedding_model, # requires list of strings
+ ),
+ k=req.k,
+ )
+ closest_sentences: List[str] = []
+ for i, index in enumerate(indices[0]):
+ for j, n_vecs in enumerate(vecs_per_txt_file):
+ if index < n_vecs:
+ break
+ index -= n_vecs
+ closest_sentences.append(parse_txt_file(txt_fps[j])[index])
+
+ return TxtQueryResponse(
+ closest_sentences=closest_sentences,
+ cosine_similarities=distances[0],
+ )
+
+ elif len(index_paths) != len(txt_fps):
+ rospy.logerr(
+ "Number of txt files and index files must be equal or index files must be 1"
+ )
+ return TxtQueryResponse()
+
+ else:
+ best_distances: list[float] = [inf] * req.k
+ best_matches: list[str] = [""] * req.k
+ for txt_fp, index_path in zip(txt_fps, index_paths):
+ possible_matches: list[str] = parse_txt_file(txt_fp)
+ query_embedding: np.ndarray = get_sentence_embeddings(
+ [query_sentence],
+ self._sentence_embedding_model, # requires list of strings
+ )
+ distances, indices = query_database(
+ index_path, query_embedding, k=req.k
+ )
+ current_nearest_matches = [possible_matches[i] for i in indices[0]]
+
+ for i, match in enumerate(current_nearest_matches):
+ if distances[0][i] < best_distances[-1]:
+ best_distances[-1] = distances[0][i]
+ best_matches[-1] = match
+ best_distances, best_matches = zip(
+ *sorted(zip(best_distances, best_matches))
+ )
+ best_distances = list(best_distances)
+ best_matches = list(best_matches)
+ best_distances.sort()
return TxtQueryResponse(
- closest_sentences=nearest_matches,
- cosine_similarities=distances[0].tolist(),
+ closest_sentences=best_matches,
+ cosine_similarities=best_distances,
)
diff --git a/common/vector_databases/lasr_vector_databases_faiss/scripts/test_index_service.py b/common/vector_databases/lasr_vector_databases_faiss/scripts/test_index_service.py
index cc7f12f3c..885295f0d 100644
--- a/common/vector_databases/lasr_vector_databases_faiss/scripts/test_index_service.py
+++ b/common/vector_databases/lasr_vector_databases_faiss/scripts/test_index_service.py
@@ -4,11 +4,12 @@
request = TxtIndexRequest()
-request.txt_path = (
+request.txt_paths = [
"/home/mattbarker/LASR/lasr_ws/src/lasr-base/tasks/gpsr/data/questions.txt"
-)
+]
+request.index_factory_string = "Flat"
-request.index_path = (
+request.index_paths = [
"/home/mattbarker/LASR/lasr_ws/src/lasr-base/tasks/gpsr/data/questions.index"
-)
+]
rospy.ServiceProxy("lasr_faiss/txt_index", TxtIndex)(request)
diff --git a/common/vector_databases/lasr_vector_databases_faiss/scripts/test_query_service.py b/common/vector_databases/lasr_vector_databases_faiss/scripts/test_query_service.py
index 4ae89e530..b86ac7fe1 100644
--- a/common/vector_databases/lasr_vector_databases_faiss/scripts/test_query_service.py
+++ b/common/vector_databases/lasr_vector_databases_faiss/scripts/test_query_service.py
@@ -4,13 +4,13 @@
request = TxtQueryRequest()
-request.txt_path = (
+request.txt_paths = [
"/home/mattbarker/LASR/lasr_ws/src/lasr-base/tasks/gpsr/data/questions.txt"
-)
+]
-request.index_path = (
+request.index_paths = [
"/home/mattbarker/LASR/lasr_ws/src/lasr-base/tasks/gpsr/data/questions.index"
-)
+]
request.query_sentence = "Do French like snails?"
diff --git a/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/__init__.py b/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/__init__.py
index 698927d9c..370f8f64d 100644
--- a/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/__init__.py
+++ b/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/__init__.py
@@ -1,2 +1,8 @@
-from .database_utils import create_vector_database, load_vector_database, query_database
+from .database_utils import (
+ load_vector_database,
+ query_database,
+ save_index_to_disk,
+ add_vectors_to_index,
+ construct_faiss_index,
+)
from .get_sentence_embeddings import get_sentence_embeddings, load_model, parse_txt_file
diff --git a/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/database_utils.py b/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/database_utils.py
index 6d0139072..8175d3d6f 100644
--- a/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/database_utils.py
+++ b/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/database_utils.py
@@ -3,40 +3,80 @@
import numpy as np
import faiss
+from typing import Union
-def create_vector_database(
+
+def construct_faiss_index(
+ index_factory_string: str,
+ vector_dim: int,
+ normalise: bool = False,
+ use_gpu: bool = False,
+) -> faiss.Index:
+ """Constructs the faiss vector datbase object.
+
+ Args:
+ index_factory_string (str): Index factory string
+ vector_dim (int): constant dim of each vector to be added to the db.
+ normalise (bool, optional): whether to use inner product instead of Euclidean distance.
+ Defaults to False.
+ use_gpu (bool, optional): whether to move the index to the GPU. Defaults to False.
+
+ Returns:
+ faiss.Index: constructed faiss index object.
+ """
+
+ metric = faiss.METRIC_INNER_PRODUCT if normalise else faiss.METRIC_L2
+ index = faiss.index_factory(vector_dim, index_factory_string, metric)
+ if use_gpu:
+ index = faiss.index_cpu_to_all_gpus(index)
+ return index
+
+
+def add_vectors_to_index(
+ index: faiss.Index,
vectors: np.ndarray,
- index_path: str,
- overwrite: bool = False,
- index_type: str = "Flat",
- normalise_vecs: bool = True,
+ normalise: bool = False,
+ add_with_ids: bool = False,
+) -> Union[None, np.ndarray]:
+ """Adds a set of vectors to the index, optionally normalising vectors
+ or adding them with Ids.
+
+ Args:
+ index (faiss.Index): index to add the vectors to.
+ vectors (np.ndarray): vectors to add to the index of shape (n_vecs, vec_dim)
+ normalise (bool, optional): whether to normalise the vectors. Defaults to False.
+ add_with_ids (bool, optional): whether to add the vectors with ids. Defaults to False.
+
+ Returns:
+ Union[None, np.ndarray]: None or the ids of the vectors added.
+ """
+
+ if normalise:
+ faiss.normalize_L2(vectors)
+ if add_with_ids:
+ ids = np.arange(index.ntotal, index.ntotal + vectors.shape[0])
+ index.add_with_ids(vectors, ids)
+ return ids
+ else:
+ index.add(vectors)
+ return None
+
+
+def save_index_to_disk(
+ index: faiss.Index, index_path: str, overwrite: bool = False
) -> None:
- """Creates a FAISS Index using the factory constructor and the given
- index type, and adds the given vector to the index, and then saves
- it to disk using the given path.
+ """Saves the index to disk.
Args:
- vectors (np.ndarray): vector of shape (n_vectors, vector_dim)
+ index (faiss.Index): index to save
index_path (str): path to save the index
- overwrite (bool, optional): Whether to replace an existing index
- at the same filepath if it exists. Defaults to False.
- index_type (str, optional): FAISS Index Factory string. Defaults to "IndexFlatIP".
- normalise_vecs (bool, optional): Whether to normalise the vectors before
- adding them to the Index. This converts the IP metric to Cosine Similarity.
- Defaults to True.
+ overwrite (bool, optional): whether to overwrite the index if it already exists.
+ Defaults to False.
"""
-
if os.path.exists(index_path) and not overwrite:
raise FileExistsError(
f"Index already exists at {index_path}. Set overwrite=True to replace it."
)
-
- index = faiss.index_factory(
- vectors.shape[1], index_type, faiss.METRIC_INNER_PRODUCT
- )
- if normalise_vecs:
- faiss.normalize_L2(vectors)
- index.add(vectors)
faiss.write_index(index, index_path)
@@ -62,7 +102,7 @@ def load_vector_database(index_path: str, use_gpu: bool = False) -> faiss.Index:
def query_database(
index_path: str,
query_vectors: np.ndarray,
- normalise: bool = True,
+ normalise: bool = False,
k: int = 1,
) -> tuple[np.ndarray, np.ndarray]:
"""Queries the given index with the given query vectors
diff --git a/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/split_txt_file.py b/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/split_txt_file.py
new file mode 100644
index 000000000..6f65d12d6
--- /dev/null
+++ b/common/vector_databases/lasr_vector_databases_faiss/src/lasr_vector_databases_faiss/split_txt_file.py
@@ -0,0 +1,27 @@
+import argparse
+from typing import Dict
+
+
+def split_txt_file(input_file, output_file, num_splits):
+ with open(input_file, "r", encoding="utf8") as src:
+ lines = src.readlines()
+ split_size = len(lines) // num_splits
+ for i in range(num_splits):
+ with open(f"{output_file}_chunk_{i+1}.txt", "w", encoding="utf8") as dest:
+ dest.writelines(lines[i * split_size : (i + 1) * split_size])
+
+
+def parse_args() -> Dict:
+ parser = argparse.ArgumentParser(description="Split a txt file into chunks")
+ parser.add_argument("input_file", type=str, help="Path to the input txt file")
+ parser.add_argument("output_file", type=str, help="Path to the output txt file")
+ parser.add_argument(
+ "num_splits", type=int, help="Number of chunks to split the file into"
+ )
+ known, _ = parser.parse_known_args()
+ return vars(known)
+
+
+if __name__ == "__main__":
+ args = parse_args()
+ split_txt_file(args["input_file"], args["output_file"], args["num_splits"])
diff --git a/common/vector_databases/lasr_vector_databases_msgs/srv/TxtIndex.srv b/common/vector_databases/lasr_vector_databases_msgs/srv/TxtIndex.srv
index 79ac01654..3ab18ec20 100644
--- a/common/vector_databases/lasr_vector_databases_msgs/srv/TxtIndex.srv
+++ b/common/vector_databases/lasr_vector_databases_msgs/srv/TxtIndex.srv
@@ -1,7 +1,14 @@
-# Path to input text file
-string txt_path
+# Path to input text files
+string[] txt_paths
-# Output path to save index
-string index_path
+# Output path to save created indices
+# If multiple text files are provided, but one
+# index file path is provided, this index will contain
+# all of the vectors from all of the txt files.
+string[] index_paths
+# Specifies the type of index to create
+# see https://github.com/facebookresearch/faiss/wiki/The-index-factory
+string index_factory_string
---
+int32[] vecs_per_txt_file
\ No newline at end of file
diff --git a/common/vector_databases/lasr_vector_databases_msgs/srv/TxtQuery.srv b/common/vector_databases/lasr_vector_databases_msgs/srv/TxtQuery.srv
index bbcb04613..463a91a96 100644
--- a/common/vector_databases/lasr_vector_databases_msgs/srv/TxtQuery.srv
+++ b/common/vector_databases/lasr_vector_databases_msgs/srv/TxtQuery.srv
@@ -1,8 +1,8 @@
-# Path to input text file
-string txt_path
+# Path to input text files
+string[] txt_paths
-# Path to index file to load
-string index_path
+# Path to corresponding index files
+string[] index_paths
# Sentence to query index with
string query_sentence
@@ -10,6 +10,10 @@ string query_sentence
# Number of nearest sentences to return
uint8 k
+# Number of vectors per txt file -- used to speed
+# up lookup.
+int32[] vecs_per_txt_file
+
---
# Nearest sentence
string[] closest_sentences
diff --git a/common/vision/lasr_vision_msgs/msg/Detection3D.msg b/common/vision/lasr_vision_msgs/msg/Detection3D.msg
index de41b3451..1a5fdea6e 100644
--- a/common/vision/lasr_vision_msgs/msg/Detection3D.msg
+++ b/common/vision/lasr_vision_msgs/msg/Detection3D.msg
@@ -15,4 +15,5 @@ string target_frame
# This will be empty if a segmentation model was not used
int32[] xyseg
+# Point in map frame
geometry_msgs/Point point
\ No newline at end of file
diff --git a/common/vision/lasr_vision_yolov8/examples/relay_3d b/common/vision/lasr_vision_yolov8/examples/relay_3d
index 1ffc9f758..02a243bd5 100644
--- a/common/vision/lasr_vision_yolov8/examples/relay_3d
+++ b/common/vision/lasr_vision_yolov8/examples/relay_3d
@@ -53,4 +53,4 @@ def listener():
if __name__ == "__main__":
- listener()
\ No newline at end of file
+ listener()
diff --git a/common/vision/lasr_vision_yolov8/requirements.txt b/common/vision/lasr_vision_yolov8/requirements.txt
index e25cdeb1e..be2c4b632 100644
--- a/common/vision/lasr_vision_yolov8/requirements.txt
+++ b/common/vision/lasr_vision_yolov8/requirements.txt
@@ -4,10 +4,10 @@ contourpy==1.1.1 # via matplotlib
cycler==0.12.1 # via matplotlib
dill==0.3.7 # via -r requirements.in
filelock==3.13.1 # via torch, triton
-fonttools==4.49.0 # via matplotlib
-fsspec==2024.2.0 # via torch
+fonttools==4.50.0 # via matplotlib
+fsspec==2024.3.0 # via torch
idna==3.6 # via requests
-importlib-resources==6.1.2 # via matplotlib
+importlib-resources==6.3.1 # via matplotlib
jinja2==3.1.3 # via torch
kiwisolver==1.4.5 # via matplotlib
markupsafe==2.1.5 # via jinja2
@@ -28,7 +28,7 @@ nvidia-nccl-cu12==2.19.3 # via torch
nvidia-nvjitlink-cu12==12.4.99 # via nvidia-cusolver-cu12, nvidia-cusparse-cu12
nvidia-nvtx-cu12==12.1.105 # via torch
opencv-python==4.9.0.80 # via ultralytics
-packaging==23.2 # via matplotlib
+packaging==24.0 # via matplotlib
pandas==2.0.3 # via seaborn, ultralytics
pillow==10.2.0 # via matplotlib, torchvision, ultralytics
psutil==5.9.8 # via ultralytics
@@ -50,4 +50,4 @@ typing-extensions==4.10.0 # via torch
tzdata==2024.1 # via pandas
ultralytics==8.0.168 # via -r requirements.in
urllib3==2.2.1 # via requests
-zipp==3.17.0 # via importlib-resources
+zipp==3.18.1 # via importlib-resources
diff --git a/common/vision/lasr_vision_yolov8/src/lasr_vision_yolov8/yolo.py b/common/vision/lasr_vision_yolov8/src/lasr_vision_yolov8/yolo.py
index 342223979..4171de2de 100644
--- a/common/vision/lasr_vision_yolov8/src/lasr_vision_yolov8/yolo.py
+++ b/common/vision/lasr_vision_yolov8/src/lasr_vision_yolov8/yolo.py
@@ -18,7 +18,8 @@
from geometry_msgs.msg import Point, PointStamped
import tf2_ros as tf
-import tf2_geometry_msgs # noqa
+import tf2_sensor_msgs # noqa
+from tf2_sensor_msgs.tf2_sensor_msgs import do_transform_cloud
# global tf buffer
tf_buffer = tf.Buffer(cache_time=rospy.Duration(10))
@@ -108,6 +109,12 @@ def detect_3d(
rospy.loginfo("Decoding")
img = cv2_pcl.pcl_to_cv2(request.pcl)
+ # transform pcl to map frame
+ trans = tf_buffer.lookup_transform(
+ "map", request.pcl.header.frame_id, rospy.Time(0), rospy.Duration(1.0)
+ )
+ pcl_map = do_transform_cloud(request.pcl, trans)
+
# load model
rospy.loginfo("Loading model")
model = load_model(request.dataset)
@@ -132,26 +139,19 @@ def detect_3d(
if has_segment_masks:
detection.xyseg = result.masks.xy[i].flatten().astype(int).tolist()
- centroid = cv2_pcl.seg_to_centroid(request.pcl, np.array(detection.xyseg))
-
- point_stamped = PointStamped()
- point_stamped.point = Point(*centroid)
- point_stamped.header.frame_id = request.pcl.header.frame_id
- point_stamped.header.stamp = rospy.Time(0)
-
- # TODO: handle tf errors properly
- while not rospy.is_shutdown():
- try:
- point_stamped = tf_buffer.transform(point_stamped, "map")
- detection.point = point_stamped.point
- break
- except Exception as e:
- rospy.logerr(e)
- continue
-
- # publish to debug topic
- if debug_point_publisher is not None:
- markers.create_and_publish_marker(debug_point_publisher, point_stamped)
+ centroid = cv2_pcl.seg_to_centroid(
+ pcl_map,
+ np.array(detection.xyseg),
+ height=request.pcl.height,
+ width=request.pcl.width,
+ )
+ detection.point = Point(*centroid)
+
+ if debug_point_publisher is not None:
+ markers.create_and_publish_marker(
+ debug_point_publisher,
+ PointStamped(point=detection.point, header=pcl_map.header),
+ )
detected_objects.append(detection)
diff --git a/skills/src/lasr_skills/__init__.py b/skills/src/lasr_skills/__init__.py
index abb498c39..a1931c23d 100755
--- a/skills/src/lasr_skills/__init__.py
+++ b/skills/src/lasr_skills/__init__.py
@@ -2,15 +2,17 @@
from .detect_3d import Detect3D
from .detect_3d_in_area import Detect3DInArea
from .wait_for_person import WaitForPerson
+from .say import Say
from .wait_for_person_in_area import WaitForPersonInArea
from .describe_people import DescribePeople
from .look_to_point import LookToPoint
from .play_motion import PlayMotion
from .go_to_location import GoToLocation
-from .go_to_semantic_location import GoToSemanticLocation
-from .say import Say
from .listen import Listen
from .listen_for import ListenFor
+from .ask_and_listen import AskAndListen
+from .find_person import FindPerson
+from .find_named_person import FindNamedPerson
from .receive_object import ReceiveObject
from .handover_object import HandoverObject
from .ask_and_listen import AskAndListen
diff --git a/skills/src/lasr_skills/find_named_person.py b/skills/src/lasr_skills/find_named_person.py
new file mode 100755
index 000000000..f1055f79a
--- /dev/null
+++ b/skills/src/lasr_skills/find_named_person.py
@@ -0,0 +1,201 @@
+import smach
+import rospy
+
+from lasr_skills import Detect3D, GoToLocation, AskAndListen
+import navigation_helpers
+
+from geometry_msgs.msg import Pose, PoseWithCovarianceStamped, Point, Quaternion
+
+
+from typing import List, Union
+
+
+class FindNamedPerson(smach.StateMachine):
+
+ class GetLocation(smach.State):
+ def __init__(self):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["location_index", "waypoints"],
+ output_keys=["location"],
+ )
+
+ def execute(self, userdata):
+ userdata.location = userdata.waypoints[userdata.location_index]
+ return "succeeded"
+
+ class GetPose(smach.State):
+ def __init__(self):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ output_keys=["current_pose"],
+ )
+
+ def execute(self, userdata):
+ userdata.current_pose = rospy.wait_for_message(
+ "/amcl_pose", PoseWithCovarianceStamped
+ ).pose.pose
+ return "succeeded"
+
+ class ComputePath(smach.State):
+ def __init__(self, waypoints: List[Pose]):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["current_pose"],
+ output_keys=["waypoints"],
+ )
+ self._waypoints = waypoints
+
+ def execute(self, userdata):
+ userdata.waypoints = navigation_helpers.min_hamiltonian_path(
+ userdata.current_pose, self._waypoints
+ )
+ return "succeeded"
+
+ class HandleDetections(smach.State):
+ def __init__(self):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["detections_3d"],
+ output_keys=["person_point"],
+ )
+
+ def execute(self, userdata):
+ if len(userdata.detections_3d.detected_objects) == 0:
+ rospy.logwarn("No person detected, returning failed.")
+ return "failed"
+ userdata.person_point = userdata.detections_3d.detected_objects[0].point
+ return "succeeded"
+
+ class HandleResponse(smach.State):
+ def __init__(self):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["transcribed_speech"],
+ )
+
+ def execute(self, userdata):
+ # TODO: make this smarter,e.g. levenshtein distance
+ if "yes" in userdata.transcribed_speech.lower():
+ return "succeeded"
+ return "failed"
+
+ def __init__(
+ self,
+ name: str,
+ waypoints: Union[List[Pose], None] = None,
+ location_param: Union[str, None] = None,
+ ):
+ smach.StateMachine.__init__(
+ self, outcomes=["succeeded", "failed"], output_keys=["person_point"]
+ )
+
+ if waypoints is None and location_param is None:
+ raise ValueError("Either waypoints or location_param must be provided")
+
+ if waypoints is None:
+ waypoints_to_iterate: List[Pose] = []
+
+ room = rospy.get_param(location_param)
+ beacons = room["beacons"]
+ for beacon in beacons:
+ waypoint: Pose = Pose(
+ position=Point(**beacons[beacon]["near_pose"]["position"]),
+ orientation=Quaternion(
+ **beacons[beacon]["near_pose"]["orientation"]
+ ),
+ )
+ waypoints_to_iterate.append(waypoint)
+ else:
+ waypoints_to_iterate: List[Pose] = waypoints
+
+ with self:
+
+ smach.StateMachine.add(
+ "GET_POSE",
+ self.GetPose(),
+ transitions={"succeeded": "COMPUTE_PATH", "failed": "failed"},
+ )
+
+ smach.StateMachine.add(
+ "COMPUTE_PATH",
+ self.ComputePath(waypoints_to_iterate),
+ transitions={"succeeded": "WAYPOINT_ITERATOR", "failed": "failed"},
+ )
+
+ waypoint_iterator = smach.Iterator(
+ outcomes=["succeeded", "failed"],
+ it=lambda: range(len(waypoints_to_iterate)),
+ it_label="location_index",
+ input_keys=["waypoints"],
+ output_keys=["person_point"],
+ exhausted_outcome="failed",
+ )
+
+ with waypoint_iterator:
+
+ container_sm = smach.StateMachine(
+ outcomes=["succeeded", "failed", "continue"],
+ input_keys=["location_index", "waypoints"],
+ output_keys=["person_point"],
+ )
+
+ with container_sm:
+
+ smach.StateMachine.add(
+ "GET_LOCATION",
+ self.GetLocation(),
+ transitions={"succeeded": "GO_TO_LOCATION", "failed": "failed"},
+ )
+
+ smach.StateMachine.add(
+ "GO_TO_LOCATION",
+ GoToLocation(),
+ transitions={
+ "succeeded": "DETECT3D",
+ "failed": "failed",
+ },
+ )
+ smach.StateMachine.add(
+ "DETECT3D",
+ Detect3D(filter=["person"]),
+ transitions={
+ "succeeded": "HANDLE_DETECTIONS",
+ "failed": "failed",
+ },
+ )
+ smach.StateMachine.add(
+ "HANDLE_DETECTIONS",
+ self.HandleDetections(),
+ transitions={
+ "succeeded": "CHECK_NAME",
+ "failed": "continue",
+ },
+ )
+ smach.StateMachine.add(
+ "CHECK_NAME",
+ AskAndListen(f"I'm looking for {name}. Are you {name}?"),
+ transitions={
+ "succeeded": "HANDLE_RESPONSE",
+ "failed": "failed",
+ },
+ )
+ smach.StateMachine.add(
+ "HANDLE_RESPONSE",
+ self.HandleResponse(),
+ transitions={"succeeded": "succeeded", "failed": "continue"},
+ )
+
+ waypoint_iterator.set_contained_state(
+ "CONTAINER_STATE", container_sm, loop_outcomes=["continue"]
+ )
+ smach.StateMachine.add(
+ "WAYPOINT_ITERATOR",
+ waypoint_iterator,
+ {"succeeded": "succeeded", "failed": "failed"},
+ )
diff --git a/skills/src/lasr_skills/find_person.py b/skills/src/lasr_skills/find_person.py
new file mode 100755
index 000000000..94dadccc1
--- /dev/null
+++ b/skills/src/lasr_skills/find_person.py
@@ -0,0 +1,149 @@
+import smach
+import rospy
+
+from lasr_skills import Detect3D, GoToLocation
+import navigation_helpers
+
+from geometry_msgs.msg import Pose, PoseWithCovarianceStamped
+
+from typing import List
+
+
+class FindPerson(smach.StateMachine):
+
+ class GetLocation(smach.State):
+ def __init__(self):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["location_index", "waypoints"],
+ output_keys=["location"],
+ )
+
+ def execute(self, userdata):
+ userdata.location = userdata.waypoints[userdata.location_index]
+ return "succeeded"
+
+ class GetPose(smach.State):
+ def __init__(self):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ output_keys=["current_pose"],
+ )
+
+ def execute(self, userdata):
+ userdata.current_pose = rospy.wait_for_message(
+ "/amcl_pose", PoseWithCovarianceStamped
+ ).pose.pose
+ return "succeeded"
+
+ class ComputePath(smach.State):
+ def __init__(self, waypoints: List[Pose]):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["current_pose"],
+ output_keys=["waypoints"],
+ )
+ self._waypoints = waypoints
+
+ def execute(self, userdata):
+ userdata.waypoints = navigation_helpers.min_hamiltonian_path(
+ userdata.current_pose, self._waypoints
+ )
+ return "succeeded"
+
+ class HandleDetections(smach.State):
+ def __init__(self):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["detections_3d"],
+ output_keys=["person_point"],
+ )
+
+ def execute(self, userdata):
+ if len(userdata.detections_3d.detected_objects) == 0:
+ rospy.logwarn("No person detected, returning failed.")
+ return "failed"
+ userdata.person_point = userdata.detections_3d.detected_objects[0].point
+ return "succeeded"
+
+ def __init__(self, waypoints: List[Pose]):
+ smach.StateMachine.__init__(
+ self, outcomes=["succeeded", "failed"], output_keys=["person_point"]
+ )
+
+ with self:
+
+ smach.StateMachine.add(
+ "GET_POSE",
+ self.GetPose(),
+ transitions={"succeeded": "COMPUTE_PATH", "failed": "failed"},
+ )
+
+ smach.StateMachine.add(
+ "COMPUTE_PATH",
+ self.ComputePath(waypoints),
+ transitions={"succeeded": "WAYPOINT_ITERATOR", "failed": "failed"},
+ )
+
+ waypoint_iterator = smach.Iterator(
+ outcomes=["succeeded", "failed"],
+ it=lambda: range(len(waypoints)),
+ it_label="location_index",
+ input_keys=["waypoints"],
+ output_keys=["person_point"],
+ exhausted_outcome="failed",
+ )
+
+ with waypoint_iterator:
+
+ container_sm = smach.StateMachine(
+ outcomes=["succeeded", "failed", "continue"],
+ input_keys=["location_index", "waypoints"],
+ output_keys=["person_point"],
+ )
+
+ with container_sm:
+
+ smach.StateMachine.add(
+ "GET_LOCATION",
+ self.GetLocation(),
+ transitions={"succeeded": "GO_TO_LOCATION", "failed": "failed"},
+ )
+
+ smach.StateMachine.add(
+ "GO_TO_LOCATION",
+ GoToLocation(),
+ transitions={
+ "succeeded": "DETECT3D",
+ "failed": "failed",
+ },
+ )
+ smach.StateMachine.add(
+ "DETECT3D",
+ Detect3D(filter=["person"]),
+ transitions={
+ "succeeded": "HANDLE_DETECTIONS",
+ "failed": "failed",
+ },
+ )
+ smach.StateMachine.add(
+ "HANDLE_DETECTIONS",
+ self.HandleDetections(),
+ transitions={
+ "succeeded": "succeeded",
+ "failed": "continue",
+ },
+ )
+
+ waypoint_iterator.set_contained_state(
+ "CONTAINER_STATE", container_sm, loop_outcomes=["continue"]
+ )
+ smach.StateMachine.add(
+ "WAYPOINT_ITERATOR",
+ waypoint_iterator,
+ {"succeeded": "succeeded", "failed": "failed"},
+ )
diff --git a/skills/src/lasr_skills/go_to_location.py b/skills/src/lasr_skills/go_to_location.py
index 562aeaf61..01553e30d 100755
--- a/skills/src/lasr_skills/go_to_location.py
+++ b/skills/src/lasr_skills/go_to_location.py
@@ -3,7 +3,7 @@
import rospy
import rosservice
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
-from geometry_msgs.msg import Pose, PoseStamped
+from geometry_msgs.msg import Pose, PoseStamped, Point, Quaternion
from std_msgs.msg import Header
from typing import Union
@@ -25,8 +25,18 @@
class GoToLocation(smach.StateMachine):
- def __init__(self, location: Union[Pose, None] = None):
- super(GoToLocation, self).__init__(outcomes=["succeeded", "failed"])
+ def __init__(
+ self,
+ location: Union[Pose, None] = None,
+ location_param: Union[str, None] = None,
+ ):
+
+ if location is not None or location_param is not None:
+ super(GoToLocation, self).__init__(outcomes=["succeeded", "failed"])
+ else:
+ super(GoToLocation, self).__init__(
+ outcomes=["succeeded", "failed"], input_keys=["location"]
+ )
IS_SIMULATION = (
"/pal_startup_control/start" not in rosservice.get_service_list()
@@ -88,6 +98,38 @@ def __init__(self, location: Union[Pose, None] = None):
"preempted": "failed",
},
)
+ elif location_param is not None:
+ smach.StateMachine.add(
+ "GO_TO_LOCATION",
+ smach_ros.SimpleActionState(
+ "move_base",
+ MoveBaseAction,
+ goal=MoveBaseGoal(
+ target_pose=PoseStamped(
+ pose=Pose(
+ position=Point(
+ **rospy.get_param(f"{location_param}/position")
+ ),
+ orientation=Quaternion(
+ **rospy.get_param(
+ f"{location_param}/orientation"
+ )
+ ),
+ ),
+ header=Header(frame_id="map"),
+ )
+ ),
+ ),
+ transitions={
+ "succeeded": (
+ "DISABLE_HEAD_MANAGER"
+ if not IS_SIMULATION
+ else "RAISE_BASE"
+ ),
+ "aborted": "failed",
+ "preempted": "failed",
+ },
+ )
else:
smach.StateMachine.add(
"GO_TO_LOCATION",
diff --git a/skills/src/lasr_skills/go_to_semantic_location.py b/skills/src/lasr_skills/go_to_semantic_location.py
deleted file mode 100755
index 007d801ca..000000000
--- a/skills/src/lasr_skills/go_to_semantic_location.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import rospy
-import smach_ros
-
-from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
-from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion
-from std_msgs.msg import Header
-
-
-class GoToSemanticLocation(smach_ros.SimpleActionState):
-
- def __init__(self):
- super(GoToSemanticLocation, self).__init__(
- "move_base",
- MoveBaseAction,
- goal_cb=lambda ud, _: MoveBaseGoal(
- target_pose=PoseStamped(
- pose=self._to_pose(rospy.get_param(f"{ud.location}/location")),
- header=Header(frame_id="map"),
- )
- ),
- input_keys=["location"],
- )
-
- def _to_pose(self, location):
- return Pose(
- position=Point(**location["position"]),
- orientation=Quaternion(**location["orientation"]),
- )
diff --git a/tasks/carry_my_luggage/CMakeLists.txt b/tasks/carry_my_luggage/CMakeLists.txt
new file mode 100644
index 000000000..469501e16
--- /dev/null
+++ b/tasks/carry_my_luggage/CMakeLists.txt
@@ -0,0 +1,204 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(carry_my_luggage)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+ rospy
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+# std_msgs # Or other packages containing msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES carry_my_luggage
+# CATKIN_DEPENDS rospy
+# DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+ ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/carry_my_luggage.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/carry_my_luggage_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+catkin_install_python(PROGRAMS
+ scripts/main.py
+ DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+)
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_carry_my_luggage.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/tasks/carry_my_luggage/config/lab.yaml b/tasks/carry_my_luggage/config/lab.yaml
new file mode 100644
index 000000000..e69de29bb
diff --git a/tasks/carry_my_luggage/launch/setup.launch b/tasks/carry_my_luggage/launch/setup.launch
new file mode 100644
index 000000000..b5b32e1c4
--- /dev/null
+++ b/tasks/carry_my_luggage/launch/setup.launch
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
diff --git a/tasks/carry_my_luggage/package.xml b/tasks/carry_my_luggage/package.xml
new file mode 100644
index 000000000..9c7499684
--- /dev/null
+++ b/tasks/carry_my_luggage/package.xml
@@ -0,0 +1,62 @@
+
+
+ carry_my_luggage
+ 0.0.0
+ The carry_my_luggage package
+
+
+
+
+ Jared Swift
+
+
+
+
+
+ MIT
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ catkin
+ rospy
+ rospy
+ rospy
+
+
+
+
+
+
+
+
diff --git a/tasks/carry_my_luggage/scripts/main.py b/tasks/carry_my_luggage/scripts/main.py
new file mode 100644
index 000000000..6cffb2d34
--- /dev/null
+++ b/tasks/carry_my_luggage/scripts/main.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+
+import rospy
+from carry_my_luggage.state_machine import CarryMyLuggage
+
+if __name__ == "__main__":
+ rospy.init_node("carry_my_luggage_robocup")
+ carry_my_luggage = CarryMyLuggage()
+ outcome = carry_my_luggage.execute()
+ rospy.loginfo(f"Carry my luggage finished with outcome: {outcome}")
+ rospy.spin()
diff --git a/tasks/carry_my_luggage/setup.py b/tasks/carry_my_luggage/setup.py
new file mode 100644
index 000000000..50bcd448a
--- /dev/null
+++ b/tasks/carry_my_luggage/setup.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+
+from distutils.core import setup
+from catkin_pkg.python_setup import generate_distutils_setup
+
+# fetch values from package.xml
+setup_args = generate_distutils_setup(
+ packages=["carry_my_luggage"], package_dir={"": "src"}
+)
+
+setup(**setup_args)
diff --git a/tasks/carry_my_luggage/src/carry_my_luggage/__init__.py b/tasks/carry_my_luggage/src/carry_my_luggage/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py b/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py
new file mode 100644
index 000000000..ae8765b5f
--- /dev/null
+++ b/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py
@@ -0,0 +1,8 @@
+import smach
+
+
+class CarryMyLuggage(smach.StateMachine):
+
+ def __init__(self):
+
+ smach.StateMachine.__init__(self, outcomes=["succeeded", "failed"])
diff --git a/tasks/carry_my_luggage/src/carry_my_luggage/states/__init__.py b/tasks/carry_my_luggage/src/carry_my_luggage/states/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tasks/gpsr/CMakeLists.txt b/tasks/gpsr/CMakeLists.txt
index af1f6c908..a1c9b8bc6 100644
--- a/tasks/gpsr/CMakeLists.txt
+++ b/tasks/gpsr/CMakeLists.txt
@@ -155,6 +155,7 @@ include_directories(
## in contrast to setup.py, you can choose the destination
catkin_install_python(PROGRAMS
scripts/parse_gpsr_xmls.py
+ scripts/main.py
nodes/commands/question_answer
nodes/command_parser
DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
diff --git a/tasks/gpsr/config/bordeaux_example.yaml b/tasks/gpsr/config/bordeaux_example.yaml
new file mode 100644
index 000000000..88f87f64c
--- /dev/null
+++ b/tasks/gpsr/config/bordeaux_example.yaml
@@ -0,0 +1,36 @@
+arena:
+ polygon: [[1.0, 0.0], [1.0, 1.0], [0.0, 1.0], [0.0, 0.0]] # Vertices composing the polygon of the arena
+ rooms: # Rooms in the arena
+ bedroom: # Room name
+ pose: # Fixed pose in the room, will be used for navigating to the room
+ position: {x: 0.0, y: 0.0, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: 0.0, w: 1.0}
+ polygon: [[0.0, 0.0], [0.0, 1.0], [1.0, 1.0], [1.0, 0.0]] # Vertices composing the polygon of the room
+ beacons: # Beacons in the room
+ bed: # Beacon name
+ near_pose: # Fixed pose in the room, will be used for navigating to the beacon
+ position: {x: 0.5, y: 0.5, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: 0.0, w: 1.0}
+ near_polygon: [[0.4, 0.4], [0.4, 0.6], [0.6, 0.6], [0.6, 0.4]]
+ search_pose: # Pose to search for the beacon for objects
+ position: {x: 0.5, y: 0.5, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: 0.0, w: 1.0}
+ search_polygon: [[0.4, 0.4], [0.4, 0.6], [0.6, 0.6], [0.6, 0.4]] # Vertices composing the polygon of the beacon, for search
+ kitchen:
+ pose:
+ orientation: {}
+ position: {}
+ polygon:
+ beacons:
+ office:
+ pose:
+ orientation: {}
+ position: {}
+ polygon:
+ beacons:
+ living_room:
+ pose:
+ orientation: {}
+ position: {}
+ polygon:
+ beacons:
diff --git a/tasks/gpsr/config/lab.yaml b/tasks/gpsr/config/lab.yaml
new file mode 100644
index 000000000..96cd8cd52
--- /dev/null
+++ b/tasks/gpsr/config/lab.yaml
@@ -0,0 +1,38 @@
+arena:
+ polygon: [[-1.76, 1.94], [1.87, 2.80], [1.43, 4.71], [0.88, 5.15], [3.05, 5.74], [4.96, 7.79], [7.49, 5.57], [4.62, 0.62], [5.28, -2.72], [3.75, -4.79], [2.36, -5.15], [-0.40, -3.94]]
+ rooms:
+ office:
+ pose:
+ position: {x: 2.49, y: 2.51, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: -0.58, w: 0.81}
+ polygon: [[1.75, 3.65], [2.88, 3.91], [3.0, 3.55], [5.21, 1.49], [5.59, 0.64], [5.23, -2.72], [3.72, -4.79], [2.41, -5.21], [0.30, -4.45], [0.25, -4.47], [0.07, -3.84], [-0.41, -3.91], [-1.76, 1.93], [1.93, 2.79]]
+ beacons:
+ tv_desk:
+ near_pose:
+ position: {x: 0.96, y: 0.82, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: 0.76, w: 0.65}
+ near_polygon: [[-0.11, 0.00], [-0.08, 2.36], [1.84, 2.75], [2.02, 1.31]]
+ search_pose:
+ position: {x: 0.97, y: 1.60, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: 0.0, w: 0.0}
+ search_polygon: [[0.40, 1.92], [0.31, 2.45], [1.36, 2.70], [1.456, 2.17]]
+ window_desk:
+ near_pose:
+ position: {x: 2.38, y: -3.0, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: -0.62, w: 0.79}
+ near_polygon: [[3.34, -3.27], [3.75, -4.80], [2.47, -5.20], [1.75, -3.58]]
+ search_pose:
+ position: {x: 2.47, y: -3.73, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: -0.61, w: 0.79}
+ search_polygon: [[3.53, -3.98], [2.38, -5.18], [1.90, -4.36], [3.54, -4.03]]
+ kitchen:
+ pose:
+ position: {x: 2.26, y: 4.68, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: 0.44, w: 0.90}
+ polygon: [[1.75, 3.68], [1.37, 5.23], [3.08, 5.69], [2.78, 7.16], [4.96, 7.67], [7.58, 5.42], [7.74, 4.70], [6.20, 2.69], [4.17, 4.28], [3.50, 4.24], [2.85, 4.26], [2.88, 4.04]]
+ beacons:
+ door:
+ near_pose:
+ position: {x: 4.04, y: 6.02, z: 0.0}
+ orientation: {x: 0.0, y: 0.0, z: 0.41, w: 0.91}
+ near_polygon: [[5.65, 7.34], [4.83, 7.72], [2.78, 7.11], [3.0, 6.11]]
diff --git a/tasks/gpsr/data/command_data/.gitignore b/tasks/gpsr/data/command_data/.gitignore
new file mode 100644
index 000000000..2211df63d
--- /dev/null
+++ b/tasks/gpsr/data/command_data/.gitignore
@@ -0,0 +1 @@
+*.txt
diff --git a/tasks/gpsr/data/command_data/.gitkeep b/tasks/gpsr/data/command_data/.gitkeep
new file mode 100644
index 000000000..8d1c8b69c
--- /dev/null
+++ b/tasks/gpsr/data/command_data/.gitkeep
@@ -0,0 +1 @@
+
diff --git a/tasks/gpsr/data/faiss_indices/.gitignore b/tasks/gpsr/data/faiss_indices/.gitignore
new file mode 100644
index 000000000..2e694fd45
--- /dev/null
+++ b/tasks/gpsr/data/faiss_indices/.gitignore
@@ -0,0 +1 @@
+*.index
diff --git a/tasks/gpsr/data/faiss_indices/.gitkeep b/tasks/gpsr/data/faiss_indices/.gitkeep
new file mode 100644
index 000000000..8d1c8b69c
--- /dev/null
+++ b/tasks/gpsr/data/faiss_indices/.gitkeep
@@ -0,0 +1 @@
+
diff --git a/tasks/gpsr/data/mock_data/names.json b/tasks/gpsr/data/mock_data/names.json
index 7dbbe563f..0882a43c4 100644
--- a/tasks/gpsr/data/mock_data/names.json
+++ b/tasks/gpsr/data/mock_data/names.json
@@ -4,7 +4,7 @@
"angel",
"axel",
"charlie",
- "janes",
+ "jane",
"jules",
"morgan",
"paris",
diff --git a/tasks/gpsr/scripts/main.py b/tasks/gpsr/scripts/main.py
new file mode 100644
index 000000000..4c0e15f39
--- /dev/null
+++ b/tasks/gpsr/scripts/main.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python3
+import smach
+import rospy
+import sys
+from typing import Dict
+from gpsr.load_known_data import GPSRDataLoader
+from gpsr.state_machine_factory import build_state_machine
+from gpsr.regex_command_parser import Configuration
+from gpsr.states import CommandParserStateMachine
+
+
+def load_gpsr_configuration() -> Configuration:
+ gpsr_data_dir = sys.argv[1]
+ """Loads the configuration for the GPSR command parser"""
+ data_loader = GPSRDataLoader(data_dir=gpsr_data_dir)
+ gpsr_known_data: Dict = data_loader.load_data()
+ config = Configuration(
+ {
+ "person_names": gpsr_known_data["names"],
+ "location_names": gpsr_known_data["non_placeable_locations"],
+ "placement_location_names": gpsr_known_data["placeable_locations"],
+ "room_names": gpsr_known_data["rooms"],
+ "object_names": gpsr_known_data["objects"],
+ "object_categories_plural": gpsr_known_data["categories_plural"],
+ "object_categories_singular": gpsr_known_data["categories_singular"],
+ }
+ )
+ return config
+
+
+def main():
+ config = load_gpsr_configuration()
+ command_parser_sm = CommandParserStateMachine(data_config=config)
+ command_parser_sm.execute()
+ parsed_command: Dict = command_parser_sm.userdata.parsed_command
+ rospy.loginfo(f"Parsed command: {parsed_command}")
+ sm = build_state_machine(parsed_command)
+ sm.execute()
+
+
+if __name__ == "__main__":
+ rospy.init_node("gpsr_main")
+ main()
+ rospy.spin()
diff --git a/tasks/gpsr/src/gpsr/create_command_indices.py b/tasks/gpsr/src/gpsr/create_command_indices.py
new file mode 100755
index 000000000..013d7aae4
--- /dev/null
+++ b/tasks/gpsr/src/gpsr/create_command_indices.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+import rospy
+
+
+if __name__ == "__main__":
+ rospy.init_node("test_index_service")
+ rospy.wait_for_service("lasr_faiss/txt_index")
+ from lasr_vector_databases_msgs.srv import TxtIndex, TxtIndexRequest
+
+ request = TxtIndexRequest()
+ txt_paths = []
+ for i in range(10):
+ txt_paths.append(
+ f"/home/mattbarker/LASR/lasr_ws/src/lasr-base/tasks/gpsr/data/command_data/all_gpsr_commands_chunk_{i+1}.txt"
+ )
+
+ request.txt_paths = txt_paths
+ request.index_paths = [
+ "/home/mattbarker/LASR/lasr_ws/src/lasr-base/tasks/gpsr/data/command_data/all_gpsr_commands.index"
+ ]
+ request.index_factory_string = "IVF4096,PQ32"
+
+ rospy.ServiceProxy("lasr_faiss/txt_index", TxtIndex)(request)
+ rospy.spin()
diff --git a/tasks/gpsr/src/gpsr/state_machine_factory.py b/tasks/gpsr/src/gpsr/state_machine_factory.py
new file mode 100644
index 000000000..d7bc33e4d
--- /dev/null
+++ b/tasks/gpsr/src/gpsr/state_machine_factory.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+import rospy
+import smach
+from smach_ros import ServiceState
+from typing import Dict, List
+from lasr_skills import GoToLocation, FindNamedPerson
+from gpsr.states import Talk
+
+STATE_COUNT = 0
+
+
+def increment_state_count() -> int:
+ global STATE_COUNT
+ STATE_COUNT += 1
+ return STATE_COUNT
+
+
+def build_state_machine(parsed_command: Dict) -> smach.StateMachine:
+ """Constructs the parameterized state machine for the GPSR task,
+ given the parsed command.
+
+ Args:
+ parsed_command (Dict): parsed command.
+
+ Returns:
+ smach.StateMachine: paramaterized state machine ready to be executed.
+ """
+ command_verbs: List[str] = parsed_command["commands"]
+ command_params: List[Dict] = parsed_command["command_params"]
+ sm = smach.StateMachine(outcomes=["succeeded", "failed"])
+ with sm:
+ for command_verb, command_param in zip(command_verbs, command_params):
+ if command_verb == "greet":
+ if "name" in command_param:
+ location_param_room = (
+ f"/gpsr/arena/rooms/{command_param['location']}"
+ )
+ location_param_pose = f"{location_param_room}/pose"
+ sm.add(
+ f"STATE_{increment_state_count()}",
+ GoToLocation(location_param=location_param_pose),
+ transitions={
+ "succeeded": f"STATE_{STATE_COUNT + 1}",
+ "failed": "failed",
+ },
+ )
+ sm.add(
+ f"STATE_{increment_state_count()}",
+ FindNamedPerson(
+ name=command_param["name"],
+ location_param=location_param_room,
+ ),
+ transitions={
+ "succeeded": f"STATE_{STATE_COUNT + 1}",
+ "failed": "failed",
+ },
+ )
+ elif "clothes" in command_param:
+ pass
+ else:
+ raise ValueError(
+ "Greet command received with no name or clothes in command parameters"
+ )
+ elif command_verb == "talk":
+ if "gesture" in command_param:
+ pass
+ elif "talk" in command_param:
+ sm.add(
+ f"STATE_{increment_state_count()}",
+ Talk(command_param["talk"]),
+ transitions={"succeeded": "succeeded", "failed": "failed"},
+ )
+ else:
+ raise ValueError(
+ "Talk command received with no gesture or talk in command parameters"
+ )
+
+ return sm
diff --git a/tasks/gpsr/src/gpsr/states/__init__.py b/tasks/gpsr/src/gpsr/states/__init__.py
new file mode 100644
index 000000000..84504f968
--- /dev/null
+++ b/tasks/gpsr/src/gpsr/states/__init__.py
@@ -0,0 +1,3 @@
+from .talk import Talk
+from .command_similarity_matcher import CommandSimilarityMatcher
+from .command_parser import ParseCommand, CommandParserStateMachine
diff --git a/tasks/gpsr/src/gpsr/states/command_parser.py b/tasks/gpsr/src/gpsr/states/command_parser.py
new file mode 100644
index 000000000..f0345bdcb
--- /dev/null
+++ b/tasks/gpsr/src/gpsr/states/command_parser.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+import smach
+import rospy
+
+from gpsr.regex_command_parser import Configuration, gpsr_compile_and_parse
+from gpsr.states import CommandSimilarityMatcher
+from lasr_skills import AskAndListen
+
+
+class ParseCommand(smach.State):
+ def __init__(self, data_config: Configuration):
+ """Takes in a string containing the command and runs the command parser
+ that outputs a dictionary of parameters for the command.
+
+ Args:
+ data_config (Configuration): Configuration object containing the regex patterns
+ """
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["raw_command"],
+ output_keys=["parsed_command"],
+ )
+ self.data_config = data_config
+
+ def execute(self, userdata):
+ rospy.loginfo(f"Received command : {userdata.raw_command.lower()}")
+ try:
+ userdata.parsed_command = gpsr_compile_and_parse(
+ self.data_config, userdata.raw_command.lower()
+ )
+ except Exception as e:
+ rospy.logerr(e)
+ return "failed"
+ return "succeeded"
+
+
+class CommandParserStateMachine(smach.StateMachine):
+ def __init__(
+ self,
+ data_config: Configuration,
+ n_vecs_per_txt_file: int = 1177943,
+ total_txt_files: int = 10,
+ ):
+ """State machine that takes in a command, matches it to a known command, and
+ outputs the parsed command.
+
+ Args:
+ data_config (Configuration): Configuration object containing the regex patterns
+ n_vecs_per_txt_file (int, optional): number of vectors in each gpsr txt
+ file. Defaults to 100.
+ total_txt_files (int, optional): total number of gpsr txt files. Defaults to 10.
+ """
+ smach.StateMachine.__init__(self, outcomes=["succeeded", "failed"])
+
+ with self:
+ smach.StateMachine.add(
+ "ASK_FOR_COMMAND",
+ AskAndListen(tts_phrase="Hello, please tell me your command."),
+ transitions={"succeeded": "PARSE_COMMAND", "failed": "failed"},
+ remapping={"transcribed_speech": "raw_command"},
+ )
+
+ smach.StateMachine.add(
+ "PARSE_COMMAND",
+ ParseCommand(data_config),
+ transitions={
+ "succeeded": "succeeded",
+ "failed": "COMMAND_SIMILARITY_MATCHER",
+ },
+ remapping={"parsed_command": "parsed_command"},
+ )
+
+ smach.StateMachine.add(
+ "COMMAND_SIMILARITY_MATCHER",
+ CommandSimilarityMatcher([n_vecs_per_txt_file] * total_txt_files),
+ transitions={"succeeded": "PARSE_COMMAND", "failed": "failed"},
+ remapping={
+ "command": "raw_command",
+ "matched_command": "raw_command",
+ },
+ )
diff --git a/tasks/gpsr/src/gpsr/states/command_similarity_matcher.py b/tasks/gpsr/src/gpsr/states/command_similarity_matcher.py
new file mode 100755
index 000000000..3982f9f67
--- /dev/null
+++ b/tasks/gpsr/src/gpsr/states/command_similarity_matcher.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python3
+import smach
+import rospy
+import rospkg
+import os
+from lasr_vector_databases_msgs.srv import TxtQuery, TxtQueryRequest
+
+
+class CommandSimilarityMatcher(smach.State):
+ def __init__(self, n_vecs_per_txt_file):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ input_keys=["command", "n_vecs_per_txt_file"],
+ output_keys=["matched_command"],
+ )
+ self._n_vecs_per_txt_file = n_vecs_per_txt_file
+
+ self._query_service = rospy.ServiceProxy("lasr_faiss/txt_query", TxtQuery)
+ self._text_directory = os.path.join(
+ rospkg.RosPack().get_path("gpsr"), "data", "command_data"
+ )
+ self._index_directory = os.path.join(
+ rospkg.RosPack().get_path("gpsr"), "data", "faiss_indices"
+ )
+ self._text_paths = [
+ os.path.join(self._text_directory, f"all_gpsr_commands_chunk_{i+1}.txt")
+ for i in range(10)
+ ]
+ self._index_paths = [
+ os.path.join(self._index_directory, f"all_gpsr_commands.index")
+ ]
+
+ def execute(self, userdata):
+ rospy.loginfo(f"Received command: {userdata.command}")
+ request = TxtQueryRequest()
+ request.txt_paths = self._text_paths
+ request.index_paths = self._index_paths
+ request.query_sentence = userdata.command
+ request.k = 1
+ request.vecs_per_txt_file = self._n_vecs_per_txt_file
+ response = self._query_service(request)
+ userdata.matched_command = response.closest_sentences[0]
+ rospy.loginfo(f"Matched command: {response.closest_sentences[0]}")
+ return "succeeded"
+
+
+if __name__ == "__main__":
+ rospy.init_node("command_similarity_matcher")
+ from lasr_skills import AskAndListen, Say, Listen
+
+ sm = smach.StateMachine(outcomes=["succeeded", "failed"])
+ with sm:
+ sm.userdata.tts_phrase = "Please tell me your command."
+ # smach.StateMachine.add(
+ # "ASK_FOR_COMMAND",
+ # AskAndListen(),
+ # transitions={"succeeded": "COMMAND_SIMILARITY_MATCHER", "failed": "failed"},
+ # remapping={"transcribed_speech": "command"},
+ # )
+ sm.add(
+ "LISTEN",
+ Listen(),
+ transitions={
+ "succeeded": "COMMAND_SIMILARITY_MATCHER",
+ "preempted": "failed",
+ "aborted": "failed",
+ },
+ remapping={"sequence": "command"},
+ )
+ sm.add(
+ "COMMAND_SIMILARITY_MATCHER",
+ CommandSimilarityMatcher([1177943] * 10),
+ transitions={"succeeded": "LISTEN"},
+ )
+ # smach.StateMachine.add(
+ # "SAY_MATCHED_COMMAND",
+ # Say(),
+ # transitions={
+ # "succeeded": "ASK_FOR_COMMAND",
+ # "aborted": "failed",
+ # "preempted": "failed",
+ # },
+ # remapping={"text": "matched_command"},
+ # )
+
+ sm.execute()
+ rospy.spin()
diff --git a/tasks/gpsr/src/gpsr/states/talk.py b/tasks/gpsr/src/gpsr/states/talk.py
new file mode 100644
index 000000000..5be20ef83
--- /dev/null
+++ b/tasks/gpsr/src/gpsr/states/talk.py
@@ -0,0 +1,74 @@
+import smach
+from lasr_skills import Say
+import time
+from typing import Dict
+import rospy
+
+
+# In future we might want to add looking at person talking to the state machine.
+class Talk(smach.StateMachine):
+ class GenerateResponse(smach.State):
+ def __init__(self, talk_phrase: str):
+ smach.State.__init__(
+ self,
+ outcomes=["succeeded", "failed"],
+ output_keys=["response"],
+ )
+ self._talk_phrase = talk_phrase
+
+ def _create_responses(self) -> Dict[str, str]:
+ response = {}
+ response["something about yourself"] = (
+ "I am a Tiago -- a helpful assistive robot developed by PAL Robotics."
+ )
+ current_time = time.strftime("%H:%M")
+ response["the time"] = f"The current time is {current_time}."
+ current_day = time.strftime("%A")
+ response["what day is today"] = f"Today is {current_day}."
+ tomorrow = time.strftime("%A", time.localtime(time.time() + 86400))
+ response["what day is tomorrow"] = f"Tomorrow is {tomorrow}."
+ response["your teams name"] = "Our team is called LASR."
+ response["your teams country"] = "Our team is from the United Kingdom."
+ response["your teams affiliation"] = (
+ "Our team is affiliated with King's College London."
+ )
+ day_of_the_week = current_day
+ day_of_the_month = time.strftime("%d")
+ response["the day of the week"] = f"Today is {day_of_the_week}."
+
+ response["the day of the month"] = (
+ f"The day of the month is {day_of_the_month}."
+ )
+ return response
+
+ def execute(self, userdata):
+ try:
+ userdata.response = self._create_responses()[self._talk_phrase]
+ except KeyError:
+ rospy.loginfo(
+ f"Failed to generate response for {self._talk_phrase} as it is not in the list of possible questions."
+ )
+ return "failed"
+ return "succeeded"
+
+ def __init__(self, talk_phrase: str):
+ smach.StateMachine.__init__(self, outcomes=["succeeded", "failed"])
+
+ with self:
+ smach.StateMachine.add(
+ "GENERATE_RESPONSE",
+ self.GenerateResponse(talk_phrase),
+ transitions={"succeeded": "SAY_RESPONSE", "failed": "failed"},
+ remapping={"response": "response"},
+ )
+
+ smach.StateMachine.add(
+ "SAY_RESPONSE",
+ Say(),
+ transitions={
+ "succeeded": "succeeded",
+ "aborted": "failed",
+ "preempted": "failed",
+ },
+ remapping={"text": "response"},
+ )