diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..d298be10 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +public/ \ No newline at end of file diff --git a/assets/images/sdk/first-moves/firewall.png b/assets/images/sdk/first-moves/firewall.png new file mode 100644 index 00000000..91fdbd2e Binary files /dev/null and b/assets/images/sdk/first-moves/firewall.png differ diff --git a/assets/images/sdk/first-moves/pykernel.png b/assets/images/sdk/first-moves/pykernel.png new file mode 100644 index 00000000..c22a3072 Binary files /dev/null and b/assets/images/sdk/first-moves/pykernel.png differ diff --git a/assets/images/sdk/first-moves/python_env.png b/assets/images/sdk/first-moves/python_env.png new file mode 100644 index 00000000..08963622 Binary files /dev/null and b/assets/images/sdk/first-moves/python_env.png differ diff --git a/assets/images/sdk/first-moves/reachy_env.png b/assets/images/sdk/first-moves/reachy_env.png new file mode 100644 index 00000000..0977f5ab Binary files /dev/null and b/assets/images/sdk/first-moves/reachy_env.png differ diff --git a/assets/images/sdk/first-moves/reachy_methods.png b/assets/images/sdk/first-moves/reachy_methods.png new file mode 100644 index 00000000..bcc4d68f Binary files /dev/null and b/assets/images/sdk/first-moves/reachy_methods.png differ diff --git a/assets/images/sdk/getting-started/IP_address.jpg b/assets/images/sdk/getting-started/IP_address.jpg new file mode 100644 index 00000000..71b5a8ea Binary files /dev/null and b/assets/images/sdk/getting-started/IP_address.jpg differ diff --git a/assets/images/sdk/getting-started/activate_env.png b/assets/images/sdk/getting-started/activate_env.png new file mode 100644 index 00000000..db76f8c7 Binary files /dev/null and b/assets/images/sdk/getting-started/activate_env.png differ diff --git a/assets/images/sdk/getting-started/conda_install.png b/assets/images/sdk/getting-started/conda_install.png new file mode 100644 index 00000000..0416e441 Binary files /dev/null and b/assets/images/sdk/getting-started/conda_install.png differ diff --git a/assets/images/sdk/getting-started/conda_install_2.png b/assets/images/sdk/getting-started/conda_install_2.png new file mode 100644 index 00000000..14db2931 Binary files /dev/null and b/assets/images/sdk/getting-started/conda_install_2.png differ diff --git a/assets/images/sdk/getting-started/create_env.png b/assets/images/sdk/getting-started/create_env.png new file mode 100644 index 00000000..c4b78731 Binary files /dev/null and b/assets/images/sdk/getting-started/create_env.png differ diff --git a/content/developing-with-reachy-2/_index.md b/content/developing-with-reachy-2/_index.md index 6c7a7ec5..9a39fc10 100644 --- a/content/developing-with-reachy-2/_index.md +++ b/content/developing-with-reachy-2/_index.md @@ -8,3 +8,11 @@ draft: false images: [] type: docs --- + +In this section, you will learn how to : + +- ["install the reachy2_sdk library"]({{< ref "developing-with-reachy-2/getting-started-sdk/installation" >}}) +- ["connect to your robot"]({{< ref "developing-with-reachy-2/getting-started-sdk/connect-reachy2" >}}) +- ["use a simulated robot"]({{< ref "developing-with-reachy-2/simulation/" >}}) +- ["use basic sdk functions"]({{< ref "developing-with-reachy-2/basics/" >}}) +- ["build complex behaviors"]({{< ref "developing-with-reachy-2/advanced-tutos/" >}}) \ No newline at end of file diff --git a/content/developing-with-reachy-2/advanced-tutos/1-reachy-awakening.md b/content/developing-with-reachy-2/advanced-tutos/1-reachy-awakening.md new file mode 100644 index 00000000..d9aa8851 --- /dev/null +++ b/content/developing-with-reachy-2/advanced-tutos/1-reachy-awakening.md @@ -0,0 +1,48 @@ +--- +title: "Reachy's Awakening" +description: "" +lead: "Your first tracking with head using arm kinematics" +date: 2023-07-26T08:05:23+02:00 +lastmod: 2023-07-26T08:05:23+02:00 +draft: false +images: [] +type: docs +menu: + developing-with-reachy-2: + parent: "Advanced tutorials" +weight: 300 +toc: true +--- +
+ +## Introduction + + +Now, you’ve learned the **basics behaviours** available with reachy2_sdk. But how to use them to **build your own program** may still seem a little abstract. + +That's why we've created a series of **practical tutorials** to guide you step-by-step through the process of thinking about and building different programs that will help you understand the mechanics involved in creating **your own behaviour**! + +For the moment, there are **three tutorials** available, ranging from the simplest to the most complex, to help you get to grips with different Reachy functions. + +The first two use only the **SDK Client**, and the last one adds object detection with AI model from **pollen-vision.** + +You will find the GitHub repository just [there](https://github.com/pollen-robotics/reachy2-tutorials), and to make the third tutorial with pollen-vision, you need to install the library on your virtual environment : + +```python +pip install --user --no-warn-script-location "pollen-vision[vision] @ git+https://github.com/pollen-robotics/pollen-vision.git@develop" +pip install depthai +``` + +Now that you're all set up, have fun ! + +## Reachy's Awakening + +In [this tutorial](https://github.com/pollen-robotics/reachy2-tutorials/blob/main/1_Reachy_awakening.ipynb), we will make Reachy do the awake sequence, which is a series of movements that makes it look like it is waking up. It involves moving its head and arms and can be used as a starting point for more complex sequences. + +What you will learn: + +- How to make it move its **head** +- How to make it move its **arms** +- How to **synchronize** head and arms movements + +Gif awake \ No newline at end of file diff --git a/content/developing-with-reachy-2/advanced-tutos/2-reachy-the-mime.md b/content/developing-with-reachy-2/advanced-tutos/2-reachy-the-mime.md new file mode 100644 index 00000000..0861fe24 --- /dev/null +++ b/content/developing-with-reachy-2/advanced-tutos/2-reachy-the-mime.md @@ -0,0 +1,29 @@ +--- +title: "Reachy the Mime" +description: "" +lead: "Synchronize head, arm and mobile base movements" +date: 2023-07-26T08:05:23+02:00 +lastmod: 2023-07-26T08:05:23+02:00 +draft: false +images: [] +type: docs +menu: + developing-with-reachy-2: + parent: "Advanced tutorials" +weight: 310 +toc: true +--- + +In [this tutorial](https://github.com/pollen-robotics/reachy2-tutorials/blob/main/2_Reachy_the_mime.ipynb), we will transform Reachy into a mime who pulls an invisible rope. + + + +What you will learn : + +- How to use the **mobile base**, +- How to **synchronize** arms and mobile base movements, +- How and when to use **blocking** gotos, +- How to follow a particular **trajectory** with the end effector. + + +Gif mime \ No newline at end of file diff --git a/content/developing-with-reachy-2/advanced-tutos/3-reachy-the-greengrocer.md b/content/developing-with-reachy-2/advanced-tutos/3-reachy-the-greengrocer.md new file mode 100644 index 00000000..810cde78 --- /dev/null +++ b/content/developing-with-reachy-2/advanced-tutos/3-reachy-the-greengrocer.md @@ -0,0 +1,29 @@ +--- +title: "Reachy the Greengrocer" +description: "" +lead: "Use pollen_vision to plug an vision model with the SDK for fruit detection and manipulation" +date: 2023-07-26T08:05:23+02:00 +lastmod: 2023-07-26T08:05:23+02:00 +draft: false +images: [] +type: docs +menu: + developing-with-reachy-2: + parent: "Advanced tutorials" +weight: 320 +toc: true +--- + +
+ + +In this [tutorial](https://github.com/pollen-robotics/reachy2-tutorials/blob/main/3_Reachy_the_greengrocer.ipynb), we will ask Reachy to sort fruits on a table and to drop them in user-defined places, according to which fruit it is. + +What you will learn : + +- How to do object detection +- How to switch from the image frame to the robot frame +- How to make Reachy move according to what it sees + + +Gif greengrocer \ No newline at end of file diff --git a/content/developing-with-reachy-2/advanced-tutos/_index.md b/content/developing-with-reachy-2/advanced-tutos/_index.md index bf8ad40e..fcf44f0b 100644 --- a/content/developing-with-reachy-2/advanced-tutos/_index.md +++ b/content/developing-with-reachy-2/advanced-tutos/_index.md @@ -11,3 +11,23 @@ menu: developing-with-reachy-2: weight: 30 --- + + + +Now, you’ve learned the **basics behaviours** available with reachy2_sdk. But how to use them to **build your own program** may still seem a little abstract. + +That's why we've created a series of **practical tutorials** to guide you step-by-step through the process of thinking about and building different programs that will help you understand the mechanics involved in creating **your own behaviour**! + +For the moment, there are **three tutorials** available, ranging from the simplest to the most complex, to help you get to grips with different Reachy functions. + +The first two use only the **SDK Client**, and the last one adds object detection with AI model from **pollen-vision.** + +You will find the GitHub repository just [there](https://github.com/pollen-robotics/reachy2-tutorials), and to make the third tutorial with pollen-vision, you need to install the library on your virtual environment : + +```python +pip install --user --no-warn-script-location "pollen-vision[vision] @ git+https://github.com/pollen-robotics/pollen-vision.git@develop" +pip install depthai +``` + +Now that you're all set up, have fun ! + diff --git a/content/developing-with-reachy-2/advanced-tutos/reachy-awakening.md b/content/developing-with-reachy-2/advanced-tutos/reachy-awakening.md deleted file mode 100644 index 47750c31..00000000 --- a/content/developing-with-reachy-2/advanced-tutos/reachy-awakening.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Reachy's Awakening" -description: "" -lead: "Your first tracking with head using arm kinematics" -date: 2023-07-26T08:05:23+02:00 -lastmod: 2023-07-26T08:05:23+02:00 -draft: false -images: [] -type: docs -menu: - developing-with-reachy-2: - parent: "Advanced tutorials" -weight: 300 -toc: true ---- - -*Page in progress* \ No newline at end of file diff --git a/content/developing-with-reachy-2/advanced-tutos/reachy-the-greengrocer.md b/content/developing-with-reachy-2/advanced-tutos/reachy-the-greengrocer.md deleted file mode 100644 index 218b35ab..00000000 --- a/content/developing-with-reachy-2/advanced-tutos/reachy-the-greengrocer.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Reachy the Greengrocer" -description: "" -lead: "Use pollen_vision to plug an vision model with the SDK for fruit detection and manipulation" -date: 2023-07-26T08:05:23+02:00 -lastmod: 2023-07-26T08:05:23+02:00 -draft: false -images: [] -type: docs -menu: - developing-with-reachy-2: - parent: "Advanced tutorials" -weight: 320 -toc: true ---- - -*Page in progress* \ No newline at end of file diff --git a/content/developing-with-reachy-2/advanced-tutos/reachy-the-mime.md b/content/developing-with-reachy-2/advanced-tutos/reachy-the-mime.md deleted file mode 100644 index bb114f8b..00000000 --- a/content/developing-with-reachy-2/advanced-tutos/reachy-the-mime.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Reachy the Mime" -description: "" -lead: "Synchronize head, arm and mobile base movements" -date: 2023-07-26T08:05:23+02:00 -lastmod: 2023-07-26T08:05:23+02:00 -draft: false -images: [] -type: docs -menu: - developing-with-reachy-2: - parent: "Advanced tutorials" -weight: 310 -toc: true ---- - -*Page in progress* \ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/1-hello-world.md b/content/developing-with-reachy-2/basics/1-hello-world.md index 92c2da71..e9250451 100644 --- a/content/developing-with-reachy-2/basics/1-hello-world.md +++ b/content/developing-with-reachy-2/basics/1-hello-world.md @@ -12,22 +12,110 @@ menu: parent: "SDK basics" weight: 200 toc: true +slug: "1-hello-world" +url: "/developing-with-reachy-2/basics/1-hello-world/" --- +## Materials + +To guide you through the SDK's functionalities, you can follow this written documentation (which is more complete), as well as the notebooks provided, so you can learn as you interact with your robot. + +#### Find the notebooks +
+ If you cloned the reachy2_sdk repository + +You can find the example notebooks in reachy2_sdk/src/examples. +
+ +
+If you installed the reachy2_sdk from Pypi +You haven't installed the examples on your computer. You can clone the folder in the directory that you want, by copying and pasting the instructions below into a terminal, in the desired folder. This will add an examples_sdk folder where you can find all the notebooks! +
+
+ + + + + ```python + git clone --no-checkout https://github.com/pollen-robotics/reachy2-sdk.git examples_sdk + cd examples_sdk + git sparse-checkout init --cone + git sparse-checkout set src/examples + git checkout develop + mv src/examples/* . + rm -rf src/ + ``` + +
+ +#### Follow them + +
+ +If you have any trouble making the notebooks work, please follow those steps : + +1. In a terminal, go to the folder containing the notebooks + (if the repo has been cloned: *reachy2_sdk/src/examples*, if downloaded: *examples_sdk*) : + > ```cd path/to/folder``` + +2. Open your code editor by writing the command + + > ```code .``` + +3. When you execute the first cell on your code editor, it may ask you if you want to install the jupyter and python extensions : click on “yes”. + +4. Then it will ask you to choose the kernel : choose a python environment then select your virtual environment. + + {{< img-center "images/sdk/first-moves/python_env.png" 600x "python env" >}} + {{< img-center "images/sdk/first-moves/reachy_env.png" 600x "reachy env" >}} + +5. A windows security popup can appear, click on “Allow” + {{< img-center "images/sdk/first-moves/firewall.png" 300x "firewall" >}} + +6. Install the ipykernel package to make the notebooks run : + {{< img-center "images/sdk/first-moves/pykernel.png" 600x "pykernel" >}} + +You are now ready ! + +
+ +
## Be ready to move ### 1. Connect to the robot -If you followed the instructions from ["Connect to Reachy 2"]({{< ref "developing-with-reachy-2/getting-started-sdk/" >}}), you know how to get Reachy's IP address and how to connect to the robot with the command: +If you followed the instructions from ["Connect to Reachy 2"]({{< ref "developing-with-reachy-2/getting-started-sdk/connect-reachy2" >}}), you know how to get Reachy's IP address and how to connect to the robot in a Python interface with the command : -```python +*(type `python` first in your terminal)* + +``` python from reachy2_sdk import ReachySDK -reachy = ReachySDK(host='192.168.0.42') # Replace with the actual IP +reachy = ReachySDK(host='10.0.0.201') # Replace with the actual IP +``` + +
+Check the connection + +You can check the connection with your robot with: +```python +reachy.is_connected() +>>> True +``` + +If the connection has been lost, and the problem has been resolved, you can reconnect to the robot with the `connect()` method: +```python +reachy.connect() ``` -### 2. Turn on motors +
+ + +### 2. Turn on / turn off motors + + +#### The whole robot When starting, your robot is in compliant mode, which means you can move its parts by manipulating manually the robot. In this mode, the robot won't respond to any command you send to it. @@ -42,6 +130,11 @@ reachy.turn_off() ``` This will act on all parts of your robot, including the mobile base. + +:warning: Turning off can be a bit brutal, especially if the arms are raised. You can use `reachy.turn_off_smoothly()` for torques to gradually decrease. + +#### Robot parts + If you want to turn on or off a single part, access directly the relevant part and turn it on or off, for example for the left arm: ```python @@ -49,7 +142,7 @@ reachy.l_arm.turn_on() reachy.l_arm.turn_off() ``` -All parts are detailed below in [ReachySDK attributes]({{< ref "developing-with-reachy-2/basics/1-hello-world#attributes" >}}). +All parts are detailed below in [ReachySDK attributes]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachy-attributes" >}}). At any time, you can check the state of your robot using the `is_on()` or `is_off()` method. Note that it will return True only if **all parts** are in the requested state. This means both methods can return False if the right arm is on but not the left one for example. @@ -76,219 +169,240 @@ reachy.l_arm.is_off() # left arm is off >>> True ``` -### 3. Start from a standard position (optional) +### 3. Start from a standard posture (optional) -2 standard positions are accessible and can be called easily to setup your starting position: -- the **zero** pose, with all joints set at 0 degree -- the **elbow_90** pose, with the elbow pitch set at -90 degrees and all other joints at 0 degree +2 standard postures are accessible and can be called easily to setup your starting position: +- the **default** pose, with both arms outstretched on either side of the body (be careful that your robot is at a sufficient height so that the arms do not touch the mobile base). +- the **elbow_90** pose, with the arms bent at 90°. -To start at the zero position, use the `set_pose()` function: +To start at the default posture, use the `goto_posture()` function: ```python -reachy.set_pose('zero') +reachy.goto_posture('default') ``` -By default, this movement is made in 2 seconds. You can choose to specify a custom duration. For example, to reach the elbow_90 pose in 5 seconds: +By default, this movement is made in 2 seconds. You can choose to specify a custom duration. For example, to reach the elbow_90 posture in 5 seconds: ```python -reachy.set_pose('elbow_90', duration=5) +reachy.goto_posture('elbow_90', duration=5) ``` -## Check connection - -At any time, you can check the connection between your SDK and the robot is still open with: -```python -reachy.is_connected() ->>> True -``` - -If the connection has been lost, and the problem has been resolved, you can reconnect to the robot with the `connect()`method: -```python -reachy.connect() -``` - -{{< alert icon="💡" text="You cannot use this method to connect to another IP address. It will automatically reconnect to the initial instantiated robot." >}} ## ReachySDK object -The *reachy* object instanciated from the ReachySDK class above is the root access to get all incoming information from Reachy 2 (joints or cameras) and to control each part of the robot (left/right arm, head, mobile base). +The *reachy* object instanciated from the ReachySDK class above is the root access to get all incoming information from Reachy 2 (joints or sensors) and to control each part of the robot (left/right arm, head, mobile base). -The *reachy* object has 7 attributes and ?? methods that we will quickly present here, more detailed information are given in the dedicated pages after this one. +The *reachy* object has 7 attributes and numerous methods which you can find in the [documentation](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/reachy_sdk.html). -{{< alert icon="💡" text="Note that you can only instantiate one ReachySDK in a session." >}} +If you want to have an overview, you can browse the basic attributes and methods below. -{{< img-center "images/sdk/first-moves/reachy_attributes.png" 400x "" >}} +{{< img-center "images/sdk/first-moves/reachy_methods.png" 400x "" >}} -### Attributes +
+Reachy's attributes -The *reachy* attributes detailed give to access to info, parts and sensors of the robot. +The *reachy* detailed attributes give access to info, parts and sensors of the robot. #### List of attributes -[reachy.cameras]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachycameras" >}}) -[reachy.head]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyhead" >}}) + [reachy.info]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyinfo" >}}) +[reachy.mobile_base]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachymobile_base" >}}) [reachy.joints]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyjoints" >}}) +[reachy.head]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyhead" >}}) [reachy.l_arm]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyl_arm" >}}) -[reachy.mobile_base]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachymobile_base" >}}) -[reachy.r_arm]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyr_arm" >}}) - -#### reachy.cameras +[reachy.r_arm]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyr_arm" >}}) +[reachy.cameras]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachycameras" >}}) + +#### reachy.info -[Camera object](https://pollen-robotics.github.io/reachy-sdk/api/camera.html). It is used to recover the last image captured by the left camera and also to control the motorized zoom attached to the camera. +[Info object](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/config/reachy_info.html) containing Reachy's informations ```python -reachy.left_camera ->>> -``` +reachy.info +>>> + ``` -#### reachy.head +#### reachy.mobile_base -[Head object](https://pollen-robotics.github.io/reachy-sdk/api/head.html). -Contains the three joints composing the Orbita actuator along with methods for its kinematics or to control it. +[Mobile_base object](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/reachy_sdk.html#ReachySDK.mobile_base) containing the informations about the mobile base. -```python -reachy.head ->>> - - - - ->> -``` -#### reachy.info +#### reachy.joints -[Camera object](https://pollen-robotics.github.io/reachy-sdk/api/camera.html). It is used to recover the last image captured by the right camera and also to control the motorized zoom attached to the camera. +[Joint object](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/reachy_sdk.html#ReachySDK.joints) containing every joint of the robot, from its arms to its head. This is useful when you want to get information, like the position, from all joints at once. ```python -reachy.right_camera ->>> +reachy.joints +>>> {'r_arm.shoulder.pitch': , + 'r_arm.shoulder.roll': , + 'r_arm.elbow.yaw': , + 'r_arm.elbow.pitch': , + 'r_arm.wrist.roll': , + 'r_arm.wrist.pitch': , + 'r_arm.wrist.yaw': , + 'l_arm.shoulder.pitch': , + 'l_arm.shoulder.roll': , + 'l_arm.elbow.yaw': , + 'l_arm.elbow.pitch': , + 'l_arm.wrist.roll': , + 'l_arm.wrist.pitch': , + 'l_arm.wrist.yaw': , + 'head.neck.roll': , + 'head.neck.pitch': , + 'head.neck.yaw': } + ``` -#### reachy.joints +#### reachy.head -[Joint object](https://pollen-robotics.github.io/reachy-sdk/api/joint.html) containing every joint of the robot, from its arms to its head and antennas. This is useful when you want to get information, like the position, from all joints at once. +[Head object](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/reachy_sdk.html#ReachySDK.head) containing the three joints composing the Orbita actuator along with methods for its kinematics or to control it. ```python -reachy.joints ->>> - - - - - - - - - - - - - - - - - - - - -> +reachy.head +>>> + + + +>> ``` #### reachy.l_arm -[Arm object](https://pollen-robotics.github.io/reachy-sdk/api/arm.html) containing every joint in the left arm along with its kinematics methods. +[Arm object](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/reachy_sdk.html#ReachySDK.l_arm) containing every joint in the left arm along with its kinematics methods. ```python reachy.l_arm ->>> - - - - - - - ->> +>>> + +> + elbow: + +> + wrist: + + +> +> + ``` -#### reachy.mobile_base -[Arm object](https://pollen-robotics.github.io/reachy-sdk/api/arm.html) containing every joint in the right arm along with its kinematics methods. +#### reachy.r_arm + +[Arm object](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/reachy_sdk.html#ReachySDK.r_arm) containing every joint in the right arm along with its kinematics methods. ```python reachy.r_arm ->>> - - - - - - - ->> +>>> + +> + elbow: + +> + wrist: + + +> +> + ``` -#### reachy.r_arm +#### reachy.cameras -[Arm object](https://pollen-robotics.github.io/reachy-sdk/api/arm.html) containing every joint in the right arm along with its kinematics methods. +[Camera object](https://pollen-robotics.github.io/reachy2-sdk/reachy2_sdk/reachy_sdk.html#ReachySDK.cameras) containing both cameras of Reachy (teleop and depth one). ```python -reachy.r_arm ->>> - - - - - - - ->> +reachy.cameras +>>> + ``` -### Basic methods +
-The *reachy* object has ?? methods, 8 of them being basic methods useful to start using the robot. The other methods are related to robot movements, and will be detailed in a more advanced section. +
+Reachy's basic methods -#### List of basic methods +The *reachy* object has several methods, 8 of them being basic methods useful to start using the robot. The other methods are related to robot movements, and will be detailed in a more advanced section. -[reachy.connect()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyconnect" >}}) -[reachy.disconnect()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachydisconnect" >}}) -[reachy.is_connected()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyis_connected" >}}) -[reachy.is_off()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyis_off" >}}) -[reachy.is_on()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyis_on" >}}) -[reachy.turn_off()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyturn_off" >}}) -[reachy.turn_on()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyturn_on" >}}) -[reachy.set_pose()]({{< ref "developing-with-reachy-2/basics/1-hello-world#reachyset_pose" >}}) +#### List of basic methods +- [reachy.connect()](#reachyconnect) +- [reachy.disconnect()](#reachydisconnect) +- [reachy.is_connected()](#reachyis_connected) +- [reachy.turn_on()](#reachyturn_on) +- [reachy.turn_off()](#reachyturn_off) +- [reachy.turn_off_smoothly()](#reachyturn_off_smoothly) +- [reachy.is_on()](#reachyis_on) +- [reachy.is_off()](#reachyis_off) +- [reachy.goto_posture()](#reachygoto_posture) #### reachy.connect() +Method to establish a connection with the robot. +```python +reachy.connect() +``` #### reachy.disconnect() +Method to disconnect from the robot. +```python +reachy.disconnect() +``` #### reachy.is_connected() +Method to check if the robot is connected. -#### reachy.is_off() - -#### reachy.is_on() +```python +reachy.is_connected() +``` +#### reachy.turn_on() +Method to turn on the whole robot. Turning on the robot means putting all the parts of the robot in stiff mode, including the mobile base if there is one. See next section for more information on what the stiff mode is for a motor. +```python +reachy.turn_on() +``` #### reachy.turn_off() - Method to turn off the whole robot. Turning off the robot means putting all parts of the robot in compliant mode, including the mobile base if there is one. See next section for more information on what the compliant mode is for a motor. ```python reachy.turn_off() ``` +#### reachy.turn_off_smoothly() +Method to turn off the robot in a smooth way : the torques gradually decrease and the robot passes through an intermediate position to avoid brutal colliding with its vertical bars or with obstacles as tables. -#### reachy.turn_on() +```python +reachy.turn_off_smoothly() +``` +#### reachy.is_on() +Method to check if the robot is turned on. -Method to turn on the whole robot. Turning on the robot means putting all the parts of the robot in stiff mode, including the mobile base if there is one. See next section for more information on what the stiff mode is for a motor. +```python +reachy.is_on() +``` +#### reachy.is_off() +Method to check if the robot is turned off. ```python -reachy.turn_on() +reachy.is_off() +``` +#### reachy.goto_posture() +Method to make the robot go to a specific posture ("default" or "elbow_90") + +```python +reachy.goto_posture() ``` -#### reachy.set_pose() +
\ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/2-understand-moves.md b/content/developing-with-reachy-2/basics/2-understand-moves.md index cb4f5629..8abd6eef 100644 --- a/content/developing-with-reachy-2/basics/2-understand-moves.md +++ b/content/developing-with-reachy-2/basics/2-understand-moves.md @@ -14,233 +14,296 @@ weight: 210 toc: true --- +
+ +> You can choose to follow our online documentation or to make your Reachy move by following the [notebook n°2](https://github.com/pollen-robotics/reachy2-sdk/blob/develop/src/examples/2_goto_introduction.ipynb). ## Moves methods ReachySDK for Reachy 2 offers you methods to make movements with the arms and head, controlling the target position in several way, choosing the duration of the movement, or even the interpolation mode. -Those methods work the same way on the left and right arms and on the head, **but not on the mobile base**. +Those methods work the same way on the arms and the head, but **not on the mobile base**. The methods to use in order to control the robot are: -- for the arms: - - **`goto_joints()`**: you control directly the goal position of each joint of the arm, in degrees - - **`goto_from_matrix()`**: you control the target pose of the end effector in the robot's coordinate system, from a 4x4 homogeneous matrix -- for the head: +- **for the arms :** + - **`goto()`**: depending on the parameter entered, you can control either : + - the joint value of each joint in degrees : *list of 7 values (joint space)* + - the end-effector position in the robot frame of reference : *4x4 homogeneous matrix (cartesian space)* + + - **`translate_by()`** and **`rotate_by()`** : you can translate or rotate the position of the end-effector in space, in robot frame or gripper frame. +
+
+- **for the head :** + - **`goto()`**: depending on the parameter entered, you can control either : + - the joint value of each head joint in degrees : *list of 3 values (joint space)* + - the head orientation in the robot frame : *quaternion (cartesian space)* +
+ :warning: between the joint and cartesian spaces, there is a 10-degree difference in pitch : to have the head looking forward, in joint space you have to put rpy = [0,10,0] whereas in cartesian space, it's the equivalent of [0,0,0]. +
+
- **`look_at()`**: you control the head by giving a point in the robot coordinate system the head will look at - - **`rotate_to()`**: you control directly the roll, pitch and yaw goal positions of the neck, in degrees - - **`orient()`**: you control the head orientation with a quaternion -## Moves properties + - **`rotate_by()`**: you can rotate the head in relation to its current position, by setting roll, pitch and yaw values in degrees, either in relation to the robot's frame of reference or to the head. + + +## Goto commands + +A goto command can only be sent on parts: +- reachy.l_arm +- reachy.r_arm +- reachy.head -### Moves IDs +and is defined by 3 parameters : +- the **joint commands**, as a list of articular degree values (7 for the arms and 3 for the head) +- the **duration**, in seconds - *set to 2 by default* +- the **interpolation mode**, 'linear' or 'minimum_jerk' - *set to 'minimum_jerk' by default* -The previous methods all return an id, that you can use to get information on this movements or to cancel this movement. Store this id in a variable to be able to use it further. + +### Goto duration + +You can give a custom duration for the execution of the movements, as shown in the examples above : ```python -move_1 = reachy.r_arm.goto_joints([10, -10, 0, -90, 0, 0, 0]) +reachy.head.goto([20, 20, -10], duration = 3) +reachy.l_arm.goto([0, -10, 10, -90, 0, 0, 0], duration = 5) -print(move_1) ->>> ?? +# Doing: +reachy.l_arm.goto([0, -10, 0, 0, 0, 0, 0]) +# will lead to the same result as: +reachy.l_arm.goto([0, -10, 0, 0, 0, 0, 0], duration = 2) ``` -### Moves execution +> Default duration is **2 seconds**. -Move commands can only be sent on parts: -- reachy.l_arm -- reachy.r_arm -- reachy.head +You **cannot set a duration to 0 second**. This will raise an exception in your code! + + +```python +reachy.l_arm.goto([0, 0, 0, 0, 0, 0, 0], duration = 0) # raises an exception +``` + +### Goto interpolation mode + +The goto methods generates a trajectory between the present position and the goal position. This trajectory is then interpolated at a predefined frequency (100Hz) to compute all intermediary target positions that should be followed before reaching the final goal position. Depending on the interpolation mode chosen, you can have a better control over speed and acceleration. + +Two interpolation modes are available when sending a goto command: +- the **linear** interpolation mode +- the **minimum-jerk** interpolation mode + +Both trajectories start and finish at the same point but don't follow the same intermediate positions. The minimum jerk will slowly accelerate at the begining and slowly decelerate at the end. This makes the movements more natural. + +You can specify the interpolation mode by setting the **`interpolation_mode`** argument when calling the method: + + +```python +reachy.head.goto([20, 0, -10], interpolation_mode='linear') +reachy.l_arm.goto([0, -10, 10, -90, 0, 0, 0], interpolation_mode='linear') +``` + +> Default interpolation mode is **minimum_jerk**. -#### Moves are non-blocking for other parts -It means you can send a move command on different parts, it won't wait for the movement to be executed on the first part to execute the other one, but will follow the timing of your code. -Let's take an example with the following sequence: ```python -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) +# Doing: +reachy.l_arm.goto([0, -10, 0, 0, 0, 0, 0]) +# will lead to the same result as: +reachy.l_arm.goto([0, -10, 0, 0, 0, 0, 0], interpolation_mode='minimum_jerk') + +``` + +## Goto execution + +There are two important concepts to be aware of : +- gotos are stacked for a part (i.e. they run one after another), +- but each part is independent (i.e. a goto for the left arm will run in parallel with a goto for the right arm). + +### Goto is non-blocking for other parts + +It means you can send a goto command on different parts, it won't wait for the movement to be executed on the first part to execute the other one, but will follow the timing of your code. + +```python +reachy.l_arm.goto([0, 0, 10, -90, 0, 0, 15], duration = 3) time.sleep(1) -reachy.r_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 2) +reachy.r_arm.goto([0, 0, -10, -90, 0, 0, -15], duration = 2) ``` + This sequence will take 3 seconds to execute, as the right arm will start its movement 1 second after the left arm has started its own movement. They will finish at the same time. -#### Moves are blocking and stacked for a part -It means that you can send several move commands on a part one after another without any delay, they will be played in this order, but will wait for the previous move to be finished. +### Goto is blocking and stacked for a part + +It means that you can send several goto commands on a part one after another without any delay, they will be played in this order, but will wait for the previous goto to be finished. -Let's take an example with the following sequence: ```python -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0], duration = 2) -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) +reachy.l_arm.goto([0, 0, 15, -90, 0, 0, 15], duration = 3) +reachy.l_arm.goto([0, -10, 0, 0, 0, 0, 0], duration = 2) +reachy.l_arm.goto([0, 0, 15, -90, 0, 0, 15], duration = 3) ``` + This sequence will take 8 seconds to execute, as each movement on the left arm will wait for the previous before starting. -Nevertheless, you can still send move commands to other parts. -For example: +Nevertheless, you can still send goto commands to other parts. + + ```python -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) #1 + +reachy.l_arm.goto([0, 0, 15, -90, 0, 0, 15], duration = 3) #1 time.sleep(1) -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0], duration = 2) #2 -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) #3 -reachy.r_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 2) #4 +reachy.l_arm.goto([0, -10, 0, 0, 0, 0, 0], duration = 2) #2 +reachy.l_arm.goto([0, 0, 15, -90, 0, 0, 15], duration = 3) #3 +reachy.r_arm.goto([0, 0, -15, -90, 0, 0, -15], duration = 2) #4 ``` This sequence will still take 8 seconds to execute: - commands #1, #2 and #3 are sent to the left arm. They will be stacked on the left arm, and the `time.sleep(1)` won't have any effect . When received, command #2 will simply wait 2 seconds rather than 3 secondes in the previous example. -- commands #4 is sent on the right arm, where no movement is progress. It will then start 1 second after command #1 has started, and will then be over approximatively at the same time. +- commands #4 is sent on the right arm, where no movement is processed. It will then start 1 second after command #1 has started, and will then be over approximatively at the same time. -The sequence execution order is #1, #4, #2, #3 +The sequence execution order is #1, #4, #2, #3. + +So how can a left arm goto wait for a right arm move? That's simple using the parameter *wait* in goto functions ! + +### Wait parameter + +We can set the parameter *wait = True* in goto functions for the execution of the program to wait for the end of the movement before going on. -### Part execution state -As the sequence can become complex, you can get information for each part on its current status, to now which movementis being played and know which others are waiting to be played. -For each part, the following methods are available: -- **`get_move_playing()`**: will return the id of the currently playing move on the part -- **`get_moves_queue()`**: will return the ids of all stacked move commands waiting to be played on the part -Those methods are called at the part level, to get info on the state of the part. -For example: ```python -# Write a sequence for the left arm -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) # id=1 -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0], duration = 2) # id=2 -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) # id=3 - -# Move #1 is currently playing -current_move = reachy.l_arm.get_move_playing() -print(current_move) ->>> ?? - -# 2 move commands, #2 and #3, are waiting to be played -print(len(reachy.l_arm.get_moves_queue())) ->>> 2 +goto_1 = reachy.r_arm.goto([0, 5, -15, 0, 0, 0, -10], duration = 2, wait = True) +goto_2 = reachy.l_arm.goto([0, -5, 15, -90, 0, 0, 10], duration = 2, wait = True) ``` -### Moves state +The left arm move will start only at the end of the right arm one. + +### Goto state + +For a specific goto, you may want to know its current state. Each gotos returns an id, that you can use to get information or cancel this movement. Store this id in a variable to be able to use it further. + +You can get information on the goto given its id with 2 methods available at reachy's level: + +- **`is_goto_finished()`**: return True if the movement is over, but also if it won't be played because it has been cancelled for example +- **`get_goto_joints_request()`**: will return the joints goal positions sent to the part by the corresponding goto command -For a specific move, you may want to know its current state. You can get information on the moves given its id with 3 methods available at reachy's level: -- **`is_move_playing()`**: return True if the movement is currently being played -- **`is_move_finished()`**: return True if the movement is over, but also if it won't be played because it has been cancelled for example -- **`get_move_joints_request()`**: will return the joints goal positions sent to the part by the corresponding move command Let's take an example: + + ```python -move_1 = reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) +goto_1 = reachy.l_arm.goto([0, 0, 0, -60, 0, 0, 0], duration = 3) time.sleep(1) -# Move is currently being played -reachy.is_move_playing(move_1) ->>> True -reachy.is_move_finished(move_1) +# Goto is currently being played +goto1_is_finished = reachy.is_goto_finished(goto_1) +print(f'After 1 second, goto 1 is finished : {goto1_is_finished}\n') >>> False time.sleep(3) -# Move is now over -reachy.is_move_playing(move_1) ->>> False -reachy.is_move_finished(move_1) +# Goto is now over +goto1_is_finished = reachy.is_goto_finished(goto_1) +print(f'After 4 seconds, goto 1 is finished : {goto1_is_finished}') >>> True - -# Get joint goal position of the move -reachy.get_move_joints_request(move_1) ->>> ?? ``` -### Cancel moves - -If you want to modify the queue of move commands on a part, or interrupt the movement being played, you can cancel move commands at any time. - -#### Single move cancellation +Retrieve the joint requests of a goto command from its ID: -To cancel a single movement, currently playing or stacked in a part's queue, use its id and call `cancel_move_by_id()` from reachy. ```python -move_1 = reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) - -time.sleep(1) -reachy.cancel_move_by_id(move_1) +reachy.get_goto_joints_request(goto_1) ``` -#### Cancel all moves at once +You get information on the part involved, the target joint values, the duration of the movement, and the interpolation mode. -To cancel all moves at once, you can call the `cancel_all_moves()` methods. -This method can be called at the level you want to act, which can be either **reachy** or a **specific part**. -##### All robot moves +### Part execution state -For example, if you want to cancel all moves on all parts: -```python -# Send a sequence of moves -reachy.head.rotate_to(20, 30, -10, duration=3) -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0], duration = 2) +As the sequence can become complex, you can get information for each part on its current status, to know which movement is being played and know which others are waiting to be played. +For each part, the following methods are available: +- **`get_goto_playing()`**: will return the id of the currently played goto on the part +- **`get_goto_queue()`**: will return the ids of all stacked goto commands waiting to be played on the part -# Cancel all moves -reachy.cancel_all_moves() -``` +Those methods are called at the part level, to get info on the state of the part. -All movements are cancelled, even the movement stacked in the left arm queue which will never be played. +Let's take an example. -##### All part moves -If you only want to cancel movement on the left arm: ```python -# Send a sequence of moves -reachy.head.rotate_to(20, 30, -10, duration=3) -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 3) -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0], duration = 2) - -# Cancel moves on left arm only -reachy.l_arm.cancel_all_moves() +# Write a sequence for the left arm +goto_1 = reachy.l_arm.goto([0, -15, 15, -90, 0, 0, 0], duration = 3) +goto_2 = reachy.l_arm.goto([0, -10, 0, 0, 0, 0, 0], duration = 2) +goto_3 = reachy.l_arm.goto([0, -15, 15, -90, 0, 0, 0], duration = 3) + +print(f'goto 1: {goto_1.id}, goto 2: {goto_2.id}, goto 3: {goto_3.id}') +>>> goto 1: 45, goto 2: 46, goto 3: 47 + +# Goto #1 is currently playing +current_goto = reachy.l_arm.get_goto_playing() +print(f'current goto : {current_goto.id}') +print(f'l_arm queue length: {len(reachy.l_arm.get_goto_queue())} gotos waiting to be played.') +>>> current goto : 45 +>>> l_arm queue length : 2 gotos waiting to be played. ``` -The movement on the head will continue, but all the movements of the left will be stopped and the left arm queue cleaned. +## Goto cancellation +If you want to modify the queue of goto commands on a part, or interrupt the movement being played, you can cancel goto commands at any time. -## Moves duration +### Single goto cancellation -For each methods mentioned in [Moves methods]({{< ref "developing-with-reachy-2/basics/2-understand-moves#moves-methods" >}}), you can give a custom duration for the execution of the movements. +To cancel a single movement, currently playing or stacked in a part's queue, use its id and call `cancel_goto_by_id()` from reachy. It will stop the robot at its current position. -Simply specify the **`duration`** argument **in seconds** when calling the method, as shown in the move examples above: -```python -reachy.head.rotate_to(20, 30, -10, duration=3) -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], duration = 5) +```python +goto_1 = reachy.l_arm.goto([0, 15, 15, -90, 10, 0, 0], duration = 3) +goto_2 = reachy.head.goto([30, 0, 0], duration = 3) -# Doing: -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0]) -# will lead to the same result as: -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0], duration = 2) +time.sleep(1) +reachy.cancel_goto_by_id(goto_1) ``` -> Default duration is 2 seconds. +### Multiple gotos cancellation -You **cannot set a duration to 0 second**. This will raise an exception in your code: -?? +To cancel all gotos at once, you can call the `cancel_all_goto()` methods. +This method can be called at the level you want to act, which can be either **reachy** or a **specific part**. +#### All gotos -## Moves interpolation mode +For example, if you want to cancel all gotos on all parts: -The moves methods generates a trajectory between the present position and the goal position. This trajectory is then interpolated at a predefined frequency (100Hz) to compute all intermediary target positions that should be followed before reaching the final goal position. Depending on the interpolation mode chosen, you can have a better control over speed and acceleration. -Two interpolation modes are available when sending a move command: -- the **linear** interpolation mode -- the **minimum-jerk** interpolation mode +```python +# Send a sequence of gotos +reachy.head.goto([20, 30, -10], duration = 3) +reachy.l_arm.goto([0, 0, 0, -90, 0, 0, 0], duration = 3) +reachy.l_arm.goto([0, 0, 0, 0, 0, 0, 0], duration = 3) -{{< img-center "images/sdk/first-moves/interpolation.png" 400x "" >}} +time.sleep(1.5) + +# Cancel all gotos +reachy.cancel_all_goto() + +print(f"Length of l_arm goto queue : {len(reachy.l_arm.get_goto_queue())}") +>>> Length of l_arm goto queue : 0 +``` + +All movements are cancelled, even the movement stacked in the left arm queue which will never be played. + +#### All gotos for one part + +If you only want to cancel movement on the left arm: -Both trajectories start and finish at the same point but don't follow the same intermediate positions. The minimum jerk will slowly accelerate at the begining and slowly decelerate at the end. This makes the movements more natural. -You can specify the interpolation mode by setting the **`interpolation_mode`** argument when calling the method: ```python -reachy.head.rotate_to(20, 30, -10, interpolation_mode='linear') -reachy.l_arm.goto_joints([0, 0, 0, -90, 0, 0, 0], interpolation_mode='linear') +# Send a sequence of gotos +reachy.head.goto([20, 30, -10], duration=3) +reachy.l_arm.goto([0, 0, 0, -90, 0, 0, 0], duration = 3) +reachy.l_arm.goto([0, 0, 0, 0, 0, 0, 0], duration = 2) +time.sleep(1) -# Doing: -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0]) -# will lead to the same result as: -reachy.l_arm.goto_joints([0, 0, 0, 0, 0, 0, 0], interpolation_mode='minimum_jerk') +# Cancel gotos on left arm only +reachy.l_arm.cancel_all_goto() ``` -> Default interpolation mode is minimum-jerk. \ No newline at end of file +The movement on the head will continue, but all the movements of the left will be stopped and the left arm queue cleaned. \ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/3-basic-arm-control.md b/content/developing-with-reachy-2/basics/3-basic-arm-control.md index 474114f9..8beed322 100644 --- a/content/developing-with-reachy-2/basics/3-basic-arm-control.md +++ b/content/developing-with-reachy-2/basics/3-basic-arm-control.md @@ -12,10 +12,17 @@ menu: parent: "SDK basics" weight: 220 toc: true +slug: "3-basic-arm-control" +url: "/developing-with-reachy-2/basics/3-basic-arm-control/" --- +
+ +> You can choose to follow our online documentation or to make your Reachy move by following the [notebook n°3](https://github.com/pollen-robotics/reachy2-sdk/blob/develop/src/examples/3_arm_and_gripper.ipynb). + ## Arm presentation + Reachy's arm offers 7 degrees of freedom. It also gives access to one joint for the gripper. The **arm** is divided as follow: - **shoulder**, composed of 2 joints (pitch and roll) @@ -23,7 +30,6 @@ The **arm** is divided as follow: - **wrist**, composed of 3 joints (roll, pitch and yaw) We refer to the shoulder, elbow and wrist as **actuators**. -For some actions, such as changing the compliance, is the the lowest level of control you will have. ### The actuators @@ -32,10 +38,18 @@ Each actuator has a unique name and uid. To access a specific actuator, you can ```python from reachy2_sdk import ReachySDK -reachy = ReachySDK(host='192.168.0.42') # Replace with the actual IP - -reachy.r_arm._actuators ->>> +reachy = ReachySDK(host='10.0.0.201') # Replace with the actual IP + reachy.r_arm._actuators +{'shoulder': + +>, 'elbow': + +>, 'wrist': + + } ``` Because they are parallel actuators, it often doesn't have sense to control one motor of an actuator without controlling the other motors of the same actuator. @@ -48,25 +62,17 @@ This is why actuators are for several cases the lowest degree of control we give ### The joints -Each joint has a unique name and uid. To access a specific joint, you can either use *reachy.joints* which has each joint of the robot as attribute or access it via the actuators it belongs to. For example, to access the right arm shoulder roll : *reachy.r_arm.shoulder.roll*. +To access a specific joint, you can either use *reachy.joints* which has each joint of the robot as attribute or access it via the actuators it belongs to. For example, to access the right arm shoulder roll : *reachy.r_arm.shoulder.roll*. First, connect to your Reachy. ```python from reachy_sdk import ReachySDK -reachy = ReachySDK(host='192.168.0.42') # Replace with the actual IP +reachy = ReachySDK(host='10.0.0.201') # Replace with the actual IP reachy.r_arm.shoulder.roll ->>> -``` -The name and the id are attributes of the returned Joint object. - -```python -reachy.r_arm.r_shoulder_pitch.name ->>> 'r_shoulder_pitch' -reachy.r_arm.r_shoulder_pitch.uid ->>> 8 + ``` Joints in Reachy are abstract elements that do not have a physical element. A joint is controlled by several motors of the actuators. The only thing you can do at joint level is reading the **present_position** and send **goal_position**. @@ -76,7 +82,7 @@ Joints in Reachy are abstract elements that do not have a physical element. A jo You can get the present position of each joint with this attribute. ```python -reachy.r_arm.r_shoulder_pitch.present_position +reachy.r_arm._shoulder.pitch.present_position >>> 22.4 ``` @@ -84,21 +90,45 @@ reachy.r_arm.r_shoulder_pitch.present_position #### goal_position -The *goal_position* attribute of a joint can be used to set a new joint's target position to make it move. However, we recommend using the [**goto_joints() method**]({{< ref "developing-with-reachy-2/basics/3-basic-arm-control#goto_joints" >}}) to move the motors which provides better control on the joint's trajectories. +The *goal_position* attribute of a joint can be used to set a new joint's target position to make it move. However, we recommend using the [**goto() method**]({{< ref "developing-with-reachy-2/basics/3-basic-arm-control#goto" >}}) to move the motors which provides better control on the joint's trajectories. Using goal_position will make the motor move **as fast as it can**, so be careful when using it. ```python -reachy.r_arm.r_elbow_pitch.goal_position = -90 +reachy.r_arm._elbow.pitch.goal_position = -90 +reachy.send_goal_positions() ``` > goal_position must be written in **degrees**. ### The gripper +Grippers are part of arms: this means that if the arm is switched on, so is the gripper. + +To get access to a gripper, you have to go through the corresponding arm : +```python +reachy.r_arm.gripper +>>> +``` +The opening corresponds to a percentage. + + ## Arm moves methods -### goto_joints() -The **`goto_joints()`** method takes a seven-elements-long list, with the angles in this order: +Arms can be controlled in two spaces: + +* the **joint space**, which allows to read and write directly the angle values of each joint of the arm +* the **cartesian space**, which consists in controlling the end effector position and orientation in Reachy's coordinate system + +> Both spaces are quite different, and **we advise not to mix them** if you are not familiar with the output. +In fact, values of the joint space are expressed in each actuator's coordinate system (respectively shoulder, elbow and wrist), whereas commands in cartesian space are expressed in Reachy's coordinate system + +### goto() + +The goto method can be used in joint and cartesian space, it depends on the input parameter, respectively a list of 7 joint values or a 4x4 matrix. + +#### In joint space + +The **`goto()`** method takes a seven-elements-long list, with the angles in this order: - r_arm.shoulder.pitch - r_arm.shoulder.roll - r_arm.elbow.yaw @@ -109,49 +139,38 @@ The **`goto_joints()`** method takes a seven-elements-long list, with the angles Let's see an example of how to use it. -You will use the `goto_joints()` methods to place the right arm at a right-angled position. First, make sure that the Reachy's right arm is placed on a cleared table and that there will not be obstacles during its movement. - -The setup should look like this: - -{{< img-center "images/sdk/first-moves/base_pos.jpg" 500x "" >}} - Let's define a list with **reachy.r_arm.elbow.pitch** at -90 degrees to the set a right-angled position for the right arm: ```python -right_angled_pose = [0, 0, 0, -90, 0, 0, 0] +right_angled_pose = [0, 10, -10, -90, 0, 0, 0] ``` -Then send the `goto_joints()` commands to the right arm: -Set the right arm motors in stiff mode. - +Then we can send the `goto` commands to the right arm, after setting the motors in stiff mode : ```python -reachy.r_arm.turn_on() # don't forget to turn the arm on +reachy.r_arm.turn_on() -reachy.r_arm.goto_joints(right_angled_pose) +reachy.r_arm.goto(right_angled_pose) ``` -You can use the +> To find out whether you have to send positive or negative angles, read next section on the arm kinematics. +#### In cartesian space -The result should look like this: +The **`goto()`** method takes a 4x4 matrix expressing the target pose of Reachy 2's end effector in Reachy 2 coordinate system. -

- {{< video "videos/sdk/goto.mp4" "80%" >}} -

+> Read next section on [Use arm kinematics]({{< ref "developing-with-reachy-2/basics/4-use-arm-kinematics" >}}) to better understand the use of the `goto` method. -Don't forget to put the right arm's joints back to the compliant mode. Place your hand below the right arm's gripper to prevent the arm from falling hard on the table. -```python -reachy.r_arm.turn_off() -``` - -> To find out whether you have to send positive or negative angles, read next section on the arm kinematics. +### translate_by() and rotate_by() -### goto_matrix() +To simplify your life, you have access to functions to easily compute translation or rotation, in the robot or gripper frame, in **cartesian space**. +For example you can use the `translate_by(...)` method to send the gripper up, asking for a translation 10cm up in Reachy's frame (+0.1m on Reachy's z axis), and a rotation of 20° around the z axis of the gripper : -The **`goto_matrix()`** method takes a 4x4 matrix expressing the target pose of Reachy 2's end effector in Reachy 2 coordinate system. +```python +reachy.r_arm.translate_by(x=0, y=0, z=0.1, frame="robot") +reachy.r_arm.rotate_by(roll=0, pitch=0, yaw=20, frame="gripper") +``` -> Read next section on [Use arm kinematics]({{< ref "developing-with-reachy-2/basics/4-use-arm-kinematics" >}}) to better understand the use of the `goto_matrix()` method. ## Gripper control @@ -177,9 +196,10 @@ The opening value corresponds to a **percentage of opening**, which means: - 0 is close - 100 is open -You can read the opening of the gripper through the opening attribute: +You can read the opening of the gripper : + ```python -reachy.r_arm.gripper.opening +reachy.r_arm.gripper.get_current_opening() >>> 20 # almost closed ``` @@ -190,14 +210,24 @@ Send your custom opening value, still between 0 and 100, to the gripper with: reachy.r_arm.gripper.set_opening(50) # half-opened ``` +Those actions are non-blocking, meaning that the rest of your program won't wait the end of the action to continue. +You can use the boolean `is_moving()` for that : + +```python +reachy.r_arm.gripper.close() +while reachy.r_arm.gripper.is_moving() : + time.sleep(0.1) +reachy.r_arm.gripper.open() +``` + > Note that there is an smart gripper control that will avoid the gripper from reaching the opening position if an object has been detected while closing the gripper. ## Read arm position -### get_joints_position() +### In joint space : get_current_positions() -You can retrieve the values from each **arm joints** using the **`get_joints_position()`** method. +You can retrieve the values from each **arm joints** using the **`get_current_positions()`** method. This method returns a seven-elements-long list, with the angles in this order: - r_arm.shoulder.pitch @@ -211,24 +241,23 @@ This method returns a seven-elements-long list, with the angles in this order: > Angles are returned in **degrees** by default. ```python -reachy.l_arm.rotate_to(20, 30, -10) - -reachy.head.get_joints_position() ->>> [7, 10, 4, -50, 4, 5, 7] +reachy.goto_posture() +reachy.r_arm.get_current_positions() +>>> [0, 10, -10, 0, 0, 0, 0] -# r_arm.shoulder.pitch=7, +# r_arm.shoulder.pitch=0, # r_arm.shoulder.roll=10, -# r_arm.elbow.yaw=4, -# r_arm.elbow.pitch=-50, -# r_arm.wrist.roll=4, -# r_arm.wrist.pitch=5, -# r_arm.wrist.yaw=7, +# r_arm.elbow.yaw=-10, +# r_arm.elbow.pitch=0, +# r_arm.wrist.roll=0, +# r_arm.wrist.pitch=0, +# r_arm.wrist.yaw=0 ``` -### End effector position +### In cartesian space : forward_kinematics() -You can get the end effector position of Reachy 2 in Reachy 2 coordinate system using forward kinematics. +You can get the end effector position of Reachy in Reachy 2 coordinate system using forward kinematics. Call: ```python diff --git a/content/developing-with-reachy-2/basics/4-use-arm-kinematics.md b/content/developing-with-reachy-2/basics/4-use-arm-kinematics.md index 9e5c4383..107b64e2 100644 --- a/content/developing-with-reachy-2/basics/4-use-arm-kinematics.md +++ b/content/developing-with-reachy-2/basics/4-use-arm-kinematics.md @@ -18,18 +18,18 @@ toc: true ### Joint coordinates -If you remember the [`goto_joint()` function]({{< ref "developing-with-reachy-2/basics/3-basic-arm-control#goto_joints" >}}), to generate a trajectory for the arm, you need to pass a list of joints with the requested position as argument. +If you remember the [`goto()` function]({{< ref "developing-with-reachy-2/basics/3-basic-arm-control#goto" >}}), to generate a trajectory for the arm, you need to pass a list of joints with the requested position as argument. For example, to place the right arm in a right angled position, we defined the following list: ```python -right_angled_position = [0, 0, 0, -90, 0, 0, 0] +right_angled_position = [0, 10, -10, -90, 0, 0, 0] ``` and then call the function with is: ```python -reachy.r_arm.goto_joints(right_angled_position) +reachy.r_arm.goto(right_angled_position) ``` In this basic arm control, we used what is called **joint coordinates** to move Reachy. This means that we controlled each joint separately. @@ -40,7 +40,7 @@ Controlling a robot in joint coordinates can be hard and is often far from what The **kinematic model** describes the motion of a robot in mathematical form without considering the forces and torque affecting it. It only focuses on the geometric relationship between elements. -We have defined the whole kinematic model of the arm. This means the translation and rotation required to go from one joint to the next one. On a right arm equipped with a gripper this actually look like this: +We have defined the whole kinematic model of the arm. This means the translation and rotation required to go from one joint to the next one. On a right arm equipped with a gripper, this actually looks like this: |Motor|Translation|Rotation| |-----|-----------|--------| @@ -88,8 +88,8 @@ You can see the right and left end-effectors animated below. Forward and inverse kinematics are a way to go from one coordinates system to the other: -* **forward kinematics: joint coordinates –> cartesian coordinates**, -* **inverse kinematics: cartesian coordinates –> joint coordinates**. +* **forward kinematics**: joint coordinates –> cartesian coordinates , +* **inverse kinematics**: cartesian coordinates –> joint coordinates. ## Forward kinematics @@ -102,28 +102,28 @@ Each arm has a **`forward_kinematics()`** method. To use it, you first need to c ```python from reachy_sdk import ReachySDK -reachy = ReachySDK(host='192.168.0.42') # Replace with the actual IP +reachy = ReachySDK(host='10.0.0.201') # Replace with the actual IP reachy.r_arm.forward_kinematics() ->>> array([[ 0.04622308, -0.03799621, -0.99820825, 0.31144822], - [ 0.10976691, 0.99341829, -0.03273101, -0.19427524], - [ 0.99288199, -0.1080573 , 0.05008958, -0.4255104 ], - [ 0. , 0. , 0. , 1. ]]) +>>> array([[-0.015, 0.001, -1. , 0.384], + [ 0.086, 0.996, -0.001, -0.224], + [ 0.996, -0.086, -0.015, -0.273], + [ 0. , 0. , 0. , 1. ]]) ``` The method returns a 4x4 matrix indicating the position and orientation of the end effector in Reachy 2's coordinate system. > By specifying no argument, it will give the current 3D position and orientation of the end effector. -You can compute the forward kinematics of the arm for other joints positions, by giving as an argument a seven-element-long list, as for the `goto_joints()`method. The arm will not move, but you can get the target position and orientation of the arm in this configuration. +You can compute the forward kinematics of the arm for other joints positions, by giving as an argument a seven-element-long list, as for the `goto()`method. The arm will not move, but you can get the target position and orientation of the arm in this configuration. -For example, for the right arm right angled position: +For example, for the right arm right-angled-position: ```python reachy.r_arm.forward_kinematics([0, 0, 0, -90, 0, 0, 0]) ->>> array([[ 0.04622308, -0.03799621, -0.99820825, 0.31144822], - [ 0.10976691, 0.99341829, -0.03273101, -0.19427524], - [ 0.99288199, -0.1080573 , 0.05008958, -0.4255104 ], - [ 0. , 0. , 0. , 1. ]]) +>>> array([[-0.045, -0.168, -0.985, 0.387], + [ 0.255, 0.951, -0.174, -0.205], + [ 0.966, -0.259, -0. , -0.27 ], + [ 0. , 0. , 0. , 1. ]]) ``` ### Understand the result @@ -144,7 +144,7 @@ reachy.r_arm.forward_kinematics() returns the current pose of the right end-effector, based on the present position of every joint in the right arm. -You can also compute the pose for a given joints position, to do that just pass the list of position as argument of forward_kinematics. Be careful to respect the order of the position you give and to give all the joints in the arm kinematic chain (i.e. from *shoulder_pitch* to *wrist_roll*). +You can also compute the pose for a given joints position. To do that, just pass the list of position as argument of forward_kinematics. Be careful to respect the order of the position you give and to give all the joints in the arm kinematic chain (i.e. from *shoulder_pitch* to *wrist_roll*). For example, we can compute the forward kinematics for the right-angle position we defined earlier. @@ -168,7 +168,7 @@ $$\begin{bmatrix} 1 & 0 & 0 \end{bmatrix}$$ -We can use scipy to understand what this matrix represents. +We can use *scipy* to understand what this matrix represents. ```python from scipy.spatial.transform import Rotation as R @@ -179,30 +179,33 @@ R.from_matrix([ [0, 1, 0], [1, 0, 0], ]).as_euler('xyz', degrees=True) ->>> array([ 0. , -89.99999879, 0. ]) +>>> array([0., -90,0.]) ``` So scipy tells us that a rotation of -90° along the y axis has been made to get this matrix, which is coherent with the result because having the hand facing forward corresponds to this rotation according to Reachy's xyz axis that we saw above. ## Inverse kinematics -The inverse kinematics is the exact opposite of the forward kinematics. From a 4x4 pose in Reachy 2 coordinate system, it gives you a list of joints positions to reach this target. +The inverse kinematics is the exact opposite of the forward kinematics. From a 4x4 pose in Reachy 2 coordinate system, it gives a list of joints positions to reach this target. -Knowing where you arm is located in the 3D space can be useful but most of the time what you want is to move the arm in cartesian coordinates. You want to have the possibility to say: “move your hand to [x, y, z] with a 90° rotation around the Y axis”. This is what **`goto_matrix()`** +Knowing where you arm is located in the 3D space can be useful but most of the time what you want is to move the arm in cartesian coordinates. You want to have the possibility to say: +> “Move your hand to [x, y, z] with a 90° rotation around the Y axis”. + +This is what **`goto()`** does, if the input is a 4x4 matrix. ### inverse_kinematics() Each arm has an **`inverse_kinematics()`** method. To use it, you first need to connect to your Reachy. -You need to specify as an argument a target pose in Reachy coordinate system. +You need to specify a target pose in Reachy coordinate system as an argument. -Let's for example ask for the inverse kinematics of the current pose, using the forward kinematics. +Let's for example ask the inverse kinematics of the current pose, using the forward kinematics. ```python from reachy_sdk import ReachySDK -reachy = ReachySDK(host='192.168.0.42') # Replace with the actual IP +reachy = ReachySDK(host='10.0.0.201') # Replace with the actual IP reachy.r_arm.inverse_kinematics(reachy.r_arm.forward_kinematics()) ->>> [0, 0, 0, -90, 0, 0, 0] ?? +>>> [0, 10, -10, -90, 0, 0, 0] ``` The method returns a seven-element-long list indicating the position of each arm joint, in the usual order: @@ -214,20 +217,10 @@ The method returns a seven-element-long list indicating the position of each arm - r_arm.wrist.pitch - r_arm.wrist.yaw -Contrary to the forward kinematics which has a unique answer (giving all joints values will always put the end effector at the same target position), inverse kinematics can have an infinite number of answers (for a target position of the end effector, several combinations of joints angles are possible). - -#### Using a q0 value -The inverse kinematics returns one solution, but you may want to custom the position from which the computation is done to get another result. -To do so, specify a **q0** value when calling the `inverse_kinematics()` method. The **`q0`** argument must be a seven-element-long list as well: -```python -reachy.r_arm.inverse_kinematics( - reachy.r_arm.forward_kinematics(), - q0=[0, 0, 0, 0, 0, 0, 0]) ->>> [0, 0, 0, -90, 0, 0, 0] ?? -``` +Contrary to the forward kinematics which has a unique answer (giving all joints values will always put the end effector at the same target position), inverse kinematics can have an infinite number of answers (for a target position of the end effector, several combinations of joints angles are possible). -### Example: square movement with goto_matrix() +### Example: square movement in cartesian space #### Defining the poses @@ -241,72 +234,54 @@ For our starting corner A, let's imagine a point in front of the robot, on its r $$A = \begin{pmatrix}0.3 & -0.4 & -0.3\end{pmatrix}$$ -The coordinates of B should match A except the z component wich should be higher. Hence +The coordinates of B should match A except the z component which should be higher. Hence $$B = \begin{pmatrix}0.3 & -0.4 & 0.0\end{pmatrix}$$ -For the corner C, we want a point on the same z level as B in the inner space of Reachy and in the same plane as A and B so we only need to change the y component of B. We can take for example +For the corner C, we want a point on the same z level as B, in the inner space of Reachy and in the same plane as A and B. So we only need to change the y component of B. We can take for example : $$C = \begin{pmatrix}0.3 & -0.1 & 0.0\end{pmatrix}$$ -And to complete our corners we can deduce D from A and C. D coordinates should match C except its z component which must the same as A. Hence +And to complete our corners, we can deduce D from A and C. D coordinates should match C, except its z component, which must the same as A. Hence $$D = \begin{pmatrix}0.3 & -0.1 & -0.3\end{pmatrix}$$ -> **Remember that you always have to provide poses to the inverse kinematics that are actually reachable by the robot.** If you're not sure whether the 3D point that you defined is reachable by Reachy, you can move the arm with your hand in compliant mode, ask the forward kinematics and check the 3D translation component of the returned pose. +{{< warning icon="👉🏾" text="Remember that you always have to provide poses to the inverse kinematics that are actually reachable by the robot. If you're not sure whether the 3D point that you defined is reachable by Reachy, you can move the arm with your hand in compliant mode (meaning turned off), ask the forward kinematics and check the 3D translation component of the returned pose. " >}} -But having the 3D position is not enough to design a pose. You also need to provide the 3D orientation via a rotation matrix. The rotation matrix is often the tricky part when building a target pose matrix. +But having the 3D position is not enough to design a pose. You also need to provide the 3D orientation via a rotation matrix. It's often the tricky part when building a target pose matrix. Keep in mind that the identity rotation matrix corresponds to the zero position of the robot which is when the hand is facing toward the bottom. So if we want the hand facing forward when drawing our virtual square, we need to rotate it from -90° around the y axis, as we saw in the forward kinematics part. -We know from before which rotation matrix corresponds to this rotation, but we can use scipy again to generate the rotation matrix for given rotations. +We know from before which rotation matrix corresponds to this rotation, but we can use the SDK to get the 4x4 matrix from a position vector and the rotation given by the Euler angles : ```python -print(np.around(R.from_euler('y', np.deg2rad(-90)).as_matrix(), 3)) ->>> [[ 0. -0. -1.] - [ 0. 1. -0.] - [ 1. 0. 0.]] +from reachy2_sdk.utils.utils import get_pose_matrix +get_pose_matrix([0.3, -0.4, -0.3], [0,-90,0]) +>>> array([[ 0. , 0. , -1. , 0.3], + [ 0. , 1. , 0. , -0.4], + [ 1. , 0. , 0. , -0.3], + [ 0. , 0. , 0. , 1. ]]) + ``` -We got the rotation matrix that we expected! +We got the 4x4 matrix that we expected! -As mentionned, building the pose matrix can be hard, so don't hesitate to use scipy to build your rotation matrix. You can also move the arm with your hand where you want it to be and use the forward kinematics to get an approximation of the target pose matrix you would give to the inverse kinematics. +You can also move the arm with your hand where you want it to be and use the forward kinematics to get an approximation of the target pose matrix you would give to the inverse kinematics. Here, having the rotation matrix and the 3D positions for our points A and B, we can build both target pose matrices. -```python -A = np.array([ - [0, 0, -1, 0.3], - [0, 1, 0, -0.4], - [1, 0, 0, -0.3], - [0, 0, 0, 1], -]) - -B = np.array([ - [0, 0, -1, 0.3], - [0, 1, 0, -0.4], - [1, 0, 0, 0.0], - [0, 0, 0, 1], -]) - -C = np.array([ - [0, 0, -1, 0.3], - [0, 1, 0, -0.1], - [1, 0, 0, 0.0], - [0, 0, 0, 1], -]) - -D = np.array([ - [0, 0, -1, 0.3], - [0, 1, 0, -0.1], - [1, 0, 0, -0.3], - [0, 0, 0, 1], -]) -``` + +| **Matrix** | **Values** | +|------------|-------------------------------------------------| +| **A** | `np.array([[0, 0, -1, 0.3], [0, 1, 0, -0.4], [1, 0, 0, -0.3], [0, 0, 0, 1]])` | +| **B** | `np.array([[0, 0, -1, 0.3], [0, 1, 0, -0.4], [1, 0, 0, 0.0], [0, 0, 0, 1]])` | +| **C** | `np.array([[0, 0, -1, 0.3], [0, 1, 0, -0.1], [1, 0, 0, 0.0], [0, 0, 0, 1]])` | +| **D** | `np.array([[0, 0, -1, 0.3], [0, 1, 0, -0.1], [1, 0, 0, -0.3], [0, 0, 0, 1]])` | + #### Sending the movements commands -As before, we use the **`goto_matrix()`** to send moving instructions to the arm. +As before, we use the **`goto`** to send moving instructions to the arm. ```python @@ -314,19 +289,21 @@ import time # put the joints in stiff mode reachy.r_arm.turn_on() -# use the goto_matrix() method -reachy.r_arm.goto_matrix(A) -reachy.r_arm.goto_matrix(B) -reachy.r_arm.goto_matrix(C) -reachy.r_arm.goto_matrix(D) +# use the goto() method +reachy.r_arm.goto(A) +reachy.r_arm.goto(B) +reachy.r_arm.goto(C) +reachy.r_arm.goto(D) # put the joints back to compliant mode # use turn_off_smoothly to prevent the arm from falling hard -reachy.r_arm.turn_off() +reachy.r_arm.turn_off_smoothly() ``` The result should look like this:

{{< video "videos/sdk/goto_ik.mp4" "80%" >}} -

\ No newline at end of file +

+ +Now, we are going to move the head ! \ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/5-control-head.md b/content/developing-with-reachy-2/basics/5-control-head.md index b83720a0..f73e66d4 100644 --- a/content/developing-with-reachy-2/basics/5-control-head.md +++ b/content/developing-with-reachy-2/basics/5-control-head.md @@ -13,12 +13,16 @@ menu: weight: 240 toc: true --- +
+ +> You can choose to follow our online documentation or to make your Reachy move by following the [notebook n°4](https://github.com/pollen-robotics/reachy2-sdk/blob/develop/src/examples/4_head_control.ipynb). ## Head presentation Reachy 2's head is mounted on an Orbita3D actuator, referred to as the **neck** actuator, giving 3 degrees of freedom to control the head orientation. -> Note : the antennas are not motorized for the moment + +> Note : the antennas control will be soon integrated in the SDK ! Stay tuned !

{{< video "videos/sdk/orbita.mp4" "80%" >}} @@ -30,10 +34,15 @@ Before starting to control it, connect to your Reachy and turn it on. As in the ```python from reachy2_sdk import ReachySDK -reachy = ReachySDK(host='192.168.0.42') # Replace with the actual IP +reachy = ReachySDK(host='10.0.0.201') # Replace with the actual IP reachy.head ->>> +>>> + + + reachy.head.turn_on() # we turn on only the head ``` @@ -41,7 +50,7 @@ reachy.head.turn_on() # we turn on only the head You could of course turn on the whole robot by calling `reachy.turn_on()` directly. There are several ways to control the head movements: -- using the `look_at()`, `rotate_to()` and `orient()` methods, called directly at the **head** level. These methods works as [move commands described previously]({{< ref "developing-with-reachy-2/basics/2-understand-moves" >}}). +- using the `look_at()`, `goto` and `rotate_by` methods, called directly at the **head** level. These methods works as [move commands described previously]({{< ref "developing-with-reachy-2/basics/2-understand-moves" >}}). - controlling the joints goal positions, namely **reachy.head.neck.roll**, **reachy.head.neck.pitch** and **reachy.head.neck.yaw**. ## Head moves methods @@ -50,7 +59,7 @@ There are several ways to control the head movements: You can use the `look_at()` function to make the head look at a specific point in space. This point must be given in Reachy 2's coordinate system in **meters**. The coordinate system is the one we have seen previously: -* the X axis corresponds to the foward arrow, +* the X axis corresponds to the forward arrow, * the Y axis corresponds to the right to left arrow, * the Z axis corresponds to the up arrow. @@ -58,14 +67,14 @@ The origin of this coordinate system is located in the upper part of the robot t {{< img-center "images/sdk/first-moves/reachy_frame.jpg" 400x "" >}} -If you want Reachy to look forward you can send it the following. +If you want Reachy to look forward, you can send it the following : ```python -reachy.head.turn_on() # Don't forget to put the hand in stiff mode +reachy.head.turn_on() # Don't forget to put the head in stiff mode reachy.head.look_at(x=0.5, y=0, z=0.2, duration=1.0) ``` -You can use multiple *look_at* to chain head movements, or even chain them with the `rotate_to()` and `orient()` functions described below. As seen in the [Understand moves in Reachy 2 section]({{< ref "developing-with-reachy-2/basics/2-understand-moves" >}}), the commands on the head will be stacked. +You can use multiple *look_at* to chain head movements, or even chain them with the `rotate_by()` and `goto()` functions described below. As seen in the [Understand moves in Reachy 2 section]({{< ref "developing-with-reachy-2/basics/2-understand-moves" >}}), the commands on the head will be stacked.

{{< video "videos/sdk/look.mp4" "80%" >}} @@ -84,93 +93,108 @@ look_front = reachy.head.look_at(x=0.5, y=0, z=0, duration=1.0) The best way to understand how to use the *look_at* is to play with it. Picture a position you would like Reachy's head to be in, guess a point which could match for the *look_at* and check if you got it right! -Another cool thing is that we can combine Reachy's kinematics with the *look_at* so that Reachy's head follows its hand! +Another cool thing is that we can combine Reachy's kinematics with the *look_at* so that Reachy's head follows its hand while you're moving it !

{{< video "videos/sdk/look_at_hand.mp4" "80%" >}}

```python -reachy.turn_on('head') - x, y, z = reachy.r_arm.forward_kinematics()[:3, -1] -reachy.head.look_at(x=x, y=y, z=z, duration=1.0) - -time.sleep(0.5) +reachy.head.look_at(x=x, y=y, z=z, duration=1.0, wait = True) while True: x, y, z = reachy.r_arm.forward_kinematics()[:3, -1] - reachy.head.look_at(x=x, y=y, z=z, duration=0.1) + reachy.head.look_at(x=x, y=y, z=z, duration=0.1, wait = True) ``` -What the code says is that we compute the [forward kinematics of Reachy's right arm]({{< ref "developing-with-reachy-2/basics/5-control-head#forward-kinematics" >}}), and the x, y, z of Reachy's right end-effector in the Reachy's coordinates system will be the coordinates of the point used by the *look_at*. +What the code says is that we compute the [forward kinematics of Reachy's right arm]({{< ref "developing-with-reachy-2/basics/5-control-head#forward-kinematics" >}}), and the x, y, z of Reachy's right end-effector in the Reachy's coordinates system will be the coordinates of the point used by the *look_at*. We use a loop with a blocking movement (*parameter wait=True*) to make the head follow the hand at a frequency of 10Hz. + + +### goto() + +The `goto()` function is another way to control the head. There is two ways to use it : +- from joints positions (in joint space) +- from the desired orientation as a quaternion (in cartesian space) -### rotate_to() +#### From joint positions -The `rotate_to()` function is another way to control the head. You directly control the joint of the neck, giving the roll, pitch and yaw angles in degrees. The rotation is made in the order: roll, pitch, yaw, in the Orbita3D coordinate system. + You directly control the joint of the neck, giving the roll, pitch and yaw angles in degrees. The rotation is made in the order: roll, pitch, yaw, in the Orbita3D coordinate system. {{< img-center "images/sdk/first-moves/orbita_rpy.png" 400x "" >}} To make the robot looks a little down: ```python -reachy.head.turn_on() # Don't forget to put the hand in stiff mode -reachy.head.rotate_to(roll=0, pitch=-10, yaw=0, duration=1.0) +reachy.head.turn_on() # Don't forget to put the head in stiff mode +reachy.head.goto([0, 10, 0], duration=1.0) ``` -### orient() +#### From quaternion -The last method to control the head is the `orient()` method. You can control the head with a quaternion. +You can control the head with a quaternion. You can use [pyquaternion library](https://kieranwynn.github.io/pyquaternion/) to create suitable quaternion for this method. + ```python from pyquaternion import Quaternion -q = Quaternion(axis=[1, 0, 0], angle=3.14159265) -reachy.head.turn_on() -reachy.head.orient(q) +q = Quaternion(axis=[1, 0, 0], angle=3.14159265 / 4) # tilt head about 45° to the right +reachy.head.goto(q) ``` -## Joint's goal_position +### Rotate_by() +You can also rotate the head from its current position, by using the *rotate_by* function and specifying angular degree values in roll, pitch, yaw, either in Reachy's or head's frame. -## Read head position -You can read the head orientation in two different ways: +```python +reachy.head.rotate_by(roll=0, pitch=0, yaw=20, frame='head') -- using the `get_orientation()` method, which returns a quaternion -- using the `get_joints_positions()` method, which the neck's roll, pitch and yaw present_position. +reachy.head.rotate_by(roll=-30, pitch=0, yaw=0, frame='robot') +``` -### get_orientation() -```python -q = reachy.head.get_orientation() -print(q) ->>> ?? -``` +## Joint's goal_position -### get_joints_positions() +The *goal_position* attribute of a joint can be used to set a new joint's target position to make it move. However, we recommend using the **goto()** method to move the motors which provides better control on the joint's trajectories. -In case you feel more comfortable using roll, pitch, yaw angles rather than working with quaternions, you can retrieve those values from the **neck joints**. +Using goal_position will make the motor move **as fast as it can**, so be careful when using it. ```python -reachy.head.rotate_to(20, 30, -10) -time.sleep(2) -reachy.head.get_joints_positions() ->>> [20, 30, -10] # roll=20, pitch=30, yaw=-10 +reachy.head.neck.roll.goal_position = 30 +reachy.send_goal_positions() ``` -Be careful that contrary to the quaternion that offers a unique representation of a rotation, it is not the case of the euler angles. Several angles combination can lead to the same orientation in space. For example: +:warning: goal_position must be written in **degrees**. + + +## Read head position + +You can read the head positions using : + +- Cartesian space : `get_current_orientation()` will give the head orientation as a quaternion according to the robot's frame + + +- Joint space : `get_current_positions()` will give the neck's roll, pitch and yaw present_position + +:warning: *Don't forget, there is a 10-degrees difference between the cartesian space and joint space, so we recommand you to not mix them.* + +### In cartesian space : ```python -reachy.head.rotate_to(70, -100, 80) # roll=70, pitch=-100, yaw=80 -time.sleep(2) -reachy.head.get_joints_positions() ->>> [-110, -80, -100] # roll=-110, pitch=-80, yaw=-100 +reachy.head.get_current_orientation() +>>> Quaternion(0.9794632485822068, 0.10189819035488734, -0.01081920496959773, 0.17364172391166605) ``` -The values are different, nevertheless it is the same final orientation. You can convince yourself doing: +### In joint space : + +In case you feel more comfortable using roll, pitch, yaw angles rather than working with quaternions, you can retrieve those values from the **neck joints**. + + ```python -reachy.head.rotate_to(-110, -80, -100) +reachy.head.get_current_positions() +>>> [11.881595589573665, -8.976164597791765, 22.07170507647743] ``` -The head won't move. \ No newline at end of file + +Now that we can move the head, let's focus on its cameras ! \ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/6-get-images-from-cameras.md b/content/developing-with-reachy-2/basics/6-get-images-from-cameras.md index 85ae7030..fc7f1a51 100644 --- a/content/developing-with-reachy-2/basics/6-get-images-from-cameras.md +++ b/content/developing-with-reachy-2/basics/6-get-images-from-cameras.md @@ -13,36 +13,23 @@ menu: weight: 250 toc: true --- +
-This section assumes that you went through the [Hello World]({{< ref "developing-with-reachy-2/getting-started-sdk/connect-reachy2" >}}) so that you know how to connect to the robot. +> You can choose to follow our online documentation or to see directly the images from your Reachy by following the [notebook n°5](https://github.com/pollen-robotics/reachy2-sdk/blob/develop/src/examples/5_cameras_images.ipynb). -Reachy 2 has 2 types of camera: -- the **teleop** cameras, with a right and left cameras, located in Reachy 2's head and used for the teleoperation -- the **SR** camera, which is a depth camera, located in Reachy 2's torso and mainly useful for manipulation tasks -Each camera can be accessed separately through *reachy.cameras*. They both have a right and left view, with the left and right sides considered from Reachy point of view. To be able to specify the view you want to get a frame from, you will need to import CameraView: +This section assumes that you went through the [Hello World]({{< ref "developing-with-reachy-2/basics/1-hello-world" >}}) so that you know how to connect to the robot. + +Reachy2 has 2 types of camera: +- the **teleop** cameras, with a left and right cameras, located in Reachy's head and used for the teleoperation +- the **depth** camera, equipped with a depth sensor, located in Reachy 2’s torso and mainly useful for manipulation tasks + +Each camera can be accessed separately through *reachy.cameras*. Teleop cameras have a right and left view, with the left and right sides considered from Reachy point of view, while the depth camera has a left (i.e. mono RGB) and depth view. To be able to specify the view you want to get a frame from, you will need to import CameraView: ```python from reachy2_sdk.media.camera import CameraView ``` -## Enable teleop cameras for the SDK - -### SR camera -The SR camera is unplugged by default. -If you want to use it, plug the SR camera on the robot's computer remaining USB port (2). - -{{< img-center "images/sdk/first-moves/plugged-sr.png" 400x "" >}} - -> Make sure to unplug it if you want to use the teleoperation. - -### Teleop cameras -The teleop cameras are shared between the teleop service and the SDK server, and can only be used by one at the same time. -In order to be able to use the teleop cameras with the SDK: -1. Go to the dashboard -2. Stop webrtc service in the services tab of the dashboard - -{{< img-center "images/sdk/first-moves/stop-webrtc-service.png" 600x "" >}} ## Get images @@ -51,94 +38,127 @@ First, connect to your Reachy. ```python from reachy_sdk import ReachySDK -reachy = ReachySDK(host='192.168.0.42') # Replace with the actual IP +reachy = ReachySDK(host='10.0.0.201') # Replace with the actual IP reachy.cameras ->>> ?? +>>> + +> ``` -The list of initialized cameras should contain both the teleop and SR camera. +The list of initialized cameras should contain both the teleop and depth cameras. -For each camera, namely the teleop and the SR ones, you must call the `capture()`function each time you want to get an image. This captures an image from both view of the given camera at the same time. You can then access one of the image with the `get_frame()` method. +For each camera, namely the teleop and the deoth ones, you must call the `get_frame()`function each time you want to get an image. ### Teleop camera -To get both views of the robot teleop cameras: +#### RGB images + +To get both views of the robot teleop cameras and the timestamp : ```python -from reachy2_sdk import ReachySDK from reachy2_sdk.media.camera import CameraView -reachy = ReachySDK(host='192.168.0.42') +l_frame, l_ts= reachy.cameras.teleop.get_frame(CameraView.LEFT) +r_frame, r_ts = reachy.cameras.teleop.get_frame(CameraView.RIGHT) +``` + +> By default, if you don't specify which camera you want, it will be the left one. + +Let's display the captured frame with PIL: -reachy.cameras.teleop.capture() -l_frame = reachy.cameras.teleop.get_frame(CameraView.LEFT) -r_frame = reachy.cameras.teleop.get_frame(CameraView.RIGHT) +```python +from PIL import Image +Image.fromarray(l_frame[:,:,::-1]) ``` -Let's display the captured frame with opencv: +#### Camera parameters +The intrinsic camera parameters, as defined [here](https://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/CameraInfo.html), are available : + ```python -import cv2 +height, width, distortion_model, D, K, R, P = reachy.cameras.teleop.get_parameters(CameraView.LEFT) +``` -cv2.imshow("left", l_frame) -cv2.imshow("right", r_frame) -cv.waitKey(0) -cv.destroyAllWindows() +As well as the extrinsic parameters (meaning the transformation of Reachy's origin in the camera's frame) : +```python +T_cam_reachy = reachy.cameras.teleop.get_extrinsics() +>>> array([[-8.20152401e-04, -9.99999365e-01, -7.72635108e-04, + 3.25965794e-02], + [ 2.65685388e-03, 7.70453615e-04, -9.99996174e-01, + 1.80999522e-01], + [ 9.99996134e-01, -8.22202042e-04, 2.65622030e-03, + -3.56420275e-02], + [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 1.00000000e+00]]) ``` -### SR camera -The SR camera works exactly the same as the teleop camera, but you have more elements captured. In fact, it a RGBD camera, so you have both access to the RGB images and depth information. -#### RGB images -Getting RGB images from the SR camera looks the same as from the teleop one: after having called `capture()`, use `get_frame()` specifying the CameraView you want to get. + +### Depth camera +The depth camera works exactly the same as the teleop camera, but you have more elements captured. In fact, it a RGBD camera, so you have both access to the RGB image and depth information. + +#### RGB image +You can use `get_frame()` to get the image : + ```python from reachy_sdk import ReachySDK from reachy2_sdk.media.camera import CameraView -reachy = ReachySDK(host='192.168.0.42') +reachy = ReachySDK(host='10.0.0.201') -reachy.cameras.SR.capture() -l_frame = reachy.cameras.SR.get_frame(CameraView.LEFT) -r_frame = reachy.cameras.SR.get_frame(CameraView.RIGHT) +rgb_frame, rgb_ts = reachy.cameras.depth.get_frame() ``` -Let's display them with opencv: +Let's display it with PIL: ```python -import cv2 +Image.fromarray(rgb_frame[:,:,::-1]) -cv2.imshow("left", l_frame) -cv2.imshow("right", r_frame) -cv.waitKey(0) -cv.destroyAllWindows() ``` -#### Depth information +#### Depth informations -The SR camera is a depth camera, you can then diplay a left or right **depth frame** using `get_depth_frame()`, but also the **depthmap** and the **disparity**. +You can use `get_depth_frame()` to get the depth information and display it : -You first have to capture all, then you can read the frame and get the information you want: ```python -from reachy_sdk import ReachySDK -from reachy2_sdk.media.camera import CameraView +depth_frame, depth_ts = reachy.cameras.depth.get_depth_frame() +Image.fromarray(depth_frame[:,:,::-1]) +``` -reachy = ReachySDK(host='192.168.0.42') +#### Camera parameters +The intrinsic camera parameters, as defined [here](https://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/CameraInfo.html), are available : + +```python +height, width, distortion_model, D, K, R, P = reachy.cameras.depth.get_parameters() +>>> (720, + 1280, + 'rational_polynomial', + array([ 6.69530872e-03, -5.04049882e-02, 3.94786854e-04, 6.92563481e-05, + 3.36577334e-02, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00]), + array([[692.074646 , 0. , 637.12384033], + [ 0. , 691.86395264, 358.03106689], + [ 0. , 0. , 1. ]]), + array([[1., 0., 0.], + [0., 1., 0.], + [0., 0., 1.]]), + array([[692.074646 , 0. , 637.12384033, 0. ], + [ 0. , 691.86395264, 358.03106689, 0. ], + [ 0. , 0. , 1. , 0. ]])) -reachy.cameras.SR.capture() -l_depth_frame = reachy.cameras.SR.get_depth_frame(CameraView.LEFT) -r_depth_frame = reachy.cameras.SR.get_depth_frame(CameraView.RIGHT) -depth = reachy.cameras.SR.get_depthmap() -disparity = reachy.cameras.SR.get_disparity() ``` -Let's display them with opencv: +As well as the extrinsic parameters (meaning the transformation of Reachy's origin in the camera's frame) : ```python -import cv2 - -cv2.imshow("sr_depthNode_left", l_depth_frame) -cv2.imshow("sr_depthNode_right", r_depth_frame) -cv2.imshow("depth", depth) -cv2.imshow("disparity", disparity) -cv.waitKey(0) -cv.destroyAllWindows() +T_cam_reachy = reachy.cameras.depth.get_extrinsics() +>>> array([[ 1.11022302e-16, -1.00000000e+00, 5.55111512e-17, + 9.98900000e-03], + [-7.43144825e-01, -1.11022302e-16, -6.69130606e-01, + 6.23442696e-02], + [ 6.69130606e-01, 5.55111512e-17, -7.43144825e-01, + -4.88122743e-03], + [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 1.00000000e+00]]) + ``` -> Note that when you call `capture()` on the SR camera, both RGB images and depth information are captured at the same time. \ No newline at end of file +That's it for the cameras ! +Now, we are going to record and replay movements. \ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/7-record-replay-trajectories.md b/content/developing-with-reachy-2/basics/7-record-replay-trajectories.md index a6eb4572..2a5a9b9c 100644 --- a/content/developing-with-reachy-2/basics/7-record-replay-trajectories.md +++ b/content/developing-with-reachy-2/basics/7-record-replay-trajectories.md @@ -13,7 +13,7 @@ menu: weight: 260 toc: true --- - +
You can easily record joint trajectories directly on Reachy, store and replay them later. This page will show you how to implement such mechanisms. @@ -23,20 +23,20 @@ In the following examples, we will assume that you are already connected to your ## Recording a trajectory -To record a trajectory, we will simply get the current position of individual motors at a predefiend frequency. We will first define a list of motors that we want to record. In this example, we will only record the joints from the right arm, but you can similarly record a single motor, or all motors of the robot at once. +To record a trajectory, we will simply get the current positions of individual motors at a predefined frequency. We will first define a list of motors that we want to record. In this example, we will only record the joints from the right arm, but you can similarly record a single motor, or all motors of the robot at once. ```python # assuming we run something like this before: -# reachy = ReachySDK(host='192.168.0.42') +# reachy = ReachySDK(host='10.0.0.201') recorded_joints = [ - reachy.r_arm.r_shoulder_pitch, - reachy.r_arm.r_shoulder_roll, - reachy.r_arm.r_arm_yaw, - reachy.r_arm.r_elbow_pitch, - reachy.r_arm.r_forearm_yaw, - reachy.r_arm.r_wrist_pitch, - reachy.r_arm.r_wrist_roll, + reachy.r_arm._shoulder.pitch, + reachy.r_arm._shoulder.roll, + reachy.r_arm._elbow.yaw, + reachy.r_arm._elbow.pitch, + reachy.r_arm._wrist.roll, + reachy.r_arm._wrist.pitch, + reachy.r_arm._wrist.yaw, ] ``` @@ -65,7 +65,7 @@ while (time.time() - start) < record_duration: ``` If you want to record a demonstration on the robot, first make sure the robot is compliant. Then, put it in the starting position. Run the code, and start moving the robot. After 5 seconds, the loop will stop and the movements you have made on Reachy will be recorded. -Depending on your uses, you can define another duration. You can also choose not to use a specify duration but maybe use start and stop event to record. In such case, the easy way is probably to run the loop within a thread or an asynchronous fonction, so it can run in background. +Depending on your uses, you can define another duration. You can also choose not to use a specify duration but maybe use start and stop event to record. In such case, the easiest way is probably to run the loop within a thread or an asynchronous fonction, so it can run in background. ## Visualise your recordings @@ -98,20 +98,17 @@ But before actually replaying the trajectory, there are a few key points that yo To avoid that, you can use the goto function to first go to the first point of your trajectories: ```python -from reachy_sdk.trajectory import goto - # Set all used joint stiff -for joint in recorded_joints: - joint.compliant = False +reachy.r_arm.turn_on() # Create a dict associating a joint to its first recorded position first_point = dict(zip(recorded_joints, trajectories[0])) # Goes to the start of the trajectory in 3s -goto(first_point, duration=3.0) +reachy.r_arm.goto(first_point, duration=3.0) ``` -Now that we are in position, we can actually play the trajectory. To do that, we simply loop over our recordings and set the goal position of each joints at the same frequency: +Now that we are in position, we can actually play the trajectory. To do that, we simply loop over our recordings and set the goal position of each joints then send all the commands at the same frequency: ```python import time @@ -119,6 +116,11 @@ import time for joints_positions in trajectories: for joint, pos in zip(recorded_joints, joints_positions): joint.goal_position = pos - + reachy.send_goal_positions(check_positions=False) time.sleep(1 / sampling_frequency) -``` \ No newline at end of file +``` + +> The check_positions parameter is used to check that the goal positions have been reached after the command has been sent, and that there has been no problem with an unreachable position. That process can take time et slow your replaying. Since it's a recording, all poses are necessarily reachable, so there's no need to waste process time on this check. + + +Now all we have to do is move the mobile base! \ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/8-use-mobile-base.md b/content/developing-with-reachy-2/basics/8-use-mobile-base.md index d4f0223f..a06c0ae0 100644 --- a/content/developing-with-reachy-2/basics/8-use-mobile-base.md +++ b/content/developing-with-reachy-2/basics/8-use-mobile-base.md @@ -13,15 +13,40 @@ menu: weight: 270 toc: true --- +
+> You can choose to follow our online documentation or to see directly the images from your Reachy by following the [notebook n°6](https://github.com/pollen-robotics/reachy2-sdk/blob/develop/src/examples/6_mobile_base.ipynb). ## What is accessible on the mobile base The following elements are accessible with *reachy.mobile_base*: -* mobile base version, * battery level, * odometry of the base, +* lidar * control and drive modes, -* goto and set_speed methods to make the mobile base move. +* goto, translate_by/rotate_by and set_speed methods to make the mobile base move. + + +## Informations + +You can find the infos by calling the attribute mobile_base directly : +```python + reachy.mobile_base + >>> +``` + +You can have the odometry by calling the ```get_current_odometry()``` function : +```python + reachy.mobile_base.get_current_odometry() + >>> {'x': 0.0018306385027244687, + 'y': 0.0533282645046711, + 'theta': -7.456543983885954, + 'vx': 0.0, + 'vy': 0.0, + 'vtheta': 0.0} +``` + ## Frames @@ -38,7 +63,7 @@ The odom frame is a **world-fixed frame**. The position (x, y, theta) of the rob {{< img-center "images/sdk/mobile-base/odom_frame.png" 400x "" >}} -The initial position of the odom frame matches the position of the robot when the HAL was started. The odom frame can also be reset to the current position of the robot using: +The initial position of the odom frame matches the position of the robot when it was started. The odom frame can also be reset to the current position of the robot using: ```python reachy_mobile.mobile_base.reset_odometry() ``` @@ -70,8 +95,9 @@ reachy_mobile.mobile_base.goto(x=0.0, y=0.0, theta=0.0) We recommend taking the time to play around with this concept. -> Note the **goto() method of the mobile base does not work like [moves methods explained previously]({{< ref "/developing-with-reachy-2/basics/8-use-mobile-base">}})** +> Note the **goto() method of the mobile base does not work like [moves methods explained previously]({{< ref "/developing-with-reachy-2/basics/3-basic-arm-control#goto">}})** +The mobile_base gotos are always blocking methods. Meaning that the rest of the code will not be executed until the goto is finished. By default, the robot will always try to reach the goal position, meaning that even if the robot did reach its position and you push it, it will try to come back to the goal position again. @@ -81,16 +107,48 @@ However, you can define two types of stop conditions through optional parameters - A spatial tolerance, expressed with 4 values: delta_x (the error in m along the X axis), delta_y (the error in m along the Y axis), delta_theta (the angle error in deg) and distance (the l2 distance between the current position and the goal position in m). The robot stops the goto when it is close enough to satisfy all 4 conditions simultaneously. +### Using the translate_by / rotate_by methods + +Unlike the goto method, which considers input parameters in relation to the odometry set when the robot is switched on, the rotate_by and translate_by methods configure translations and rotations in relation to the robot's current position. + +> They work the same way as gotos but use a different frame. + +To make the robot rotate by a quarter turn then go 30 cm forward : +```python +reachy.mobile_base.rotate_by(theta = 90) +time.sleep(2) +reachy.mobile_base.translate_by(x=0.3, y=0.0) +``` + +With this method, you don't have to reset the odometry to make a movement safely. + + + ### Using the set_speed method -Since the mobile base is holonomic, the `set_speed()` method expects 3 speed commands expressed in the robot frame: +Since the mobile base is holonomic, the `set_goal_speed()/send_speed_command()` method expects 3 speed commands expressed in the robot frame: - x_vel, in m/s. The instantaneous speed positive in front of the robot. - y_vel, in m/s. The instantaneous speed positive to the left of the robot. - rot_vel, in deg/s. The instantaneous rotational speed positive counterclockwise. -See the [joy_controller code](https://github.com/pollen-robotics/mobile-base-sdk/blob/main/mobile_base_sdk/examples/scripts/joy_controller.py) for a working example. +```python +# you start by setting the speed +reachy.mobile_base.set_goal_speed(x=1.0, y=1.0, theta=2) +# then you send the command +reachy.mobile_base.send_speed_command() +``` :bulb: As a safety measure, the HAL will stop the wheels if it didn't receive a new goal speed in the last 200ms. :bulb: The way this is implemented in the HAL is simply to listen to the /cmd_vel topic, apply some smoothing, perform the kinematic calculations and send the speed commands to the wheels. This makes it very easy to create control interfaces using ROS, see the [keyboard example](https://github.com/pollen-robotics/zuuu_hal/blob/main/examples/zuuu_teleop_keyboard.py) or the [joy controller example](https://github.com/pollen-robotics/zuuu_hal/blob/main/examples/zuuu_teleop_joy.py). *Note: the HAL has a drive mode to set speed commands for variable amounts of time. Instead of relying on a topic, it creates a service. The niche usage didn't warrant the added complexity, so the interface with the SDK was not made. But if needed, it exists!* + +## Lidar + +A safety measure prevents the robot from approaching obstacles detected by its lidar. If you ever need to get closer, you can always disable this safety feature via the SDK : + +```python +reachy.mobile_base.lidar.safety_enabled(False) +``` + +Well done, now you know all the basics about Reachy's SDK ! Now, let's learn how to implement complex behaviours ! \ No newline at end of file diff --git a/content/developing-with-reachy-2/basics/_index.md b/content/developing-with-reachy-2/basics/_index.md index a88285ce..dccee4e1 100644 --- a/content/developing-with-reachy-2/basics/_index.md +++ b/content/developing-with-reachy-2/basics/_index.md @@ -11,3 +11,4 @@ menu: developing-with-reachy-2: weight: 20 --- + diff --git a/content/developing-with-reachy-2/getting-started-sdk/connect-reachy2.md b/content/developing-with-reachy-2/getting-started-sdk/connect-reachy2.md index 843d112c..372fd42f 100644 --- a/content/developing-with-reachy-2/getting-started-sdk/connect-reachy2.md +++ b/content/developing-with-reachy-2/getting-started-sdk/connect-reachy2.md @@ -7,30 +7,42 @@ lastmod: 2023-07-26T08:05:23+02:00 draft: false images: [] type: docs +url: "/developing-with-reachy-2/getting-started-sdk/connect-reachy2/" menu: developing-with-reachy-2: parent: "Getting started with the SDK" weight: 110 toc: true +slug: "connect-reachy2" +url: "/developing-with-reachy-2/getting-started-sdk/connect-reachy2/" --- +
-The last required step before being able to use your Reachy 2 is to find its IP address. +To be able to connect to your Reachy 2, you first need to be on the same network (either via Ethernet or WiFi). + +Then you need to find your robot's IP address. Unfortunately, you can't use its .local address for the SDK. You have two ways to do that : via the dashboard (the easiest way) or via the LCD screen. + +## Using the dashboard + +You can use your robot name to access the dashboard : for that, you type on a browser `reachy_name.local:8000/`. + +Once you are in the dashboard, you can click on **Network** and you will find the IP addresses of your robot (WiFi and Ethernet). -> Note: if you haven't connected Reachy to a network yet, please first follow the instructions ??? ## Using the LCD screen -If you haven't unplugged it, the LCD screen connected in Reachy's back should be diplaying its IP address. +If you are not able to access the dashboard, you can turn off completely your robot. Then, you can plug the supplied LCD screen on the USB port of the mobile base. -{{< img-center "images/sdk/getting-started/lcd-display.png" 400x "" >}} +Turn on your robot again, and it should display display the robot's IP addresses (alternately Ethernet and WiFi) : -If the LCD screen is not working or is unplugged, check out the page Find my IP section to learn other ways to get the IP address. +{{< img-center "images/sdk/getting-started/IP_address.jpg" 400x "" >}} -You can check that everything is working as expected by running the following Python code: +You can check that everything is working as expected by running the following Python code in a terminal on your virtual environment : -```python +``` +$ python3 from reachy_sdk import ReachySDK # Replace with the actual IP you've found. diff --git a/content/developing-with-reachy-2/getting-started-sdk/installation.md b/content/developing-with-reachy-2/getting-started-sdk/installation.md index c63948d1..87e68365 100644 --- a/content/developing-with-reachy-2/getting-started-sdk/installation.md +++ b/content/developing-with-reachy-2/getting-started-sdk/installation.md @@ -17,12 +17,19 @@ toc: true ## How to install the Python SDK -The Python SDK is a pure Python library. The installation should thus be rather straightforward. +The Python SDK is a pure Python library. The installation should thus be rather straightforward. It supports Python >= 3.10 (older versions will not work because of typing syntax). -It supports Python >= 3.10 (older versions will not work because of typing syntax). It works on Windows/Mac/Linux. +> For now, the library [pollen_vision](pollen-robotics/pollen-vision) used to do ["perception"]({{< ref "ai-with-reachy-2/perception/" >}}), needs Python 3.10, so you may want to have that version. + +It works on Windows/Mac/Linux. + +
+On Linux We recommend to use [virtual environment](https://docs.python.org/3/tutorial/venv.html) for your development. They make the installation simple and avoid compatibility issues. They also come with their [pip](https://pip.pypa.io/en/stable/) command. +Inside your virtual environment, you can install the library either from Pypi, or by cloning all the repository : + ### From PyPi ```bash @@ -32,11 +39,72 @@ pip install reachy2-sdk ### From the source ```bash -git clone https://github.com/pollen-robotics/reachy2-sdk +git clone https://github.com/pollen-robotics/reachy2-sdk.git cd reachy2-sdk pip install -e reachy2-sdk ``` +
+ +
+On Windows + +We recommend you to use a virtual environment, that will allow you to have all the needed packages to control Reachy without any conflict with already existing packages on your computer. + +### Create the virtual environment : + +1. You can use Miniconda, which is a minimal version of the Anaconda Python distribution. You need to download it [there](https://www.anaconda.com/download/success) : scroll down until you reach the Miniconda Installers and click on the Windows installer. + + {{< img "images/sdk/getting-started/conda_install.png" 500x "miniconda">}} + +2. Launch the .exe you just downloaded and follow the installation procedure. + + {{< img "images/sdk/getting-started/conda_install_2.png" 500x "miniconda installer">}} + +3. Open the Anaconda Powershell Prompt on your applications and type `conda create -n python=3.10 git`, for example `conda create -n reachy python=3.10 git` (to install the supported version of Python and Git with your new environment) + + {{< img "images/sdk/getting-started/create_env.png" 800x "create venv">}} + +4. Then activate your virtual environment : `conda activate ` + + {{< img "images/sdk/getting-started/activate_env.png" 800x "activate venv">}} + + +### Install the SDK Client : + +Inside your virtual environment, you can install the library either from Pypi, or by cloning all the repository : + +#### From Pypi +```bash +pip install reachy2-sdk +``` + +#### From source +1. Create a folder (for example “Dev”) + > `mkdir Dev` +2. Go in this folder + > `cd \Dev\` +3. Clone the SDK repository in this folder + > `git clone https://github.com/pollen-robotics/reachy2-sdk.git` +4. Go in this new subfolder + > `cd \reachy2-sdk\` +5. Install the library + > `pip install -e` . *that command will install all the needed packages and libraries to make the SDK work on your virtual environment* + + + +
+ +
+On Mac + +To be done. + +
+ +To be sure it worked, you can write `pip list` and check that you have reachy2-sdk. + + ## Dependencies The SDK relies on a few third-party Python packages, such as: @@ -46,3 +114,7 @@ The SDK relies on a few third-party Python packages, such as: * [grpc](https://grpc.io) - to connect to the robot They will be **installed automatically** when you install the SDK. + + + +{{< warning icon="👉🏾" text="Now that you have reachy2-sdk installed on your computer, you can connect to your robot and learn how to use it with the Getting Started Notebooks. So keep up !" >}} \ No newline at end of file diff --git a/content/developing-with-reachy-2/getting-started-sdk/visualize-fake-robot.md b/content/developing-with-reachy-2/getting-started-sdk/visualize-fake-robot.md index ffd38425..b40053a9 100644 --- a/content/developing-with-reachy-2/getting-started-sdk/visualize-fake-robot.md +++ b/content/developing-with-reachy-2/getting-started-sdk/visualize-fake-robot.md @@ -14,4 +14,14 @@ weight: 120 toc: true --- -*Page in progress* \ No newline at end of file +For now, Reachy doesn't have any collision avoidance restrictions (left arm against right arm, arm against torso, etc.). + +So before implementing any new behaviour on your Reachy, we recommand you to test your movements on a fake mode. That means that your physical robot won't move but the simulated one will. As so, you will be able to visualize what Reachy will do and to adapt your moves before testing it on the real one. + +For that, blablabla + + +Then you can go back to the dashboard, click on **Visualisation tools** and you will see Rviz with your fake Reachy. + + +Now, you are ready to make your robot move ! You can skip to the *Basics*. diff --git a/content/developing-with-reachy-2/simulation/simulation-installation.md b/content/developing-with-reachy-2/simulation/simulation-installation.md index 8f4f3b45..93a094e1 100644 --- a/content/developing-with-reachy-2/simulation/simulation-installation.md +++ b/content/developing-with-reachy-2/simulation/simulation-installation.md @@ -14,13 +14,15 @@ weight: 400 toc: true --- -If you want to try movements on the robot without using the real robot, you can install a simulated Reachy 2 on your computer, and run it the same way the real robot is run. The easiest way is using a docker image. We will thus assume that you already have docker installed and setup. +If you want to try movements on the robot without using the real robot, you can install a simulated Reachy 2 on your computer, and run it the same way the real robot is run. You'll have a rviz window to see the fake robot move. + +The easiest way is using a docker image. We will thus assume that you already have docker installed and setup. Clone the sources of our docker, and pull the sources: ```python git clone git@github.com:pollen-robotics/docker_reachy2_core.git cd docker_reachy2_core -./pull_sources.sh beta +./sources checkout stable ``` Then download the configuration files: @@ -31,12 +33,13 @@ cp -r reachy_config_example/.reachy_config ~/ In your docker_reachy2_core folder, compose a container with: ```python -docker compose -f dev.yaml up -d core +docker compose -f mode/dev.yaml up -d core ``` > This can take a few minutes to compose. Build: ```python +full_build cbuilds ``` diff --git a/content/help/faq/robot-faq.md b/content/help/faq/robot-faq.md index 5004ec56..fbe9bcbe 100644 --- a/content/help/faq/robot-faq.md +++ b/content/help/faq/robot-faq.md @@ -204,3 +204,18 @@ An `AVG SLOPE SCORE` below `0.1%` is OK. Ideally it could be under `0.05%`. The lower, the better. + +## 6. Change the sound volume + +If you want to change the volume, especially for the starting sound of your robot or the output sound when you teleoperate, you need to go on a terminal **when the webRTC service is running** : + +Run: +```console +$ ssh bedrock@your_robot_ip #password : root +$ docker exec -it webrtc_streaming_playback_ros bash +$ alsamixer -c 1 +``` + +Then, you can set the volume as you wish. + + diff --git a/content/help/faq/sdk-faq.md b/content/help/faq/sdk-faq.md index 0c38b61a..208a0e7a 100644 --- a/content/help/faq/sdk-faq.md +++ b/content/help/faq/sdk-faq.md @@ -23,3 +23,9 @@ Check all logs of the service with: ```bash journalctl -b -u reachy2-core ``` + +> *I execute a command in the SDK but nothing happens on my robot* + +Check that you are not on a fake mode (mode that only makes the virtual robot moves in the visualisation tools of the dashboard but not the real one) : for that, you can type `reachy.info` and check the mode is not 'FAKE'. If so, the simplest way to undo it is to reboot entirely your robot. + +Check on the dashboard services that everything is fine, especially in the reachy2-core logs. If you see errors, restart the core. diff --git a/content/help/faq/teleoperation-faq.md b/content/help/faq/teleoperation-faq.md index 784a1725..d0ac182a 100644 --- a/content/help/faq/teleoperation-faq.md +++ b/content/help/faq/teleoperation-faq.md @@ -14,10 +14,33 @@ toc: true weight: 230 --- +## Problems with the app + +> *The app is lagging a lot, what can I do ?* + +Check that your computer is plugged (note that for some laptops, they must be plugged in as soon as they are switched on). + +If that doesn’t resolve the lag, maybe your network is overloaded, you can try change your robot’s and your computer’s network (you don’t need to have internet for it to work, it can work on a isolated router). Check that there is no driver update available on your VR device (for Oculus Quest, you can see them on top of your MetaQuestLink app on your computer, if your device is plugged). + +Finally, it can be a GPU issue : FAQ GPU. + + +> *When I hit “play”, I have a loading page in my device and I never enter the app* + +Go to Edition > Project Settings > XR Plug-in Management. Check that Initialize XR on Start-Up and Oculus are selected. If so, try to unselect the first one and try again. + +> *Tunnelling appears only in one eye* + +Go to Edition > Project Settings > XR Plug-in Management > Oculus > Stereo rendering mode : select multi pass and try again. + + + ## Problem with the cameras or sound ### With teleoperation application +Check in your laptop settings that your device is selected in “Output”. Check also that you don’t have an audio device connected by Bluetooth that can interfere. + During teleoperation, the cameras and sound are managed by the webrtc service. This service is automatically launched when you start Reachy 2 computer. @@ -31,3 +54,5 @@ Check all logs of the service with: ```bash journalctl -b -u webrtc ``` + + diff --git a/layouts/index.html b/layouts/index.html index 77250363..82531432 100644 --- a/layouts/index.html +++ b/layouts/index.html @@ -45,9 +45,9 @@

Tutorials

diff --git a/layouts/partials/head/custom-head.html b/layouts/partials/head/custom-head.html index 0c59d7ff..1b7c81b4 100644 --- a/layouts/partials/head/custom-head.html +++ b/layouts/partials/head/custom-head.html @@ -1 +1,3 @@ + + diff --git a/layouts/shortcodes/img-center.html b/layouts/shortcodes/img-center.html index b7c41a24..29fb1272 100644 --- a/layouts/shortcodes/img-center.html +++ b/layouts/shortcodes/img-center.html @@ -1,6 +1,11 @@ {{ $image := resources.Get (.Get 0) }} -{{ $image := $image.Resize (.Get 1) }} - -

- {{(.Get 2)}} -

\ No newline at end of file +{{ if $image }} + {{ $resized := $image.Resize (.Get 1) }} +

+ {{ .Get 2 }} +

+{{ else }} +

+ Image not found: {{ .Get 0 }} +

+{{ end }} diff --git a/layouts/shortcodes/img.html b/layouts/shortcodes/img.html index 11b628aa..6abc7b8f 100644 --- a/layouts/shortcodes/img.html +++ b/layouts/shortcodes/img.html @@ -1,3 +1,7 @@ {{ $image := resources.Get (.Get 0) }} -{{ $image := $image.Resize (.Get 1) }} -{{(.Get 2)}} \ No newline at end of file +{{ if $image }} + {{ $resized := $image.Resize (.Get 1) }} + {{ .Get 2 }} +{{ else }} +

Image not found : {{ .Get 0 }}

+{{ end }} \ No newline at end of file diff --git a/static/gifs/tutorials/gif_awake.gif b/static/gifs/tutorials/gif_awake.gif new file mode 100644 index 00000000..4fff3ed8 Binary files /dev/null and b/static/gifs/tutorials/gif_awake.gif differ diff --git a/static/gifs/tutorials/gif_oranges.gif b/static/gifs/tutorials/gif_oranges.gif new file mode 100644 index 00000000..d80d131d Binary files /dev/null and b/static/gifs/tutorials/gif_oranges.gif differ diff --git a/static/gifs/tutorials/gif_rope.gif b/static/gifs/tutorials/gif_rope.gif new file mode 100644 index 00000000..9523c062 Binary files /dev/null and b/static/gifs/tutorials/gif_rope.gif differ