Skip to content

Commit c28ab63

Browse files
authored
Merge pull request #15 from personalrobotics/egordon/062022_working_demo
Demo updates
2 parents 533efee + 97f5477 commit c28ab63

File tree

7 files changed

+30
-17
lines changed

7 files changed

+30
-17
lines changed

README.md

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -111,19 +111,23 @@ Run the following commands from your ROS workspace:
111111
3) Once the lights on the joystick go solid, home ADA by holding the orange button until the robot stops moving.
112112
4) `ssh nano` (you may need to add `nano` to your `.ssh/config`, this is the Jetson on the robot). Once there, set your ROS Master using `uselovelace`, `useweebo`, or `useweebowired` (or set your ROS_MASTER_URI manually), execute `./run_camera.sh` to start streaming RGBD data.
113113
* You may have to adjust the camera exposure, depending on the lighting condition. Either run `run_adjust_camera_daylight.sh` or `run_adjust_camera_all.sh` after running `run_camera.sh`. Check the image stream via rviz, by adding the image topic `/camera/color/image_raw/color`. If some area is too bright and look burnt or saturated, reduce the exposure.
114-
6) `roslaunch forque_sensor_hardware forque.launch` (Optionally add `forque_ip:=<IPv4>` if your Net-FT is on a non-default IP)
114+
5) `roslaunch forque_sensor_hardware forque.launch` (Optionally add `forque_ip:=<IPv4>` if your Net-FT is on a non-default IP)
115115
6) `rosrun face_detection face_detection`
116-
7) `roslaunch libada default.launch use_forque:=true perception:=true detector:=spanet use_apriltag_calib:=true`
116+
7) If you are also running the Alexa code, cd to the `ADA_Talk` directory, and run:
117+
a) `roslaunch rosbridge_server rosbridge_websocket.launch`
118+
b) `bst proxy lambda index.js`
119+
8) `roslaunch libada default.launch use_forque:=true perception:=true detector:=spanet use_apriltag_calib:=true`
117120
* Optionally, run with `perception:=false` if you want to run perception manually in another terminal.
118121
* To run perception manually in another terminal: `rosrun food_detector run_perception_module.py --demo-type spanet`
119-
8) `roslaunch ada_feeding feeding.launch` (will quit after writing ROS parameters)
122+
9) `roslaunch ada_feeding feeding.launch` (will quit after writing ROS parameters)
120123
* Optionally run `roslaunch ada_feeding data_collection.launch` after `feeding.launch` if you're doing data collection.
121-
9) `cd ~/Workspace/ada_ws/devel/bin/` and `./feeding -af`
124+
10) `cd ~/Workspace/ada_ws/devel/bin/` and `./feeding -af`
122125
* `-a`: specified that this is the real robot, and not a simulation
123126
* `-f`: enables the force/torque sensor on the real robot (**REQUIRED when picking up food for safety**)
124127
* `-c`: causes demo to proceed without the user pressing \[ENTER\] between steps
125128
* `-d`: Select demo to run.
126-
10) Advance the demo by pressing Return or terminate nicely by pressing `n` and then return
129+
* `-s`: Whether to run it with sound or not.
130+
11) Advance the demo by pressing Return or terminate nicely by pressing `n` and then return
127131

128132
## Running with acquisition detection
129133
To run with acquisition detection (and not require manual success/failure input from the supervisor) run

config/gen2_feeding_demo.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ wheelchair:
5656
feedingDemo:
5757
heightAbovePlate: 0.15
5858
heightAboveFood: 0.08
59-
heightIntoFood: 0.085
59+
heightIntoFood: 0.08
6060
moveOutofFood: 0.025
6161
#distanceToPerson: 0.25
6262
distanceToPerson: 0.05
@@ -120,15 +120,15 @@ foodItems:
120120
#forces: [15,3,3,7,22,15,7]
121121
#pickUpAngleModes: [0, 2, 2, 0, 0, 0, 0]
122122
names: ["apple", "banana", "bell_pepper", "broccoli", "cantaloupe", "carrot", "cauliflower", "celery", "cherry_tomato", "grape", "honeydew", "kiwi", "strawberry", "lettuce", "spinach", "kale"]
123-
forces: [10, 10, 10, 10, 15, 22, 7, 22, 7, 10, 7, 7, 15, 7, 7, 7]
123+
forces: [10, 10, 10, 10, 7, 15, 7, 15, 7, 7, 7, 7, 15, 7, 7, 7]
124124
pickUpAngleModes: [1, 5, 1, 2, 1, 1, 0, 1, 1, 1, 3, 3, 0, 1, 2, 0]
125125

126126
humanStudy:
127127
autoAcquisition: true
128128
autoTiming: true
129129
autoTransfer: true
130130
createError: false
131-
useAlexa: false
131+
useAlexa: true
132132
foodTopic: /alexa_msgs
133133
actionTopic: /alexa_msgs
134134

include/feeding/util.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ void dumpSplinePhasePlot(const aikido::trajectory::Spline &spline,
6161
/// param[in] food_only If true, only food choices are valid
6262
/// param[in]] nodeHandle Ros Node to set food name for detection.
6363
std::string getUserFoodInput(bool food_only, ros::NodeHandle &nodeHandle,
64-
bool useAlexa = true, double timeout = 10);
64+
bool useAlexa = true, double timeout = 20);
6565

6666
int getUserInputWithOptions(const std::vector<std::string> &optionPrompts,
6767
const std::string &prompt);

src/action/FeedFoodToPerson.cpp

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -179,9 +179,9 @@ void feedFoodToPerson(const std::shared_ptr<Perception> &perception,
179179

180180
nodeHandle->setParam("/feeding/facePerceptionOn", true);
181181

182-
if (overrideTiltOffset == nullptr) {
183-
distanceToPerson = 0;
184-
}
182+
// if (overrideTiltOffset == nullptr) {
183+
// distanceToPerson = 0;
184+
// }
185185

186186
ROS_INFO_STREAM("Move towards person");
187187
moveSuccess =
@@ -277,6 +277,10 @@ void feedFoodToPerson(const std::shared_ptr<Perception> &perception,
277277
ada::util::waitForUser("Move backward", ada);
278278
if (useSound)
279279
talk("Let me get out of your way.", true);
280+
281+
if (feedingDemo->getFTThresholdHelper())
282+
feedingDemo->getFTThresholdHelper()->setThresholds(STANDARD_FT_THRESHOLD, true);
283+
280284
Eigen::Vector3d goalDirection(0, -1, 0);
281285
bool success = moveInFrontOfPerson(
282286
ada->getArm()->getWorldCollisionConstraint(

src/action/MoveInto.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ bool moveInto(const std::shared_ptr<Perception> &perception, TargetItem item,
9292
// Collision constraint is not set because f/t sensor stops execution.
9393
auto trajectory =
9494
ada->getArm()->planToOffset(ada->getEndEffectorBodyNode()->getName(),
95-
Eigen::Vector3d{0.0, 0.0, -heightIntoFood});
95+
heightIntoFood * endEffectorDirection);
9696

9797
bool success = true;
9898
try {

src/action/Skewer.cpp

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,11 @@ bool skewer(const std::shared_ptr<Perception> &perception,
253253
// ===== INTO FOOD =====
254254
// if (useSound)
255255
// talk("Here we go!", true);
256+
257+
// Set end effector to move into food
258+
endEffectorDirection = perception->getTrackedFoodItemPose().translation() - ada->getEndEffectorBodyNode()->getTransform().translation();
259+
endEffectorDirection.normalize();
260+
256261
auto moveIntoSuccess = moveInto(perception, TargetItem::FOOD,
257262
endEffectorDirection, feedingDemo);
258263

src/util.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -200,11 +200,11 @@ std::string getInputFromTopic(std::string topic,
200200
ROS_INFO_STREAM("Got Input " << foodWord);
201201
if (validateAsFood) {
202202
for (std::size_t i = 0; i < FOOD_NAMES.size(); ++i) {
203-
if (FOOD_NAMES[i].compare(foodWord) == 0) {
203+
if (foodWord.find(FOOD_NAMES[i]) != std::string::npos) {
204204
ROS_INFO_STREAM("Sucessfully returned");
205-
nodeHandle.setParam("/deep_pose/forceFoodName", foodWord);
206-
nodeHandle.setParam("/deep_pose/spnet_food_name", foodWord);
207-
return foodWord;
205+
nodeHandle.setParam("/deep_pose/forceFoodName", FOOD_NAMES[i]);
206+
nodeHandle.setParam("/deep_pose/spnet_food_name", FOOD_NAMES[i]);
207+
return FOOD_NAMES[i];
208208
}
209209
}
210210
return "";

0 commit comments

Comments
 (0)