Skip to content

Commit 7717beb

Browse files
Merge pull request #403 from JdeRobot/issue-401
First approach for Carla Simulator support
2 parents 56a852d + 2e1a085 commit 7717beb

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+2448
-78
lines changed
Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
#!/usr/bin/python
2+
# -*- coding: utf-8 -*-
3+
import csv
4+
import cv2
5+
import math
6+
import numpy as np
7+
import threading
8+
import time
9+
import carla
10+
from albumentations import (
11+
Compose, Normalize, RandomRain, RandomBrightness, RandomShadow, RandomSnow, RandomFog, RandomSunFlare
12+
)
13+
from utils.constants import DATASETS_DIR, ROOT_PATH
14+
15+
16+
17+
GENERATED_DATASETS_DIR = ROOT_PATH + '/' + DATASETS_DIR
18+
19+
20+
class Brain:
21+
22+
def __init__(self, sensors, actuators, handler, config=None):
23+
self.camera = sensors.get_camera('camera_0')
24+
self.camera_1 = sensors.get_camera('camera_1')
25+
self.camera_2 = sensors.get_camera('camera_2')
26+
self.camera_3 = sensors.get_camera('camera_3')
27+
28+
self.pose = sensors.get_pose3d('pose3d_0')
29+
30+
self.bird_eye_view = sensors.get_bird_eye_view('bird_eye_view_0')
31+
32+
self.motors = actuators.get_motor('motors_0')
33+
self.handler = handler
34+
self.config = config
35+
36+
self.threshold_image = np.zeros((640, 360, 3), np.uint8)
37+
self.color_image = np.zeros((640, 360, 3), np.uint8)
38+
self.lock = threading.Lock()
39+
self.threshold_image_lock = threading.Lock()
40+
self.color_image_lock = threading.Lock()
41+
self.cont = 0
42+
self.iteration = 0
43+
44+
# self.previous_timestamp = 0
45+
# self.previous_image = 0
46+
47+
self.previous_v = None
48+
self.previous_w = None
49+
self.previous_w_normalized = None
50+
self.suddenness_distance = []
51+
52+
client = carla.Client('localhost', 2000)
53+
client.set_timeout(10.0) # seconds
54+
world = client.get_world()
55+
time.sleep(3)
56+
self.vehicle = world.get_actors().filter('vehicle.*')[0]
57+
58+
time.sleep(2)
59+
60+
def update_frame(self, frame_id, data):
61+
"""Update the information to be shown in one of the GUI's frames.
62+
63+
Arguments:
64+
frame_id {str} -- Id of the frame that will represent the data
65+
data {*} -- Data to be shown in the frame. Depending on the type of frame (rgbimage, laser, pose3d, etc)
66+
"""
67+
self.handler.update_frame(frame_id, data)
68+
69+
def update_pose(self, pose_data):
70+
self.handler.update_pose3d(pose_data)
71+
72+
def execute(self):
73+
image = self.camera.getImage().data
74+
image_1 = self.camera_1.getImage().data
75+
image_2 = self.camera_2.getImage().data
76+
image_3 = self.camera_3.getImage().data
77+
78+
bird_eye_view_1 = self.bird_eye_view.getImage(self.vehicle)
79+
80+
#print(self.bird_eye_view.getImage(self.vehicle))
81+
82+
#self.motors.sendW(w)
83+
self.motors.sendV(1)
84+
self.update_frame('frame_0', image)
85+
self.update_frame('frame_1', image_1)
86+
self.update_frame('frame_2', image_2)
87+
self.update_frame('frame_3', image_3)
88+
89+
#self.update_frame('frame_0', bird_eye_view_1)
90+
91+
92+
self.update_pose(self.pose.getPose3d())
93+
#print(self.pose.getPose3d())
94+
Lines changed: 151 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,151 @@
1+
#!/usr/bin/python
2+
# -*- coding: utf-8 -*-
3+
import csv
4+
import cv2
5+
import math
6+
import numpy as np
7+
import threading
8+
import time
9+
import carla
10+
from albumentations import (
11+
Compose, Normalize, RandomRain, RandomBrightness, RandomShadow, RandomSnow, RandomFog, RandomSunFlare
12+
)
13+
from utils.constants import DATASETS_DIR, ROOT_PATH
14+
15+
import tensorflow as tf
16+
gpus = tf.config.experimental.list_physical_devices('GPU')
17+
for gpu in gpus:
18+
tf.config.experimental.set_memory_growth(gpu, True)
19+
20+
PRETRAINED_MODELS = "../models/"
21+
22+
class Brain:
23+
24+
def __init__(self, sensors, actuators, handler, config=None):
25+
self.camera = sensors.get_camera('camera_0')
26+
self.camera_1 = sensors.get_camera('camera_1')
27+
self.camera_2 = sensors.get_camera('camera_2')
28+
self.camera_3 = sensors.get_camera('camera_3')
29+
30+
self.pose = sensors.get_pose3d('pose3d_0')
31+
32+
self.bird_eye_view = sensors.get_bird_eye_view('bird_eye_view_0')
33+
34+
self.motors = actuators.get_motor('motors_0')
35+
self.handler = handler
36+
self.config = config
37+
38+
self.threshold_image = np.zeros((640, 360, 3), np.uint8)
39+
self.color_image = np.zeros((640, 360, 3), np.uint8)
40+
'''
41+
self.lock = threading.Lock()
42+
self.threshold_image_lock = threading.Lock()
43+
self.color_image_lock = threading.Lock()
44+
'''
45+
self.cont = 0
46+
self.iteration = 0
47+
48+
# self.previous_timestamp = 0
49+
# self.previous_image = 0
50+
51+
self.previous_v = None
52+
self.previous_w = None
53+
self.previous_w_normalized = None
54+
self.suddenness_distance = []
55+
56+
client = carla.Client('localhost', 2000)
57+
client.set_timeout(10.0) # seconds
58+
world = client.get_world()
59+
time.sleep(10)
60+
#print(world.get_actors())
61+
self.vehicle = world.get_actors().filter('vehicle.*')[0]
62+
63+
model = '/home/jderobot/Documents/Projects/BehaviorMetrics/PlayingWithCARLA/models/20221104-143528_pilotnet_CARLA_17_10_dataset_bird_eye_300_epochs_no_flip_3_output_velocity_all_towns_vel_30_cp.h5'
64+
self.net = tf.keras.models.load_model(model)
65+
self.previous_speed = 0
66+
67+
68+
def update_frame(self, frame_id, data):
69+
"""Update the information to be shown in one of the GUI's frames.
70+
71+
Arguments:
72+
frame_id {str} -- Id of the frame that will represent the data
73+
data {*} -- Data to be shown in the frame. Depending on the type of frame (rgbimage, laser, pose3d, etc)
74+
"""
75+
if data.shape[0] != data.shape[1]:
76+
if data.shape[0] > data.shape[1]:
77+
difference = data.shape[0] - data.shape[1]
78+
extra_left, extra_right = int(difference/2), int(difference/2)
79+
extra_top, extra_bottom = 0, 0
80+
else:
81+
difference = data.shape[1] - data.shape[0]
82+
extra_left, extra_right = 0, 0
83+
extra_top, extra_bottom = int(difference/2), int(difference/2)
84+
85+
86+
data = np.pad(data, ((extra_top, extra_bottom), (extra_left, extra_right), (0, 0)), mode='constant', constant_values=0)
87+
88+
self.handler.update_frame(frame_id, data)
89+
90+
def update_pose(self, pose_data):
91+
self.handler.update_pose3d(pose_data)
92+
93+
def execute(self):
94+
#print(self.vehicle.get_location())
95+
image = self.camera.getImage().data
96+
image_1 = self.camera_1.getImage().data
97+
image_2 = self.camera_2.getImage().data
98+
image_3 = self.camera_3.getImage().data
99+
100+
bird_eye_view_1 = self.bird_eye_view.getImage(self.vehicle)
101+
102+
#print(self.bird_eye_view.getImage(self.vehicle))
103+
104+
#self.update_frame('frame_0', image)
105+
self.update_frame('frame_1', image_1)
106+
self.update_frame('frame_2', image_2)
107+
self.update_frame('frame_3', image_3)
108+
109+
self.update_frame('frame_0', bird_eye_view_1)
110+
111+
self.update_pose(self.pose.getPose3d())
112+
113+
image_shape=(50, 150)
114+
img_base = cv2.resize(bird_eye_view_1, image_shape)
115+
116+
AUGMENTATIONS_TEST = Compose([
117+
Normalize()
118+
])
119+
image = AUGMENTATIONS_TEST(image=img_base)
120+
img = image["image"]
121+
122+
#velocity_dim = np.full((150, 50), 0.5)
123+
velocity_dim = np.full((150, 50), self.previous_speed/30)
124+
new_img_vel = np.dstack((img, velocity_dim))
125+
img = new_img_vel
126+
127+
img = np.expand_dims(img, axis=0)
128+
prediction = self.net.predict(img, verbose=0)
129+
throttle = prediction[0][0]
130+
steer = prediction[0][1] * (1 - (-1)) + (-1)
131+
break_command = prediction[0][2]
132+
speed = self.vehicle.get_velocity()
133+
vehicle_speed = 3.6 * math.sqrt(speed.x**2 + speed.y**2 + speed.z**2)
134+
self.previous_speed = vehicle_speed
135+
136+
if vehicle_speed > 300:
137+
self.motors.sendThrottle(0)
138+
self.motors.sendSteer(steer)
139+
self.motors.sendBrake(0)
140+
else:
141+
if vehicle_speed < 2:
142+
self.motors.sendThrottle(1.0)
143+
self.motors.sendSteer(0.0)
144+
self.motors.sendBrake(0)
145+
else:
146+
self.motors.sendThrottle(throttle)
147+
self.motors.sendSteer(steer)
148+
self.motors.sendBrake(0)
149+
150+
151+
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
#!/usr/bin/python
2+
# -*- coding: utf-8 -*-
3+
import csv
4+
import cv2
5+
import math
6+
import numpy as np
7+
import threading
8+
import time
9+
from albumentations import (
10+
Compose, Normalize, RandomRain, RandomBrightness, RandomShadow, RandomSnow, RandomFog, RandomSunFlare
11+
)
12+
from utils.constants import DATASETS_DIR, ROOT_PATH
13+
14+
15+
16+
GENERATED_DATASETS_DIR = ROOT_PATH + '/' + DATASETS_DIR
17+
18+
19+
class Brain:
20+
21+
def __init__(self, sensors, actuators, handler, config=None):
22+
self.camera = sensors.get_camera('camera_0')
23+
self.camera_1 = sensors.get_camera('camera_1')
24+
self.camera_2 = sensors.get_camera('camera_2')
25+
self.camera_3 = sensors.get_camera('camera_3')
26+
27+
self.pose = sensors.get_pose3d('pose3d_0')
28+
29+
self.motors = actuators.get_motor('motors_0')
30+
self.handler = handler
31+
self.config = config
32+
33+
self.threshold_image = np.zeros((640, 360, 3), np.uint8)
34+
self.color_image = np.zeros((640, 360, 3), np.uint8)
35+
self.lock = threading.Lock()
36+
self.threshold_image_lock = threading.Lock()
37+
self.color_image_lock = threading.Lock()
38+
self.cont = 0
39+
self.iteration = 0
40+
41+
# self.previous_timestamp = 0
42+
# self.previous_image = 0
43+
44+
self.previous_v = None
45+
self.previous_w = None
46+
self.previous_w_normalized = None
47+
self.suddenness_distance = []
48+
49+
time.sleep(2)
50+
51+
def update_frame(self, frame_id, data):
52+
"""Update the information to be shown in one of the GUI's frames.
53+
54+
Arguments:
55+
frame_id {str} -- Id of the frame that will represent the data
56+
data {*} -- Data to be shown in the frame. Depending on the type of frame (rgbimage, laser, pose3d, etc)
57+
"""
58+
self.handler.update_frame(frame_id, data)
59+
60+
def update_pose(self, pose_data):
61+
self.handler.update_pose3d(pose_data)
62+
63+
def execute(self):
64+
image = self.camera.getImage().data
65+
image_1 = self.camera_1.getImage().data
66+
image_2 = self.camera_2.getImage().data
67+
image_3 = self.camera_3.getImage().data
68+
69+
self.motors.sendW(1)
70+
self.motors.sendV(0.5)
71+
self.update_frame('frame_0', image)
72+
self.update_frame('frame_1', image_1)
73+
self.update_frame('frame_2', image_2)
74+
self.update_frame('frame_3', image_3)
75+
self.update_pose(self.pose.getPose3d())
76+
#print(self.pose.getPose3d())

behavior_metrics/brains/brains_handler.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,12 @@ def load_brain(self, path, model=None):
3939

4040
if robot_type == 'f1rl':
4141
from utils import environment
42-
environment.close_gazebo()
42+
environment.close_ros_and_simulators()
4343
exec(open(self.brain_path).read())
44+
elif robot_type == 'CARLA':
45+
module = importlib.import_module(import_name)
46+
Brain = getattr(module, 'Brain')
47+
self.active_brain = Brain(self.sensors, self.actuators, handler=self, config=self.config)
4448
else:
4549
if import_name in sys.modules: # for reloading sake
4650
del sys.modules[import_name]
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
<!-- -->
2+
<launch>
3+
<!-- CARLA connection -->
4+
<arg name='host' default='localhost'/>
5+
<arg name='port' default='2000'/>
6+
<arg name='timeout' default='10'/>
7+
8+
<!-- Ego vehicle -->
9+
<arg name='role_name' default='ego_vehicle'/>
10+
<arg name="vehicle_filter" default='vehicle.*'/>
11+
<!--<arg name="spawn_point" default=""/>--><!-- use comma separated format "x,y,z,roll,pitch,yaw" -->
12+
<arg name="spawn_point" default="40.0, 2.0, 1.37, 0.0, 0.0, 180.0"/>
13+
<!--<arg name="spawn_point" default="-2.0, -10.0, 1.37, 0.0, 0.0, -90.0"/>-->
14+
<!--<arg name="spawn_point" default="-2.0, -280.0, 1.37, 0.0, 0.0, -90.0"/>-->
15+
16+
<!-- Map to load on startup (either a predefined CARLA town (e.g. 'Town01'), or a OpenDRIVE map file) -->
17+
<arg name="town" default='Town01'/>
18+
19+
<!-- Enable/disable passive mode -->
20+
<arg name='passive' default=''/>
21+
22+
<!-- Synchronous mode-->
23+
<arg name='synchronous_mode_wait_for_vehicle_control_command' default='False'/>
24+
<arg name='fixed_delta_seconds' default='0.05'/>
25+
26+
27+
<include file="$(find carla_ros_bridge)/launch/carla_ros_bridge.launch">
28+
<arg name='host' value='$(arg host)'/>
29+
<arg name='port' value='$(arg port)'/>
30+
<arg name='town' value='$(arg town)'/>
31+
<arg name='timeout' value='$(arg timeout)'/>
32+
<arg name='passive' value='$(arg passive)'/>
33+
<arg name='synchronous_mode_wait_for_vehicle_control_command' value='$(arg synchronous_mode_wait_for_vehicle_control_command)'/>
34+
<arg name='fixed_delta_seconds' value='$(arg fixed_delta_seconds)'/>
35+
</include>
36+
37+
<!-- the ego vehicle, that will be controlled by an agent (e.g. carla_ad_agent) -->
38+
<include file="$(find carla_spawn_objects)/launch/carla_example_ego_vehicle.launch">
39+
<arg name="objects_definition_file" value='$(find carla_spawn_objects)/config/objects.json'/>
40+
<arg name='role_name' value='$(arg role_name)'/>
41+
<arg name="spawn_point_ego_vehicle" value="$(arg spawn_point)"/>
42+
<arg name="spawn_sensors_only" value="false"/>
43+
</include>
44+
45+
<include file="$(find carla_manual_control)/launch/carla_manual_control.launch">
46+
<arg name='role_name' value='$(arg role_name)'/>
47+
</include>
48+
49+
</launch>

0 commit comments

Comments
 (0)