CRAIC目标射击实践

比赛介绍

中国机器人及人工智能大赛是一项历史悠久、影响广泛的全国性学科竞赛。目前大赛已为我国培养了大量“能动手”、“敢创新”、“善协作”的复合型人才。大赛已列入中国高等教育学会发布的全国普通高等学校学科竞赛排行榜、全国普通高校大学生竞赛分析报告。为积极响应中国机器人及人工智能大赛组委会号召以及进一步发挥大赛培养学生主动学习、主动创新、主动创造的能力,激发广大学生探索、应用、创新、创造新技术的热情,有效推进相关专业人才培养。

主要使用代码

主目录运行:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
建图:  ./1-gmapping.sh 
保存: roslaunch robot_slam save_map.launch
射击: roslaunch abot_bringup shoot.launch 发射驱动程序
rostopic pub /shoot std_msgs/String "data: '' " 发布射击的空话题,等待发射
识别:
roslaunch usb_cam usb_cam_test.launch 打开相机
rosrun rqt_image_view rqt_image_view 可视化相机
语音:
连接蓝牙耳机WI-C200(在蓝牙设置中连接)
roscore
rosrun robot_voice tts_subscribe
rostopic pub /voiceWords std_msgs/String "data: '1234' "

启动导航与识别:
3-mission.sh 在这里
roslaunch track_tag usb_cam_with_calibration.launch 打开相机节点
roslaunch track_tag ar_track_camera.launch 启动二维码识别节点
rosrun robot_voice tts_subscribe; exec bash 语音播报节点
robot_slam/launch/multi_goal.launch 修改导航的目标点的坐标值(文件路径)
robot_slam/scripts/ navigation_multi_goals.py 修改对应id分别走到哪个点(文件路径)

射击相关:
user_demo/param/mission.yaml 修改射击目标点的相关参数(文件路径)
roslaunch usb_cam usb_cam_test.launch 打开相机
roslaunch find_object_2d find_object_2d6.launch 启动识别程序
rosrun robot_slam III.py 识别结果判断
rostopic echo /object_position
跟踪标签:在6-mission.sh里有 ,
roslaunch track_tag usb_cam_with_calibration.launch
roslaunch track_tag ar_track_camera.launch
rostopic echo /ar_pose_marker


启动代码前一定要插上炮台的USB串口线,不然运行就会报错没有串口 /dev/shoot,记得打开炮台开关



查看坐标点
运行navigation.sh脚本前注释掉最后一行 ,在打开的地图里点击目标点前 运 rostopic echo /move_base_simple/goal
在导航地图中使用RViz中的navigation goal标定目标后,到终端的输出查看pose 字段,里面有x,y目标点
直接拿迟来量坐标比较快,单位是米,搞懂ros坐标系

编译及运行--------------------------------------------------------------------
catkin_make
catkin_make -DCATKIN_WHITELIST_PACKAGES="robot_slam"

source devel/setup.bash
source /opt/ros/melodic/setup.bash


逻辑文件

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
 
#!/usr/bin/env python
#coding: utf-8

import rospy
from geometry_msgs.msg import Point, Twist
import threading
import actionlib
import serial
import time
from actionlib_msgs.msg import GoalStatus
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from geometry_msgs.msg import PoseWithCovarianceStamped
from tf_conversions import transformations
from ar_track_alvar_msgs.msg import AlvarMarkers
from math import pi
import subprocess




class Navigation:
def __init__(self):
self.set_pose_pub = rospy.Publisher('/initialpose', PoseWithCovarianceStamped, queue_size=5)
self.move_base = actionlib.SimpleActionClient("move_base", MoveBaseAction)
self.move_base.wait_for_server(rospy.Duration(60))

def set_pose(self, p):
if self.move_base is None:
return False
x, y, th = p
pose = PoseWithCovarianceStamped()
pose.header.stamp = rospy.Time.now()
pose.header.frame_id = 'map'
pose.pose.pose.position.x = x
pose.pose.pose.position.y = y
q = transformations.quaternion_from_euler(0.0, 0.0, th/180.0*pi)
pose.pose.pose.orientation.x = q[0]
pose.pose.pose.orientation.y = q[1]
pose.pose.pose.orientation.z = q[2]
pose.pose.pose.orientation.w = q[3]
self.set_pose_pub.publish(pose)
return True

def _feedback_cb(self, feedback):
msg = feedback
#rospy.loginfo("[Navi] navigation feedback\r\n%s"%feedback)

def goto(self, p):
rospy.loginfo("[Navigation] goto %s"%p)
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = 'map'
goal.target_pose.header.stamp = rospy.Time.now()
goal.target_pose.pose.position.x = p[0]
goal.target_pose.pose.position.y = p[1]
q = transformations.quaternion_from_euler(0.0, 0.0, p[2]/180.0*pi)
goal.target_pose.pose.orientation.x = q[0]
goal.target_pose.pose.orientation.y = q[1]
goal.target_pose.pose.orientation.z = q[2]
goal.target_pose.pose.orientation.w = q[3]
self.move_base.send_goal(goal, self._done_cb, self._active_cb, self._feedback_cb)
result = self.move_base.wait_for_result(rospy.Duration(60))
if not result:
self.move_base.cancel_goal()
rospy.loginfo("Timed out achieving goal")
else:
state = self.move_base.get_state()
if state == GoalStatus.SUCCEEDED:
rospy.loginfo("reach goal %s succeeded!"%p)
return True

def _done_cb(self, status, result):
rospy.loginfo("navigation done! status:%d result:%s"%(status, result))

def _active_cb(self):
rospy.loginfo("[Navi] navigation has be actived")

def cancel(self):
self.move_base.cancel_all_goals()
return True


class ARTracker:
def __init__(self):
self.ar_sub = rospy.Subscriber('/ar_pose_marker', AlvarMarkers, self.ar_cb)
def ar_cb(self,data):
global target_id
global ar_x
global ar_y
global ar_z
global ar_id
for marker in data.markers:
if marker.id == target_id :
ar_x = marker.pose.pose.position.x
ar_y = marker.pose.pose.position.y
ar_z = marker.pose.pose.position.z
ar_id = marker.id
#print('AR Marker Position (x,y,z):', ar_x, ar_y,ar_z)
break



class Object_position:
def __init__(self):
self.ar_sub = rospy.Subscriber('/object_position', Point, self.find_cb)

def find_cb(self,data):
global find_id
global find_x
global find_y
point_msg = data
if(point_msg.z>=1 and point_msg.z<=5):
find_id = 1
find_x=point_msg.x
find_y=point_msg.y
else:
find_id = 0


def process():
rospy.spin()


find_id = 0
find_x=0.0
find_y=0.0
target_id = 0
ar_id = 0
ar_x =0.0
ar_y =0.0
ar_z =0.0

if __name__ == '__main__':

rospy.init_node('navigation_demo',anonymous=True)
ser = serial.Serial(port="/dev/shoot", baudrate=9600, parity="N", bytesize=8, stopbits=1)
pub = rospy.Publisher("/cmd_vel", Twist, queue_size=1000)

goals = [ [1.1 , -0.37,0.0],
[1.1 , -1.45,.0],
[1.0 , -2.72,.0],
[0.07 , -2.72,.0] ]

object_position = Object_position()
ar_acker = ARTracker()

executor_thread = threading.Thread(target=process).start()

navi = Navigation()


time.sleep(15)



# ------first--------------------------------------------------------
navi.goto(goals[0])
start=time.time()
is_shoot=0
while True:
if find_id == 1:
flog0 = find_x - 320
flog1 = abs(flog0)
print(flog0)
if abs(flog1) >10:
msg = Twist()
msg.angular.z = -0.01 * flog0
pub.publish(msg)
print(msg.angular.z)
elif abs(flog1) <= 10:
print('1 is shoot')
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
time.sleep(0.2)
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')
time.sleep(2)
is_shoot=1
break
end=time.time()
if end-start>12:
break

if is_shoot==0:
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
time.sleep(0.2)
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')
time.sleep(2)

#subprocess.run( ['rosnode','kill','find_object_2d'] )


# ------sencond-----------------------------------------------------
navi.goto(goals[1])
target_id=1
Yaw_th = 0.003
start=time.time()
is_shoot=0
while True:
if ar_id == target_id:
ar_x_abs = abs(ar_x)
print('x:',ar_x)
print('z:',ar_z)
if ar_x_abs >= Yaw_th:
msg = Twist()
msg.angular.z = -1.5 * ar_x
pub.publish(msg)
elif ar_x_abs < Yaw_th and (ar_z >= 1.57 and ar_z <=1.64):
print('2 is shoot')
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
time.sleep(0.1)
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')
is_shoot=1
break
end=time.time()
if end-start>20:
break

if is_shoot==0:
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
time.sleep(0.2)
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')
time.sleep(2)


# # --------------------third----------------------------------
navi.goto(goals[2])
target_id=2 # ------------------------------------------------------
Yaw_th = 0.002
start=time.time()
is_shoot=0
while True:
if ar_id == target_id:
ar_x_abs = abs(ar_x)
print(ar_x)
if ar_x_abs >= Yaw_th:
msg = Twist()
msg.angular.z = -1.5 * ar_x
pub.publish(msg)
elif ar_x_abs < Yaw_th:
print('3 is shoot')
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
time.sleep(0.1)
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')
is_shoot=1
break

end=time.time()
if end-start>12:
break
if is_shoot==0:
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
time.sleep(0.2)
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')
time.sleep(2)


# # -------------------------------------------------------------------------

navi.goto(goals[3])
#slowly


#rclpy.shutdown()


二维码识别

二维码识别部分,脚本放在abot_ws/src/robot_slam/scripts/ar_demo_s.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import rospy
from ar_track_alvar_msgs.msg import AlvarMarkers

class ARTracker:
def __init__(self):
# 初始化ROS节点,命名为'ar_tracker_node',并设置为匿名节点
rospy.init_node('ar_tracker_node', anonymous=True)
# 创建一个订阅者,订阅AR标记的消息,消息类型为AlvarMarkers,回调函数为ar_cb
self.sub = rospy.Subscriber('/ar_pose_marker', AlvarMarkers, self.ar_cb)

# 初始化AR标记的x和y坐标
self.ar_x_0 = 0.0
self.ar_y_0 = 0.0
# 初始化AR标记的ID
self.id = None

# AR标记消息的回调函数
def ar_cb(self, data):
# 遍历接收到的所有AR标记
for marker in data.markers:
# 如果AR标记的ID为0
if marker.id == 0:
# 更新AR标记的x和y坐标
self.ar_x_0 = marker.pose.pose.position.x
self.ar_y_0 = marker.pose.pose.position.y
# 更新AR标记的ID
self.id = marker.id

# 打印检测到的AR标记的ID和位置信息
print('Detected AR Marker ID:', self.id)
print('AR Marker Position (x,y):', self.ar_x_0, self.ar_y_0)

if __name__ == '__main__':
try:
# 创建ARTracker对象
ar_tracker = ARTracker()
# 进入ROS事件循环
rospy.spin()
except rospy.ROSInterruptException:
pass

二维码射击

二维码射击部分,脚本放在abot_ws/src/robot_slam/scripts/ar_shoot_demo.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 上面这行是为了告诉操作系统,这是一个Python脚本,可以直接运行

import rospy
from ar_track_alvar_msgs.msg import AlvarMarkers
from geometry_msgs.msg import Twist

# 定义Yaw阈值
Yaw_th = 0.0045

class ARTracker:
def __init__(self):
# 初始化ROS节点,命名为'ar_tracker_node',并设置为匿名节点
rospy.init_node('ar_tracker_node', anonymous=True)
# 创建一个订阅者,订阅AR标记的消息,消息类型为AlvarMarkers,回调函数为ar_cb
self.sub = rospy.Subscriber('/ar_pose_marker', AlvarMarkers, self.ar_cb)
# 创建一个发布者,用于发布Twist类型的消息到/cmd_vel话题
self.pub = rospy.Publisher("/cmd_vel", Twist, queue_size=1000)

# AR标记消息的回调函数
def ar_cb(self, data):
global ar_x, ar_x_abs, Yaw_th
# 获取所有AR标记
ar_markers = data
# 遍历接收到的所有AR标记
for marker in data.markers:
# 如果AR标记的ID为0
if marker.id == 0:
# 获取AR标记的x坐标
ar_x = marker.pose.pose.position.x
# 计算AR标记x坐标的绝对值
ar_x_abs = abs(ar_x)
# 如果AR标记的x坐标绝对值大于等于Yaw阈值
if ar_x_abs >= Yaw_th:
# 创建一个Twist消息
msg = Twist()
# 设置消息的角速度为AR标记x坐标的相反值(*-1)
msg.angular.z = -1.5 * ar_x
# 发布Twist消息
self.pub.publish(msg)
# 如果AR标记的x坐标绝对值小于Yaw阈值
elif ar_x_abs < Yaw_th:
print "ok"

if __name__ == '__main__':
try:
# 创建ARTracker对象
ar_tracker = ARTracker()
# 进入ROS事件循环
rospy.spin()
except rospy.ROSInterruptException:
pass



图像识别

图像识别部分,脚本放在abot_ws/src/robot_slam/scripts/find_demo_s.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 上面这行是为了告诉操作系统,这是一个Python脚本,可以直接运行

import rospy
from geometry_msgs.msg import Point

class object_position:
def __init__(self):
# 初始化ROS节点,命名为'object_position_node',并设置为匿名节点
rospy.init_node('object_position_node', anonymous=True)
# 创建一个订阅者,订阅/object_position话题,消息类型为Point,回调函数为find_cb
self.find_sub = rospy.Subscriber('/object_position', Point, self.find_cb)

# /object_position话题的回调函数
def find_cb(self, data):
# 获取接收到的Point消息
point_msg = data
# 打印接收到的点的x坐标
print('x:', point_msg.x)
# 打印接收到的点的y坐标
print('y:', point_msg.y)
# 打印接收到的点的z坐标
print('z:', point_msg.z)

if __name__ == '__main__':
try:
# 创建object_position对象
object_position = object_position()
# 进入ROS事件循环
rospy.spin()
except rospy.ROSInterruptException:
pass



图像识别射击

图像识别射击部分,脚本放在abot_ws/src/robot_slam/scripts/find_shoot_demo.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 上面这行是为了告诉操作系统,这是一个Python脚本,可以直接运行

import rospy
from geometry_msgs.msg import Point, Twist
import serial
import time
from std_msgs.msg import String

# 设置串口和波特率
serialPort = "/dev/shoot"
baudRate = 9600

# 打开串口
ser = serial.Serial(port=serialPort, baudrate=baudRate, parity="N", bytesize=8, stopbits=1)

class object_position:
def __init__(self):
# 初始化ROS节点,命名为'object_position_node',并设置为匿名节点
rospy.init_node('object_position_node', anonymous=True)
# 创建一个订阅者,订阅/object_position话题,消息类型为Point,回调函数为find_cb
self.find_sub = rospy.Subscriber('/object_position', Point, self.find_cb)
# 创建一个发布者,用于发布Twist类型的消息到/cmd_vel话题
self.pub = rospy.Publisher("/cmd_vel", Twist, queue_size=1000)

# /object_position话题的回调函数
def find_cb(self, data):
global flog0, flog1
# 获取接收到的Point消息
point_msg = data
# 计算目标点与图像中心的偏差
flog0 = point_msg.x - 320
# 计算偏差的绝对值
flog1 = abs(flog0)
# 如果偏差的绝对值大于0.5
if abs(flog1) > 0.5:
# 创建一个Twist消息
msg = Twist()
# 设置消息的角速度为偏差乘以0.01
msg.angular.z = -0.01 * flog0
# 发布Twist消息
self.pub.publish(msg)
# 如果偏差的绝对值小于等于0.5
elif abs(flog1) <= 0.5:
# 发送射击指令
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
print ('打印射击')
# 等待0.1秒
time.sleep(0.08)
# 发送停止射击指令
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')

if __name__ == '__main__':
try:
# 创建object_position对象
object_position = object_position()
# 进入ROS事件循环
rospy.spin()
except rospy.ROSInterruptException:
pass



机器人移动方法1

机器人循环移动,脚本放在abot_ws/src/robot_slam/scripts/move_demo.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
#!/usr/bin/env python
#coding: utf-8
# 上面两行是为了告诉操作系统,这是一个Python脚本,并且使用UTF-8编码

import rospy
from geometry_msgs.msg import Twist

# 定义移动机器人的函数
def move_robot(linear_x, angular_z):
# 初始化ROS节点,命名为'move_robot_node',并设置为匿名节点
rospy.init_node('move_robot_node', anonymous=True)
# 创建一个发布者,用于发布Twist类型的消息到/cmd_vel话题
velocity_publisher = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
# 设置ROS发布频率为10Hz
rate = rospy.Rate(10)

# 创建一个Twist消息,设置线速度和角速度
vel_msg = Twist()
vel_msg.linear.x = linear_x
vel_msg.angular.z = angular_z

# 循环发布消息,直到节点被关闭
while not rospy.is_shutdown():
velocity_publisher.publish(vel_msg)
rate.sleep()

if __name__ == '__main__':
try:
# 设置线速度和角速度
linear_x = 0.2 # 线速度
angular_z = 0.5 # 角速度
# 调用move_robot函数,控制机器人移动
move_robot(linear_x, angular_z)
except rospy.ROSInterruptException:
pass


机器人移动方法2

机器人指定移动,脚本放在abot_ws/src/robot_slam/scripts/move_robot.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#!/usr/bin/env python
#coding: utf-8
# 上面两行是为了告诉操作系统,这是一个Python脚本,并且使用UTF-8编码

import rospy
from geometry_msgs.msg import Twist

class move_robot:
def __init__(self):
# 初始化ROS节点,命名为'move_robot_node',并设置为匿名节点
rospy.init_node('move_robot_node', anonymous=True)
# 创建一个发布者,用于发布Twist类型的消息到/cmd_vel话题
self.pub = rospy.Publisher("/cmd_vel", Twist, queue_size=1000)

# 控制机器人移动的回调函数
def move_cb(self):
global time
# 初始化时间变量
time = 0
# 创建一个Twist消息
msg = Twist()
msg.linear.x = 1.0
msg.linear.y = 0.0
msg.linear.z = 0.0
msg.angular.x = 0.0
msg.angular.y = 0.0
msg.angular.z = 0.0
# 控制机器人移动,持续1秒
while time < 10:
self.pub.publish(msg)
rospy.sleep(0.1)
time += 1

if __name__ == '__main__':
try:
# 创建move_robot对象
move = move_robot()
# 调用move_cb函数,控制机器人移动
move.move_cb()
except rospy.ROSInterruptException:
pass



射击实现

单独的射击脚本,放在abot_ws/src/robot_slam/scripts/shoot_demo.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
a#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 上面这行指定了Python解释器路径,使得脚本可以直接在命令行中执行
import rospy
import serial
import time
from std_msgs.msg import String

# 设置串口和波特率
serialPort = "/dev/shoot"
baudRate = 9600

# 打开串口
ser = serial.Serial(port=serialPort, baudrate=baudRate, parity="N", bytesize=8, stopbits=1)

if __name__ == '__main__':
try:
# 发送射击指令
ser.write(b'\x55\x01\x12\x00\x00\x00\x01\x69')
print ('打印射击')
# 等待0.1秒
time.sleep(0.08)
# 发送停止射击指令
ser.write(b'\x55\x01\x11\x00\x00\x00\x01\x68')
# 进入ROS的spin循环,保持节点持续运行
rospy.spin()
except:
pass


导航文件

有这么几个自主巡航赛道的导航文件,我们可以借鉴一下,放在abot_ws/src/robot_slam/scripts/

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#!/usr/bin/env python

""" A couple of handy conversion utilities taken from the turtlebot_node.py script found in the
turtlebot_node ROS package at:

http://www.ros.org/wiki/turtlebot_node

"""

import PyKDL
from math import pi

def quat_to_angle(quat):
rot = PyKDL.Rotation.Quaternion(quat.x, quat.y, quat.z, quat.w)
return rot.GetRPY()[2]

def normalize_angle(angle):
res = angle
while res > pi:
res -= 2.0 * pi
while res < -pi:
res += 2.0 * pi
return res

导航点调节

结合逻辑文件,调节射击阈值和导航点,以下是导航点的调节方式
导航点文件在abot_ws\src\robot_slam\launch\multi_goal_shoot.launch,注意,这个launch文件是根据multi_goal.launch生成的,因此记得需要在启动脚本中引用。

1
2
3
4
5
6
7
8
9
10
<launch>
<node name="multi_goal" pkg="robot_slam" type="你的逻辑文件" output="screen">
<param name="goalListX" value=" 任务点1X, 任务点2X,以此类推"/>
<param name="goalListY" value=" 任务点1Y, 任务点2Y,以此类推"/>
<param name="goalListYaw" value="任务点1旋转角度, 任务点2旋转角度, 以此类推"/>
</node>
</launch>



射击点调节

A点颜色靶子

1
2
3
4
5
6
7
8
9
10
11
if abs(flog1) > 0.5:
# 创建一个Twist消息
msg = Twist()
# 设置消息的角速度为偏差乘以0.01
msg.angular.z = -0.01 * flog0
# 发布Twist消息
self.pub.publish(msg)
# 如果偏差的绝对值小于等于0.5
elif abs(flog1) <= 0.5:
# 发送射击指令

B靶为环形旋转靶,需要调节部分为marker.id(射击哪个靶子)Max_y(靶子射击的最大高度)Min_y(靶子射击的最小高度)Yaw_th(B点射击精度):

1
2
3
4
5
6
7
8
9
10
11
12
if marker.id == 1 and case == 1:
ar_x_0 = marker.pose.pose.position.x
ar_y_0 = marker.pose.pose.position.y
ar_x_0_abs = abs(ar_x_0)
ar_y_0_abs = abs(ar_y_0)
if ar_x_0_abs >= Yaw_th:
msg = Twist()
msg.angular.z = -1.5 * ar_x_0
self.pub.publish(msg)
elif ar_y_0_abs <= Max_y and ar_y_0_abs >= Min_y and case ==1:
# 发送射击指令

C 靶为来回移动靶,需要调节部分为marker.id(射击哪个靶子),Yaw_th1(C点射击精度):

1
2
3
4
5
6
7
8
9
10
11
12
13
if marker.id == 0 and case == 2:
ar_x_0 = marker.pose.pose.position.x
ar_y_0 = marker.pose.pose.position.y
ar_x_0_abs = abs(ar_x_0)
if ar_x_0_abs >= Yaw_th1:
msg = Twist()
msg.angular.z = -1.5 * ar_x_0
self.pub.publish(msg)
elif ar_x_0_abs < Yaw_th1 and case == 2:
# 发送射击指令



小车大小及膨胀系数调节

小车参数文件在”abot_ws\src\robot_slam\params\carto\costmap_common_params.yaml”

1
2
3
4
5
6
7
8
9
10
11
12
13
14
obstacle_range: 3.0
raytrace_range: 3.5

footprint: [[X1, Y1], [X2, Y2], [X3, Y3], [X4, Y4]]#小车参数(目前是正方形)
#robot_radius: 0.17

inflation_radius: 膨胀系数
cost_scaling_factor: 3.0

map_type: costmap
observation_sources: scan
scan: {sensor_frame: laser_link, data_type: LaserScan, topic: scan_filtered, marking: true, clearing: true}


TF调节

TF变换文件位置在:”abot_ws\src\abot_base\abot_bringup\launch\bringup_with_imu.launch”