-
Notifications
You must be signed in to change notification settings - Fork 713
Expand file tree
/
Copy pathutils.py
More file actions
214 lines (178 loc) · 8.19 KB
/
utils.py
File metadata and controls
214 lines (178 loc) · 8.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
# nuScenes dev-kit.
# Code written by Holger Caesar, 2018.
from typing import List, Dict, Any
import numpy as np
from pyquaternion import Quaternion
from shapely import affinity
from shapely.geometry import Polygon
from nuscenes.eval.common.data_classes import EvalBox
from nuscenes.utils.data_classes import Box
DetectionBox = Any # Workaround as direct imports lead to cyclic dependencies.
def create_2d_polygon_from_box(bbox: EvalBox) -> Polygon:
"""
Convert an EvalBox into a 2D Polygon
:param bbox: An EvalBox describing center, rotation and size.
:return: A 2D Polygon describing the xy vertices.
"""
l = bbox.size[0]
w = bbox.size[1]
poly_veh = Polygon(((0.5*l,0.5*w),(-0.5*l,0.5*w),(-0.5*l,-0.5*w),(0.5*l,-0.5*w),(0.5*l,0.5*w)))
poly_rot = affinity.rotate(poly_veh,quaternion_yaw(Quaternion(bbox.rotation)),use_radians=True)
poly_glob = affinity.translate(poly_rot,bbox.translation[0],bbox.translation[1])
return poly_glob
def bev_iou(gt_poly: Polygon, pred_poly: Polygon) -> float:
"""
Birds Eye View IOU percentage between two input polygons (xy only).
:param gt_poly: GT annotation sample.
:param pred_poly: Predicted sample.
:return: IOU.
"""
intersection = gt_poly.intersection(pred_poly).area
bev_iou = intersection/(gt_poly.area + pred_poly.area - intersection)
# Guard against machine precision (i.e. when dealing with perfect overlap)
bev_iou = min(bev_iou,1.0)
return bev_iou
def bev_iou_complement(gt_box: EvalBox, pred_box: EvalBox) -> float:
"""
1 - BEV_IOU percentage between two input boxes (xy only).
:param gt_box: GT annotation sample.
:param pred_box: Predicted sample.
:return: 1 - IOU.
"""
# Do a cheaper first pass before calculating IOU i.e. check if the circles that enclose the two
# boxes overlap
gt_radius = np.linalg.norm(0.5*np.array([gt_box.size[0],gt_box.size[1]]))
pred_radius = np.linalg.norm(0.5*np.array([pred_box.size[0],pred_box.size[1]]))
if (center_distance(gt_box,pred_box) >= pred_radius + gt_radius):
bev_iou_complement = 1.0
else:
bev_iou_complement = 1.0 - bev_iou(create_2d_polygon_from_box(gt_box),
create_2d_polygon_from_box(pred_box))
return bev_iou_complement
def center_distance(gt_box: EvalBox, pred_box: EvalBox) -> float:
"""
L2 distance between the box centers (xy only).
:param gt_box: GT annotation sample.
:param pred_box: Predicted sample.
:return: L2 distance.
"""
return np.linalg.norm(np.array(pred_box.translation[:2]) - np.array(gt_box.translation[:2]))
def velocity_l2(gt_box: EvalBox, pred_box: EvalBox) -> float:
"""
L2 distance between the velocity vectors (xy only).
If the predicted velocities are nan, we return inf, which is subsequently clipped to 1.
:param gt_box: GT annotation sample.
:param pred_box: Predicted sample.
:return: L2 distance.
"""
return np.linalg.norm(np.array(pred_box.velocity) - np.array(gt_box.velocity))
def yaw_diff(gt_box: EvalBox, eval_box: EvalBox, period: float = 2*np.pi) -> float:
"""
Returns the yaw angle difference between the orientation of two boxes.
:param gt_box: Ground truth box.
:param eval_box: Predicted box.
:param period: Periodicity in radians for assessing angle difference.
:return: Yaw angle difference in radians in [0, pi].
"""
yaw_gt = quaternion_yaw(Quaternion(gt_box.rotation))
yaw_est = quaternion_yaw(Quaternion(eval_box.rotation))
return abs(angle_diff(yaw_gt, yaw_est, period))
def angle_diff(x: float, y: float, period: float) -> float:
"""
Get the smallest angle difference between 2 angles: the angle from y to x.
:param x: To angle.
:param y: From angle.
:param period: Periodicity in radians for assessing angle difference.
:return: <float>. Signed smallest between-angle difference in range (-pi, pi).
"""
# calculate angle difference, modulo to [0, 2*pi]
diff = (x - y + period / 2) % period - period / 2
if diff > np.pi:
diff = diff - (2 * np.pi) # shift (pi, 2*pi] to (-pi, 0]
return diff
def attr_acc(gt_box: DetectionBox, pred_box: DetectionBox) -> float:
"""
Computes the classification accuracy for the attribute of this class (if any).
If the GT class has no attributes or the annotation is missing attributes, we assign an accuracy of nan, which is
ignored later on.
:param gt_box: GT annotation sample.
:param pred_box: Predicted sample.
:return: Attribute classification accuracy (0 or 1) or nan if GT annotation does not have any attributes.
"""
if gt_box.attribute_name == '':
# If the class does not have attributes or this particular sample is missing attributes, return nan, which is
# ignored later. Note that about 0.4% of the sample_annotations have no attributes, although they should.
acc = np.nan
else:
# Check that label is correct.
acc = float(gt_box.attribute_name == pred_box.attribute_name)
return acc
def scale_iou(sample_annotation: EvalBox, sample_result: EvalBox) -> float:
"""
This method compares predictions to the ground truth in terms of scale.
It is equivalent to intersection over union (IOU) between the two boxes in 3D,
if we assume that the boxes are aligned, i.e. translation and rotation are considered identical.
:param sample_annotation: GT annotation sample.
:param sample_result: Predicted sample.
:return: Scale IOU.
"""
# Validate inputs.
sa_size = np.array(sample_annotation.size)
sr_size = np.array(sample_result.size)
assert all(sa_size > 0), 'Error: sample_annotation sizes must be >0.'
assert all(sr_size > 0), 'Error: sample_result sizes must be >0.'
# Compute IOU.
min_wlh = np.minimum(sa_size, sr_size)
volume_annotation = np.prod(sa_size)
volume_result = np.prod(sr_size)
intersection = np.prod(min_wlh) # type: float
union = volume_annotation + volume_result - intersection # type: float
iou = intersection / union
return iou
def quaternion_yaw(q: Quaternion) -> float:
"""
Calculate the yaw angle from a quaternion.
Note that this only works for a quaternion that represents a box in lidar or global coordinate frame.
It does not work for a box in the camera frame.
:param q: Quaternion of interest.
:return: Yaw angle in radians.
"""
# Project into xy plane.
v = np.dot(q.rotation_matrix, np.array([1, 0, 0]))
# Measure yaw using arctan.
yaw = np.arctan2(v[1], v[0])
return yaw
def boxes_to_sensor(boxes: List[EvalBox], pose_record: Dict, cs_record: Dict):
"""
Map boxes from global coordinates to the vehicle's sensor coordinate system.
:param boxes: The boxes in global coordinates.
:param pose_record: The pose record of the vehicle at the current timestamp.
:param cs_record: The calibrated sensor record of the sensor.
:return: The transformed boxes.
"""
boxes_out = []
for box in boxes:
# Create Box instance.
box = Box(box.translation, box.size, Quaternion(box.rotation))
# Move box to ego vehicle coord system.
box.translate(-np.array(pose_record['translation']))
box.rotate(Quaternion(pose_record['rotation']).inverse)
# Move box to sensor coord system.
box.translate(-np.array(cs_record['translation']))
box.rotate(Quaternion(cs_record['rotation']).inverse)
boxes_out.append(box)
return boxes_out
def cummean(x: np.array) -> np.array:
"""
Computes the cumulative mean up to each position in a NaN sensitive way
- If all values are NaN return an array of ones.
- If some values are NaN, accumulate arrays discording those entries.
"""
if sum(np.isnan(x)) == len(x):
# Is all numbers in array are NaN's.
return np.ones(len(x)) # If all errors are NaN set to error to 1 for all operating points.
else:
# Accumulate in a nan-aware manner.
sum_vals = np.nancumsum(x.astype(float)) # Cumulative sum ignoring nans.
count_vals = np.cumsum(~np.isnan(x)) # Number of non-nans up to each position.
return np.divide(sum_vals, count_vals, out=np.zeros_like(sum_vals), where=count_vals != 0)