Open3D (C++ API)  0.17.0
Loading...
Searching...
No Matches
RGBDOdometryJacobianImpl.h
Go to the documentation of this file.
1// ----------------------------------------------------------------------------
2// - Open3D: www.open3d.org -
3// ----------------------------------------------------------------------------
4// Copyright (c) 2018-2023 www.open3d.org
5// SPDX-License-Identifier: MIT
6// ----------------------------------------------------------------------------
7
8// Private header. Do not include in Open3d.h.
9
10#include "open3d/core/Tensor.h"
13
14namespace open3d {
15namespace t {
16namespace pipelines {
17namespace kernel {
18namespace odometry {
19
21using t::geometry::kernel::TransformIndexer;
22
23#ifndef __CUDACC__
24using std::abs;
25using std::isnan;
26using std::max;
27#endif
28
29inline OPEN3D_HOST_DEVICE float HuberDeriv(float r, float delta) {
30 float abs_r = abs(r);
31 return abs_r < delta ? r : delta * Sign(r);
32}
33
34inline OPEN3D_HOST_DEVICE float HuberLoss(float r, float delta) {
35 float abs_r = abs(r);
36 return abs_r < delta ? 0.5 * r * r : delta * abs_r - 0.5 * delta * delta;
37}
38
40 int x,
41 int y,
42 const float square_dist_thr,
43 const NDArrayIndexer& source_vertex_indexer,
44 const NDArrayIndexer& target_vertex_indexer,
45 const TransformIndexer& ti,
46 float* J_x,
47 float* J_y,
48 float* J_z,
49 float& rx,
50 float& ry,
51 float& rz) {
52 float* source_v = source_vertex_indexer.GetDataPtr<float>(x, y);
53 if (isnan(source_v[0])) {
54 return false;
55 }
56
57 // Transform source points to the target camera's coordinate space.
58 float T_source_to_target_v[3], u, v;
59 ti.RigidTransform(source_v[0], source_v[1], source_v[2],
60 &T_source_to_target_v[0], &T_source_to_target_v[1],
61 &T_source_to_target_v[2]);
62 ti.Project(T_source_to_target_v[0], T_source_to_target_v[1],
63 T_source_to_target_v[2], &u, &v);
64 u = roundf(u);
65 v = roundf(v);
66
67 if (T_source_to_target_v[2] < 0 ||
68 !target_vertex_indexer.InBoundary(u, v)) {
69 return false;
70 }
71
72 int ui = static_cast<int>(u);
73 int vi = static_cast<int>(v);
74 float* target_v = target_vertex_indexer.GetDataPtr<float>(ui, vi);
75 if (isnan(target_v[0])) {
76 return false;
77 }
78
79 rx = (T_source_to_target_v[0] - target_v[0]);
80 ry = (T_source_to_target_v[1] - target_v[1]);
81 rz = (T_source_to_target_v[2] - target_v[2]);
82 float r2 = rx * rx + ry * ry + rz * rz;
83 if (r2 > square_dist_thr) {
84 return false;
85 }
86
87 // T s - t
88 J_x[0] = J_x[4] = J_x[5] = 0.0;
89 J_x[1] = T_source_to_target_v[2];
90 J_x[2] = -T_source_to_target_v[1];
91 J_x[3] = 1.0;
92
93 J_y[1] = J_y[3] = J_y[5] = 0.0;
94 J_y[0] = -T_source_to_target_v[2];
95 J_y[2] = T_source_to_target_v[0];
96 J_y[4] = 1.0;
97
98 J_z[2] = J_z[3] = J_z[4] = 0.0;
99 J_z[0] = T_source_to_target_v[1];
100 J_z[1] = -T_source_to_target_v[0];
101 J_z[5] = 1.0;
102
103 return true;
104}
105
107 int x,
108 int y,
109 const float depth_outlier_trunc,
110 const NDArrayIndexer& source_vertex_indexer,
111 const NDArrayIndexer& target_vertex_indexer,
112 const NDArrayIndexer& target_normal_indexer,
113 const TransformIndexer& ti,
114 float* J_ij,
115 float& r) {
116 float* source_v = source_vertex_indexer.GetDataPtr<float>(x, y);
117 if (isnan(source_v[0])) {
118 return false;
119 }
120
121 // Transform source points to the target camera's coordinate space.
122 float T_source_to_target_v[3], u, v;
123 ti.RigidTransform(source_v[0], source_v[1], source_v[2],
124 &T_source_to_target_v[0], &T_source_to_target_v[1],
125 &T_source_to_target_v[2]);
126 ti.Project(T_source_to_target_v[0], T_source_to_target_v[1],
127 T_source_to_target_v[2], &u, &v);
128 u = roundf(u);
129 v = roundf(v);
130
131 if (T_source_to_target_v[2] < 0 ||
132 !target_vertex_indexer.InBoundary(u, v)) {
133 return false;
134 }
135
136 int ui = static_cast<int>(u);
137 int vi = static_cast<int>(v);
138 float* target_v = target_vertex_indexer.GetDataPtr<float>(ui, vi);
139 float* target_n = target_normal_indexer.GetDataPtr<float>(ui, vi);
140 if (isnan(target_v[0]) || isnan(target_n[0])) {
141 return false;
142 }
143
144 r = (T_source_to_target_v[0] - target_v[0]) * target_n[0] +
145 (T_source_to_target_v[1] - target_v[1]) * target_n[1] +
146 (T_source_to_target_v[2] - target_v[2]) * target_n[2];
147 if (abs(r) > depth_outlier_trunc) {
148 return false;
149 }
150
151 J_ij[0] = -T_source_to_target_v[2] * target_n[1] +
152 T_source_to_target_v[1] * target_n[2];
153 J_ij[1] = T_source_to_target_v[2] * target_n[0] -
154 T_source_to_target_v[0] * target_n[2];
155 J_ij[2] = -T_source_to_target_v[1] * target_n[0] +
156 T_source_to_target_v[0] * target_n[1];
157 J_ij[3] = target_n[0];
158 J_ij[4] = target_n[1];
159 J_ij[5] = target_n[2];
160
161 return true;
162}
163
165 int x,
166 int y,
167 const float depth_outlier_trunc,
168 const NDArrayIndexer& source_depth_indexer,
169 const NDArrayIndexer& target_depth_indexer,
170 const NDArrayIndexer& source_intensity_indexer,
171 const NDArrayIndexer& target_intensity_indexer,
172 const NDArrayIndexer& target_intensity_dx_indexer,
173 const NDArrayIndexer& target_intensity_dy_indexer,
174 const NDArrayIndexer& source_vertex_indexer,
175 const TransformIndexer& ti,
176 float* J_I,
177 float& r_I) {
178 const float sobel_scale = 0.125;
179
180 float* source_v = source_vertex_indexer.GetDataPtr<float>(x, y);
181 if (isnan(source_v[0])) {
182 return false;
183 }
184
185 // Transform source points to the target camera's coordinate space.
186 float T_source_to_target_v[3], u_tf, v_tf;
187 ti.RigidTransform(source_v[0], source_v[1], source_v[2],
188 &T_source_to_target_v[0], &T_source_to_target_v[1],
189 &T_source_to_target_v[2]);
190 ti.Project(T_source_to_target_v[0], T_source_to_target_v[1],
191 T_source_to_target_v[2], &u_tf, &v_tf);
192 int u_t = int(roundf(u_tf));
193 int v_t = int(roundf(v_tf));
194
195 if (T_source_to_target_v[2] < 0 ||
196 !target_depth_indexer.InBoundary(u_t, v_t)) {
197 return false;
198 }
199
200 float fx, fy;
201 ti.GetFocalLength(&fx, &fy);
202
203 float depth_t = *target_depth_indexer.GetDataPtr<float>(u_t, v_t);
204 float diff_D = depth_t - T_source_to_target_v[2];
205 if (isnan(depth_t) || abs(diff_D) > depth_outlier_trunc) {
206 return false;
207 }
208
209 float diff_I = *target_intensity_indexer.GetDataPtr<float>(u_t, v_t) -
210 *source_intensity_indexer.GetDataPtr<float>(x, y);
211 float dIdx = sobel_scale *
212 (*target_intensity_dx_indexer.GetDataPtr<float>(u_t, v_t));
213 float dIdy = sobel_scale *
214 (*target_intensity_dy_indexer.GetDataPtr<float>(u_t, v_t));
215
216 float invz = 1 / T_source_to_target_v[2];
217 float c0 = dIdx * fx * invz;
218 float c1 = dIdy * fy * invz;
219 float c2 = -(c0 * T_source_to_target_v[0] + c1 * T_source_to_target_v[1]) *
220 invz;
221
222 J_I[0] = (-T_source_to_target_v[2] * c1 + T_source_to_target_v[1] * c2);
223 J_I[1] = (T_source_to_target_v[2] * c0 - T_source_to_target_v[0] * c2);
224 J_I[2] = (-T_source_to_target_v[1] * c0 + T_source_to_target_v[0] * c1);
225 J_I[3] = (c0);
226 J_I[4] = (c1);
227 J_I[5] = (c2);
228 r_I = diff_I;
229
230 return true;
231}
232
234 int x,
235 int y,
236 const float depth_outlier_trunc,
237 const NDArrayIndexer& source_depth_indexer,
238 const NDArrayIndexer& target_depth_indexer,
239 const NDArrayIndexer& source_intensity_indexer,
240 const NDArrayIndexer& target_intensity_indexer,
241 const NDArrayIndexer& target_depth_dx_indexer,
242 const NDArrayIndexer& target_depth_dy_indexer,
243 const NDArrayIndexer& target_intensity_dx_indexer,
244 const NDArrayIndexer& target_intensity_dy_indexer,
245 const NDArrayIndexer& source_vertex_indexer,
246 const TransformIndexer& ti,
247 float* J_I,
248 float* J_D,
249 float& r_I,
250 float& r_D) {
251 // sqrt 0.5, according to
252 // http://redwood-data.org/indoor_lidar_rgbd/supp.pdf
253 const float sqrt_lambda_intensity = 0.707;
254 const float sqrt_lambda_depth = 0.707;
255 const float sobel_scale = 0.125;
256
257 float* source_v = source_vertex_indexer.GetDataPtr<float>(x, y);
258 if (isnan(source_v[0])) {
259 return false;
260 }
261
262 // Transform source points to the target camera coordinate space.
263 float T_source_to_target_v[3], u_tf, v_tf;
264 ti.RigidTransform(source_v[0], source_v[1], source_v[2],
265 &T_source_to_target_v[0], &T_source_to_target_v[1],
266 &T_source_to_target_v[2]);
267 ti.Project(T_source_to_target_v[0], T_source_to_target_v[1],
268 T_source_to_target_v[2], &u_tf, &v_tf);
269 int u_t = int(roundf(u_tf));
270 int v_t = int(roundf(v_tf));
271
272 if (T_source_to_target_v[2] < 0 ||
273 !target_depth_indexer.InBoundary(u_t, v_t)) {
274 return false;
275 }
276
277 float fx, fy;
278 ti.GetFocalLength(&fx, &fy);
279
280 float depth_t = *target_depth_indexer.GetDataPtr<float>(u_t, v_t);
281 float diff_D = depth_t - T_source_to_target_v[2];
282 if (isnan(depth_t) || abs(diff_D) > depth_outlier_trunc) {
283 return false;
284 }
285
286 float dDdx = sobel_scale *
287 (*target_depth_dx_indexer.GetDataPtr<float>(u_t, v_t));
288 float dDdy = sobel_scale *
289 (*target_depth_dy_indexer.GetDataPtr<float>(u_t, v_t));
290 if (isnan(dDdx) || isnan(dDdy)) {
291 return false;
292 }
293
294 float diff_I = *target_intensity_indexer.GetDataPtr<float>(u_t, v_t) -
295 *source_intensity_indexer.GetDataPtr<float>(x, y);
296 float dIdx = sobel_scale *
297 (*target_intensity_dx_indexer.GetDataPtr<float>(u_t, v_t));
298 float dIdy = sobel_scale *
299 (*target_intensity_dy_indexer.GetDataPtr<float>(u_t, v_t));
300
301 float invz = 1 / T_source_to_target_v[2];
302 float c0 = dIdx * fx * invz;
303 float c1 = dIdy * fy * invz;
304 float c2 = -(c0 * T_source_to_target_v[0] + c1 * T_source_to_target_v[1]) *
305 invz;
306 float d0 = dDdx * fx * invz;
307 float d1 = dDdy * fy * invz;
308 float d2 = -(d0 * T_source_to_target_v[0] + d1 * T_source_to_target_v[1]) *
309 invz;
310
311 J_I[0] = sqrt_lambda_intensity *
312 (-T_source_to_target_v[2] * c1 + T_source_to_target_v[1] * c2);
313 J_I[1] = sqrt_lambda_intensity *
314 (T_source_to_target_v[2] * c0 - T_source_to_target_v[0] * c2);
315 J_I[2] = sqrt_lambda_intensity *
316 (-T_source_to_target_v[1] * c0 + T_source_to_target_v[0] * c1);
317 J_I[3] = sqrt_lambda_intensity * (c0);
318 J_I[4] = sqrt_lambda_intensity * (c1);
319 J_I[5] = sqrt_lambda_intensity * (c2);
320 r_I = sqrt_lambda_intensity * diff_I;
321
322 J_D[0] = sqrt_lambda_depth *
323 ((-T_source_to_target_v[2] * d1 + T_source_to_target_v[1] * d2) -
324 T_source_to_target_v[1]);
325 J_D[1] = sqrt_lambda_depth *
326 ((T_source_to_target_v[2] * d0 - T_source_to_target_v[0] * d2) +
327 T_source_to_target_v[0]);
328 J_D[2] = sqrt_lambda_depth *
329 ((-T_source_to_target_v[1] * d0 + T_source_to_target_v[0] * d1));
330 J_D[3] = sqrt_lambda_depth * (d0);
331 J_D[4] = sqrt_lambda_depth * (d1);
332 J_D[5] = sqrt_lambda_depth * (d2 - 1.0f);
333
334 r_D = sqrt_lambda_depth * diff_D;
335
336 return true;
337}
338
339} // namespace odometry
340} // namespace kernel
341} // namespace pipelines
342} // namespace t
343} // namespace open3d
#define OPEN3D_HOST_DEVICE
Definition CUDAUtils.h:44
OPEN3D_HOST_DEVICE int Sign(int x)
Definition GeometryMacros.h:77
Helper class for converting coordinates/indices between 3D/3D, 3D/2D, 2D/3D.
Definition GeometryIndexer.h:25
OPEN3D_HOST_DEVICE void Project(float x_in, float y_in, float z_in, float *u_out, float *v_out) const
Project a 3D coordinate in camera coordinate to a 2D uv coordinate.
Definition GeometryIndexer.h:100
OPEN3D_HOST_DEVICE void RigidTransform(float x_in, float y_in, float z_in, float *x_out, float *y_out, float *z_out) const
Transform a 3D coordinate in camera coordinate to world coordinate.
Definition GeometryIndexer.h:62
OPEN3D_HOST_DEVICE void GetFocalLength(float *fx, float *fy) const
Definition GeometryIndexer.h:122
TArrayIndexer< int64_t > NDArrayIndexer
Definition GeometryIndexer.h:360
OPEN3D_HOST_DEVICE bool GetJacobianHybrid(int x, int y, const float depth_outlier_trunc, const NDArrayIndexer &source_depth_indexer, const NDArrayIndexer &target_depth_indexer, const NDArrayIndexer &source_intensity_indexer, const NDArrayIndexer &target_intensity_indexer, const NDArrayIndexer &target_depth_dx_indexer, const NDArrayIndexer &target_depth_dy_indexer, const NDArrayIndexer &target_intensity_dx_indexer, const NDArrayIndexer &target_intensity_dy_indexer, const NDArrayIndexer &source_vertex_indexer, const TransformIndexer &ti, float *J_I, float *J_D, float &r_I, float &r_D)
Definition RGBDOdometryJacobianImpl.h:233
OPEN3D_HOST_DEVICE float HuberDeriv(float r, float delta)
Definition RGBDOdometryJacobianImpl.h:29
OPEN3D_HOST_DEVICE bool GetJacobianPointToPlane(int x, int y, const float depth_outlier_trunc, const NDArrayIndexer &source_vertex_indexer, const NDArrayIndexer &target_vertex_indexer, const NDArrayIndexer &target_normal_indexer, const TransformIndexer &ti, float *J_ij, float &r)
Definition RGBDOdometryJacobianImpl.h:106
OPEN3D_HOST_DEVICE bool GetJacobianPointToPoint(int x, int y, const float square_dist_thr, const NDArrayIndexer &source_vertex_indexer, const NDArrayIndexer &target_vertex_indexer, const TransformIndexer &ti, float *J_x, float *J_y, float *J_z, float &rx, float &ry, float &rz)
Definition RGBDOdometryJacobianImpl.h:39
OPEN3D_HOST_DEVICE bool GetJacobianIntensity(int x, int y, const float depth_outlier_trunc, const NDArrayIndexer &source_depth_indexer, const NDArrayIndexer &target_depth_indexer, const NDArrayIndexer &source_intensity_indexer, const NDArrayIndexer &target_intensity_indexer, const NDArrayIndexer &target_intensity_dx_indexer, const NDArrayIndexer &target_intensity_dy_indexer, const NDArrayIndexer &source_vertex_indexer, const TransformIndexer &ti, float *J_I, float &r_I)
Definition RGBDOdometryJacobianImpl.h:164
OPEN3D_HOST_DEVICE float HuberLoss(float r, float delta)
Definition RGBDOdometryJacobianImpl.h:34
Definition PinholeCameraIntrinsic.cpp:16