UFO 1.0.0
An Efficient Probabilistic 3D Mapping Framework That Embraces the Unknown
Loading...
Searching...
No Matches
ufo_manipulation.cpp
1// UFO
2#include <ufo/cloud/point_cloud.hpp>
3#include <ufo/map/integrator/angular_integrator.hpp>
4#include <ufo/map/ufomap.hpp>
5#include <ufo/numeric/transform3.hpp>
6#include <ufo/vision/camera.hpp>
7
8// STL
9#include <cassert>
10#include <cstddef>
11#include <cstdint>
12#include <future>
13#include <iostream>
14#include <set>
15#include <string>
16#include <string_view>
17#include <utility>
18#include <vector>
19
20// C STL
21#include <fcntl.h>
22#include <sys/stat.h>
23#include <sys/types.h>
24#include <unistd.h>
25
26// Orbbec
27#include <libobsensor/ObSensor.hpp>
28#include <libobsensor/hpp/Utils.hpp>
29
30// TOML
31#include <toml++/toml.hpp>
32
33// STB
34#define STB_IMAGE_WRITE_IMPLEMENTATION
35#include "stb_image_write.h"
36
37// PCL
38#include <pcl/filters/crop_box.h>
39#include <pcl/filters/uniform_sampling.h>
40#include <pcl/filters/voxel_grid.h>
41#include <pcl/io/pcd_io.h>
42#include <pcl/point_types.h>
43#include <pcl/registration/gicp6d.h>
44#include <pcl/registration/icp.h>
45
46#define RESET "\033[0m"
47#define BLACK "\033[30m" /* Black */
48#define RED "\033[31m" /* Red */
49#define GREEN "\033[32m" /* Green */
50#define YELLOW "\033[33m" /* Yellow */
51#define BLUE "\033[34m" /* Blue */
52#define MAGENTA "\033[35m" /* Magenta */
53#define CYAN "\033[36m" /* Cyan */
54#define WHITE "\033[37m" /* White */
55#define BOLDBLACK "\033[1m\033[30m" /* Bold Black */
56#define BOLDRED "\033[1m\033[31m" /* Bold Red */
57#define BOLDGREEN "\033[1m\033[32m" /* Bold Green */
58#define BOLDYELLOW "\033[1m\033[33m" /* Bold Yellow */
59#define BOLDBLUE "\033[1m\033[34m" /* Bold Blue */
60#define BOLDMAGENTA "\033[1m\033[35m" /* Bold Magenta */
61#define BOLDCYAN "\033[1m\033[36m" /* Bold Cyan */
62#define BOLDWHITE "\033[1m\033[37m" /* Bold White */
63
66
67class Camera
68{
69 public:
70 Camera(std::string_view name, ufo::Transform3f pose, char const* serial_number,
71 std::uint32_t width = 640, std::uint32_t height = 400, std::uint32_t fps = 15,
72 OBFormat color_format = OB_FORMAT_RGB, OBFormat depth_format = OB_FORMAT_Y16)
73 : name_(name), pose_(pose)
74 {
75 ob::Context ctx;
76 auto devices = ctx.queryDeviceList();
77 device_ = devices->getDeviceBySN(serial_number);
78 if (nullptr == device_) {
79 std::cerr << BOLDRED
80 << "Could not find a device with serial number: " << serial_number << '\n'
81 << RESET;
82 exit(EXIT_FAILURE);
83 }
84
85 config_ = std::make_shared<ob::Config>();
86 config_->enableVideoStream(OB_STREAM_COLOR, width, height, fps, color_format);
87 config_->enableVideoStream(OB_STREAM_DEPTH, width, height, fps, depth_format);
88 config_->setFrameAggregateOutputMode(
89 // OB_FRAME_AGGREGATE_OUTPUT_FULL_FRAME_REQUIRE
90 OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
91 config_->setAlignMode(ALIGN_D2C_HW_MODE);
92
93 pipeline_ = std::make_shared<ob::Pipeline>(device_);
94 pipeline_->enableFrameSync();
95 pipeline_->start(config_);
96
97 point_cloud_ = std::make_shared<ob::PointCloudFilter>();
98 }
99
100 ~Camera() { pipeline_->stop(); }
101
102 bool update()
103 {
104 auto frame_set = pipeline_->waitForFrameset(100);
105 // auto frame_set = pipeline_->waitForFrames(100);
106 if (nullptr == frame_set) {
107 return false;
108 }
109
110 point_cloud_->setCreatePointFormat(OB_FORMAT_RGB_POINT);
111
112 frame_ = point_cloud_->process(frame_set);
113
114 return true;
115 }
116
117 [[nodiscard]] Cloud cloud() const
118 {
119 std::uint32_t num_points = frame_->dataSize() / sizeof(OBColorPoint);
120 OBColorPoint const* point = static_cast<OBColorPoint const*>(frame_->data());
121
122 Cloud cloud;
123 cloud.reserve(num_points);
124
125 for (std::uint32_t i{}; num_points > i; ++i) {
126 cloud.emplace_back(
127 ufo::Vec3f(point->x, point->y, point->z),
128 ufo::convert<ufo::FineLab>(ufo::FineRGB{point->r, point->g, point->b}));
129 }
130
131 return cloud;
132 }
133
134 [[nodiscard]] ufo::Transform3f pose() const { return pose_; }
135
136 private:
137 std::string name_;
138 ufo::Transform3f pose_;
139 std::shared_ptr<ob::Device> device_;
140 std::shared_ptr<ob::Config> config_;
141 std::shared_ptr<ob::Pipeline> pipeline_;
142 std::shared_ptr<ob::PointCloudFilter> point_cloud_;
143 std::shared_ptr<ob::Frame> frame_;
144};
145
146struct Renderer {
149 ufo::Image<ufo::SmallRGB> raw_rgb_image;
150 ufo::Image<std::uint8_t> raw_depth_image;
151 std::vector<char> rgb_image;
152 std::vector<char> depth_image;
153 std::filesystem::path save_dir;
154 bool named_pipe;
155 std::size_t image_index{};
156 int fd;
157 ufo::SmallRGB background_color;
158 float min_dist = 0.01;
159 float max_dist = 4.6;
160
161 Renderer(ufo::Image<ufo::Ray3f> rays, std::filesystem::path save_dir, bool named_pipe,
162 ufo::SmallRGB background_color)
163 : rays(rays)
164 , nodes(rays.rows(), rays.cols())
165 , raw_rgb_image(rays.rows(), rays.cols())
166 , raw_depth_image(rays.rows(), rays.cols())
167 , save_dir(save_dir)
168 , named_pipe(named_pipe)
169 , background_color(background_color)
170 {
171 if (named_pipe) {
172 mkfifo("ufo_manipulation", 0666);
173 fd = open("ufo_manipulation", O_WRONLY);
174 }
175 }
176
177 ~Renderer() { close(fd); }
178
179 void render(Map const& map)
180 {
181 pcl::PointCloud<pcl::PointXYZRGB>::Ptr pcl_cloud(
182 new pcl::PointCloud<pcl::PointXYZRGB>);
183 for (auto node : map.query(ufo::pred::Leaf{} && ufo::pred::Occupied{})) {
184 auto coord = map.center(node.index);
185 auto color = map.colorRGB(node.index);
186 pcl_cloud->push_back(pcl::PointXYZRGB(coord.x, coord.y, coord.z, color.red,
187 color.green, color.blue));
188 }
189 // for (auto node : map) {
190 // if (map.isLeaf(node.index) && map.occupancyOccupied(node.index)) {
191 // auto coord = map.center(node.index);
192 // auto color = map.color<ufo::ColorType::RGB8U>(node.index);
193 // pcl_cloud->push_back(pcl::PointXYZRGB(coord.x, coord.y, coord.z, color.red,
194 // color.green, color.blue));
195 // }
196 // }
197
198 pcl::io::savePCDFile(
199 "/home/dduberg/Downloads/manipulation/src/ufo/lib/map/apps/manipulation/renders/"
200 "map.pcd",
201 *pcl_cloud, true);
202
203 map.trace(ufo::execution::par, rays.begin(), rays.end(), nodes.begin(),
204 ufo::pred::Leaf{} && ufo::pred::Occupied{}, min_dist, max_dist);
205
206 ufo::transform(ufo::execution::par, nodes.begin(), nodes.end(), raw_rgb_image.begin(),
207 [this, &map](auto hit) {
208 return map.valid(hit.node) ? map.colorRGB(hit.node)
209 : background_color;
210 });
211
212 double max_depth = 0;
213 for (auto const& n : nodes) {
214 double depth = map.valid(n.node) ? n.distance : max_dist;
215 max_depth = std::max(max_depth, depth);
216 }
217
218 ufo::transform(ufo::execution::par, nodes.begin(), nodes.end(),
219 raw_depth_image.begin(), [this, &map, max_depth](auto hit) {
220 double depth = map.valid(hit.node) ? hit.distance : max_depth;
221 depth = std::round(depth * 255 / max_depth);
222 // depth = std::clamp(depth, min_dist, max_dist);
223 // float ranged = (depth - min_dist) /
224 // (max_dist - min_dist); // dmin->0.0, dmax->1.0
225 // float out = 1.0 - ranged; // 0 -> white, 1 -> black
226 // // output* *= 1 / 2.2; // most picture data is gamma-compressed
227
228 // return static_cast<std::uint8_t>(
229 // out * std::numeric_limits<std::uint8_t>::max());
230 return static_cast<std::uint8_t>(depth);
231 });
232
233 ufo::Image<int> seen(raw_rgb_image.rows(), raw_rgb_image.cols(), 0);
234 std::cout << raw_rgb_image(92, 95) << "\n";
235 std::cout << raw_rgb_image(95, 92) << "\n";
236 std::set<std::pair<int, int>> open;
237 for (int iter{}; 1 > iter; ++iter) {
238 for (int r{}; raw_rgb_image.rows() > r; ++r) {
239 for (int c{}; raw_rgb_image.cols() > c; ++c) {
240 if (255 == raw_depth_image(r, c)) {
241 continue;
242 }
243
244 seen(r, c) = 1;
245 open.insert(std::pair(c, r));
246
247 while (!open.empty()) {
248 auto cur = *open.begin();
249 open.erase(cur);
250 for (int y = -1; 1 >= y; ++y) {
251 for (int x = -1; 1 >= x; ++x) {
252 int ry = cur.second + y;
253 int cx = cur.first + x;
254 if (0 > ry || raw_rgb_image.rows() <= ry || 0 > cx ||
255 raw_rgb_image.cols() <= cx || 0 < seen(ry, cx)) {
256 continue;
257 }
258
259 if (255 == raw_depth_image(ry, cx)) {
260 seen(ry, cx) = 2;
261 continue;
262 }
263
264 seen(ry, cx) = 1;
265 open.insert(std::pair(cx, ry));
266 }
267 }
268 }
269
270 std::size_t num_white{};
271 std::size_t num_green{};
272
273 for (int r{}; raw_rgb_image.rows() > r; ++r) {
274 for (int c{}; raw_rgb_image.cols() > c; ++c) {
275 if (1 != seen(r, c)) {
276 continue;
277 }
278
279 auto color = raw_rgb_image(r, c);
280
281 if (130 < color.red && 160 > color.red && 190 < color.green &&
282 210 > color.green && 130 < color.blue && 150 > color.blue) {
283 ++num_green;
284 } else if (220 < color.red && 220 < color.green && 220 < color.blue) {
285 ++num_white;
286 }
287 }
288 }
289
290 if (5 > num_green && 30 > num_white) {
291 for (int r{}; raw_rgb_image.rows() > r; ++r) {
292 for (int c{}; raw_rgb_image.cols() > c; ++c) {
293 if (0 == seen(r, c)) {
294 continue;
295 }
296
297 raw_rgb_image(r, c) = background_color;
298 raw_depth_image(r, c) = 255;
299 }
300 }
301 }
302
303 std::fill(seen.begin(), seen.end(), 0);
304 open.clear();
305 }
306 }
307 }
308
309 auto f = [this](std::string const& name, auto raw_image, auto image, int comp) {
310 if (0 != stbi_write_jpg_to_func(
311 [](void* context, void* data, int size) {
312 std::vector<char>* v = static_cast<std::vector<char>*>(context);
313 std::size_t s = v->size();
314 v->resize(s + size);
315 std::memcpy(v->data() + s, data, size);
316 },
317 &image, raw_image.cols(), raw_image.rows(),
318 sizeof(typename decltype(raw_image)::value_type) / sizeof(char),
319 raw_image.data(), 95)) {
320 std::cerr << BOLDRED << "Error writing jpg image\n" << RESET;
321 }
322
323 // if (named_pipe) {
324 // if (-1 == write(fd, rgb_image.data(), rgb_image.size())) {
325 // std::cerr << BOLDRED << "Error while writing to pipe\n" << RESET;
326 // }
327 // }
328
329 std::filesystem::path path =
330 save_dir / (name + "_" + std::to_string(image_index) + ".jpg");
331 std::cout << "Saving file " << path << '\n';
332 std::ofstream file(path, std::ios::binary | std::ios::out);
333 file.write(image.data(), image.size());
334 file.close();
335
336 image.clear();
337 };
338
339 f("rgb", raw_rgb_image, rgb_image, 3);
340 f("depth", raw_depth_image, depth_image, 1);
341
342 // TODO: Make the index nicer
343
344 ++image_index;
345 }
346};
347
348[[nodiscard]] std::vector<Camera> createCameras(toml::table const& config)
349{
350 std::cout << "Creating cameras\n";
351
352 std::uint32_t default_width = 640;
353 std::uint32_t default_height = 400;
354 std::uint32_t default_fps = 15;
355
356 if (auto v = config["width"].value<std::uint32_t>(); v) {
357 default_width = *v;
358 } else {
359 std::cout << YELLOW << "\tMissing default camera width, using '" << default_width
360 << "'.\n";
361 }
362
363 if (auto v = config["height"].value<std::uint32_t>(); v) {
364 default_height = *v;
365 } else {
366 std::cout << YELLOW << "\tMissing default camera height, using '" << default_height
367 << "'.\n";
368 }
369
370 if (auto v = config["fps"].value<std::uint32_t>(); v) {
371 default_fps = *v;
372 } else {
373 std::cout << YELLOW << "\tMissing default camera fps, using '" << default_fps
374 << "'.\n";
375 }
376
377 std::vector<Camera> cameras;
378
379 for (auto const& [name, value] : config) {
380 if (!value.is_table()) {
381 continue;
382 }
383
384 std::cout << "\tCamera '" << name << "'\n";
385
386 std::string_view serial_number;
387 ufo::Transform3f pose;
388 std::uint32_t width = default_width;
389 std::uint32_t height = default_height;
390 std::uint32_t fps = default_fps;
391
392 auto params = *value.as_table();
393
394 if (auto v = params["serial_number"].value<std::string_view>(); v) {
395 serial_number = *v;
396 } else {
397 std::cerr << BOLDRED << "\t - Missing serial_number.\n" << RESET;
398 exit(EXIT_FAILURE);
399 }
400
401 if ("" == serial_number) {
402 std::cerr << BOLDRED << "\t - Empty serial_number.\n" << RESET;
403 exit(EXIT_FAILURE);
404 }
405
406 if (auto v = params["pose"].as_array(); v) {
407 std::vector<float> components;
408 v->for_each([&components](auto&& e) {
409 if constexpr (toml::is_number<decltype(e)>) {
410 components.push_back(*e);
411 } else {
412 std::cerr << BOLDRED
413 << "\t - 'pose' should consist of 7 floats in the format [x, y, z, "
414 "qw, qx, qy, qz] "
415 "(e.g., [0, 0, 0, 1, 0, 0, 0])\n"
416 << RESET;
417 exit(EXIT_FAILURE);
418 }
419 });
420 if (7 != components.size()) {
421 std::cerr << BOLDRED
422 << "\t - 'pose' should be in the format [x, y, z, qw, qx, qy, qz] "
423 "(e.g., [0, 0, 0, 1, 0, 0, 0])\n"
424 << RESET;
425 exit(EXIT_FAILURE);
426 }
427
428 pose = ufo::Transform3f(normalize(ufo::Quatf(components[3], components[4],
429 components[5], components[6])),
430 ufo::Vec3f(components[0], components[1], components[2]));
431 } else {
432 std::cerr << BOLDRED << "\t - Missing pose.\n" << RESET;
433 exit(EXIT_FAILURE);
434 }
435
436 if (auto v = params["width"].value<std::uint32_t>(); v) {
437 width = *v;
438 } else {
439 std::cout << YELLOW << "\t - Missing width, using default width of '" << width
440 << "' instead.\n"
441 << RESET;
442 }
443
444 if (auto v = params["height"].value<std::uint32_t>(); v) {
445 height = *v;
446 } else {
447 std::cout << YELLOW << "\t - Missing height, using default height of '" << height
448 << "' instead.\n"
449 << RESET;
450 }
451
452 if (auto v = params["fps"].value<std::uint32_t>(); v) {
453 fps = *v;
454 } else {
455 std::cout << YELLOW << "\t - Missing fps, using default fps of '" << fps
456 << "' instead.\n"
457 << RESET;
458 }
459
460 std::cout << "\t - Serial number: " << serial_number << '\n';
461 std::cout << "\t - Pose: " << pose << '\n';
462 std::cout << "\t - Width: " << width << '\n';
463 std::cout << "\t - Height: " << height << '\n';
464 std::cout << "\t - FPS: " << fps << '\n';
465
466 cameras.emplace_back(name.str(), pose, serial_number.data(), width, height, fps);
467 }
468
469 std::cout << "Created " << cameras.size() << " cameras\n";
470
471 return cameras;
472}
473
474[[nodiscard]] Map createMap(toml::table const& config)
475{
476 std::cout << "Creating UFOMap\n";
477
478 float resolution = 0.1f;
479 if (auto v = config["resolution"].value<float>(); v) {
480 resolution = *v;
481 } else {
482 std::cout << YELLOW << " - Missing UFOMap resolution, using '" << resolution
483 << "'.\n";
484 }
485
486 std::cout << " - Resolution: " << resolution << " m\n";
487
488 return Map(resolution);
489}
490
491[[nodiscard]] ufo::AngularIntegrator<3> createIntegrator(toml::table const& config)
492{
493 std::cout << "Creating UFOMap integrator\n";
494
495 ufo::AngularIntegrator<3> integrator;
496
497 if (auto v = config["angular_resolution"].value<float>(); v) {
498 integrator.angularResolution(ufo::radians(*v));
499 } else {
500 std::cout << YELLOW << " - Missing 'angular_resolution', using '"
501 << ufo::degrees(integrator.angularResolution()) << "'.\n";
502 }
503
504 if (auto v = config["min_distance"].value<float>(); v) {
505 integrator.min_distance = *v;
506 } else {
507 std::cout << YELLOW << " - Missing 'min_distance', using '" << integrator.min_distance
508 << "'.\n";
509 }
510
511 if (auto v = config["max_distance"].value<float>(); v) {
512 integrator.max_distance = *v;
513 } else {
514 std::cout << YELLOW << " - Missing 'max_distance', using '" << integrator.max_distance
515 << "'.\n";
516 }
517
518 if (auto v = config["sensor_angular_resolution"].value<float>(); v) {
519 integrator.sensor_angular_resolution = ufo::radians(*v);
520 } else {
521 std::cout << YELLOW << " - Missing 'sensor_angular_resolution', using '"
522 << ufo::degrees(integrator.sensor_angular_resolution) << "'.\n";
523 }
524
525 if (auto v = config["translation_error"].value<float>(); v) {
526 integrator.translation_error = *v;
527 } else {
528 std::cout << YELLOW << " - Missing 'translation_error', using '"
529 << integrator.translation_error << "'.\n";
530 }
531
532 if (auto v = config["orientation_error"].value<float>(); v) {
533 integrator.orientation_error = *v;
534 } else {
535 std::cout << YELLOW << " - Missing 'orientation_error', using '"
536 << integrator.orientation_error << "'.\n";
537 }
538
539 if (auto v = config["occupancy_hit"].value<float>(); v) {
540 integrator.occupancy_hit = *v;
541 } else {
542 std::cout << YELLOW << " - Missing 'occupancy_hit', using '"
543 << integrator.occupancy_hit << "'.\n";
544 }
545
546 if (auto v = config["occupancy_miss"].value<float>(); v) {
547 integrator.occupancy_miss = *v;
548 } else {
549 std::cout << YELLOW << " - Missing 'occupancy_miss', using '"
550 << integrator.occupancy_miss << "'.\n";
551 }
552
553 std::cout << " - Angular resolution: " << ufo::degrees(integrator.angularResolution())
554 << "°\n";
555 std::cout << " - Min. distance: " << integrator.min_distance << " m\n";
556 std::cout << " - Max. distance: " << integrator.max_distance << " m\n";
557 std::cout << " - Sensor angular resolution: "
558 << ufo::degrees(integrator.sensor_angular_resolution) << "°\n";
559 std::cout << " - Translation error: " << integrator.translation_error << " m\n";
560 std::cout << " - Orientation error: " << ufo::degrees(integrator.orientation_error)
561 << "°\n";
562 std::cout << " - Occupancy hit: " << (100 * integrator.occupancy_hit) << "%\n";
563 std::cout << " - Occupancy miss: " << (100 * integrator.occupancy_miss) << "%\n";
564
565 return integrator;
566}
567
568[[nodiscard]] Renderer createRenderer(toml::table const& config)
569{
570 std::cout << "Creating renderer\n";
571
572 bool named_pipe = false;
573 if (auto v = config["named_pipe"].value<bool>(); v) {
574 named_pipe = *v;
575 } else {
576 std::cerr << BOLDRED << " - Missing named_pipe, using default value of '"
577 << std::boolalpha << named_pipe << "'\n"
578 << RESET;
579 }
580
581 std::filesystem::path save_dir;
582
583 if (auto v = config["output_dir"].value<std::string_view>(); v) {
584 save_dir = *v;
585 } else {
586 std::cerr << BOLDRED << " - Missing output_dir.\n" << RESET;
587 exit(EXIT_FAILURE);
588 }
589
590 if ("" == save_dir) {
591 std::cerr << BOLDRED << " - Empty output_dir.\n" << RESET;
592 exit(EXIT_FAILURE);
593 }
594
595 unsigned width = 160;
596 unsigned height = 160;
597
598 if (auto v = config["width"].value<std::uint32_t>(); v) {
599 width = *v;
600 } else {
601 std::cout << YELLOW << " - Missing width, using default width of '" << width
602 << "' instead.\n"
603 << RESET;
604 }
605
606 if (auto v = config["height"].value<std::uint32_t>(); v) {
607 height = *v;
608 } else {
609 std::cout << YELLOW << " - Missing height, using default height of '" << height
610 << "' instead.\n"
611 << RESET;
612 }
613
614 float elevation = 5.0f;
615
616 if (auto v = config["elevation"].value<std::uint32_t>(); v) {
617 elevation = *v;
618 } else {
619 std::cout << YELLOW << " - Missing elevation, using default elevation of '"
620 << elevation << "' instead.\n"
621 << RESET;
622 }
623
624 auto coord_f = [&config, elevation](std::string const& name,
625 ufo::Vec2f const& default_val = ufo::Vec2f{}) {
626 if (auto v = config[name].as_array(); v) {
627 std::vector<float> components;
628 v->for_each([&name, &components](auto&& e) {
629 if constexpr (toml::is_number<decltype(e)>) {
630 components.push_back(*e);
631 } else {
632 std::cerr << BOLDRED << " - coordinate '" + name
633 << "' should consist of 2 floats in the format [x, y] (e.g., [0, "
634 "0])\n"
635 << RESET;
636 exit(EXIT_FAILURE);
637 }
638 });
639 if (2 != components.size()) {
640 std::cerr << BOLDRED << " - coordinate '" + name
641 << "' should consist of 2 floats in the format [x, y] (e.g., [0, 0])\n"
642 << RESET;
643 exit(EXIT_FAILURE);
644 }
645 return ufo::Vec3f(components[0], components[1], elevation);
646 } else {
647 std::cerr << BOLDRED << " - Missing coordinate '" + name
648 << "', using default coordinate '" << default_val << "' instead.\n"
649 << RESET;
650 return ufo::Vec3f(default_val, elevation);
651 }
652 };
653
654 ufo::Vec3f top_left = coord_f("top_left", ufo::Vec2f(-2.0f, 2.0f));
655 ufo::Vec3f top_right = coord_f("top_right", ufo::Vec2f(2.0f, 2.0f));
656 ufo::Vec3f bottom_right = coord_f("bottom_right", ufo::Vec2f(2.0f, -2.0f));
657 ufo::Vec3f bottom_left = coord_f("bottom_left", ufo::Vec2f(-2.0f, -2.0f));
658
659 ufo::Image<ufo::Ray3f> rays(width, height);
660 for (std::size_t r{}; rays.rows() > r; ++r) {
661 for (std::size_t c{}; rays.cols() > c; ++c) {
662 auto top = ufo::lerp(top_left, top_right, float(c) / rays.cols());
663 auto bottom = ufo::lerp(bottom_left, bottom_right, float(c) / rays.cols());
664 rays(r, c).origin = ufo::lerp(top, bottom, float(r) / rays.rows());
665 rays(r, c).direction = ufo::Vec3f(0, 0, -1);
666 }
667 }
668
669 ufo::SmallRGB background_color;
670 if (auto v = config["background_color"].as_array(); v) {
671 std::vector<std::uint8_t> components;
672 v->for_each([&components](auto&& e) {
673 if constexpr (toml::is_number<decltype(e)>) {
674 components.push_back(*e);
675 } else {
676 std::cerr << BOLDRED
677 << " - background_color should consist of 3 8-bit unsigned integers in "
678 "the format [r, g, b] (e.g., [0, 32, 255])\n"
679 << RESET;
680 exit(EXIT_FAILURE);
681 }
682 });
683 if (3 != components.size()) {
684 std::cerr
685 << BOLDRED
686 << " - background_color should consist of 3 8-bit unsigned integers in the "
687 "format [r, g, b] (e.g., [0, 32, 255])\n"
688 << RESET;
689 exit(EXIT_FAILURE);
690 }
691 background_color = ufo::SmallRGB{components[0], components[1], components[2]};
692 } else {
693 std::cerr << BOLDRED << " - Missing background_color, using default color '"
694 << background_color << "' instead.\n"
695 << RESET;
696 }
697
698 return Renderer(rays, save_dir, named_pipe, background_color);
699
700 // TODO: Old below
701
702 // Image<Ray3> rays(rows, cols);
703
704 // ufo::Camera camera;
705
706 // if (auto v = config["pose"].as_array(); v) {
707 // std::vector<float> components;
708 // v->for_each([&components](auto&& e) {
709 // if constexpr (toml::is_number<decltype(e)>) {
710 // components.push_back(*e);
711 // } else {
712 // std::cerr << BOLDRED
713 // << " - 'pose' should consist of 7 floats in the format [x, y, z, "
714 // "qw, qx, qy, qz] "
715 // "(e.g., [0, 0, 0, 1, 0, 0, 0])\n"
716 // << RESET;
717 // exit(EXIT_FAILURE);
718 // }
719 // });
720 // if (7 != components.size()) {
721 // std::cerr << BOLDRED
722 // << " - 'pose' should be in the format [x, y, z, qw, qx, qy, qz] "
723 // "(e.g., [0, 0, 0, 1, 0, 0, 0])\n"
724 // << RESET;
725 // exit(EXIT_FAILURE);
726 // }
727
728 // camera.pose = ufo::Transform3f(
729 // ufo::Quatf(components[3], components[4], components[5], components[6]),
730 // ufo::Vec3f(components[0], components[1], components[2]));
731 // } else {
732 // std::cerr << BOLDRED << " - Missing pose.\n" << RESET;
733 // exit(EXIT_FAILURE);
734 // }
735
736 // if (auto v = config["width"].value<std::uint32_t>(); v) {
737 // camera.cols = *v;
738 // } else {
739 // std::cout << YELLOW << " - Missing width, using default width of '" << camera.cols
740 // << "' instead.\n"
741 // << RESET;
742 // }
743
744 // if (auto v = config["height"].value<std::uint32_t>(); v) {
745 // camera.rows = *v;
746 // } else {
747 // std::cout << YELLOW << " - Missing height, using default height of '" << camera.rows
748 // << "' instead.\n"
749 // << RESET;
750 // }
751
752 // if (auto v = config["near_clip"].value<float>(); v) {
753 // camera.near_clip = *v;
754 // } else {
755 // std::cout << YELLOW << " - Missing 'near_clip', using default near clip of '"
756 // << camera.near_clip << "' instead.\n"
757 // << RESET;
758 // }
759
760 // if (auto v = config["far_clip"].value<float>(); v) {
761 // camera.far_clip = *v;
762 // } else {
763 // std::cout << YELLOW << " - Missing 'far_clip', using default far clip of '"
764 // << camera.far_clip << "' instead.\n"
765 // << RESET;
766 // }
767
768 // if (auto v = config["zoom"].value<float>(); v) {
769 // camera.zoom = *v;
770 // } else {
771 // std::cout << YELLOW << " - Missing zoom, using default zoom of '" << camera.zoom
772 // << "' instead.\n"
773 // << RESET;
774 // }
775
776 // if (auto v = config["vertical_fov"].value<float>(); v) {
777 // camera.vertical_fov = *v;
778 // } else {
779 // std::cout << YELLOW << " - Missing 'vertical_fov', using default vertical FOV of '"
780 // << camera.vertical_fov << "' instead.\n"
781 // << RESET;
782 // }
783
784 // camera.projection_type = ufo::ProjectionType::ORTHOGONAL;
785
786 // std::filesystem::path save_dir;
787
788 // if (auto v = config["output_dir"].value<std::string_view>(); v) {
789 // save_dir = *v;
790 // } else {
791 // std::cerr << BOLDRED << " - Missing output_dir.\n" << RESET;
792 // exit(EXIT_FAILURE);
793 // }
794
795 // if ("" == save_dir) {
796 // std::cerr << BOLDRED << " - Empty output_dir.\n" << RESET;
797 // exit(EXIT_FAILURE);
798 // }
799
800 // return Renderer(camera.rays(), save_dir, false);
801}
802
803[[nodiscard]] std::vector<std::pair<std::filesystem::path, ufo::Transform3f>> loadDataset(
804 toml::table const& config)
805{
806 std::cout << "Loading dataset\n";
807
808 if (auto v = config["use_dataset"].value<bool>(); v) {
809 if (!(*v)) {
810 std::cout << " - dataset disabled.\n";
811 return std::vector<std::pair<std::filesystem::path, ufo::Transform3f>>();
812 }
813 } else {
814 std::cerr << BOLDRED << " - Missing use_dataset.\n" << RESET;
815 exit(EXIT_FAILURE);
816 }
817
818 std::filesystem::path dir;
819 if (auto v = config["dataset_path"].value<std::string_view>(); v) {
820 dir = *v;
821 } else {
822 std::cerr << BOLDRED << " - Missing dataset_path.\n" << RESET;
823 exit(EXIT_FAILURE);
824 }
825
826 if ("" == dir) {
827 std::cerr << BOLDRED << " - Empty dataset_path.\n" << RESET;
828 exit(EXIT_FAILURE);
829 }
830
831 std::vector<std::filesystem::path> clouds;
832 std::vector<ufo::Transform3f> poses;
833 for (auto const& entry : std::filesystem::directory_iterator(dir)) {
834 if (!entry.is_regular_file()) {
835 continue;
836 }
837
838 auto const& path = entry.path();
839
840 if (".pcd" == path.extension()) {
841 clouds.push_back(path);
842 } else if (".tsv" == path.extension()) {
843 if (!poses.empty()) {
844 std::cerr << BOLDRED << " - Multiple poses files (i.e., '.tsv' files).\n"
845 << RESET;
846 exit(EXIT_FAILURE);
847 }
848
849 std::ifstream data(path, std::ios::in | std::ios::binary);
850 std::string s;
851 while (std::getline(data, s)) {
852 std::stringstream ss(s);
853 std::vector<float> elements;
854
855 std::string tmp;
856 while (getline(ss, tmp, '\t')) {
857 elements.push_back(std::stof(tmp));
858 }
859
860 poses.emplace_back(
861 normalize(ufo::Quatf(elements[6], elements[3], elements[4], elements[5])),
862 ufo::Vec3f(elements[0], elements[1], elements[2]));
863 // poses.emplace_back(
864 // normalize(ufo::Quatf(elements[3], elements[4], elements[5], elements[6])),
865 // ufo::Vec3f(elements[0], elements[1], elements[2]));
866 }
867 }
868 }
869
870 std::cout << " - Found " + std::to_string(clouds.size()) + " clouds and " +
871 std::to_string(poses.size()) + " poses.\n";
872
873 if (clouds.size() != poses.size()) {
874 std::cerr << BOLDRED << " - Number of clouds is not the same as number of poses\n"
875 << RESET;
876 exit(EXIT_FAILURE);
877 }
878
879 assert(clouds.size() == poses.size());
880
881 std::sort(clouds.begin(), clouds.end());
882
883 std::vector<std::pair<std::filesystem::path, ufo::Transform3f>> ret;
884 ret.reserve(clouds.size());
885
886 std::transform(clouds.begin(), clouds.end(), poses.begin(), std::back_inserter(ret),
887 [](auto const& a, auto const& b) { return std::make_pair(a, b); });
888
889 return ret;
890}
891
892int main(int argc, char* argv[])
893{
894 if (2 != argc) {
895 std::cerr << BOLDRED << "Run the program like: `./UFOManipulation config.toml`\n"
896 << RESET;
897 return EXIT_FAILURE;
898 }
899
900 toml::table tbl;
901 try {
902 tbl = toml::parse_file(argv[1]);
903 } catch (toml::parse_error const& err) {
904 std::cerr << BOLDRED << "Error parsing file '" << *err.source().path << "':\n"
905 << err.description() << "\n (" << err.source().begin << ")\n"
906 << RESET;
907 return EXIT_FAILURE;
908 }
909
910 Map map = createMap(*tbl["map"].as_table());
911 // Map map(0.1);
912 auto integrator = createIntegrator(*tbl["integrator"].as_table());
913 auto cameras = createCameras(*tbl["cameras"].as_table());
914 auto renderer = createRenderer(*tbl["renderer"].as_table());
915
916 auto dataset = loadDataset(*tbl["dataset"].as_table());
917 if (!dataset.empty()) {
918 std::size_t i{};
919 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pcl_first(
920 new pcl::PointCloud<pcl::PointXYZRGBA>);
921 Eigen::Matrix4f camera_to_world{
922 // clang-format off
923 {0, 0, 1, 0},
924 {1, 0, 0, 0},
925 {0, 1, 0, 0},
926 {0, 0, 0, 1},
927 // clang-format on
928 };
929 for (auto const& [cloud_file, pose] : dataset) {
930 std::cout << "\rIntegrating dataset [" << ++i << "/" << dataset.size() << "]"
931 << std::flush;
932
933 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pcl_cloud(
934 new pcl::PointCloud<pcl::PointXYZRGBA>);
935 if (-1 ==
936 pcl::io::loadPCDFile<pcl::PointXYZRGBA>(cloud_file.string(), *pcl_cloud)) {
937 std::cerr << BOLDRED << "Could not read file " + cloud_file.string() + ".\n"
938 << RESET;
939 exit(EXIT_FAILURE);
940 continue;
941 }
942
943 for (auto& p : *pcl_cloud) {
944 p.x *= 0.001f;
945 p.y *= 0.001f;
946 p.z *= 0.001f;
947 }
948
949 {
950 ufo::Mat4x4f tf = static_cast<ufo::Mat4x4f>(pose);
951 Eigen::Matrix4f pcl_tf{
952 // clang-format off
953 {tf[0][0], tf[1][0], tf[2][0], tf[3][0]},
954 {tf[0][1], tf[1][1], tf[2][1], tf[3][1]},
955 {tf[0][2], tf[1][2], tf[2][2], tf[3][2]},
956 {tf[0][3], tf[1][3], tf[2][3], tf[3][3]},
957 // clang-format on
958 };
959
960 pcl_tf *= camera_to_world;
961
962 pcl::transformPointCloud(*pcl_cloud, *pcl_cloud, pcl_tf);
963
964 pcl::CropBox<pcl::PointXYZRGBA> filter;
965 filter.setMin(Eigen::Vector4f(-2.7, -2.7, -1.0, 0.0));
966 filter.setMax(Eigen::Vector4f(2.7, 2.7, 10.0, 0.0));
967 filter.setInputCloud(pcl_cloud);
968 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr ds(
969 new pcl::PointCloud<pcl::PointXYZRGBA>);
970 filter.filter(*ds);
971 *pcl_cloud = *ds;
972
973 pcl::UniformSampling<pcl::PointXYZRGBA> sampler;
974 // pcl::VoxelGrid<pcl::PointXYZRGBA> sampler;
975 sampler.setInputCloud(pcl_cloud);
976
977 double sample_size = 0.02;
978 int nn = 30;
979 double std_ratio = 1.5;
980 sampler.setRadiusSearch(sample_size);
981 // sampler.setLeafSize(sample_size, sample_size, sample_size);
982
983 sampler.filter(*ds);
984
985 // std::cout << pcl_cloud->size() << " vs " << ds->size() << '\n';
986
987 if (!pcl_first->empty()) {
988 pcl::GeneralizedIterativeClosestPoint6D icp;
989 // pcl::IterativeClosestPoint<pcl::PointXYZRGBA, pcl::PointXYZRGBA> icp;
990 icp.setInputSource(ds);
991 icp.setInputTarget(pcl_first);
992
993 icp.setMaximumIterations(50);
994 icp.setMaxCorrespondenceDistance(0.3);
995 icp.setRANSACOutlierRejectionThreshold(sample_size * 6);
996 // icp.useBFGS();
997
998 icp.setEuclideanFitnessEpsilon(1e-07);
999 icp.setTransformationEpsilon(1e-09);
1000 icp.setTransformationRotationEpsilon(1e-07);
1001
1002 // pcl::PointCloud<pcl::PointXYZRGBA> final;
1003 // icp.align(final); //, pcl_tf);
1004
1005 // // *pcl_cloud = final;
1006
1007 // pcl_tf = icp.getFinalTransformation();
1008 // pcl_tf(0, 3) = 0;
1009 // pcl_tf(1, 3) = 0;
1010 // pcl_tf(2, 3) = 0;
1011 // pcl::transformPointCloud(*pcl_cloud, *pcl_cloud, pcl_tf);
1012
1013 std::cout << '\n' << pcl_tf << '\n';
1014 }
1015
1016 if (pcl_first->empty()) {
1017 pcl_first = ds;
1018 // pcl_first = pcl_cloud;
1019 }
1020 }
1021
1022 Cloud cloud;
1023 cloud.reserve(pcl_cloud->size());
1024 for (auto const& point : *pcl_cloud) {
1025 cloud.emplace_back(
1026 ufo::Vec3f(point.x, point.y, point.z),
1027 ufo::convert<ufo::FineLab>(ufo::SmallRGB{point.r, point.g, point.b}));
1028 }
1029
1030 // ufo::transformInPlace(ufo::execution::par, (pose * camera_to_world), cloud);
1031
1032 auto nodes = map.create(ufo::execution::par, cloud.view<ufo::Vec3f>());
1033
1034 for (std::size_t i{}; cloud.size() > i; ++i) {
1035 // auto const& p = ufo::TreeCoord(cloud.view<ufo::Vec3f>()[i], 0);
1036 auto const& c = cloud.view<ufo::FineLab>()[i];
1037 auto n = nodes[i];
1038 map.occupancyUpdate(n, 0.6f, false);
1039 map.colorAdd(n, c, false);
1040 // map.occupancySet(n, 0.8f, false);
1041 // map.colorSet(n, c, false);
1042 }
1043
1044 // for (float x)
1045
1046 // integrator(ufo::execution::par, map, cloud, pose);
1047
1048 // if (2 == i) {
1049 // break;
1050 // }
1051 }
1052 std::cout << "\n";
1053 }
1054
1055 map.propagate(ufo::execution::par);
1056 map.modifiedReset();
1057
1058 std::vector<std::future<bool>> res;
1059 std::size_t iter{};
1060 while (true) {
1061 ++iter;
1062
1063 // res.clear();
1064
1065 // // Start update for each camera
1066 // for (auto& camera : cameras) {
1067 // res.emplace_back(std::async(std::launch::async, &Camera::update, &camera));
1068 // }
1069
1070 // assert(cameras.size() == res.size());
1071
1072 // for (std::size_t i{}; cameras.size() > i; ++i) {
1073 // // Wait for camera update to finish
1074 // res[i].wait();
1075
1076 // auto cloud = cameras[i].cloud();
1077 // auto pose = cameras[i].pose();
1078
1079 // // Update map
1080 // integrator(map, cloud, pose);
1081 // }
1082
1083 if (10 == iter) {
1084 // Render view
1085 renderer.render(map);
1086
1087 break;
1088 }
1089 }
1090
1091 return EXIT_SUCCESS;
1092}
Image class for storing and manipulating 2D pixel data.
Definition image.hpp:78
constexpr size_type rows() const noexcept
Returns the number of rows (height) in the image.
Definition image.hpp:362
auto end(this auto &self) noexcept
Returns an iterator one past the last pixel.
Definition image.hpp:313
auto begin(this auto &self) noexcept
Returns an iterator to the first pixel.
Definition image.hpp:282
constexpr size_type cols() const noexcept
Returns the number of columns (width) in the image.
Definition image.hpp:368
Coord center() const
Returns the center of the tree (/ root node).
Definition tree.hpp:637
bool valid(pos_type block) const
Checks if a block is valid.
Definition tree.hpp:1361
constexpr T radians(T deg) noexcept
Converts degrees to radians.
Definition math.hpp:86
constexpr T degrees(T rad) noexcept
Converts radians to degrees.
Definition math.hpp:98
STL namespace.
constexpr Quat< T > normalize(Quat< T > const &q) noexcept
Returns a unit quaternion in the same direction as q.
Definition quat.hpp:679
constexpr Quat< T > lerp(Quat< T > const &x, Quat< T > const &y, T a) noexcept
Normalised linear interpolation (NLERP) between two quaternions.
Definition quat.hpp:770
PointCloud< Dim, T, Rest... > transform(Transform< Dim, T > const &t, PointCloud< Dim, T, Rest... > pc)
Applies a rigid transform to every point position and returns the result.
Oklab perceptual color.
Definition lab.hpp:77
Unit quaternion representing an orientation or rotation in 3-D space.
Definition quat.hpp:76
Rigid-body transform: a rotation matrix plus a translation vector.
Definition transform.hpp:81