38 #ifndef PCL_SURFACE_IMPL_TEXTURE_MAPPING_HPP_
39 #define PCL_SURFACE_IMPL_TEXTURE_MAPPING_HPP_
42 #include <pcl/surface/texture_mapping.h>
43 #include <unordered_set>
46 template<
typename Po
intInT> std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> >
48 const Eigen::Vector3f &p1,
49 const Eigen::Vector3f &p2,
50 const Eigen::Vector3f &p3)
52 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > tex_coordinates;
54 Eigen::Vector3f p1p2 (p2[0] - p1[0], p2[1] - p1[1], p2[2] - p1[2]);
55 Eigen::Vector3f p1p3 (p3[0] - p1[0], p3[1] - p1[1], p3[2] - p1[2]);
56 Eigen::Vector3f p2p3 (p3[0] - p2[0], p3[1] - p2[1], p3[2] - p2[2]);
59 p1p2 /= std::sqrt (p1p2.dot (p1p2));
60 p1p3 /= std::sqrt (p1p3.dot (p1p3));
61 p2p3 /= std::sqrt (p2p3.dot (p2p3));
64 Eigen::Vector3f f_normal = p1p2.cross (p1p3);
65 f_normal /= std::sqrt (f_normal.dot (f_normal));
68 Eigen::Vector3f f_vector_field = vector_field_ - vector_field_.dot (f_normal) * f_normal;
71 f_vector_field /= std::sqrt (f_vector_field.dot (f_vector_field));
74 Eigen::Vector2f tp1, tp2, tp3;
76 double alpha = std::acos (f_vector_field.dot (p1p2));
79 double e1 = (p2 - p3).norm () / f_;
80 double e2 = (p1 - p3).norm () / f_;
81 double e3 = (p1 - p2).norm () / f_;
87 tp2[0] =
static_cast<float> (e3);
91 double cos_p1 = (e2 * e2 + e3 * e3 - e1 * e1) / (2 * e2 * e3);
92 double sin_p1 = sqrt (1 - (cos_p1 * cos_p1));
94 tp3[0] =
static_cast<float> (cos_p1 * e2);
95 tp3[1] =
static_cast<float> (sin_p1 * e2);
98 Eigen::Vector2f r_tp2, r_tp3;
99 r_tp2[0] =
static_cast<float> (tp2[0] * std::cos (alpha) - tp2[1] * std::sin (alpha));
100 r_tp2[1] =
static_cast<float> (tp2[0] * std::sin (alpha) + tp2[1] * std::cos (alpha));
102 r_tp3[0] =
static_cast<float> (tp3[0] * std::cos (alpha) - tp3[1] * std::sin (alpha));
103 r_tp3[1] =
static_cast<float> (tp3[0] * std::sin (alpha) + tp3[1] * std::cos (alpha));
113 float min_x = tp1[0];
114 float min_y = tp1[1];
137 tex_coordinates.push_back (tp1);
138 tex_coordinates.push_back (tp2);
139 tex_coordinates.push_back (tp3);
140 return (tex_coordinates);
144 template<
typename Po
intInT>
void
149 int point_size =
static_cast<int> (tex_mesh.
cloud.
data.size ()) / nr_points;
154 Eigen::Vector3f facet[3];
157 std::vector<std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > >texture_map;
159 for (std::size_t m = 0; m < tex_mesh.
tex_polygons.size (); ++m)
162 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > texture_map_tmp;
165 for (std::size_t i = 0; i < tex_mesh.
tex_polygons[m].size (); ++i)
170 for (std::size_t j = 0; j < tex_mesh.
tex_polygons[m][i].vertices.size (); ++j)
173 memcpy (&x, &tex_mesh.
cloud.
data[idx * point_size + tex_mesh.
cloud.
fields[0].offset],
sizeof(
float));
174 memcpy (&y, &tex_mesh.
cloud.
data[idx * point_size + tex_mesh.
cloud.
fields[1].offset],
sizeof(
float));
175 memcpy (&z, &tex_mesh.
cloud.
data[idx * point_size + tex_mesh.
cloud.
fields[2].offset],
sizeof(
float));
182 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > tex_coordinates = mapTexture2Face (facet[0], facet[1], facet[2]);
183 for (
const auto &tex_coordinate : tex_coordinates)
184 texture_map_tmp.push_back (tex_coordinate);
188 tex_material_.tex_name =
"material_" + std::to_string(m);
189 tex_material_.tex_file = tex_files_[m];
198 template<
typename Po
intInT>
void
203 int point_size =
static_cast<int> (tex_mesh.
cloud.
data.size ()) / nr_points;
205 float x_lowest = 100000;
207 float y_lowest = 100000;
209 float z_lowest = 100000;
213 for (
int i = 0; i < nr_points; ++i)
215 memcpy (&x_, &tex_mesh.
cloud.
data[i * point_size + tex_mesh.
cloud.
fields[0].offset],
sizeof(
float));
216 memcpy (&y_, &tex_mesh.
cloud.
data[i * point_size + tex_mesh.
cloud.
fields[1].offset],
sizeof(
float));
217 memcpy (&z_, &tex_mesh.
cloud.
data[i * point_size + tex_mesh.
cloud.
fields[2].offset],
sizeof(
float));
236 float x_range = (x_lowest - x_highest) * -1;
237 float x_offset = 0 - x_lowest;
242 float z_range = (z_lowest - z_highest) * -1;
243 float z_offset = 0 - z_lowest;
246 std::vector<std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > >texture_map;
248 for (std::size_t m = 0; m < tex_mesh.
tex_polygons.size (); ++m)
251 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > texture_map_tmp;
254 for (std::size_t i = 0; i < tex_mesh.
tex_polygons[m].size (); ++i)
256 Eigen::Vector2f tmp_VT;
257 for (std::size_t j = 0; j < tex_mesh.
tex_polygons[m][i].vertices.size (); ++j)
259 std::size_t idx = tex_mesh.
tex_polygons[m][i].vertices[j];
260 memcpy (&x_, &tex_mesh.
cloud.
data[idx * point_size + tex_mesh.
cloud.
fields[0].offset],
sizeof(
float));
261 memcpy (&y_, &tex_mesh.
cloud.
data[idx * point_size + tex_mesh.
cloud.
fields[1].offset],
sizeof(
float));
262 memcpy (&z_, &tex_mesh.
cloud.
data[idx * point_size + tex_mesh.
cloud.
fields[2].offset],
sizeof(
float));
265 tmp_VT[0] = (x_ + x_offset) / x_range;
266 tmp_VT[1] = (z_ + z_offset) / z_range;
267 texture_map_tmp.push_back (tmp_VT);
272 tex_material_.tex_name =
"material_" + std::to_string(m);
273 tex_material_.tex_file = tex_files_[m];
282 template<
typename Po
intInT>
void
288 PCL_ERROR (
"The mesh should be divided into nbCamera+1 sub-meshes.\n");
289 PCL_ERROR (
"You provided %d cameras and a mesh containing %d sub-meshes.\n", cams.size (), tex_mesh.
tex_polygons.size ());
293 PCL_INFO (
"You provided %d cameras and a mesh containing %d sub-meshes.\n", cams.size (), tex_mesh.
tex_polygons.size ());
301 for (std::size_t m = 0; m < cams.size (); ++m)
304 Camera current_cam = cams[m];
307 Eigen::Affine3f cam_trans = current_cam.
pose;
313 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > texture_map_tmp;
316 for (
const auto &tex_polygon : tex_mesh.
tex_polygons[m])
318 Eigen::Vector2f tmp_VT;
320 for (
const auto &vertex : tex_polygon.vertices)
323 PointInT pt = (*camera_transformed_cloud)[vertex];
326 getPointUVCoordinates (pt, current_cam, tmp_VT);
327 texture_map_tmp.push_back (tmp_VT);
332 tex_material_.tex_name =
"material_" + std::to_string(m);
341 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > texture_map_tmp;
342 for (
const auto &tex_polygon : tex_mesh.
tex_polygons[cams.size ()])
343 for (std::size_t j = 0; j < tex_polygon.vertices.size (); ++j)
345 Eigen::Vector2f tmp_VT;
348 texture_map_tmp.push_back (tmp_VT);
354 tex_material_.tex_name =
"material_" + std::to_string(cams.size());
355 tex_material_.tex_file =
"occluded.jpg";
360 template<
typename Po
intInT>
bool
363 Eigen::Vector3f direction;
364 direction (0) = pt.x;
365 direction (1) = pt.y;
366 direction (2) = pt.z;
371 cloud = octree->getInputCloud();
373 double distance_threshold = octree->getResolution();
376 octree->getIntersectedVoxelIndices(direction, -direction, indices);
378 int nbocc =
static_cast<int> (indices.size ());
379 for (
const auto &index : indices)
382 if (pt.z * (*cloud)[index].z < 0)
388 if (std::fabs ((*cloud)[index].z - pt.z) <= distance_threshold)
399 template<
typename Po
intInT>
void
402 const double octree_voxel_size,
pcl::Indices &visible_indices,
406 double maxDeltaZ = octree_voxel_size;
409 Octree octree (octree_voxel_size);
417 visible_indices.clear ();
420 Eigen::Vector3f direction;
422 for (std::size_t i = 0; i < input_cloud->size (); ++i)
424 direction (0) = (*input_cloud)[i].x;
425 direction (1) = (*input_cloud)[i].y;
426 direction (2) = (*input_cloud)[i].z;
431 int nbocc =
static_cast<int> (indices.size ());
432 for (
const auto &index : indices)
435 if ((*input_cloud)[i].z * (*input_cloud)[index].z < 0)
441 if (std::fabs ((*input_cloud)[index].z - (*input_cloud)[i].z) <= maxDeltaZ)
451 filtered_cloud->points.push_back ((*input_cloud)[i]);
452 visible_indices.push_back (
static_cast<pcl::index_t> (i));
456 occluded_indices.push_back (
static_cast<pcl::index_t> (i));
463 template<
typename Po
intInT>
void
467 cleaned_mesh = tex_mesh;
476 removeOccludedPoints (cloud, filtered_cloud, octree_voxel_size, visible, occluded);
480 for (std::size_t polygons = 0; polygons < cleaned_mesh.
tex_polygons.size (); ++polygons)
485 for (std::size_t faces = 0; faces < tex_mesh.
tex_polygons[polygons].size (); ++faces)
488 bool faceIsVisible =
true;
491 for (
const auto &vertex : tex_mesh.
tex_polygons[polygons][faces].vertices)
493 if (find (occluded.begin (), occluded.end (), vertex) == occluded.end ())
502 faceIsVisible =
false;
516 template<
typename Po
intInT>
void
518 const double octree_voxel_size)
526 removeOccludedPoints (cloud, filtered_cloud, octree_voxel_size, visible, occluded);
531 template<
typename Po
intInT>
int
538 PCL_ERROR (
"The mesh must contain only 1 sub-mesh!\n");
542 if (cameras.empty ())
544 PCL_ERROR (
"Must provide at least one camera info!\n");
549 sorted_mesh = tex_mesh;
561 for (
const auto &camera : cameras)
564 Eigen::Affine3f cam_trans = camera.pose;
571 removeOccludedPoints (transformed_cloud, filtered_cloud, octree_voxel_size, visible, occluded);
572 visible_pts = *filtered_cloud;
575 std::unordered_set<index_t> occluded_set(occluded.cbegin(), occluded.cend());
579 std::vector<pcl::Vertices> visibleFaces_currentCam;
581 for (std::size_t faces = 0; faces < tex_mesh.
tex_polygons[0].size (); ++faces)
585 const auto faceIsVisible = std::all_of(tex_mesh.
tex_polygons[0][faces].vertices.cbegin(),
587 [&](
const auto& vertex)
589 if (occluded_set.find(vertex) != occluded_set.cend()) {
593 Eigen::Vector2f dummy_UV;
594 return this->getPointUVCoordinates ((*transformed_cloud)[vertex], camera, dummy_UV);
600 visibleFaces_currentCam.push_back (tex_mesh.
tex_polygons[0][faces]);
607 sorted_mesh.
tex_polygons.push_back (visibleFaces_currentCam);
612 sorted_mesh.tex_polygons.push_back (tex_mesh.tex_polygons[0]);
617 template<
typename Po
intInT>
void
620 const double octree_voxel_size,
const bool show_nb_occlusions,
621 const int max_occlusions)
624 double maxDeltaZ = octree_voxel_size * 2.0;
627 Octree octree (octree_voxel_size);
636 Eigen::Vector3f direction;
642 std::vector<double> zDist;
643 std::vector<double> ptDist;
645 for (
const auto& point: *input_cloud)
647 direction = pt.getVector3fMap() = point.getVector3fMap();
653 nbocc =
static_cast<int> (indices.size ());
656 for (
const auto &index : indices)
659 if (pt.z * (*input_cloud)[index].z < 0)
663 else if (std::fabs ((*input_cloud)[index].z - pt.z) <= maxDeltaZ)
670 zDist.push_back (std::fabs ((*input_cloud)[index].z - pt.z));
675 if (show_nb_occlusions)
676 (nbocc <= max_occlusions) ? (pt.
intensity =
static_cast<float> (nbocc)) : (pt.
intensity =
static_cast<float> (max_occlusions));
680 colored_cloud->
points.push_back (pt);
683 if (zDist.size () >= 2)
685 std::sort (zDist.begin (), zDist.end ());
686 std::sort (ptDist.begin (), ptDist.end ());
691 template<
typename Po
intInT>
void
693 double octree_voxel_size,
bool show_nb_occlusions,
int max_occlusions)
699 showOcclusions (cloud, colored_cloud, octree_voxel_size, show_nb_occlusions, max_occlusions);
703 template<
typename Po
intInT>
void
714 std::vector<pcl::Vertices> faces;
716 for (
int current_cam = 0; current_cam < static_cast<int> (cameras.size ()); ++current_cam)
718 PCL_INFO (
"Processing camera %d of %d.\n", current_cam+1, cameras.size ());
726 std::vector<bool> visibility;
727 visibility.resize (mesh.
tex_polygons[current_cam].size ());
728 std::vector<UvIndex> indexes_uv_to_points;
733 nan_point.
x = std::numeric_limits<float>::quiet_NaN ();
734 nan_point.
y = std::numeric_limits<float>::quiet_NaN ();
740 for (
int idx_face = 0; idx_face < static_cast<int> (mesh.
tex_polygons[current_cam].size ()); ++idx_face)
747 if (isFaceProjected (cameras[current_cam],
748 (*camera_cloud)[mesh.
tex_polygons[current_cam][idx_face].vertices[0]],
749 (*camera_cloud)[mesh.
tex_polygons[current_cam][idx_face].vertices[1]],
750 (*camera_cloud)[mesh.
tex_polygons[current_cam][idx_face].vertices[2]],
758 projections->
points.push_back (uv_coord1);
759 projections->
points.push_back (uv_coord2);
760 projections->
points.push_back (uv_coord3);
768 indexes_uv_to_points.push_back (u1);
769 indexes_uv_to_points.push_back (u2);
770 indexes_uv_to_points.push_back (u3);
773 visibility[idx_face] =
true;
777 projections->
points.push_back (nan_point);
778 projections->
points.push_back (nan_point);
779 projections->
points.push_back (nan_point);
780 indexes_uv_to_points.push_back (u_null);
781 indexes_uv_to_points.push_back (u_null);
782 indexes_uv_to_points.push_back (u_null);
784 visibility[idx_face] =
false;
794 if (visibility.size () - cpt_invisible !=0)
801 std::vector<float> neighborsSquaredDistance;
805 for (
int idx_pcam = 0 ; idx_pcam <= current_cam ; ++idx_pcam)
808 for (
int idx_face = 0; idx_face < static_cast<int> (mesh.
tex_polygons[idx_pcam].size ()); ++idx_face)
811 if (idx_pcam == current_cam && !visibility[idx_face])
824 if (isFaceProjected (cameras[current_cam],
825 (*camera_cloud)[mesh.
tex_polygons[idx_pcam][idx_face].vertices[0]],
826 (*camera_cloud)[mesh.
tex_polygons[idx_pcam][idx_face].vertices[1]],
827 (*camera_cloud)[mesh.
tex_polygons[idx_pcam][idx_face].vertices[2]],
837 getTriangleCircumcscribedCircleCentroid(uv_coord1, uv_coord2, uv_coord3, center, radius);
840 if (kdtree.
radiusSearch (center, radius, idxNeighbors, neighborsSquaredDistance) > 0 )
843 for (
const auto &idxNeighbor : idxNeighbors)
845 if (std::max ((*camera_cloud)[mesh.
tex_polygons[idx_pcam][idx_face].vertices[0]].z,
846 std::max ((*camera_cloud)[mesh.
tex_polygons[idx_pcam][idx_face].vertices[1]].z,
847 (*camera_cloud)[mesh.
tex_polygons[idx_pcam][idx_face].vertices[2]].z))
848 < (*camera_cloud)[indexes_uv_to_points[idxNeighbor].idx_cloud].z)
851 if (checkPointInsideTriangle(uv_coord1, uv_coord2, uv_coord3, (*projections)[idxNeighbor]))
854 visibility[indexes_uv_to_points[idxNeighbor].idx_face] =
false;
871 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > dummy_container;
876 std::vector<pcl::Vertices> occluded_faces;
877 occluded_faces.resize (visibility.size ());
878 std::vector<pcl::Vertices> visible_faces;
879 visible_faces.resize (visibility.size ());
881 int cpt_occluded_faces = 0;
882 int cpt_visible_faces = 0;
884 for (std::size_t idx_face = 0 ; idx_face < visibility.size () ; ++idx_face)
886 if (visibility[idx_face])
889 mesh.
tex_coordinates[current_cam][cpt_visible_faces * 3](0) = (*projections)[idx_face*3].x;
890 mesh.
tex_coordinates[current_cam][cpt_visible_faces * 3](1) = (*projections)[idx_face*3].y;
892 mesh.
tex_coordinates[current_cam][cpt_visible_faces * 3 + 1](0) = (*projections)[idx_face*3 + 1].x;
893 mesh.
tex_coordinates[current_cam][cpt_visible_faces * 3 + 1](1) = (*projections)[idx_face*3 + 1].y;
895 mesh.
tex_coordinates[current_cam][cpt_visible_faces * 3 + 2](0) = (*projections)[idx_face*3 + 2].x;
896 mesh.
tex_coordinates[current_cam][cpt_visible_faces * 3 + 2](1) = (*projections)[idx_face*3 + 2].y;
898 visible_faces[cpt_visible_faces] = mesh.
tex_polygons[current_cam][idx_face];
905 occluded_faces[cpt_occluded_faces] = mesh.
tex_polygons[current_cam][idx_face];
906 cpt_occluded_faces++;
911 occluded_faces.resize (cpt_occluded_faces);
914 visible_faces.resize (cpt_visible_faces);
925 std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > dummy_container;
930 for(std::size_t idx_face = 0 ; idx_face < mesh.
tex_polygons[cameras.size()].size() ; ++idx_face)
932 Eigen::Vector2f UV1, UV2, UV3;
933 UV1(0) = -1.0; UV1(1) = -1.0;
934 UV2(0) = -1.0; UV2(1) = -1.0;
935 UV3(0) = -1.0; UV3(1) = -1.0;
944 template<
typename Po
intInT>
inline void
949 ptB.
x = p2.
x - p1.
x; ptB.
y = p2.
y - p1.
y;
950 ptC.
x = p3.
x - p1.
x; ptC.
y = p3.
y - p1.
y;
952 double D = 2.0*(ptB.
x*ptC.
y - ptB.
y*ptC.
x);
957 circomcenter.
x = p1.
x;
958 circomcenter.
y = p1.
y;
963 double bx2 = ptB.
x * ptB.
x;
964 double by2 = ptB.
y * ptB.
y;
965 double cx2 = ptC.
x * ptC.
x;
966 double cy2 = ptC.
y * ptC.
y;
969 circomcenter.
x =
static_cast<float> (p1.
x + (ptC.
y*(bx2 + by2) - ptB.
y*(cx2 + cy2)) / D);
970 circomcenter.
y =
static_cast<float> (p1.
y + (ptB.
x*(cx2 + cy2) - ptC.
x*(bx2 + by2)) / D);
973 radius = std::sqrt( (circomcenter.
x - p1.
x)*(circomcenter.
x - p1.
x) + (circomcenter.
y - p1.
y)*(circomcenter.
y - p1.
y));
977 template<
typename Po
intInT>
inline void
981 circumcenter.
x =
static_cast<float> (p1.
x + p2.
x + p3.
x ) / 3;
982 circumcenter.
y =
static_cast<float> (p1.
y + p2.
y + p3.
y ) / 3;
983 double r1 = (circumcenter.
x - p1.
x) * (circumcenter.
x - p1.
x) + (circumcenter.
y - p1.
y) * (circumcenter.
y - p1.
y) ;
984 double r2 = (circumcenter.
x - p2.
x) * (circumcenter.
x - p2.
x) + (circumcenter.
y - p2.
y) * (circumcenter.
y - p2.
y) ;
985 double r3 = (circumcenter.
x - p3.
x) * (circumcenter.
x - p3.
x) + (circumcenter.
y - p3.
y) * (circumcenter.
y - p3.
y) ;
988 radius = std::sqrt( std::max( r1, std::max( r2, r3) )) ;
993 template<
typename Po
intInT>
inline bool
999 double sizeX = cam.
width;
1000 double sizeY = cam.
height;
1011 double focal_x, focal_y;
1022 UV_coordinates.
x =
static_cast<float> ((focal_x * (pt.x / pt.z) + cx) / sizeX);
1023 UV_coordinates.
y = 1.0f -
static_cast<float> ((focal_y * (pt.y / pt.z) + cy) / sizeY);
1026 if (UV_coordinates.
x >= 0.0 && UV_coordinates.
x <= 1.0 && UV_coordinates.
y >= 0.0 && UV_coordinates.
y <= 1.0)
1031 UV_coordinates.
x = -1.0f;
1032 UV_coordinates.
y = -1.0f;
1037 template<
typename Po
intInT>
inline bool
1041 Eigen::Vector2d v0, v1, v2;
1042 v0(0) = p3.
x - p1.
x; v0(1) = p3.
y - p1.
y;
1043 v1(0) = p2.
x - p1.
x; v1(1) = p2.
y - p1.
y;
1044 v2(0) = pt.
x - p1.
x; v2(1) = pt.
y - p1.
y;
1047 double dot00 = v0.dot(v0);
1048 double dot01 = v0.dot(v1);
1049 double dot02 = v0.dot(v2);
1050 double dot11 = v1.dot(v1);
1051 double dot12 = v1.dot(v2);
1054 double invDenom = 1.0 / (dot00*dot11 - dot01*dot01);
1055 double u = (dot11*dot02 - dot01*dot12) * invDenom;
1056 double v = (dot00*dot12 - dot01*dot02) * invDenom;
1059 return ((u >= 0) && (v >= 0) && (u + v < 1));
1063 template<
typename Po
intInT>
inline bool
1066 return (getPointUVCoordinates(p1, camera, proj1)
1068 getPointUVCoordinates(p2, camera, proj2)
1070 getPointUVCoordinates(p3, camera, proj3)
1074 #define PCL_INSTANTIATE_TextureMapping(T) \
1075 template class PCL_EXPORTS pcl::TextureMapping<T>;
KdTreeFLANN is a generic type of 3D spatial locator using kD-tree structures.
int radiusSearch(const PointT &point, double radius, Indices &k_indices, std::vector< float > &k_sqr_distances, unsigned int max_nn=0) const override
Search for all the nearest neighbors of the query point in a given radius.
void setInputCloud(const PointCloudConstPtr &cloud, const IndicesConstPtr &indices=IndicesConstPtr()) override
Provide a pointer to the input dataset.
shared_ptr< PointCloud< PointT > > Ptr
std::vector< PointT, Eigen::aligned_allocator< PointT > > points
The point data.
bool getPointUVCoordinates(const PointInT &pt, const Camera &cam, Eigen::Vector2f &UV_coordinates)
computes UV coordinates of point, observed by one particular camera
void mapTexture2MeshUV(pcl::TextureMesh &tex_mesh)
Map texture to a mesh UV mapping.
void mapTexture2Mesh(pcl::TextureMesh &tex_mesh)
Map texture to a mesh synthesis algorithm.
void getTriangleCircumcscribedCircleCentroid(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, pcl::PointXY &circumcenter, double &radius)
Returns the centroid of a triangle and the corresponding circumscribed circle's radius.
typename PointCloud::Ptr PointCloudPtr
typename Octree::Ptr OctreePtr
bool isPointOccluded(const PointInT &pt, const OctreePtr octree)
Check if a point is occluded using raycasting on octree.
bool checkPointInsideTriangle(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, const pcl::PointXY &pt)
Returns True if a point lays within a triangle.
bool isFaceProjected(const Camera &camera, const PointInT &p1, const PointInT &p2, const PointInT &p3, pcl::PointXY &proj1, pcl::PointXY &proj2, pcl::PointXY &proj3)
Returns true if all the vertices of one face are projected on the camera's image plane.
void removeOccludedPoints(const PointCloudPtr &input_cloud, PointCloudPtr &filtered_cloud, const double octree_voxel_size, pcl::Indices &visible_indices, pcl::Indices &occluded_indices)
Remove occluded points from a point cloud.
std::vector< Eigen::Vector2f, Eigen::aligned_allocator< Eigen::Vector2f > > mapTexture2Face(const Eigen::Vector3f &p1, const Eigen::Vector3f &p2, const Eigen::Vector3f &p3)
Map texture to a face.
int sortFacesByCamera(pcl::TextureMesh &tex_mesh, pcl::TextureMesh &sorted_mesh, const pcl::texture_mapping::CameraVector &cameras, const double octree_voxel_size, PointCloud &visible_pts)
Segment faces by camera visibility.
void getTriangleCircumcenterAndSize(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, pcl::PointXY &circumcenter, double &radius)
Returns the circumcenter of a triangle and the circle's radius.
void showOcclusions(const PointCloudPtr &input_cloud, pcl::PointCloud< pcl::PointXYZI >::Ptr &colored_cloud, const double octree_voxel_size, const bool show_nb_occlusions=true, const int max_occlusions=4)
Colors a point cloud, depending on its occlusions.
typename PointCloud::ConstPtr PointCloudConstPtr
void mapMultipleTexturesToMeshUV(pcl::TextureMesh &tex_mesh, pcl::texture_mapping::CameraVector &cams)
Map textures acquired from a set of cameras onto a mesh.
void textureMeshwithMultipleCameras(pcl::TextureMesh &mesh, const pcl::texture_mapping::CameraVector &cameras)
Segment and texture faces by camera visibility.
void defineBoundingBox()
Investigate dimensions of pointcloud data set and define corresponding bounding box for octree.
void setInputCloud(const PointCloudConstPtr &cloud_arg, const IndicesConstPtr &indices_arg=IndicesConstPtr())
Provide a pointer to the input data set.
void addPointsFromInputCloud()
Add points from input point cloud to octree.
Octree pointcloud search class
uindex_t getIntersectedVoxelIndices(Eigen::Vector3f origin, Eigen::Vector3f direction, Indices &k_indices, uindex_t max_voxel_count=0) const
Get indices of all voxels that are intersected by a ray (origin, direction).
Define standard C methods to do distance calculations.
void transformPointCloud(const pcl::PointCloud< PointT > &cloud_in, pcl::PointCloud< PointT > &cloud_out, const Eigen::Matrix< Scalar, 4, 4 > &transform, bool copy_all_fields)
Apply a rigid transform defined by a 4x4 matrix.
std::vector< Camera, Eigen::aligned_allocator< Camera > > CameraVector
detail::int_type_t< detail::index_type_size, detail::index_type_signed > index_t
Type used for an index in PCL.
float euclideanDistance(const PointType1 &p1, const PointType2 &p2)
Calculate the euclidean distance between the two given points.
IndicesAllocator<> Indices
Type used for indices in PCL.
void fromPCLPointCloud2(const pcl::PCLPointCloud2 &msg, pcl::PointCloud< PointT > &cloud, const MsgFieldMap &field_map)
Convert a PCLPointCloud2 binary data blob into a pcl::PointCloud<T> object using a field_map.
std::vector<::pcl::PCLPointField > fields
std::vector< std::uint8_t > data
A 2D point structure representing Euclidean xy coordinates.
std::vector< std::vector< pcl::Vertices > > tex_polygons
std::vector< std::vector< Eigen::Vector2f, Eigen::aligned_allocator< Eigen::Vector2f > > > tex_coordinates
std::vector< pcl::TexMaterial > tex_materials
pcl::PCLPointCloud2 cloud
Structure to store camera pose and focal length.
Structure that links a uv coordinate to its 3D point and face.