Commit 530512dd authored by Clément Pinard's avatar Clément Pinard
Browse files

add c++ pcl tools

parent 25c74ad8
# Copyright (c) 2018, ETH Zurich and UNC Chapel Hill.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of ETH Zurich and UNC Chapel Hill nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: Johannes L. Schoenberger (jsch-at-demuc-dot-de)
# This script is based on an original implementation by True Price.
import sys
import sqlite3
import numpy as np
IS_PYTHON3 = sys.version_info[0] >= 3
MAX_IMAGE_ID = 2**31 - 1
CREATE_CAMERAS_TABLE = """CREATE TABLE IF NOT EXISTS cameras (
camera_id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
model INTEGER NOT NULL,
width INTEGER NOT NULL,
height INTEGER NOT NULL,
params BLOB,
prior_focal_length INTEGER NOT NULL)"""
CREATE_DESCRIPTORS_TABLE = """CREATE TABLE IF NOT EXISTS descriptors (
image_id INTEGER PRIMARY KEY NOT NULL,
rows INTEGER NOT NULL,
cols INTEGER NOT NULL,
data BLOB,
FOREIGN KEY(image_id) REFERENCES images(image_id) ON DELETE CASCADE)"""
CREATE_IMAGES_TABLE = """CREATE TABLE IF NOT EXISTS images (
image_id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
name TEXT NOT NULL UNIQUE,
camera_id INTEGER NOT NULL,
prior_qw REAL,
prior_qx REAL,
prior_qy REAL,
prior_qz REAL,
prior_tx REAL,
prior_ty REAL,
prior_tz REAL,
CONSTRAINT image_id_check CHECK(image_id >= 0 and image_id < {}),
FOREIGN KEY(camera_id) REFERENCES cameras(camera_id))
""".format(MAX_IMAGE_ID)
CREATE_TWO_VIEW_GEOMETRIES_TABLE = """
CREATE TABLE IF NOT EXISTS two_view_geometries (
pair_id INTEGER PRIMARY KEY NOT NULL,
rows INTEGER NOT NULL,
cols INTEGER NOT NULL,
data BLOB,
config INTEGER NOT NULL,
F BLOB,
E BLOB,
H BLOB)
"""
CREATE_KEYPOINTS_TABLE = """CREATE TABLE IF NOT EXISTS keypoints (
image_id INTEGER PRIMARY KEY NOT NULL,
rows INTEGER NOT NULL,
cols INTEGER NOT NULL,
data BLOB,
FOREIGN KEY(image_id) REFERENCES images(image_id) ON DELETE CASCADE)
"""
CREATE_MATCHES_TABLE = """CREATE TABLE IF NOT EXISTS matches (
pair_id INTEGER PRIMARY KEY NOT NULL,
rows INTEGER NOT NULL,
cols INTEGER NOT NULL,
data BLOB)"""
CREATE_NAME_INDEX = \
"CREATE UNIQUE INDEX IF NOT EXISTS index_name ON images(name)"
CREATE_ALL = "; ".join([
CREATE_CAMERAS_TABLE,
CREATE_IMAGES_TABLE,
CREATE_KEYPOINTS_TABLE,
CREATE_DESCRIPTORS_TABLE,
CREATE_MATCHES_TABLE,
CREATE_TWO_VIEW_GEOMETRIES_TABLE,
CREATE_NAME_INDEX
])
def image_ids_to_pair_id(image_id1, image_id2):
if image_id1 > image_id2:
image_id1, image_id2 = image_id2, image_id1
return image_id1 * MAX_IMAGE_ID + image_id2
def pair_id_to_image_ids(pair_id):
image_id2 = pair_id % MAX_IMAGE_ID
image_id1 = (pair_id - image_id2) / MAX_IMAGE_ID
return image_id1, image_id2
def array_to_blob(array):
if IS_PYTHON3:
return array.tostring()
else:
return np.getbuffer(array)
def blob_to_array(blob, dtype, shape=(-1,)):
if IS_PYTHON3:
return np.fromstring(blob, dtype=dtype).reshape(*shape)
else:
return np.frombuffer(blob, dtype=dtype).reshape(*shape)
class COLMAPDatabase(sqlite3.Connection):
@staticmethod
def connect(database_path):
return sqlite3.connect(database_path, factory=COLMAPDatabase)
def __init__(self, *args, **kwargs):
super(COLMAPDatabase, self).__init__(*args, **kwargs)
self.create_tables = lambda: self.executescript(CREATE_ALL)
self.create_cameras_table = \
lambda: self.executescript(CREATE_CAMERAS_TABLE)
self.create_descriptors_table = \
lambda: self.executescript(CREATE_DESCRIPTORS_TABLE)
self.create_images_table = \
lambda: self.executescript(CREATE_IMAGES_TABLE)
self.create_two_view_geometries_table = \
lambda: self.executescript(CREATE_TWO_VIEW_GEOMETRIES_TABLE)
self.create_keypoints_table = \
lambda: self.executescript(CREATE_KEYPOINTS_TABLE)
self.create_matches_table = \
lambda: self.executescript(CREATE_MATCHES_TABLE)
self.create_name_index = lambda: self.executescript(CREATE_NAME_INDEX)
def add_camera(self, model, width, height, params,
prior_focal_length=False, camera_id=None):
params = np.asarray(params, np.float64)
cursor = self.execute(
"INSERT INTO cameras VALUES (?, ?, ?, ?, ?, ?)",
(camera_id, model, width, height, array_to_blob(params),
prior_focal_length))
return cursor.lastrowid
def add_image(self, name, camera_id,
prior_q=np.full(4, np.NaN), prior_t=np.full(3, np.NaN), image_id=None):
cursor = self.execute(
"INSERT INTO images VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(image_id, name, camera_id, prior_q[0], prior_q[1], prior_q[2],
prior_q[3], prior_t[0], prior_t[1], prior_t[2]))
return cursor.lastrowid
def add_keypoints(self, image_id, keypoints):
assert(len(keypoints.shape) == 2)
assert(keypoints.shape[1] in [2, 4, 6])
keypoints = np.asarray(keypoints, np.float32)
self.execute(
"INSERT INTO keypoints VALUES (?, ?, ?, ?)",
(image_id,) + keypoints.shape + (array_to_blob(keypoints),))
def add_descriptors(self, image_id, descriptors):
descriptors = np.ascontiguousarray(descriptors, np.uint8)
self.execute(
"INSERT INTO descriptors VALUES (?, ?, ?, ?)",
(image_id,) + descriptors.shape + (array_to_blob(descriptors),))
def add_matches(self, image_id1, image_id2, matches):
assert(len(matches.shape) == 2)
assert(matches.shape[1] == 2)
if image_id1 > image_id2:
matches = matches[:,::-1]
pair_id = image_ids_to_pair_id(image_id1, image_id2)
matches = np.asarray(matches, np.uint32)
self.execute(
"INSERT INTO matches VALUES (?, ?, ?, ?)",
(pair_id,) + matches.shape + (array_to_blob(matches),))
def add_two_view_geometry(self, image_id1, image_id2, matches,
F=np.eye(3), E=np.eye(3), H=np.eye(3), config=2):
assert(len(matches.shape) == 2)
assert(matches.shape[1] == 2)
if image_id1 > image_id2:
matches = matches[:,::-1]
pair_id = image_ids_to_pair_id(image_id1, image_id2)
matches = np.asarray(matches, np.uint32)
F = np.asarray(F, dtype=np.float64)
E = np.asarray(E, dtype=np.float64)
H = np.asarray(H, dtype=np.float64)
self.execute(
"INSERT INTO two_view_geometries VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
(pair_id,) + matches.shape + (array_to_blob(matches), config,
array_to_blob(F), array_to_blob(E), array_to_blob(H)))
def example_usage():
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--database_path", default="database.db")
args = parser.parse_args()
if os.path.exists(args.database_path):
print("ERROR: database path already exists -- will not modify it.")
return
# Open the database.
db = COLMAPDatabase.connect(args.database_path)
# For convenience, try creating all the tables upfront.
db.create_tables()
# Create dummy cameras.
model1, width1, height1, params1 = \
0, 1024, 768, np.array((1024., 512., 384.))
model2, width2, height2, params2 = \
2, 1024, 768, np.array((1024., 512., 384., 0.1))
camera_id1 = db.add_camera(model1, width1, height1, params1)
camera_id2 = db.add_camera(model2, width2, height2, params2)
# Create dummy images.
image_id1 = db.add_image("image1.png", camera_id1)
image_id2 = db.add_image("image2.png", camera_id1)
image_id3 = db.add_image("image3.png", camera_id2)
image_id4 = db.add_image("image4.png", camera_id2)
# Create dummy keypoints.
#
# Note that COLMAP supports:
# - 2D keypoints: (x, y)
# - 4D keypoints: (x, y, theta, scale)
# - 6D affine keypoints: (x, y, a_11, a_12, a_21, a_22)
num_keypoints = 1000
keypoints1 = np.random.rand(num_keypoints, 2) * (width1, height1)
keypoints2 = np.random.rand(num_keypoints, 2) * (width1, height1)
keypoints3 = np.random.rand(num_keypoints, 2) * (width2, height2)
keypoints4 = np.random.rand(num_keypoints, 2) * (width2, height2)
db.add_keypoints(image_id1, keypoints1)
db.add_keypoints(image_id2, keypoints2)
db.add_keypoints(image_id3, keypoints3)
db.add_keypoints(image_id4, keypoints4)
# Create dummy matches.
M = 50
matches12 = np.random.randint(num_keypoints, size=(M, 2))
matches23 = np.random.randint(num_keypoints, size=(M, 2))
matches34 = np.random.randint(num_keypoints, size=(M, 2))
db.add_matches(image_id1, image_id2, matches12)
db.add_matches(image_id2, image_id3, matches23)
db.add_matches(image_id3, image_id4, matches34)
# Commit the data to the file.
db.commit()
# Read and check cameras.
rows = db.execute("SELECT * FROM cameras")
camera_id, model, width, height, params, prior = next(rows)
params = blob_to_array(params, np.float64)
assert camera_id == camera_id1
assert model == model1 and width == width1 and height == height1
assert np.allclose(params, params1)
camera_id, model, width, height, params, prior = next(rows)
params = blob_to_array(params, np.float64)
assert camera_id == camera_id2
assert model == model2 and width == width2 and height == height2
assert np.allclose(params, params2)
# Read and check keypoints.
keypoints = dict(
(image_id, blob_to_array(data, np.float32, (-1, 2)))
for image_id, data in db.execute(
"SELECT image_id, data FROM keypoints"))
assert np.allclose(keypoints[image_id1], keypoints1)
assert np.allclose(keypoints[image_id2], keypoints2)
assert np.allclose(keypoints[image_id3], keypoints3)
assert np.allclose(keypoints[image_id4], keypoints4)
# Read and check matches.
pair_ids = [image_ids_to_pair_id(*pair) for pair in
((image_id1, image_id2),
(image_id2, image_id3),
(image_id3, image_id4))]
matches = dict(
(pair_id_to_image_ids(pair_id),
blob_to_array(data, np.uint32, (-1, 2)))
for pair_id, data in db.execute("SELECT pair_id, data FROM matches")
)
assert np.all(matches[(image_id1, image_id2)] == matches12)
assert np.all(matches[(image_id2, image_id3)] == matches23)
assert np.all(matches[(image_id3, image_id4)] == matches34)
# Clean up.
db.close()
if os.path.exists(args.database_path):
os.remove(args.database_path)
if __name__ == "__main__":
example_usage()
This diff is collapsed.
......@@ -29,6 +29,7 @@ def load_and_convert(input_file, output_folder, verbose=False):
*(cloud.centroid/1000)))
output_centroid = cloud.centroid
print(cloud.centroid)
np.savetxt(txt_path, output_centroid)
xyz = cloud.points[['x', 'y', 'z']]
......
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from wrappers import Colmap, FFMpeg, PDraw, ETH3D
from wrappers import Colmap, FFMpeg, PDraw, ETH3D, PCLUtil
from global_options import add_global_options
import meshlab_xml_writer as mxw
import add_video_to_db as avtd
......@@ -60,6 +60,7 @@ exec_parser.add_argument("--colmap", default="colmap",
exec_parser.add_argument("--eth3d", default="../dataset-pipeline/build",
type=Path, help="ETH3D detaset pipeline exec files folder location")
exec_parser.add_argument("--ffmpeg", default="ffmpeg")
exec_parser.add_argument("--pcl_util", default="pcl_util/build", type=Path)
video_registration_parser = parser.add_argument_group("Video Registration")
video_registration_parser.add_argument("--vocab_tree", type=Path, default="vocab_tree_flickr100K_words256K.bin")
......@@ -82,7 +83,7 @@ def print_step(step_number, step_name):
print("=================")
def convert_point_cloud(pointclouds, lidar_path, verbose, eth3d, **env):
def convert_point_cloud(pointclouds, lidar_path, verbose, eth3d, pcl_util, **env):
converted_clouds = []
centroids = []
for pc in pointclouds:
......@@ -90,8 +91,10 @@ def convert_point_cloud(pointclouds, lidar_path, verbose, eth3d, **env):
output_folder=lidar_path,
verbose=verbose >= 1)
centroids.append(centroid)
eth3d.clean_pointcloud(ply, filter=(5, 10))
converted_clouds.append(ply + ".inliers.ply")
eth3d.clean_pointcloud(ply, filter=(6, 2))
pcl_util.subsample(input_file=ply + ".inliers.ply", output_file=ply + "_subsampled.ply", resolution=0.1)
converted_clouds.append(ply + "_subsampled.ply")
return converted_clouds, centroids[0]
......@@ -171,6 +174,8 @@ def main():
env["pdraw"] = pdraw
eth3d = ETH3D(args.eth3d, env["image_path"], quiet=args.verbose < 1)
env["eth3d"] = eth3d
pcl_util = PCLUtil(args.pcl_util)
env["pcl_util"] = pcl_util
las_files = (args.input_folder/"Lidar").files("*.las")
ply_files = (args.input_folder/"Lidar").files("*.ply")
......@@ -198,13 +203,14 @@ def main():
if i + 1 not in args.skip_step:
print_step(i, s)
path_lists, env["videos_output_folders"] = extract_videos_to_workspace(**env)
with open(env["video_frame_list_thorough"], "w") as f:
f.write("\n".join(path_lists["thorough"]))
with open(env["georef_frames_list"], "w") as f:
f.write("\n".join(path_lists["georef"]))
for v in env["videos_list"]:
with open(env["videos_output_folders"][v] / "to_scan.txt", "w") as f:
f.write("\n".join(path_lists[v]))
if path_lists is not None:
with open(env["video_frame_list_thorough"], "w") as f:
f.write("\n".join(path_lists["thorough"]))
with open(env["georef_frames_list"], "w") as f:
f.write("\n".join(path_lists["georef"]))
for v in env["videos_list"]:
with open(env["videos_output_folders"][v] / "to_scan.txt", "w") as f:
f.write("\n".join(path_lists[v]))
else:
by_basename = {v.basename(): v for v in env["videos_list"]}
for folder in env["video_path"].walkdirs():
......@@ -240,8 +246,8 @@ def main():
print_step(i, s)
with_normals_path = env["lidar_path"] / "with_normals.ply"
eth3d.compute_normals(with_normals_path, env["aligned_mlp"], neighbor_count=8)
eth3d.triangulate_mesh(env["occlusion_ply"], with_normals_path, resolution=20)
eth3d.compute_normals(with_normals_path, env["aligned_mlp"], neighbor_radius=0.2)
pcl_util.triangulate_mesh(env["occlusion_ply"], with_normals_path, resolution=0.2)
eth3d.create_splats(env["splats_ply"], with_normals_path, env["occlusion_ply"], threshold=0.1)
mxw.create_project(env["occlusion_mlp"], [env["occlusion_ply"], env["splats_ply"]])
if args.save_space:
......
cmake_minimum_required(VERSION 2.8 FATAL_ERROR)
project(Parrot_photogrammetry_PCLutils)
find_package(PCL REQUIRED)
include_directories(${PCL_INCLUDE_DIRS})
link_directories(${PCL_LIBRARY_DIRS})
add_definitions(${PCL_DEFINITIONS})
add_executable (PointCloudSubsampler pointcloud_subsampler.cpp)
target_link_libraries (PointCloudSubsampler ${PCL_LIBRARIES} glog)
add_executable (MeshTriangulator mesh_triangulator.cpp)
target_link_libraries (MeshTriangulator ${PCL_LIBRARIES} glog)
// Copyright 2020 ENSTA Paris, Clément Pinard
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include <iostream>
#include <string>
#include <vector>
#include <glog/logging.h>
#include <pcl/common/transforms.h>
#include <pcl/console/parse.h>
#include <pcl/io/ply_io.h>
#include <pcl/kdtree/kdtree_flann.h>
#include <pcl/surface/gp3.h>
#include "pointcloud_subsampler.h"
int main(int argc, char** argv) {
FLAGS_logtostderr = 1;
google::InitGoogleLogging(argv[0]);
pcl::console::setVerbosityLevel(pcl::console::L_DEBUG);
// Parse arguments.
int dummy;
if (argc <= 1 ||
pcl::console::parse_argument(argc, argv, "-h", dummy) >= 0 ||
pcl::console::parse_argument(argc, argv, "--help", dummy) >= 0) {
LOG(INFO) << "Usage: " << argv[0] << " --point_normal_cloud_path <file.ply> --resolution <m> --out_mesh <file.ply>";
return EXIT_FAILURE;
}
std::string point_normal_cloud_path;
pcl::console::parse_argument(argc, argv, "--point_normal_cloud_path", point_normal_cloud_path);
float resolution = 20; //20cm resolution
pcl::console::parse_argument(argc, argv, "--resolution", resolution);
std::string mesh_output;
pcl::console::parse_argument(argc, argv, "--out_mesh", mesh_output);
// Load point cloud with normals.
LOG(INFO) << "Loading point cloud ...";
pcl::PointCloud<pcl::PointNormal>::Ptr point_normal_cloud(
new pcl::PointCloud<pcl::PointNormal>());
if (pcl::io::loadPLYFile(point_normal_cloud_path, *point_normal_cloud) < 0) {
return EXIT_FAILURE;
}
LOG(INFO) << "Subsampling to have a mean distance between points of " << resolution << " m";
point_normal_cloud = filter<pcl::PointNormal>(point_normal_cloud, resolution);
LOG(INFO) << "Beginning triangulation";
// Create search tree*
pcl::search::KdTree<pcl::PointNormal>::Ptr tree (new pcl::search::KdTree<pcl::PointNormal>);
tree->setInputCloud (point_normal_cloud);
pcl::GreedyProjectionTriangulation<pcl::PointNormal> gp3;
pcl::PolygonMesh triangles;
// Set the maximum distance between connected points (maximum edge length)
gp3.setSearchRadius (resolution * 2);
// Set typical values for the parameters
gp3.setMu (2.5);
gp3.setMaximumNearestNeighbors (100);
gp3.setMaximumSurfaceAngle(M_PI);
gp3.setMinimumAngle(0);
gp3.setMaximumAngle(2 * M_PI);
gp3.setNormalConsistency(false);
// Get result
gp3.setInputCloud (point_normal_cloud);
gp3.setSearchMethod (tree);
gp3.reconstruct (triangles);
LOG(INFO) << "Done.";
// Additional vertex information
if (!mesh_output.empty())
pcl::io::savePLYFileBinary (mesh_output, triangles);
return EXIT_SUCCESS;
}
#include <glog/logging.h>
#include <pcl/io/ply_io.h>
#include <pcl/point_types.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/console/parse.h>
#include "pointcloud_subsampler.h"
int
main (int argc, char** argv)
{
FLAGS_logtostderr = 1;
google::InitGoogleLogging(argv[0]);
pcl::console::setVerbosityLevel(pcl::console::L_DEBUG);
// Parse arguments.
int dummy;
if (argc <= 1 ||
pcl::console::parse_argument(argc, argv, "-h", dummy) >= 0 ||
pcl::console::parse_argument(argc, argv, "--help", dummy) >= 0) {
LOG(INFO) << "Usage: " << argv[0] << " --point_cloud_path <file.ply> --resolution <m> --out_mesh <file.ply>";
return EXIT_FAILURE;
}
std::string point_cloud_path;
pcl::console::parse_argument(argc, argv, "--point_cloud_path", point_cloud_path);
float resolution = 0.2; //20cm resolution
pcl::console::parse_argument(argc, argv, "--resolution", resolution);
std::string output_path;
pcl::console::parse_argument(argc, argv, "--output", output_path);
// Load point cloud with normals.
LOG(INFO) << "Loading point cloud ...";
pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(
new pcl::PointCloud<pcl::PointXYZ>());
if (pcl::io::loadPLYFile(point_cloud_path, *point_cloud) < 0) {
return EXIT_FAILURE;
}
LOG(INFO) << "Subsampling to have a mean distance between points of " << resolution << " m";
point_cloud = filter<pcl::PointXYZ>(point_cloud, resolution);
pcl::io::savePLYFileBinary (output_path, *point_cloud);