diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..76194b4 Binary files /dev/null and b/.DS_Store differ diff --git a/README.md b/README.md index cf885c9..1b95689 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,13 @@ This may be improved in future versions, as it should be easily fixed with addit **A video showing the outputs of the tool is provided here: https://youtu.be/rej5Bu57AqM** +## My contributions + +I have edited this code to update it and have solved some of the bugs that have cropped up for other users e.g. incompatible package installations/TypeErrors. This has been done on a Mac Studio M2 ultra and I used all 24 CPU cores for this on the CPU only setting. My TLS was also 74 million+ points a scan so I have not tested minimum numbers of points. The code now runs on python 3.11.7. + +I might adapt the training data for European Arctic specific contexts, as per my research interests, as the original data relates to western Australia. I am collecting TLS data myself over summer 2024 in the European Arctic and may update the training data so that this is useable for large scale plot cleaning/metric extraction for forestry in these contexts. + +The only slight bug is it looks for the tools and model.pth files in /x/x/x/FSCT/model or /tools. The way I got around this was to simply make another folder called FSCT and copy the file/folder in there so it would find it where it expected as the folder when I download from FSCT is called FSCT-main. I used VS code, pip3, and a conda environment. ## Installation @@ -26,7 +33,7 @@ In Anaconda Prompt, type the following (replacing the path to FSCT and your desi ```shell cd PATH_TO_FSCT-MAIN_DIRECTORY -conda create --name YOUR_ENVIRONMENT_NAME_HERE python==3.9 +conda create --name YOUR_ENVIRONMENT_NAME_HERE conda activate YOUR_ENVIRONMENT_NAME_HERE conda install pip pip install -r requirements.txt @@ -34,8 +41,6 @@ pip install -r requirements.txt This should hopefully install all required packages for you. These are the instructions for Windows 10 and Linux. -I have not tested this on Mac. If someone with a Mac tests this and -it works (or doesn't), please let me know! If you have any difficulties or find any bugs, please get in touch and I will try to help you get it going. Suggestions for improvements are greatly appreciated. diff --git a/data/.DS_Store b/data/.DS_Store new file mode 100644 index 0000000..7db7751 Binary files /dev/null and b/data/.DS_Store differ diff --git a/data/test/.DS_Store b/data/test/.DS_Store new file mode 100644 index 0000000..ad2cfa0 Binary files /dev/null and b/data/test/.DS_Store differ diff --git a/model/.DS_Store b/model/.DS_Store new file mode 100644 index 0000000..5008ddf Binary files /dev/null and b/model/.DS_Store differ diff --git a/requirements.txt b/requirements.txt index 24a7243..f2cdbcf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,16 +5,16 @@ certifi==2021.10.8 charset-normalizer==2.0.12 colorama==0.4.4 cycler==0.11.0 -Cython==0.29.28 +Cython==0.29.37 fonttools==4.33.3 googledrivedownloader==0.4 -hdbscan==0.8.28 +hdbscan==0.8.33 idna==3.3 imageio==2.19.0 importlib-metadata==4.11.3 isodate==0.6.1 Jinja2==3.1.2 -joblib==1.1.0 +joblib==1.1.1 kiwisolver==1.4.2 laspy==2.1.2 Markdown==3.3.7 @@ -22,7 +22,7 @@ MarkupSafe==2.1.1 matplotlib==3.5.2 mdutils==1.3.1 networkx==2.8 -numpy==1.22.3 +numpy==1.23.5 packaging==21.3 pandas==1.4.2 Pillow==9.1.0 @@ -34,14 +34,14 @@ PyWavelets==1.3.0 rdflib==6.1.1 requests==2.27.1 scikit-image==0.19.2 -scikit-learn==1.0.2 -scikit-spatial==6.4.0 -scipy==1.8.0 +scikit-learn==1.1.3 +scikit-spatial==6.4.1 +scipy==1.9.3 six==1.16.0 sklearn==0.0 threadpoolctl==3.1.0 tifffile==2022.5.4 -torch==1.9.0+cu111 +torch==2.1.0 torch-cluster==1.5.9 torch-geometric==1.7.2 torch-scatter==2.0.8 @@ -50,4 +50,4 @@ torch-spline-conv==1.2.1 tqdm==4.64.0 typing_extensions==4.2.0 urllib3==1.26.9 -zipp==3.8.0 +zipp==3.8.0 \ No newline at end of file diff --git a/scripts/inference.py b/scripts/inference.py index 88e39f2..291a2e1 100644 --- a/scripts/inference.py +++ b/scripts/inference.py @@ -1,4 +1,4 @@ -from abc import ABC +from abc import ABC, abstractmethod import torch import torch_geometric from torch_geometric.data import Dataset, DataLoader, Data @@ -20,9 +20,10 @@ class TestingDataset(Dataset, ABC): + def __init__(self, root_dir, points_per_box, device): - super().__init__() - self.filenames = glob.glob(root_dir + "*.npy") + super(). __init__() + self.filenames = glob.glob(os.path.join(root_dir, '*.npy')) self.device = device self.points_per_box = points_per_box @@ -88,12 +89,11 @@ def __init__(self, parameters): self.filename = "working_point_cloud.las" self.directory = self.output_dir self.plot_summary = pd.read_csv(self.output_dir + "plot_summary.csv", index_col=None) - self.plot_centre = [[float(self.plot_summary["Plot Centre X"]), float(self.plot_summary["Plot Centre Y"])]] + self.plot_centre = [[float(self.plot_summary["Plot Centre X"].iloc[0]), float(self.plot_summary["Plot Centre Y"].iloc[0])]] def inference(self): - test_dataset = TestingDataset( - root_dir=self.working_dir, points_per_box=self.parameters["max_points_per_box"], device=self.device - ) + test_dataset = TestingDataset(root_dir=self.working_dir, points_per_box=self.parameters["max_points_per_box"], + device=self.device) test_loader = DataLoader(test_dataset, batch_size=self.parameters["batch_size"], shuffle=False, num_workers=0) diff --git a/scripts/run.py b/scripts/run.py index 15de405..8bb8adf 100644 --- a/scripts/run.py +++ b/scripts/run.py @@ -1,6 +1,8 @@ from run_tools import FSCT, directory_mode, file_mode from other_parameters import other_parameters + + if __name__ == "__main__": """Choose one of the following or modify as needed. Directory mode will find all .las files within a directory and sub directories but will ignore any .las files in @@ -14,22 +16,21 @@ """ # point_clouds_to_process = directory_mode() # point_clouds_to_process = ['full_path_to_your_point_cloud.las', 'full_path_to_your_second_point_cloud.las', etc.] - point_clouds_to_process = file_mode() - + point_clouds_to_process = directory_mode() for point_cloud_filename in point_clouds_to_process: parameters = dict( - point_cloud_filename=point_cloud_filename, + point_cloud_filename = point_cloud_filename, # Adjust if needed plot_centre=None, # [X, Y] Coordinates of the plot centre (metres). If "None", plot_centre is computed based on the point cloud bounding box. # Circular Plot options - Leave at 0 if not using. - plot_radius=0, # If 0 m, the plot is not cropped. Otherwise, the plot is cylindrically cropped from the plot centre with plot_radius + plot_radius_buffer. - plot_radius_buffer=0, # See README. If non-zero, this is used for "Tree Aware Plot Cropping Mode". + plot_radius=5, # If 0 m, the plot is not cropped. Otherwise, the plot is cylindrically cropped from the plot centre with plot_radius + plot_radius_buffer. + plot_radius_buffer=5, # See README. If non-zero, this is used for "Tree Aware Plot Cropping Mode". # Set these appropriately for your hardware. batch_size=2, # You will get CUDA errors if this is too high, as you will run out of VRAM. This won't be an issue if running on CPU only. Must be >= 2. num_cpu_cores=0, # Number of CPU cores you want to use. If you run out of RAM, lower this. 0 means ALL cores. - use_CPU_only=False, # Set to True if you do not have an Nvidia GPU, or if you don't have enough vRAM. + use_CPU_only=True, # Set to True if you do not have an Nvidia GPU, or if you don't have enough vRAM. # Optional settings - Generally leave as they are. - slice_thickness=0.15, # If your point cloud resolution is a bit low (and only if the stem segmentation is still reasonably accurate), try increasing this to 0.2. + slice_thickness=0.10, # If your point cloud resolution is a bit low (and only if the stem segmentation is still reasonably accurate), try increasing this to 0.2. # If your point cloud is really dense, you may get away with 0.1. slice_increment=0.05, # The smaller this is, the better your results will be, however, this increases the run time. sort_stems=1, # If you don't need the sorted stem points, turning this off speeds things up. @@ -38,7 +39,7 @@ tree_base_cutoff_height=5, # A tree must have a cylinder measurement below this height above the DTM to be kept. This filters unsorted branches from being called individual trees. generate_output_point_cloud=1, # Turn on if you would like a semantic and instance segmented point cloud. This mode will override the "sort_stems" setting if on. # If you activate "tree aware plot cropping mode", this function will use it. - ground_veg_cutoff_height=3, # Any vegetation points below this height are considered to be understory and are not assigned to individual trees. + ground_veg_cutoff_height=1, # Any vegetation points below this height are considered to be understory and are not assigned to individual trees. veg_sorting_range=1.5, # Vegetation points can be, at most, this far away from a cylinder horizontally to be matched to a particular tree. stem_sorting_range=1, # Stem points can be, at most, this far away from a cylinder in 3D to be matched to a particular tree. taper_measurement_height_min=0, # Lowest height to measure diameter for taper output. diff --git a/scripts/run_tools.py b/scripts/run_tools.py index feee797..603374a 100644 --- a/scripts/run_tools.py +++ b/scripts/run_tools.py @@ -7,6 +7,9 @@ import tkinter as tk import tkinter.filedialog as fd import os +import numpy +numpy.float = numpy.float64 +numpy.int = numpy.int_ def FSCT( diff --git a/scripts/run_with_multiple_plot_centres.py b/scripts/run_with_multiple_plot_centres.py index c8fc5ed..908e63d 100644 --- a/scripts/run_with_multiple_plot_centres.py +++ b/scripts/run_with_multiple_plot_centres.py @@ -6,10 +6,7 @@ """ This script is an example of how to provide multiple different plot centres with your input point clouds. """ - point_clouds_to_process = [ - ["E:/your_point_cloud1.las", [your_plot_centre_X_coord, your_plot_centre_Y_coord], your_plot_radius], - ["E:/your_point_cloud2.las", [your_plot_centre_X_coord, your_plot_centre_Y_coord], your_plot_radius], - ] + point_clouds_to_process = [] for point_cloud_filename, plot_centre, plot_radius in point_clouds_to_process: parameters = dict( @@ -19,7 +16,7 @@ plot_radius_buffer=0, batch_size=18, num_cpu_cores=0, - use_CPU_only=False, + use_CPU_only=True, # Optional settings - Generally leave as they are. slice_thickness=0.15, # If your point cloud resolution is a bit low (and only if the stem segmentation is still reasonably accurate), try increasing this to 0.2. @@ -35,7 +32,7 @@ generate_output_point_cloud=1, # Turn on if you would like a semantic and instance segmented point cloud. This mode will override the "sort_stems" setting if on. # If you activate "tree aware plot cropping mode", this function will use it. - ground_veg_cutoff_height=3, + ground_veg_cutoff_height=2, # Any vegetation points below this height are considered to be understory and are not assigned to individual trees. veg_sorting_range=1.5, # Vegetation points can be, at most, this far away from a cylinder horizontally to be matched to a particular tree. diff --git a/tools/.DS_Store b/tools/.DS_Store new file mode 100644 index 0000000..dd86fa0 Binary files /dev/null and b/tools/.DS_Store differ