Final submission code
Browse filesUpdate to the paths so they are generic :).
- end_to_end.py +11 -7
- fast_pointnet_v2.py +2 -1
- find_best_results.py +1 -1
- generate_pcloud_dataset.py +4 -3
- predict.py +4 -4
- script.py +7 -7
- train.py +9 -8
- train_pnet_class.py +2 -2
- train_pnet_v2.py +2 -2
end_to_end.py
CHANGED
|
@@ -861,8 +861,8 @@ def run_inference(model_path: str,
|
|
| 861 |
base_name = os.path.splitext(os.path.basename(file_path))[0]
|
| 862 |
output_filename = f"{base_name}_predictions"
|
| 863 |
try:
|
| 864 |
-
save_data(output_data, output_filename)
|
| 865 |
-
print(f"Results saved to: {output_filename}.pkl")
|
| 866 |
except Exception as e:
|
| 867 |
print(f"Error saving results for {file_path}: {e}")
|
| 868 |
|
|
@@ -871,17 +871,20 @@ def run_inference(model_path: str,
|
|
| 871 |
if __name__ == "__main__":
|
| 872 |
inference = False
|
| 873 |
|
| 874 |
-
|
| 875 |
-
|
|
|
|
|
|
|
| 876 |
num_epochs_train = 100
|
| 877 |
batch_size_train = 16
|
| 878 |
# This parameter now controls the ratio of negative to positive samples for BCE loss
|
| 879 |
negative_to_positive_bce_ratio = 1
|
| 880 |
|
| 881 |
if inference:
|
| 882 |
-
|
| 883 |
-
|
| 884 |
-
|
|
|
|
| 885 |
grid_size=128,
|
| 886 |
voxel_size=0.5,
|
| 887 |
vertex_threshold=0.5
|
|
@@ -891,3 +894,4 @@ if __name__ == "__main__":
|
|
| 891 |
num_epochs=num_epochs_train,
|
| 892 |
batch_size=batch_size_train,
|
| 893 |
neg_pos_ratio_val=negative_to_positive_bce_ratio)
|
|
|
|
|
|
| 861 |
base_name = os.path.splitext(os.path.basename(file_path))[0]
|
| 862 |
output_filename = f"{base_name}_predictions"
|
| 863 |
try:
|
| 864 |
+
save_data(output_data, output_filename) # Saves to 'data' subfolder by default
|
| 865 |
+
print(f"Results saved to: data/{output_filename}.pkl")
|
| 866 |
except Exception as e:
|
| 867 |
print(f"Error saving results for {file_path}: {e}")
|
| 868 |
|
|
|
|
| 871 |
if __name__ == "__main__":
|
| 872 |
inference = False
|
| 873 |
|
| 874 |
+
# Replace with your actual data folder path
|
| 875 |
+
data_folder_train = 'YOUR_LOCAL_DATA_FOLDER_PATH'
|
| 876 |
+
# Example: data_folder_train = '/path/to/your/training_data'
|
| 877 |
+
|
| 878 |
num_epochs_train = 100
|
| 879 |
batch_size_train = 16
|
| 880 |
# This parameter now controls the ratio of negative to positive samples for BCE loss
|
| 881 |
negative_to_positive_bce_ratio = 1
|
| 882 |
|
| 883 |
if inference:
|
| 884 |
+
# Replace with your actual model path and data path for inference
|
| 885 |
+
run_inference(model_path='YOUR_MODEL_PATH.pth', # Example: '/path/to/your/model.pth'
|
| 886 |
+
data_file_path='YOUR_INFERENCE_DATA_FOLDER_PATH', # Example: '/path/to/your/inference_data'
|
| 887 |
+
output_file=None, # Output will be saved in a 'data' subfolder relative to script
|
| 888 |
grid_size=128,
|
| 889 |
voxel_size=0.5,
|
| 890 |
vertex_threshold=0.5
|
|
|
|
| 894 |
num_epochs=num_epochs_train,
|
| 895 |
batch_size=batch_size_train,
|
| 896 |
neg_pos_ratio_val=negative_to_positive_bce_ratio)
|
| 897 |
+
|
fast_pointnet_v2.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
# This file defines a FastPointNet model for 3D vertex prediction from point clouds.
|
| 2 |
-
# It includes:
|
| 3 |
# 1. `FastPointNet`: A deep neural network with enhancements like residual connections,
|
| 4 |
# channel attention, and multi-scale pooling. It predicts 3D coordinates,
|
| 5 |
# and optionally, confidence scores and classification labels.
|
|
@@ -579,3 +579,4 @@ def predict_vertex_from_patch(model: FastPointNet, patch: np.ndarray, device: to
|
|
| 579 |
position += offset
|
| 580 |
|
| 581 |
return position, score, classification
|
|
|
|
|
|
| 1 |
# This file defines a FastPointNet model for 3D vertex prediction from point clouds.
|
| 2 |
+
# It is located at <YOUR_LOCAL_PATH>/fast_pointnet_v2.py and includes:
|
| 3 |
# 1. `FastPointNet`: A deep neural network with enhancements like residual connections,
|
| 4 |
# channel attention, and multi-scale pooling. It predicts 3D coordinates,
|
| 5 |
# and optionally, confidence scores and classification labels.
|
|
|
|
| 579 |
position += offset
|
| 580 |
|
| 581 |
return position, score, classification
|
| 582 |
+
|
find_best_results.py
CHANGED
|
@@ -158,7 +158,7 @@ if __name__ == "__main__":
|
|
| 158 |
if len(sys.argv) > 1:
|
| 159 |
results_dir = sys.argv[1]
|
| 160 |
else:
|
| 161 |
-
results_dir = "/
|
| 162 |
|
| 163 |
# You can specify a different folder prefix as the second argument
|
| 164 |
folder_prefix = sys.argv[2] if len(sys.argv) > 2 else ""
|
|
|
|
| 158 |
if len(sys.argv) > 1:
|
| 159 |
results_dir = sys.argv[1]
|
| 160 |
else:
|
| 161 |
+
results_dir = "/path/to/your/results/directory" # MODIFIED: Placeholder for results directory
|
| 162 |
|
| 163 |
# You can specify a different folder prefix as the second argument
|
| 164 |
folder_prefix = sys.argv[2] if len(sys.argv) > 2 else ""
|
generate_pcloud_dataset.py
CHANGED
|
@@ -19,12 +19,12 @@ from utils import read_colmap_rec
|
|
| 19 |
|
| 20 |
from tqdm import tqdm
|
| 21 |
|
| 22 |
-
ds = load_dataset("usm3d/hoho25k", cache_dir="
|
| 23 |
-
#ds = load_dataset("usm3d/hoho25k", cache_dir="
|
| 24 |
ds = ds.shuffle()
|
| 25 |
|
| 26 |
# Create output directory
|
| 27 |
-
output_dir = "
|
| 28 |
os.makedirs(output_dir, exist_ok=True)
|
| 29 |
|
| 30 |
counter = 0
|
|
@@ -67,3 +67,4 @@ for a in tqdm(ds['train'], desc="Processing dataset"):
|
|
| 67 |
print(f"Generated {counter} samples in {output_dir}")
|
| 68 |
|
| 69 |
|
|
|
|
|
|
| 19 |
|
| 20 |
from tqdm import tqdm
|
| 21 |
|
| 22 |
+
ds = load_dataset("usm3d/hoho25k", cache_dir="<CACHE_DIR_PLACEHOLDER>", trust_remote_code=True)
|
| 23 |
+
#ds = load_dataset("usm3d/hoho25k", cache_dir="<ALTERNATIVE_CACHE_DIR_PLACEHOLDER>", trust_remote_code=True)
|
| 24 |
ds = ds.shuffle()
|
| 25 |
|
| 26 |
# Create output directory
|
| 27 |
+
output_dir = "<OUTPUT_DIR_PLACEHOLDER>"
|
| 28 |
os.makedirs(output_dir, exist_ok=True)
|
| 29 |
|
| 30 |
counter = 0
|
|
|
|
| 67 |
print(f"Generated {counter} samples in {output_dir}")
|
| 68 |
|
| 69 |
|
| 70 |
+
|
predict.py
CHANGED
|
@@ -37,12 +37,12 @@ import time
|
|
| 37 |
from collections import Counter
|
| 38 |
|
| 39 |
GENERATE_DATASET = False
|
| 40 |
-
#DATASET_DIR = '/
|
| 41 |
-
DATASET_DIR = '/
|
| 42 |
|
| 43 |
GENERATE_DATASET_EDGES = False
|
| 44 |
-
#EDGES_DATASET_DIR = '/
|
| 45 |
-
EDGES_DATASET_DIR = '/
|
| 46 |
|
| 47 |
def convert_entry_to_human_readable(entry):
|
| 48 |
out = {}
|
|
|
|
| 37 |
from collections import Counter
|
| 38 |
|
| 39 |
GENERATE_DATASET = False
|
| 40 |
+
#DATASET_DIR = '/path/to/your/hohocustom/'
|
| 41 |
+
DATASET_DIR = '/path/to/your/hohocustom_v4/'
|
| 42 |
|
| 43 |
GENERATE_DATASET_EDGES = False
|
| 44 |
+
#EDGES_DATASET_DIR = '/path/to/your/hohocustom_edges/'
|
| 45 |
+
EDGES_DATASET_DIR = '/path/to/your/hohocustom_edges_10d_v5/'
|
| 46 |
|
| 47 |
def convert_entry_to_human_readable(entry):
|
| 48 |
out = {}
|
script.py
CHANGED
|
@@ -25,13 +25,13 @@ if __name__ == "__main__":
|
|
| 25 |
print('pwd:')
|
| 26 |
os.system('pwd')
|
| 27 |
print(os.system('ls -lahtr'))
|
| 28 |
-
print('/tmp/data/'
|
| 29 |
-
print(os.system('ls -lahtr /
|
| 30 |
-
print('/tmp/data/data'
|
| 31 |
-
print(os.system('ls -lahtrR /
|
| 32 |
|
| 33 |
-
data_path_test_server = Path('/tmp/data')
|
| 34 |
-
data_path_local = Path()
|
| 35 |
|
| 36 |
if data_path_test_server.exists():
|
| 37 |
# data_path = data_path_test_server
|
|
@@ -42,7 +42,7 @@ if __name__ == "__main__":
|
|
| 42 |
from huggingface_hub import snapshot_download
|
| 43 |
_ = snapshot_download(
|
| 44 |
repo_id=params['dataset'],
|
| 45 |
-
local_dir="/tmp/data"
|
| 46 |
repo_type="dataset",
|
| 47 |
)
|
| 48 |
data_path = data_path_test_server
|
|
|
|
| 25 |
print('pwd:')
|
| 26 |
os.system('pwd')
|
| 27 |
print(os.system('ls -lahtr'))
|
| 28 |
+
print('/generic/path/to/data_dir/') # Placeholder for '/tmp/data/'
|
| 29 |
+
print(os.system('ls -lahtr /generic/path/to/data_dir/')) # Placeholder for /tmp/data/
|
| 30 |
+
print('/generic/path/to/data_dir/data') # Placeholder for '/tmp/data/data'
|
| 31 |
+
print(os.system('ls -lahtrR /generic/path/to/data_dir/data')) # Placeholder for /tmp/data/data
|
| 32 |
|
| 33 |
+
data_path_test_server = Path('/generic/path/to/data_dir') # Placeholder for Path('/tmp/data')
|
| 34 |
+
data_path_local = Path("/generic/path/to/user_home") / '.cache/huggingface/datasets/usm3d___hoho25k_test_x/' # Placeholder for Path().home()
|
| 35 |
|
| 36 |
if data_path_test_server.exists():
|
| 37 |
# data_path = data_path_test_server
|
|
|
|
| 42 |
from huggingface_hub import snapshot_download
|
| 43 |
_ = snapshot_download(
|
| 44 |
repo_id=params['dataset'],
|
| 45 |
+
local_dir="/generic/path/to/data_dir", # Placeholder for "/tmp/data"
|
| 46 |
repo_type="dataset",
|
| 47 |
)
|
| 48 |
data_path = data_path_test_server
|
train.py
CHANGED
|
@@ -56,8 +56,8 @@ print(f"Running with configuration: {config}")
|
|
| 56 |
os.makedirs(args.results_dir, exist_ok=True)
|
| 57 |
|
| 58 |
|
| 59 |
-
ds = load_dataset("usm3d/hoho25k", cache_dir="/
|
| 60 |
-
#ds = load_dataset("usm3d/hoho25k", cache_dir="/
|
| 61 |
#ds = ds.shuffle()
|
| 62 |
|
| 63 |
scores_hss = []
|
|
@@ -69,16 +69,16 @@ show_visu = True
|
|
| 69 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 70 |
|
| 71 |
#pnet_model = load_pointnet_model(model_path="pnet.pth", device=device, predict_score=True)
|
| 72 |
-
pnet_model = load_pointnet_model(model_path="
|
| 73 |
-
#pnet_model = load_pointnet_model(model_path="/
|
| 74 |
#pnet_model = None
|
| 75 |
|
| 76 |
-
#pnet_class_model = load_pointnet_class_model(model_path="/
|
| 77 |
-
#pnet_class_model = load_pointnet_class_model_10d(model_path="/
|
| 78 |
-
pnet_class_model = load_pointnet_class_model(model_path="
|
| 79 |
#pnet_class_model = None
|
| 80 |
|
| 81 |
-
#voxel_model = load_3dcnn_model(model_path="/
|
| 82 |
voxel_model = None
|
| 83 |
|
| 84 |
|
|
@@ -169,3 +169,4 @@ with open(results_filepath, 'w') as f:
|
|
| 169 |
|
| 170 |
|
| 171 |
print(f"Results saved to {results_filepath}")
|
|
|
|
|
|
| 56 |
os.makedirs(args.results_dir, exist_ok=True)
|
| 57 |
|
| 58 |
|
| 59 |
+
ds = load_dataset("usm3d/hoho25k", cache_dir="YOUR_CACHE_DIR_PATH/hoho25k/", trust_remote_code=True)
|
| 60 |
+
#ds = load_dataset("usm3d/hoho25k", cache_dir="YOUR_ALTERNATIVE_CACHE_DIR_PATH/hoho25k/", trust_remote_code=True)
|
| 61 |
#ds = ds.shuffle()
|
| 62 |
|
| 63 |
scores_hss = []
|
|
|
|
| 69 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 70 |
|
| 71 |
#pnet_model = load_pointnet_model(model_path="pnet.pth", device=device, predict_score=True)
|
| 72 |
+
pnet_model = load_pointnet_model(model_path="pnet.pth", device=device, predict_score=True)
|
| 73 |
+
#pnet_model = load_pointnet_model(model_path="YOUR_MODEL_PATH/initial_epoch_100.pth", device=device, predict_score=True)
|
| 74 |
#pnet_model = None
|
| 75 |
|
| 76 |
+
#pnet_class_model = load_pointnet_class_model(model_path="YOUR_MODEL_PATH/initial_epoch_100.pth", device=device)
|
| 77 |
+
#pnet_class_model = load_pointnet_class_model_10d(model_path="YOUR_MODEL_PATH/initial_epoch_75.pth", device=device)
|
| 78 |
+
pnet_class_model = load_pointnet_class_model(model_path="pnet_class.pth", device=device)
|
| 79 |
#pnet_class_model = None
|
| 80 |
|
| 81 |
+
#voxel_model = load_3dcnn_model(model_path="YOUR_MODEL_PATH/initial_epoch_100.pth", device=device, predict_score=True)
|
| 82 |
voxel_model = None
|
| 83 |
|
| 84 |
|
|
|
|
| 169 |
|
| 170 |
|
| 171 |
print(f"Results saved to {results_filepath}")
|
| 172 |
+
|
train_pnet_class.py
CHANGED
|
@@ -16,8 +16,8 @@ import os
|
|
| 16 |
if __name__ == "__main__":
|
| 17 |
|
| 18 |
# Load the dataset
|
| 19 |
-
dataset_path = "
|
| 20 |
-
model_save_path = "
|
| 21 |
|
| 22 |
os.makedirs(model_save_path, exist_ok=True)
|
| 23 |
|
|
|
|
| 16 |
if __name__ == "__main__":
|
| 17 |
|
| 18 |
# Load the dataset
|
| 19 |
+
dataset_path = "<YOUR_DATASET_PATH_HERE>"
|
| 20 |
+
model_save_path = "<YOUR_MODEL_SAVE_PATH_HERE>"
|
| 21 |
|
| 22 |
os.makedirs(model_save_path, exist_ok=True)
|
| 23 |
|
train_pnet_v2.py
CHANGED
|
@@ -3,8 +3,8 @@ from fast_pointnet_v2 import train_pointnet
|
|
| 3 |
if __name__ == "__main__":
|
| 4 |
|
| 5 |
# Load the dataset
|
| 6 |
-
dataset_path = "
|
| 7 |
-
model_save_path = "
|
| 8 |
|
| 9 |
# Train the model
|
| 10 |
train_pointnet(dataset_path, model_save_path, epochs=100, batch_size=512, lr=0.001, score_weight=0.25, class_weight=1.0)
|
|
|
|
| 3 |
if __name__ == "__main__":
|
| 4 |
|
| 5 |
# Load the dataset
|
| 6 |
+
dataset_path = "path/to/your/dataset"
|
| 7 |
+
model_save_path = "path/to/your/model.pth"
|
| 8 |
|
| 9 |
# Train the model
|
| 10 |
train_pointnet(dataset_path, model_save_path, epochs=100, batch_size=512, lr=0.001, score_weight=0.25, class_weight=1.0)
|