diff --git a/full_eval.py b/full_eval.py index 672ee12..d174ef8 100644 --- a/full_eval.py +++ b/full_eval.py @@ -12,7 +12,7 @@ import os from argparse import ArgumentParser -mipnerf360_outdoor_scenes = ["flowers", "garden", "stump", "treehill"] +mipnerf360_outdoor_scenes = ["bicycle", "flowers", "garden", "stump", "treehill"] mipnerf360_indoor_scenes = ["room", "counter", "kitchen", "bonsai"] tanks_and_temples_scenes = ["truck", "train"] deep_blending_scenes = ["drjohnson", "playroom"] @@ -37,7 +37,7 @@ if not args.skip_training or not args.skip_rendering: args = parser.parse_args() if not args.skip_training: - common_args = " --eval --save_iterations -1" + common_args = " --quiet --eval --test_iterations -1" for scene in mipnerf360_outdoor_scenes: source = args.mipnerf360 + "/" + scene os.system("python train.py -s " + source + " -i images_4 -m " + args.output_path + "/" + scene + common_args) diff --git a/scene/gaussian_model.py b/scene/gaussian_model.py index f50a4b2..20a81d4 100644 --- a/scene/gaussian_model.py +++ b/scene/gaussian_model.py @@ -84,9 +84,7 @@ class GaussianModel: self.active_sh_degree += 1 def create_from_pcd(self, pcd : BasicPointCloud, spatial_lr_scale : float): - spatial_lr_scale = 5 self.spatial_lr_scale = spatial_lr_scale - #print(spatial_lr_scale) fused_point_cloud = torch.tensor(np.asarray(pcd.points)).float().cuda() fused_color = RGB2SH(torch.tensor(np.asarray(pcd.colors)).float().cuda()) features = torch.zeros((fused_color.shape[0], 3, (self.max_sh_degree + 1) ** 2)).float().cuda()