diff --git a/README.md b/README.md index 4f1200d..f31fe92 100644 --- a/README.md +++ b/README.md @@ -301,12 +301,12 @@ cmake --build build --target install ``` #### Ubuntu 20.04 -Backwards compatibility with Focal Fossa is not fully tested, but building SIBR should still work by first invoking +Backwards compatibility with Focal Fossa is not fully tested, but building SIBR should still invoking ```shell git checkout fossa_compatibility git submodule update --init ``` -and then continuing with the steps for Ubuntu 22.04. +in ```SIBR_viewers``` and then continuing with the steps for Ubuntu 22.04. ### Navigation in SIBR Viewers The SIBR interface provides several methods of navigating the scene. By default, you will be started with an FPS navigator, which you can control with ```W, A, S, D, Q, E``` for camera translation and ```I, K, J, L, U, O``` for rotation. Alternatively, you may want to use a Trackball-style navigator (select from the floating menu). You can also snap to a camera from the data set with the ```Snap to``` button or find the closest camera with ```Snap to closest```. The floating menues also allow you to change the navigation speed. You can use the ```Scaling Modifier``` to control the size of the displayed Gaussians, or show the initial point cloud. diff --git a/full_eval.py b/full_eval.py index d174ef8..672ee12 100644 --- a/full_eval.py +++ b/full_eval.py @@ -12,7 +12,7 @@ import os from argparse import ArgumentParser -mipnerf360_outdoor_scenes = ["bicycle", "flowers", "garden", "stump", "treehill"] +mipnerf360_outdoor_scenes = ["flowers", "garden", "stump", "treehill"] mipnerf360_indoor_scenes = ["room", "counter", "kitchen", "bonsai"] tanks_and_temples_scenes = ["truck", "train"] deep_blending_scenes = ["drjohnson", "playroom"] @@ -37,7 +37,7 @@ if not args.skip_training or not args.skip_rendering: args = parser.parse_args() if not args.skip_training: - common_args = " --quiet --eval --test_iterations -1" + common_args = " --eval --save_iterations -1" for scene in mipnerf360_outdoor_scenes: source = args.mipnerf360 + "/" + scene os.system("python train.py -s " + source + " -i images_4 -m " + args.output_path + "/" + scene + common_args) diff --git a/scene/gaussian_model.py b/scene/gaussian_model.py index 20a81d4..f50a4b2 100644 --- a/scene/gaussian_model.py +++ b/scene/gaussian_model.py @@ -84,7 +84,9 @@ class GaussianModel: self.active_sh_degree += 1 def create_from_pcd(self, pcd : BasicPointCloud, spatial_lr_scale : float): + spatial_lr_scale = 5 self.spatial_lr_scale = spatial_lr_scale + #print(spatial_lr_scale) fused_point_cloud = torch.tensor(np.asarray(pcd.points)).float().cuda() fused_color = RGB2SH(torch.tensor(np.asarray(pcd.colors)).float().cuda()) features = torch.zeros((fused_color.shape[0], 3, (self.max_sh_degree + 1) ** 2)).float().cuda()