-
Notifications
You must be signed in to change notification settings - Fork 6
/
eval.py
96 lines (79 loc) · 2.76 KB
/
eval.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
# pylint: disable=[E1101,W0621]
import os
import copy
import json
import warnings
from typing import List
import omegaconf
from omegaconf import OmegaConf
from conerf.evaluators.gaussian_splatting_evaluator import GaussianSplatEvaluator
from conerf.utils.utils import setup_seed
warnings.filterwarnings("ignore", category=UserWarning)
def create_evaluator(
config: OmegaConf,
load_train_data: bool = False,
trainset=None,
load_val_data: bool = True,
valset=None,
load_test_data: bool = False,
testset = None,
models: List = None,
meta_data: List = None,
verbose: bool = False,
device: str = "cuda",
):
"""Factory function for training neural network trainers."""
if config.neural_field_type.find("gs") >= 0:
evaluator = GaussianSplatEvaluator(
config, load_train_data, trainset,
load_val_data, valset, load_test_data,
testset, models, meta_data, verbose, device
)
else:
raise NotImplementedError
return evaluator
if __name__ == "__main__":
from conerf.utils.config import config_parser, load_config
args = config_parser()
# parse YAML config to OmegaConf
config = load_config(args.config)
assert config.dataset.get("data_split_json", "") != "" or config.dataset.scene != ""
setup_seed(config.seed)
scenes = []
if config.dataset.get("data_split_json", "") != "" and config.dataset.scene == "":
# For objaverse only.
with open(config.dataset.data_split_json, "r", encoding="utf-8") as fp:
obj_id_to_name = json.load(fp)
for idx, name in obj_id_to_name.items():
scenes.append(name)
elif (
type(config.dataset.scene) == omegaconf.listconfig.ListConfig # pylint: disable=C0123
): # pylint: disable=C0123
scene_list = list(config.dataset.scene)
for sc in config.dataset.scene:
scenes.append(sc)
else:
scenes.append(config.dataset.scene)
for scene in scenes:
data_dir = os.path.join(config.dataset.root_dir, scene)
if not os.path.exists(data_dir):
continue
local_config = copy.deepcopy(config)
local_config.expname = (
f"{config.neural_field_type}_{config.task}_{config.dataset.name}_{scene}"
)
local_config.expname = local_config.expname + "_" + args.suffix
local_config.dataset.scene = scene
evaluator = create_evaluator(
local_config,
load_train_data=False,
trainset=None,
load_val_data=True,
valset=None,
load_test_data=True,
testset=None,
verbose=True,
)
evaluator.eval(split="val")
evaluator.eval(split="test")
evaluator.export_mesh()