Skip to content

Commit 06c25bf

Browse files
drop old models
1 parent 0d2d03e commit 06c25bf

File tree

4 files changed

+23
-185
lines changed

4 files changed

+23
-185
lines changed

NN/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,8 @@ def _nerf_from_config(config):
4949
trainingLoss=_makeTrainingLoss(config.get('training loss', None)),
5050
residual=config.get('residual', False),
5151
extraLatents=config.get('extra latents', None),
52+
format=config['format']
5253
)
53-
# If format is not specified, use BGR, because old models were trained to predict BGR
54-
nerfParams['format'] = config.get('format', 'bgr')
5554

5655
return lambda encoder, renderer: CNerf2D(
5756
encoder=encoder,

NN/encoding/CCoordsEncodingLayer.py

Lines changed: 0 additions & 151 deletions
This file was deleted.

NN/encoding/__init__.py

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,8 @@
11
import tensorflow as tf
2-
from .CCoordsEncodingLayer import CCoordsEncodingLayerV1
32
from .CCoordsEncodingLayerV2 import CCoordsEncodingLayerV2 as CCoordsEncodingLayer
43
from .CCoordsGridLayer import CCoordsGridLayer
54
from .CFixedSinCosEncoding import CFixedSinCosEncoding
65

7-
# Old and incorrect implementation of the encoding layer
8-
class CFlatCoordsEncodingLayer_OLD(tf.keras.layers.Layer):
9-
def __init__(self, N=32, **kwargs):
10-
super().__init__(**kwargs)
11-
self._enc = CCoordsEncodingLayerV1(N)
12-
return
13-
14-
def call(self, x):
15-
B = tf.shape(x)[0]
16-
tf.assert_equal(tf.shape(x)[:-1], (B, ))
17-
x = tf.cast(x, tf.float32)[..., None]
18-
return self._enc(x)[:, 0]
19-
206
# Correct implementation of the encoding layer
217
class CFlatCoordsEncodingLayer(tf.keras.layers.Layer):
228
def __init__(self, encoder, **kwargs):
@@ -38,12 +24,8 @@ def encoding_from_config(config):
3824
if isinstance(config, dict):
3925
name = config['name']
4026
params = { k: v for k, v in config.items() if k != 'name' }
41-
if 'learned' == name: return CFlatCoordsEncodingLayer_OLD(**params)
4227
if 'fixed' == name: return CFixedSinCosEncoding(**params)
4328

44-
if 'learned v2' == name: return CFlatCoordsEncodingLayer(
45-
encoder=CCoordsEncodingLayerV1(**params)
46-
)
4729
if 'learned v3' == name: return CFlatCoordsEncodingLayer(
4830
encoder=CCoordsEncodingLayer(**params)
4931
)

train.py

Lines changed: 22 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,28 @@
1-
from Utils.utils import setupGPU, load_config, setGPUMemoryLimit
1+
from Utils.utils import setupGPU, load_config, setGPUMemoryLimit, upgrade_configs_structure
22
setupGPU() # call it on startup to prevent OOM errors on my machine
33

44
import argparse, os, shutil, json
55
import tensorflow as tf
66
from NN import model_from_config, model_to_architecture
77
from Utils import dataset_from_config
88

9+
def validateLayersNames(model):
10+
not_unique_layers = []
11+
layers_names = set()
12+
for layer in model.trainable_variables:
13+
if layer.name in layers_names:
14+
not_unique_layers.append(layer.name)
15+
layers_names.add(layer.name)
16+
continue
17+
for layer in not_unique_layers:
18+
print(f"Layer name '{layer}' is not unique")
19+
assert not not_unique_layers, "Model contains not unique layers names"
20+
return
21+
922
def main(args):
1023
folder = os.path.dirname(__file__)
1124
config = load_config(args.config, folder=folder)
25+
1226
assert "experiment" in config, "Config must contain 'experiment' key"
1327
# store args as part of config
1428
config['experiment']['command line arguments'] = vars(args)
@@ -37,17 +51,7 @@ def main(args):
3751
# Create model
3852
model = model_from_config(config["model"], compile=True)
3953
model.summary(expand_nested=True)
40-
# check if model is contain only unique layers names
41-
not_unique_layers = []
42-
layers_names = set()
43-
for layer in model.trainable_variables:
44-
if layer.name in layers_names:
45-
not_unique_layers.append(layer.name)
46-
layers_names.add(layer.name)
47-
continue
48-
for layer in not_unique_layers:
49-
print(f"Layer name '{layer}' is not unique")
50-
assert not not_unique_layers, "Model contains not unique layers names"
54+
validateLayersNames(model)
5155
# save to config model architecture and number of parameters
5256
config['architecture'] = model_to_architecture(model)
5357

@@ -82,10 +86,13 @@ def main(args):
8286
),
8387
tf.keras.callbacks.TerminateOnNaN(),
8488
]
85-
86-
if args.wandb: # init wandb
89+
90+
if args.wandb:
8791
import wandb
92+
8893
wandb.init(project=args.wandb, entity=args.wandb_entity, config=config)
94+
# assign run name if specified
95+
if args.wandb_name: wandb.run.name = args.wandb_name
8996
# track model metrics only
9097
callbacks.append(wandb.keras.WandbCallback(
9198
save_model=False, # save model to wandb manually
@@ -130,6 +137,7 @@ def main(args):
130137

131138
parser.add_argument('--wandb', type=str, help='Wandb project name (optional)')
132139
parser.add_argument('--wandb-entity', type=str, help='Wandb entity name (optional)')
140+
parser.add_argument('--wandb-name', type=str, help='Wandb run name (optional)')
133141

134142
args = parser.parse_args()
135143
if args.gpu_memory_mb: setGPUMemoryLimit(args.gpu_memory_mb)

0 commit comments

Comments
 (0)