Skip to content

Commit

Permalink
md
Browse files Browse the repository at this point in the history
  • Loading branch information
dingguanglei committed Dec 25, 2018
1 parent ea63c4b commit 6bdb63f
Show file tree
Hide file tree
Showing 9 changed files with 56 additions and 340 deletions.
384 changes: 50 additions & 334 deletions README.md

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions jdit/trainer/instances/fashingGenerateGan.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ def forward(self, x):
class Generator(nn.Module):
def __init__(self, input_nc=256, output_nc=1, depth=64):
super(Generator, self).__init__()
self.latent_to_features = nn.Sequential(
self.encoder = nn.Sequential(
nn.ConvTranspose2d(input_nc, 4 * depth, 4, 1, 0), # 256,1,1 => 256,4,4
nn.ReLU())
self.features_to_image = nn.Sequential(
self.decoder = nn.Sequential(
nn.ConvTranspose2d(4 * depth, 4 * depth, 4, 2, 1), # 256,4,4 => 256,8,8
nn.ReLU(),
nn.BatchNorm2d(4 * depth),
Expand All @@ -56,13 +56,13 @@ def __init__(self, input_nc=256, output_nc=1, depth=64):
)

def forward(self, input_data):
out = self.latent_to_features(input_data)
out = self.features_to_image(out)
out = self.encoder(input_data)
out = self.decoder(out)
return out


class FashingGenerateGenerateGanTrainer(GenerateGanTrainer):
d_turn = 5
d_turn = 1
def __init__(self, logdir, nepochs, gpu_ids_abs, netG, netD, optG, optD, dataset, latent_shape):
super(FashingGenerateGenerateGanTrainer, self).__init__(logdir, nepochs, gpu_ids_abs, netG, netD, optG, optD,
dataset,
Expand Down
2 changes: 1 addition & 1 deletion jdit/trainer/super.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ def _record_configs(self, configs_names=None):
if (configs_names is None) or "dataset" in configs_names:
_datasets = super(SupTrainer, self).__getattribute__("_datasets")
for name, dataset in _datasets.items():
self.loger.regist_config(dataset, self.current_epoch, self.step, config_filename=name)
self.loger.regist_config(dataset, config_filename=name)
if (configs_names is None) or "optimizer" in configs_names:
_opts = super(SupTrainer, self).__getattribute__("_opts")
for name, opt in _opts.items():
Expand Down
Binary file added resources/class_dataset.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added resources/class_log.jpg
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added resources/class_net.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added resources/class_opt.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added resources/class_train.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added resources/class_valid.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 6bdb63f

Please sign in to comment.