/
demo_spatio_temporal_auto_encoder.ipynb
1583 lines (1583 loc) · 976 KB
/
demo_spatio_temporal_auto_encoder.ipynb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Observing reconstruction images sequences (video image) by Spatio-Temporal Auto-Encoder.\n",
"\n",
"This notebook demonstrates that the Spatio-Temporal Auto-Encoder can learn images and reconstruct its. Image data set for this demo is about [Tennis player's motion](https://lmb.informatik.uni-freiburg.de/Publications/2011/Bro11a/)\n",
".\n",
"\n",
"Firstly, import Python and Cython modules for building Spatio-Temporal Auto-Encoder."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from pydbm.cnn.convolutional_neural_network import ConvolutionalNeuralNetwork\n",
"from pydbm.cnn.convolutionalneuralnetwork.residual_learning import ResidualLearning\n",
"from pydbm.cnn.convolutionalneuralnetwork.convolutional_auto_encoder import ConvolutionalAutoEncoder\n",
"from pydbm.cnn.spatio_temporal_auto_encoder import SpatioTemporalAutoEncoder\n",
"from pydbm.cnn.layerablecnn.convolution_layer import ConvolutionLayer as ConvolutionLayer1\n",
"from pydbm.cnn.layerablecnn.convolution_layer import ConvolutionLayer as ConvolutionLayer2\n",
"from pydbm.cnn.featuregenerator.image_generator import ImageGenerator"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"from pydbm.activation.relu_function import ReLuFunction\n",
"from pydbm.activation.tanh_function import TanhFunction\n",
"from pydbm.activation.logistic_function import LogisticFunction\n",
"from pydbm.loss.mean_squared_error import MeanSquaredError\n",
"from pydbm.optimization.optparams.adam import Adam"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"from pydbm.synapse.cnn_graph import CNNGraph as ConvGraph1\n",
"from pydbm.synapse.cnn_graph import CNNGraph as ConvGraph2"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"from pydbm.rnn.lstmmodel.conv_lstm_model import ConvLSTMModel as Encoder\n",
"from pydbm.rnn.lstmmodel.convlstmmodel.deconv_lstm_model import DeconvLSTMModel as Decoder\n",
"from pydbm.optimization.optparams.adam import Adam as EncoderAdam\n",
"from pydbm.optimization.optparams.adam import Adam as DecoderAdam\n",
"from pydbm.synapse.recurrenttemporalgraph.lstm_graph import LSTMGraph as EncoderGraph\n",
"from pydbm.synapse.recurrenttemporalgraph.lstm_graph import LSTMGraph as DecoderGraph"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"from pydbm.verification.verificate_function_approximation import VerificateFunctionApproximation"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Set hyperparameters of feature points in image data."
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"batch_size = 20\n",
"seq_len = 5\n",
"channel = 1\n",
"height = 100\n",
"width = 100\n",
"scale = 0.1\n",
"enc_dim = 100\n",
"dec_dim = 100"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Instantiate objects and call the method."
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"from logging import getLogger, StreamHandler, NullHandler, DEBUG, ERROR\n",
"\n",
"logger = getLogger(\"pydbm\")\n",
"handler = StreamHandler()\n",
"handler.setLevel(DEBUG)\n",
"logger.setLevel(DEBUG)\n",
"logger.addHandler(handler)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"# Init.\n",
"encoder_graph = EncoderGraph()\n",
"\n",
"# Activation function in LSTM.\n",
"encoder_graph.observed_activating_function = TanhFunction()\n",
"encoder_graph.input_gate_activating_function = LogisticFunction()\n",
"encoder_graph.forget_gate_activating_function = LogisticFunction()\n",
"encoder_graph.output_gate_activating_function = LogisticFunction()\n",
"encoder_graph.hidden_activating_function = TanhFunction()\n",
"encoder_graph.output_activating_function = TanhFunction()\n",
"\n",
"# Initialization strategy.\n",
"# This method initialize each weight matrices and biases in Gaussian distribution: `np.random.normal(size=hoge) * 0.01`.\n",
"encoder_graph.create_rnn_cells(\n",
" input_neuron_count=enc_dim,\n",
" hidden_neuron_count=100,\n",
" output_neuron_count=enc_dim\n",
")\n",
"\n",
"# Optimizer for Encoder.\n",
"encoder_opt_params = EncoderAdam()\n",
"encoder_opt_params.weight_limit = 0.5\n",
"encoder_opt_params.dropout_rate = 0.0\n",
"\n",
"encoder = Encoder(\n",
" # Delegate `graph` to `LSTMModel`.\n",
" graph=encoder_graph,\n",
" # The number of epochs in mini-batch training.\n",
" epochs=60,\n",
" # The batch size.\n",
" batch_size=batch_size,\n",
" # Learning rate.\n",
" learning_rate=1e-05,\n",
" # Attenuate the `learning_rate` by a factor of this value every `attenuate_epoch`.\n",
" learning_attenuate_rate=0.1,\n",
" # Attenuate the `learning_rate` by a factor of `learning_attenuate_rate` every `attenuate_epoch`.\n",
" attenuate_epoch=10,\n",
" # Refereed maxinum step `t` in BPTT. If `0`, this class referes all past data in BPTT.\n",
" bptt_tau=seq_len,\n",
" # Size of Test data set. If this value is `0`, the validation will not be executed.\n",
" test_size_rate=0.3,\n",
" # Loss function.\n",
" computable_loss=MeanSquaredError(),\n",
" # Optimizer.\n",
" opt_params=encoder_opt_params,\n",
" # Verification function.\n",
" verificatable_result=VerificateFunctionApproximation(),\n",
" # Tolerance for the optimization.\n",
" # When the loss or score is not improving by at least tol \n",
" # for two consecutive iterations, convergence is considered \n",
" # to be reached and training stops.\n",
" tol=0.0,\n",
" tld=1.0,\n",
" filter_num=batch_size,\n",
" channel=batch_size\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"# Init.\n",
"decoder_graph = DecoderGraph()\n",
"\n",
"# Activation function in LSTM.\n",
"decoder_graph.observed_activating_function = TanhFunction()\n",
"decoder_graph.input_gate_activating_function = LogisticFunction()\n",
"decoder_graph.forget_gate_activating_function = LogisticFunction()\n",
"decoder_graph.output_gate_activating_function = LogisticFunction()\n",
"decoder_graph.hidden_activating_function = TanhFunction()\n",
"decoder_graph.output_activating_function = TanhFunction()\n",
"\n",
"# Initialization strategy.\n",
"# This method initialize each weight matrices and biases in Gaussian distribution: `np.random.normal(size=hoge) * 0.01`.\n",
"decoder_graph.create_rnn_cells(\n",
" input_neuron_count=width * height * channel,\n",
" hidden_neuron_count=width * height * channel,\n",
" output_neuron_count=width * height * channel\n",
")\n",
"\n",
"# Optimizer for Decoder.\n",
"decoder_opt_params = DecoderAdam()\n",
"decoder_opt_params.weight_limit = 0.5\n",
"decoder_opt_params.dropout_rate = 0.0\n",
"\n",
"decoder = Decoder(\n",
" # Delegate `graph` to `LSTMModel`.\n",
" graph=decoder_graph,\n",
" # The number of epochs in mini-batch training.\n",
" epochs=100,\n",
" # The batch size.\n",
" batch_size=batch_size,\n",
" # Learning rate.\n",
" learning_rate=1e-05,\n",
" # Attenuate the `learning_rate` by a factor of this value every `attenuate_epoch`.\n",
" learning_attenuate_rate=0.1,\n",
" # Attenuate the `learning_rate` by a factor of `learning_attenuate_rate` every `attenuate_epoch`.\n",
" attenuate_epoch=50,\n",
" seq_len=seq_len,\n",
" # Refereed maxinum step `t` in BPTT. If `0`, this class referes all past data in BPTT.\n",
" bptt_tau=seq_len,\n",
" # Size of Test data set. If this value is `0`, the validation will not be executed.\n",
" test_size_rate=0.3,\n",
" # Loss function.\n",
" computable_loss=MeanSquaredError(),\n",
" # Optimizer.\n",
" opt_params=decoder_opt_params,\n",
" # Verification function.\n",
" verificatable_result=VerificateFunctionApproximation(),\n",
" # Tolerance for the optimization.\n",
" # When the loss or score is not improving by at least tol \n",
" # for two consecutive iterations, convergence is considered \n",
" # to be reached and training stops.\n",
" tol=0.0,\n",
" filter_num=batch_size,\n",
" channel=batch_size\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Setup CNN layers and the parameters.\n"
]
}
],
"source": [
"conv1 = ConvolutionLayer1(\n",
" ConvGraph1(\n",
" activation_function=TanhFunction(),\n",
" filter_num=batch_size,\n",
" channel=channel,\n",
" kernel_size=3,\n",
" scale=scale,\n",
" stride=1,\n",
" pad=1\n",
" )\n",
")\n",
"\n",
"conv2 = ConvolutionLayer2(\n",
" ConvGraph2(\n",
" activation_function=TanhFunction(),\n",
" filter_num=batch_size,\n",
" channel=batch_size,\n",
" kernel_size=3,\n",
" scale=scale,\n",
" stride=1,\n",
" pad=1\n",
" )\n",
")\n",
"\n",
"cnn = SpatioTemporalAutoEncoder(\n",
" layerable_cnn_list=[\n",
" conv1, \n",
" conv2\n",
" ],\n",
" encoder=encoder,\n",
" decoder=decoder,\n",
" epochs=100,\n",
" batch_size=batch_size,\n",
" learning_rate=1e-05,\n",
" learning_attenuate_rate=0.1,\n",
" attenuate_epoch=25,\n",
" computable_loss=MeanSquaredError(),\n",
" opt_params=Adam(),\n",
" verificatable_result=VerificateFunctionApproximation(),\n",
" test_size_rate=0.3,\n",
" tol=1e-15,\n",
" save_flag=False\n",
")\n",
"\n",
"feature_generator = ImageGenerator(\n",
" epochs=100,\n",
" batch_size=batch_size,\n",
" training_image_dir=\"../../../../Downloads/tennis/train/\",\n",
" test_image_dir=\"../../../../Downloads/tennis/test/\",\n",
" seq_len=seq_len,\n",
" gray_scale_flag=True,\n",
" wh_size_tuple=(height, width),\n",
" norm_mode=\"z_score\"\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Execute learning."
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"CNN starts learning.\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"Convolutional Auto-Encoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 1\n",
"Loss: \n",
"Training: 0.397456471809 Test: 0.395307212262\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 1.00045840308 Test: 1.02567640242\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 2\n",
"Loss: \n",
"Training: 0.420858135885 Test: 0.40402639882\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 1.00756602554 Test: 1.01212494455\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 3\n",
"Loss: \n",
"Training: 0.420073766058 Test: 0.396962276059\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.996380564556 Test: 1.03855690061\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Convolutional Auto-Encoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 4\n",
"Loss: \n",
"Training: 0.392613099248 Test: 0.399015557883\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 1.02696810493 Test: 1.00891953598\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 5\n",
"Loss: \n",
"Training: 0.407857714801 Test: 0.393536534155\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.984412285901 Test: 0.984509766542\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 6\n",
"Loss: \n",
"Training: 0.401523176232 Test: 0.363984831495\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 1.00620272754 Test: 1.02195760514\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Convolutional Auto-Encoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 7\n",
"Loss: \n",
"Training: 0.344035241114 Test: 0.383966752912\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 1.01242679931 Test: 0.987956698842\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 8\n",
"Loss: \n",
"Training: 0.365647345051 Test: 0.385685600993\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.995067390202 Test: 0.991500404749\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 9\n",
"Loss: \n",
"Training: 0.410460857235 Test: 0.399919678786\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.991295356066 Test: 0.952944124991\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 10\n",
"Loss: \n",
"Training: 0.412723325428 Test: 0.412519814454\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.950355929239 Test: 0.945882690082\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 11\n",
"Loss: \n",
"Training: 0.416069761827 Test: 0.352057095571\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.3973249132862485 Test: 0.3934924657819885\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.953697943224 Test: 0.945937203812\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 12\n",
"Loss: \n",
"Training: 0.391898377347 Test: 0.394133470548\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.3991862422880731 Test: 0.38916745411284526\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.954757262496 Test: 0.951492264016\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 13\n",
"Loss: \n",
"Training: 0.404249519479 Test: 0.401195372864\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.396290266434257 Test: 0.3881781612856504\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.950354617019 Test: 0.951914772303\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 14\n",
"Loss: \n",
"Training: 0.392773934194 Test: 0.393810351908\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.394707841776322 Test: 0.3886014709660969\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.95454368632 Test: 0.957769776911\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 15\n",
"Loss: \n",
"Training: 0.385923621957 Test: 0.398887524435\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.3947239252709303 Test: 0.38808095036865903\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.954040481029 Test: 0.952474380805\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 16\n",
"Loss: \n",
"Training: 0.387218451668 Test: 0.428535855266\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.3925305159865464 Test: 0.38861604939670163\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.958780646447 Test: 0.959603248694\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 17\n",
"Loss: \n",
"Training: 0.381618228864 Test: 0.398160556204\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.3911000435301471 Test: 0.3950711517737828\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.950417016836 Test: 0.956302485883\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 18\n",
"Loss: \n",
"Training: 0.425120697954 Test: 0.398495567646\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.39485834230506617 Test: 0.39649053210296864\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.949817821488 Test: 0.94747278938\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 19\n",
"Loss: \n",
"Training: 0.410810620352 Test: 0.39194381129\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.400805677595314 Test: 0.3977715287681737\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.951004489939 Test: 0.949638153483\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 20\n",
"Loss: \n",
"Training: 0.411947242882 Test: 0.389506293561\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.4008406539070203 Test: 0.3969739420185327\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.950635775968 Test: 0.947862526117\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 21\n",
"Loss: \n",
"Training: 0.416651750568 Test: 0.38262341516\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.4007630456523934 Test: 0.39467258992926235\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.953127419125 Test: 0.952539387076\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 22\n",
"Loss: \n",
"Training: 0.390119160399 Test: 0.38315759273\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.40082124452647844 Test: 0.3977292218881616\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.950647327841 Test: 0.951877347059\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 23\n",
"Loss: \n",
"Training: 0.367307052454 Test: 0.411580829233\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.4006433228316618 Test: 0.39663163410629554\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.957603207588 Test: 0.956489828659\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"Convolutional Auto-Encoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 24\n",
"Loss: \n",
"Training: 0.33868045952 Test: 0.927437068105\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.39694907612915836 Test: 0.3976701797432306\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.949795083904 Test: 0.580018870981\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 25\n",
"Loss: \n",
"Training: 0.927933643159 Test: 0.928431605831\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.39153972866175446 Test: 0.4510328513629352\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.573342144018 Test: 0.583737772049\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 26\n",
"Loss: \n",
"Training: 0.919931396501 Test: 0.927855097245\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.44574073078197 Test: 0.5039872595025213\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.580477956045 Test: 0.581531848388\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 27\n",
"Loss: \n",
"Training: 0.929200284879 Test: 0.927611626596\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.4990120252653131 Test: 0.5539191837004533\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.572498666113 Test: 0.593790391427\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 28\n",
"Loss: \n",
"Training: 0.926860395788 Test: 0.931795235997\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.5537702308668818 Test: 0.6068642907397119\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.583386236919 Test: 0.574217552802\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 29\n",
"Loss: \n",
"Training: 0.925839822093 Test: 0.93164826374\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.6039442006503147 Test: 0.6601942575748353\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.580691894675 Test: 0.576549031639\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"Encoder/Decoder's best params are updated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 30\n",
"Loss: \n",
"Training: 0.929862552091 Test: 0.927154588475\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.6554471208243815 Test: 0.7141647028198785\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.569390918772 Test: 0.581501184398\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 31\n",
"Loss: \n",
"Training: 0.929766285387 Test: 0.925813402818\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.7072386517452273 Test: 0.7679295323112922\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.570877893273 Test: 0.57720674485\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 32\n",
"Loss: \n",
"Training: 0.924764428481 Test: 0.92296637302\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.7585501052271016 Test: 0.8222485310770841\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.570966914159 Test: 0.58017501559\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 33\n",
"Loss: \n",
"Training: 0.926278194216 Test: 0.928510040218\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.8120146320353256 Test: 0.876229409106086\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.594838537261 Test: 0.582849102459\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 34\n",
"Loss: \n",
"Training: 0.925489207906 Test: 0.921352473778\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.8679117462114974 Test: 0.9279223302046342\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.582383534571 Test: 0.58281373112\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 35\n",
"Loss: \n",
"Training: 0.921351188597 Test: 0.92703475966\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9265926210500675 Test: 0.9273138707719368\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.579186526452 Test: 0.5780751119\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 36\n",
"Loss: \n",
"Training: 0.930105180856 Test: 0.927614021308\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9259343755938305 Test: 0.9271741861547683\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.579459708795 Test: 0.58797973983\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 37\n",
"Loss: \n",
"Training: 0.924922552801 Test: 0.92297242402\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9269517540292662 Test: 0.9271500785610902\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.59578184152 Test: 0.579302112555\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 38\n",
"Loss: \n",
"Training: 0.926203416784 Test: 0.92248926159\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9265239808214399 Test: 0.9266861583035066\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.579412227044 Test: 0.579168549126\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 39\n",
"Loss: \n",
"Training: 0.92502080179 Test: 0.925817681303\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9264582829210658 Test: 0.9257555608628284\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.577038174712 Test: 0.589516853384\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 40\n",
"Loss: \n",
"Training: 0.92313782794 Test: 0.924795995079\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9263763808907939 Test: 0.925172502619044\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.581413321058 Test: 0.600528220806\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 41\n",
"Loss: \n",
"Training: 0.926474434976 Test: 0.928487192836\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9257039084757726 Test: 0.9249366432794313\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.601790145624 Test: 0.612274420963\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 42\n",
"Loss: \n",
"Training: 0.925612515837 Test: 0.925583300108\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9253747234346761 Test: 0.9252040222812035\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.589283176896 Test: 0.613576447293\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 43\n",
"Loss: \n",
"Training: 0.921801549514 Test: 0.91870325096\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9254595321703117 Test: 0.9254657149900314\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.613209774317 Test: 0.599052119999\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 44\n",
"Loss: \n",
"Training: 0.922868712785 Test: 0.922289195646\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9250118677001259 Test: 0.9244850360642098\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.60924006026 Test: 0.601820522137\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 45\n",
"Loss: \n",
"Training: 0.924981745801 Test: 0.9222918964\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.92474981818802 Test: 0.9245787082509695\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.606994052537 Test: 0.620876824931\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 46\n",
"Loss: \n",
"Training: 0.920558499695 Test: 0.922115040155\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9251128739084239 Test: 0.9241044219250432\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.613053854655 Test: 0.625583743398\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 47\n",
"Loss: \n",
"Training: 0.908308424794 Test: 0.921888480641\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9241582057923647 Test: 0.9235545238097277\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.628303971518 Test: 0.63038397019\n",
"----------------------------------------------------------------------------------------------------\n",
"Generate training data: (20, 5, 1, 100, 100)\n",
"Generate test data: (20, 5, 1, 100, 100)\n",
"Encoder/Decoder's deltas are propagated.\n",
"----------------------------------------------------------------------------------------------------\n",
"Convolutional Auto-Encoder's loss:\n",
"Epoch: 48\n",
"Loss: \n",
"Training: 0.921764472279 Test: 0.917330502599\n",
"Rolling mean of Loss (Window is 10): \n",
"Training: 0.9224967929916252 Test: 0.9234461294718284\n",
"----------------------------------------------------------------------------------------------------\n",
"Encoder/Decoder's loss: \n",
"Training: 0.63026447344 Test: 0.634446339562\n",