Skip to content
Go to file
- Each layer has it own op scope
- A scope can be specified to share variables among different layers
- Some examples have been adapted
1 contributor

Users who have contributed to this file

41 lines (32 sloc) 1.49 KB
# -*- coding: utf-8 -*-
""" Deep Neural Network for MNIST dataset classification task.
Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner. "Gradient-based
learning applied to document recognition." Proceedings of the IEEE,
86(11):2278-2324, November 1998.
[MNIST Dataset]
from __future__ import division, print_function, absolute_import
import tflearn
# Data loading and preprocessing
import tflearn.datasets.mnist as mnist
X, Y, testX, testY = mnist.load_data(one_hot=True)
# Building deep neural network
input_layer = tflearn.input_data(shape=[None, 784])
dense1 = tflearn.fully_connected(input_layer, 64, activation='tanh',
regularizer='L2', weight_decay=0.001)
dropout1 = tflearn.dropout(dense1, 0.8)
dense2 = tflearn.fully_connected(dropout1, 64, activation='tanh',
regularizer='L2', weight_decay=0.001)
dropout2 = tflearn.dropout(dense2, 0.8)
softmax = tflearn.fully_connected(dropout2, 10, activation='softmax')
# Regression using SGD with learning rate decay and Top-3 accuracy
sgd = tflearn.SGD(learning_rate=0.1, lr_decay=0.96, decay_step=1000)
top_k = tflearn.metrics.Top_k(3)
net = tflearn.regression(softmax, optimizer=sgd, metric=top_k,
# Training
model = tflearn.DNN(net, tensorboard_verbose=0), Y, n_epoch=20, validation_set=(testX, testY),
show_metric=True, run_id="dense_model")
You can’t perform that action at this time.