Skip to content

Commit

Permalink
first commit
Browse files Browse the repository at this point in the history
  • Loading branch information
milancurcic committed Jul 29, 2018
0 parents commit 02a7cd0
Show file tree
Hide file tree
Showing 21 changed files with 1,183 additions and 0 deletions.
82 changes: 82 additions & 0 deletions CMakeLists.txt
@@ -0,0 +1,82 @@
# cmake version, project name, language
cmake_minimum_required(VERSION 2.8 FATAL_ERROR)
project(neural-fortran Fortran)

# set output paths for modules, archives, and executables
set(CMAKE_Fortran_MODULE_DIRECTORY ${PROJECT_BINARY_DIR}/include)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)

# if build type not specified, default to release
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "release")
endif()

# handle integer size
if(INT)
message(STATUS "Configuring build for ${INT}-bit integers")
add_definitions(-DINT${INT})
else()
message(STATUS "Configuring build for 32-bit integers")
add_definitions(-DINT32)
endif()

# handle real size
if(REAL)
message(STATUS "Configuring build for ${REAL}-bit reals")
add_definitions(-DREAL${REAL})
else()
message(STATUS "Configuring build for 32-bit reals")
add_definitions(-DREAL32)
endif()

# compiler flags for gfortran
if(CMAKE_Fortran_COMPILER_ID MATCHES GNU)

if(SERIAL)
message(STATUS "Configuring to build with -fcoarrays-single")
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fcoarray=single")
endif()

if(BLAS)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fexternal-blas ${BLAS}")
message(STATUS "Configuring build to use BLAS from ${BLAS}")
endif()

set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -cpp")
set(CMAKE_Fortran_FLAGS_DEBUG "-O0 -g -C -fbacktrace")
set(CMAKE_Fortran_FLAGS_RELEASE "-O3 -ffast-math")

endif()

# compiler flags for ifort
if(CMAKE_Fortran_COMPILER_ID MATCHES Intel)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -coarray=shared -fpp -assume byterecl,realloc_lhs -heap-arrays")
set(CMAKE_Fortran_FLAGS_DEBUG "-O0 -g -C -traceback")
set(CMAKE_Fortran_FLAGS_RELEASE "-O3")
endif()

# compiler flags for Cray ftn
if(CMAKE_Fortran_COMPILER_ID MATCHES Cray)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -h noomp")
set(CMAKE_Fortran_FLAGS_DEBUG "-O0 -g")
set(CMAKE_Fortran_FLAGS_RELEASE "-O3")
endif()

# library to archive (libneural.a)
add_library(neural src/lib/mod_activation.f90 src/lib/mod_io.f90 src/lib/mod_kinds.f90 src/lib/mod_layer.f90 src/lib/mod_mnist.f90 src/lib/mod_network.f90 src/lib/mod_parallel.f90 src/lib/mod_random.f90)

# tests
enable_testing()
foreach(execid mnist network_save network_sync)
add_executable(test_${execid} src/tests/test_${execid}.f90)
target_link_libraries(test_${execid} neural)
add_test(test_${execid} bin/test_${execid})
endforeach()

foreach(execid mnist simple sine)
add_executable(example_${execid} src/tests/example_${execid}.f90)
target_link_libraries(example_${execid} neural)
add_test(example_${execid} bin/example_${execid})
endforeach()
21 changes: 21 additions & 0 deletions LICENSE
@@ -0,0 +1,21 @@
MIT License

Copyright (c) 2018 Milan Curcic

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
114 changes: 114 additions & 0 deletions README.md
@@ -0,0 +1,114 @@
# neural-fortran

A parallel neural net microframework.
Companion code to Chapter 6 of
[Modern Fortran: Building Efficient Parallel Applications](https://www.manning.com/books/modern-fortran?a_aid=modernfortran&a_bid=2dc4d442).

## Getting started

### Getting the code

```
git clone https://github.com/modern-fortran/neural-fortran
```

### Dependencies

* Fortran compiler
* OpenCoarrays (optional, for parallel execution, gfortran only)
* BLAS, MKL (optional)

### Building neural-fortran

```
cd neural-fortran
mkdir build
cd build
cmake ..
make
```

The examples will be built in the `bin/` directory.

#### Building in parallel mode

If you use gfortran and want to build neural-fortran in parallel mode,
you must first install [OpenCoarrays](https://github.com/sourceryinstitute/OpenCoarrays).
Once installed, use the compiler wrappers `caf` and `cafrun` to build and execute
in parallel, respectively:

```
FC=caf cmake ..
make
cafrun -n 4 bin/example_mnist # run MNIST example on 4 cores
```

#### Building in serial mode

If you use gfortran and want to build neural-fortran in serial mode,
configure using the following flag:

```
cmake .. -DSERIAL=1
```

#### Building with a different compiler

If you want to build with a different compiler, such as Intel Fortran,
specify `FC` when issuing `cmake`:

```
FC=ifort cmake ..
```

#### Building with BLAS or MKL

To use an external BLAS or MKL library for `matmul` calls,
run cmake like this:

```
cmake .. -DBLAS=-lblas
```

where the value of `-DBLAS` should point to the desired BLAS implementation,
which has to be available in the linking path.
This option is currently available only with gfortran.

#### Building with debug flags

To build with debugging flags enabled, type:

```
cmake .. -DCMAKE_BUILD_TYPE=debug
```

### Unpacking the data

If you intend to work with the MNIST dataset, unpack it first:

```
cd data/mnist
tar xzvf mnist.tar.gz
cd -
```

### Examples

TODO

#### Creating a neural net

#### Training

#### Saving and loading from file

#### MNIST training example

## Features

* Dense, fully connected neural networks of arbitrary shape and size
* Backprop with root-mean-square cost function
* Data-based parallelism
* Several activation functions
* MNIST training example
* Support for 32, 64, and 128-bit floating point numbers
Binary file added data/mnist/mnist.tar.gz
Binary file not shown.
34 changes: 34 additions & 0 deletions plotting/plot_mnist_accuracy.py
@@ -0,0 +1,34 @@
#!/usr/bin/env python

import argparse
parser = argparse.ArgumentParser()
parser.add_argument('path', help='path to file where output of example_mnist is stored')
args = parser.parse_args()

path = args.path

import matplotlib
matplotlib.use('Agg')

import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np

lines = [line.strip() for line in open(path).readlines()]

accuracy = []
accuracy.append(float(lines[0].split()[2]))
for line in lines[1:]:
accuracy.append(float(line.split()[4]))

fig = plt.figure(figsize=(8, 4))
ax = fig.add_subplot(111, xlim=(0, len(accuracy)-1), ylim=(0, 100))
ax.tick_params(axis='both', which='major', labelsize=16)
plt.plot(accuracy, 'k-', marker='.', ms=12, lw=2)
plt.xlabel('Epoch', fontsize=16)
plt.ylabel('Accuracy [%]', fontsize=16)
plt.grid()
plt.title('MNIST accuracy [%]', fontsize=16)
fig.subplots_adjust(bottom=0.2)
plt.savefig('mnist_accuracy.svg')
plt.close(fig)
33 changes: 33 additions & 0 deletions plotting/plot_mnist_digit.py
@@ -0,0 +1,33 @@
#!/usr/bin/env python

import argparse
parser = argparse.ArgumentParser()
parser.add_argument('index', help='Index number of dataset to plot')
args = parser.parse_args()

n = int(args.index)

import matplotlib
matplotlib.use('Agg')

import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np

imagepath = '../data/mnist/mnist_training_images.dat'
labelpath = '../data/mnist/mnist_training_labels.dat'

images = np.reshape(np.fromfile(imagepath, dtype='float32'), [50000, 784])
labels = np.fromfile(labelpath, dtype='float32')

digit = np.reshape(images[n,:], [28, 28])[-1::-1,:]
fig = plt.figure(figsize=(8, 7))
ax = fig.add_subplot(111, aspect='equal')
ax.tick_params(axis='both', which='major', labelsize=16)
plt.xticks(range(0, 32, 4))
plt.yticks(range(0, 32, 4))
plt.pcolor(digit, cmap=cm.binary, vmin=0, vmax=1)
plt.colorbar(shrink=0.8, ticks=np.arange(0, 1.1, 0.1))
plt.title('MNIST training sample ' + str(n) + '; Label = ' + str(int(labels[n])), fontsize=16)
plt.savefig('digit_' + '%2.2i' % n + '.svg')
plt.close(fig)
2 changes: 2 additions & 0 deletions plotting/requirements.txt
@@ -0,0 +1,2 @@
matplotlib
numpy
100 changes: 100 additions & 0 deletions src/lib/mod_activation.f90
@@ -0,0 +1,100 @@
module mod_activation

! A collection of activation functions and their derivatives.

use mod_kinds, only: ik, rk

implicit none

private

public :: gaussian, gaussian_prime
public :: relu, relu_prime
public :: sigmoid, sigmoid_prime
public :: step, step_prime
public :: tanhf, tanh_prime

contains

pure function gaussian(x) result(res)
! Gaussian activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = exp(-x**2)
end function gaussian

pure function gaussian_prime(x) result(res)
! First derivative of the Gaussian activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = -2 * x * gaussian(x)
end function gaussian_prime

pure function relu(x) result(res)
!! REctified Linear Unit (RELU) activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = max(0., x)
end function relu

pure function relu_prime(x) result(res)
! First derivative of the REctified Linear Unit (RELU) activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
where (x > 0)
res = 1
elsewhere
res = 0
end where
end function relu_prime

pure function sigmoid(x) result(res)
! Sigmoid activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = 1 / (1 + exp(-x))
endfunction sigmoid

pure function sigmoid_prime(x) result(res)
! First derivative of the sigmoid activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = sigmoid(x) * (1 - sigmoid(x))
end function sigmoid_prime

pure function step(x) result(res)
! Step activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
where (x > 0)
res = 1
elsewhere
res = 0
end where
end function step

pure function step_prime(x) result(res)
! First derivative of the step activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = 0
end function step_prime

pure function tanhf(x) result(res)
! Tangent hyperbolic activation function.
! Same as the intrinsic tanh, but must be
! defined here so that we can use procedure
! pointer with it.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = 1 - tanh(x)**2
end function tanhf

pure function tanh_prime(x) result(res)
! First derivative of the tanh activation function.
real(rk), intent(in) :: x(:)
real(rk) :: res(size(x))
res = 1 - tanh(x)**2
end function tanh_prime

end module mod_activation

0 comments on commit 02a7cd0

Please sign in to comment.