Skip to content
Permalink
Browse files

chap 11 first update

  • Loading branch information...
jermwatt committed Mar 29, 2019
1 parent 9153e96 commit 53cbcfa1e83a9a8108f6cc1d771a18528c5b48e8
Showing with 101,466 additions and 0 deletions.
  1. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_1.png
  2. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_10.png
  3. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_11.png
  4. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_12.png
  5. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_13.png
  6. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_14.png
  7. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_15.png
  8. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_16.png
  9. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_17.png
  10. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_18.png
  11. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_19.png
  12. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_2.png
  13. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_20.png
  14. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_21.png
  15. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_22.png
  16. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_23.png
  17. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_24.png
  18. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_25.png
  19. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_26.png
  20. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_27.png
  21. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_28.png
  22. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_29.png
  23. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_3.png
  24. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_30.png
  25. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_31.png
  26. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_32.png
  27. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_33.png
  28. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_34.png
  29. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_35.png
  30. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_36.png
  31. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_37.png
  32. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_38.png
  33. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_39.png
  34. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_4.png
  35. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_40.png
  36. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_41.png
  37. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_42.png
  38. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_43.png
  39. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_44.png
  40. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_45.png
  41. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_46.png
  42. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_47.png
  43. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_48.png
  44. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_49.png
  45. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_5.png
  46. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_50.png
  47. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_51.png
  48. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_52.png
  49. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_53.png
  50. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_54.png
  51. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_55.png
  52. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_57.png
  53. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_6.png
  54. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_7.png
  55. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_8.png
  56. BIN mlrefined_images/nonlinear_superlearn_images/Figure_11_9.png
  57. BIN mlrefined_libraries/.DS_Store
  58. +245 −0 mlrefined_libraries/nonlinear_superlearn_library/boosting_classification_animator.py
  59. +170 −0 mlrefined_libraries/nonlinear_superlearn_library/boosting_regression_animators.py
  60. +166 −0 mlrefined_libraries/nonlinear_superlearn_library/boosting_regression_animators_v3.py
  61. +523 −0 mlrefined_libraries/nonlinear_superlearn_library/crossval_classification_animator.py
  62. 0 mlrefined_libraries/nonlinear_superlearn_library/intro_boost_library/__init__.py
  63. +133 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_boost_library/cost_functions.py
  64. +286 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_boost_library/kernel_booster.py
  65. +278 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_boost_library/net_booster.py
  66. +90 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_boost_library/normalizers.py
  67. +94 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_boost_library/optimizers.py
  68. +346 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_boost_library/stump_booster.py
  69. 0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/__init__.py
  70. +236 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/cost_functions.py
  71. +97 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/history_plotters.py
  72. +72 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/multilayer_perceptron.py
  73. +126 −0 ...ries/nonlinear_superlearn_library/intro_general_library/multilayer_perceptron_batch_normalized.py
  74. +85 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/normalizers.py
  75. +91 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/optimizers.py
  76. +35 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/polys.py
  77. +35 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/sines.py
  78. +100 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/stumps.py
  79. +193 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/superlearn_setup.py
  80. +91 −0 mlrefined_libraries/nonlinear_superlearn_library/intro_general_library/unsuperlearn_setup.py
  81. +306 −0 mlrefined_libraries/nonlinear_superlearn_library/main_classification_animators.py
  82. +363 −0 mlrefined_libraries/nonlinear_superlearn_library/main_classification_comparison.py
  83. +351 −0 mlrefined_libraries/nonlinear_superlearn_library/regression_basis_comparison_2d.py
  84. +724 −0 mlrefined_libraries/nonlinear_superlearn_library/regression_basis_single.py
  85. +387 −0 mlrefined_libraries/nonlinear_superlearn_library/regularization_regression_animators.py
  86. +13,360 −0 notes/11_Feature_learning/11_1_Introduction.html
  87. +260 −0 notes/11_Feature_learning/11_1_Introduction.ipynb
  88. +19,108 −0 notes/11_Feature_learning/11_2_Universal.html
  89. +5,910 −0 notes/11_Feature_learning/11_2_Universal.ipynb
  90. +13,801 −0 notes/11_Feature_learning/11_3_Real_approximation.html
  91. +645 −0 notes/11_Feature_learning/11_3_Real_approximation.ipynb
  92. +13,512 −0 notes/11_Feature_learning/11_4_Cross_validation.html
  93. +397 −0 notes/11_Feature_learning/11_4_Cross_validation.ipynb
  94. +13,830 −0 notes/11_Feature_learning/11_5_Boosting.html
  95. +738 −0 notes/11_Feature_learning/11_5_Boosting.ipynb
  96. +13,702 −0 notes/11_Feature_learning/11_6_Regularization.html
  97. +580 −0 notes/11_Feature_learning/11_6_Regularization.ipynb
  98. BIN notes/11_Feature_learning/videos/animation_1.mp4
  99. BIN notes/11_Feature_learning/videos/animation_10.mp4
  100. BIN notes/11_Feature_learning/videos/animation_2.mp4
  101. BIN notes/11_Feature_learning/videos/animation_3.mp4
  102. BIN notes/11_Feature_learning/videos/animation_4.mp4
  103. BIN notes/11_Feature_learning/videos/animation_5.mp4
  104. BIN notes/11_Feature_learning/videos/animation_6.mp4
  105. BIN notes/11_Feature_learning/videos/animation_7.mp4
  106. BIN notes/11_Feature_learning/videos/animation_9.mp4
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Diff not rendered.
Binary file not shown.
@@ -0,0 +1,245 @@
# import standard plotting and animation
import matplotlib.pyplot as plt
from matplotlib import gridspec
from matplotlib.ticker import FormatStrFormatter
import matplotlib.animation as animation
from mpl_toolkits.mplot3d import Axes3D
from IPython.display import clear_output
from matplotlib.ticker import MaxNLocator, FuncFormatter

# import autograd functionality
import autograd.numpy as np
import math
import time
from matplotlib import gridspec
import copy
from matplotlib.ticker import FormatStrFormatter
from inspect import signature

class Visualizer:
'''
Visualize cross validation performed on N = 2 dimensional input classification datasets
'''
#### initialize ####
def __init__(self,csvname):
# grab input
data = np.loadtxt(csvname,delimiter = ',')
self.x = data[:-1,:]
self.y = data[-1:,:]

self.colors = ['salmon','cornflowerblue','lime','bisque','mediumaquamarine','b','m','g']

#### animate multiple runs on single regression ####
def animate_trainval_boosting(self,savepath,runner,num_frames,**kwargs):
# select subset of runs
inds = np.arange(0,len(runner.models),int(len(runner.models)/float(num_frames)))
labels = np.arange(0,len(runner.models),int(len(runner.models)/float(5)))

# global names for train / valid sets
train_inds = runner.train_inds
valid_inds = runner.valid_inds

self.x_train = self.x[:,train_inds]
self.y_train = self.y[:,train_inds]

self.x_valid = self.x[:,valid_inds]
self.y_valid = self.y[:,valid_inds]

self.normalizer = runner.normalizer
train_errors = runner.train_count_vals
valid_errors = runner.valid_count_vals
num_units = len(runner.models)

# construct figure
fig = plt.figure(figsize = (6,6))
artist = fig

# create subplot with 3 panels, plot input function in center plot
gs = gridspec.GridSpec(2, 2)
ax1 = plt.subplot(gs[0]);
ax2 = plt.subplot(gs[1]);
ax3 = plt.subplot(gs[2]);
ax4 = plt.subplot(gs[3]);

# start animation
num_frames = len(inds) -2
print ('starting animation rendering...')
def animate(k):
# clear panels
ax1.cla()
ax2.cla()
ax3.cla()
ax4.cla()

# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()

#### plot training, testing, and full data ####
# pluck out current weights
a = inds[k]
steps = runner.best_steps[:a+1]

# produce static img
self.draw_boundary(ax1,steps,a)
self.static_N2_simple(ax1,train_valid = 'original')
self.draw_boundary(ax2,steps,a)

self.static_N2_simple(ax2,train_valid = 'train')
self.draw_boundary(ax3,steps,a)
self.static_N2_simple(ax3,train_valid = 'validate')

#### plot training / validation errors ####
self.plot_train_valid_errors(ax4,a,train_errors,valid_errors,inds)
return artist,

anim = animation.FuncAnimation(fig, animate ,frames=num_frames, interval=num_frames, blit=True)

# produce animation and save
fps = 50
if 'fps' in kwargs:
fps = kwargs['fps']
anim.save(savepath, fps=fps, extra_args=['-vcodec', 'libx264'])
clear_output()

##### draw boundary #####
def draw_boundary(self,ax,steps,ind):
### create boundary data ###
# get visual boundary
xmin1 = np.min(self.x[0,:])
xmax1 = np.max(self.x[0,:])
xgap1 = (xmax1 - xmin1)*0.05
xmin1 -= xgap1
xmax1 += xgap1

xmin2 = np.min(self.x[1,:])
xmax2 = np.max(self.x[1,:])
xgap2 = (xmax2 - xmin2)*0.05
xmin2 -= xgap2
xmax2 += xgap2

# plot boundary for 2d plot
r1 = np.linspace(xmin1,xmax1,300)
r2 = np.linspace(xmin2,xmax2,300)
s,t = np.meshgrid(r1,r2)
s = np.reshape(s,(np.size(s),1))
t = np.reshape(t,(np.size(t),1))
h = np.concatenate((s,t),axis = 1).T

# plot total fit
model = lambda x: np.sum([v(x) for v in steps],axis=0)
z = model(self.normalizer(h))
z1 = np.sign(z)

# reshape it
s.shape = (np.size(r1),np.size(r2))
t.shape = (np.size(r1),np.size(r2))
z1.shape = (np.size(r1),np.size(r2))

#### plot contour, color regions ####
ax.contour(s,t,z1,colors='k', linewidths=2.5,levels = [0],zorder = 2)
ax.contourf(s,t,z1,colors = [self.colors[1],self.colors[0]],alpha = 0.15,levels = range(-1,2))

######## show N = 2 static image ########
# show coloring of entire space
def static_N2_simple(self,ax,train_valid):
### create boundary data ###
xmin1 = np.min(self.x[0,:])
xmax1 = np.max(self.x[0,:])
xgap1 = (xmax1 - xmin1)*0.05
xmin1 -= xgap1
xmax1 += xgap1

xmin2 = np.min(self.x[1,:])
xmax2 = np.max(self.x[1,:])
xgap2 = (xmax2 - xmin2)*0.05
xmin2 -= xgap2
xmax2 += xgap2

### loop over two panels plotting each ###
# plot training points
if train_valid == 'train':
# reverse normalize data
x_train = self.x_train
y_train = self.y_train

# plot data
ind0 = np.argwhere(y_train == +1)
ind0 = [v[1] for v in ind0]
ax.scatter(x_train[0,ind0],x_train[1,ind0],s = 45, color = self.colors[0], edgecolor = [0,0.7,1],linewidth = 1,zorder = 3)

ind1 = np.argwhere(y_train == -1)
ind1 = [v[1] for v in ind1]
ax.scatter(x_train[0,ind1],x_train[1,ind1],s = 45, color = self.colors[1], edgecolor = [0,0.7,1],linewidth = 1,zorder = 3)
ax.set_title('training data',fontsize = 15)

if train_valid == 'validate':
# reverse normalize data
x_valid = self.x_valid
y_valid = self.y_valid

# plot testing points
ind0 = np.argwhere(y_valid == +1)
ind0 = [v[1] for v in ind0]
ax.scatter(x_valid[0,ind0],x_valid[1,ind0],s = 45, color = self.colors[0], edgecolor = [1,0.8,0.5],linewidth = 1,zorder = 3)

ind1 = np.argwhere(y_valid == -1)
ind1 = [v[1] for v in ind1]
ax.scatter(x_valid[0,ind1],x_valid[1,ind1],s = 45, color = self.colors[1], edgecolor = [1,0.8,0.5],linewidth = 1,zorder = 3)
ax.set_title('validation data',fontsize = 15)

if train_valid == 'original':
# plot all points
ind0 = np.argwhere(self.y == +1)
ind0 = [v[1] for v in ind0]
ax.scatter(self.x[0,ind0],self.x[1,ind0],s = 55, color = self.colors[0], edgecolor = 'k',linewidth = 1,zorder = 3)

ind1 = np.argwhere(self.y == -1)
ind1 = [v[1] for v in ind1]
ax.scatter(self.x[0,ind1],self.x[1,ind1],s = 55, color = self.colors[1], edgecolor = 'k',linewidth = 1,zorder = 3)
ax.set_title('original data',fontsize = 15)

# cleanup panel
ax.set_xlabel(r'$x_1$',fontsize = 15)
ax.set_ylabel(r'$x_2$',fontsize = 15,rotation = 0,labelpad = 20)
ax.xaxis.set_major_formatter(FormatStrFormatter('%.1f'))
ax.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))

def plot_train_valid_errors(self,ax,k,train_errors,valid_errors,inds):
num_elements = np.arange(len(train_errors))

ax.plot([v+1 for v in inds[:k+1]] ,[train_errors[v] for v in inds[:k+1]],color = [0,0.7,1],linewidth = 2.5,zorder = 1,label = 'training')
# ax.scatter([v+1 for v in inds[:k+1]] ,train_errors[:k+1],color = [0,0.7,1],s = 70,edgecolor = 'w',linewidth = 1.5,zorder = 3)

ax.plot([v+1 for v in inds[:k+1]] ,[valid_errors[v] for v in inds[:k+1]],color = [1,0.8,0.5],linewidth = 2.5,zorder = 1,label = 'validation')
# ax.scatter([v+1 for v in inds[:k+1]] ,valid_errors[:k+1],color= [1,0.8,0.5],s = 70,edgecolor = 'w',linewidth = 1.5,zorder = 3)
ax.set_title('misclassifications',fontsize = 15)

# cleanup
ax.set_xlabel('number of units',fontsize = 12)
ax.set_ylabel('number of misclassifications',fontsize = 12)

# cleanp panel
num_iterations = len(train_errors)
minxc = 0.5
maxxc = len(num_elements) + 0.5
minc = min(min(copy.deepcopy(train_errors)),min(copy.deepcopy(valid_errors)))
maxc = max(max(copy.deepcopy(train_errors[:10])),max(copy.deepcopy(valid_errors[:10])))
gapc = (maxc - minc)*0.25
minc -= gapc
maxc += gapc

ax.set_xlim([minxc- 0.5,maxxc + 0.5])
ax.set_ylim([minc,maxc])

# ax.xaxis.set_major_locator(MaxNLocator(integer=True))
#labels = [str(v) for v in num_units]
#ax.set_xticks(np.arange(1,len(num_elements)+1))
# ax.set_xticklabels(num_units)



Oops, something went wrong.

0 comments on commit 53cbcfa

Please sign in to comment.
You can’t perform that action at this time.