-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathparticle_adam.py
57 lines (41 loc) · 1.12 KB
/
particle_adam.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import torch
import numpy as np
import matplotlib.pyplot as plt
from scipy import optimize
n = 200
xs = [0] * n
target_xs = torch.FloatTensor([1] * 100 + [0] * (n-100))
vs = [0] * n
fs = torch.linspace(0,0,n, requires_grad=True)
def update_xs(fs):
vs_new = torch.cumsum(fs, dim=0)
xs_new = torch.cumsum(vs_new, dim=0)
return xs_new, vs_new
fig = plt.figure()
plt.autoscale(tight=True)
ax = fig.add_subplot(111)
ax.autoscale(enable=True, axis="y", tight=False)
li1, = ax.plot(xs)
li2, = ax.plot(vs)
li3, = ax.plot(fs.detach().numpy())
fig.canvas.draw()
plt.show(block=False)
#optimizer = torch.optim.SGD([fs], lr=0.000000005, momentum=0.9, nesterov=True)
optimizer = torch.optim.Adam([fs])
lr = 0.000001 # learning rate
while True:
xs, vs = update_xs(fs)
#print(xs, vs)
cost = torch.sum((xs - target_xs).pow(2) + fs.pow(2))
optimizer.zero_grad()
cost.backward()
# update plots
li1.set_ydata(xs.detach().numpy())
li2.set_ydata(vs.detach().numpy())
li3.set_ydata(fs.detach().numpy())
ax.relim()
# update ax.viewLim using the new dataLim
ax.autoscale_view()
fig.canvas.draw()
# update wire heights
optimizer.step()