-
Notifications
You must be signed in to change notification settings - Fork 0
/
Newton_F.py
75 lines (52 loc) · 2.01 KB
/
Newton_F.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
#!/usr/bin/python
import numpy as np
from numpy.linalg import norm
from numpy.linalg import solve
from time import process_time
#############################################################################
# #
# RESOLUTION D'UN PROBLEME D'OPTIMISATION SANS CONTRAINTES #
# #
# Methode de Newton a pas fixe #
# #
#############################################################################
from Visualg import Visualg
def Newton_F(Oracle, x0):
# Initialisation des variables
iter_max = 100
gradient_step = 1
threshold = 0.000001
gradient_norm_list = []
gradient_step_list = []
critere_list = []
time_start = process_time()
x = x0
# Boucle sur les iterations
for k in range(iter_max):
# Valeur du critere et du gradient
critere, gradient, hessien = Oracle(x)
# Test de convergence
gradient_norm = norm(gradient)
if gradient_norm <= threshold:
break
# Direction de descente
direction = - solve(hessien, gradient)
# Mise a jour des variables
x = x + (gradient_step*direction)
# Evolution du gradient, du pas, et du critere
gradient_norm_list.append(gradient_norm)
gradient_step_list.append(gradient_step)
critere_list.append(critere)
# Resultats de l'optimisation
critere_opt = critere
gradient_opt = gradient
x_opt = x
time_cpu = process_time() - time_start
print()
print('Iteration :', k)
print('Temps CPU :', time_cpu)
print('Critere optimal :', critere_opt)
print('Norme du gradient :', norm(gradient_opt))
# Visualisation de la convergence
Visualg(gradient_norm_list, gradient_step_list, critere_list)
return critere_opt, gradient_opt, x_opt