Skip to content

Commit

Permalink
...
Browse files Browse the repository at this point in the history
  • Loading branch information
sherstpasha committed Feb 12, 2024
1 parent 736b530 commit 43066d5
Show file tree
Hide file tree
Showing 15 changed files with 547 additions and 52 deletions.
206 changes: 192 additions & 14 deletions src/test3.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,202 @@
# model.fit()


from thefittest.optimizers import GeneticAlgorithm, SHAGA
from thefittest.benchmarks import OneMax
# from thefittest.optimizers import GeneticAlgorithm, SHAGA, SelfCGA, PDPGA
# from thefittest.benchmarks import OneMax

number_of_iterations = 10
population_size = 10
string_length = 50
# number_of_iterations = 10
# population_size = 10
# string_length = 50

optimizer = SHAGA(fitness_function=OneMax(),
iters=number_of_iterations,
pop_size=population_size,
str_len=string_length,
show_progress_each=None,
random_state=18)
# optimizer = PDPGA(
# fitness_function=OneMax(),
# iters=number_of_iterations,
# pop_size=population_size,
# str_len=string_length,
# show_progress_each=None,
# random_state=18,
# )

optimizer.fit()
# optimizer.fit()

print(optimizer.get_fittest())
# print(optimizer.get_fittest())

# optimizer = PDPGA(
# fitness_function=OneMax(),
# iters=number_of_iterations,
# pop_size=population_size,
# str_len=string_length,
# show_progress_each=None,
# random_state=18,
# )

# optimizer.fit()

# fittest = optimizer.get_fittest()
# print(optimizer.get_fittest())

# optimizer.fit()

# fittest = optimizer.get_fittest()


# import numpy as np
# import matplotlib.pyplot as plt

# from thefittest.benchmarks import Griewank
# from thefittest.optimizers import DifferentialEvolution, SHADE


# n_dimension = 10
# left_border = -100.0
# right_border = 100.0
# number_of_iterations = 50
# population_size = 50


# left_border_array = np.full(shape=n_dimension, fill_value=left_border, dtype=np.float64)
# right_border_array = np.full(shape=n_dimension, fill_value=right_border, dtype=np.float64)

# optimizer = SHADE(
# fitness_function=Griewank(),
# iters=number_of_iterations,
# pop_size=population_size,
# left=left_border_array,
# right=right_border_array,
# # show_progress_each=10,
# minimization=True,
# keep_history=True,
# random_state=18,
# )

# optimizer.fit()

# print(optimizer.get_fittest())

# optimizer = SHADE(
# fitness_function=Griewank(),
# iters=number_of_iterations,
# pop_size=population_size,
# left=left_border_array,
# right=right_border_array,
# # show_progress_each=10,
# minimization=True,
# keep_history=True,
# random_state=18,
# )

# optimizer.fit()

# print(optimizer.get_fittest())


import numpy as np
import matplotlib.pyplot as plt

from thefittest.base import FunctionalNode
from thefittest.base import TerminalNode
from thefittest.base import EphemeralNode
from thefittest.base import UniversalSet
from thefittest.optimizers import GeneticProgramming
from thefittest.base._tree import Mul
from thefittest.base._tree import Add
from thefittest.base._tree import Div
from thefittest.base._tree import Neg
from thefittest.utils._metrics import coefficient_determination


def generator1():
return np.round(np.random.uniform(0, 10), 4)


def generator2():
return np.random.randint(0, 10)


def problem(x):
return np.sin(x[:, 0])


function = problem
left_border = -4.5
right_border = 4.5
sample_size = 300
n_dimension = 1

number_of_iterations = 100
population_size = 500

X = np.array([np.linspace(left_border, right_border, sample_size) for _ in range(n_dimension)]).T
y = function(X)


functional_set = (
FunctionalNode(Add()),
FunctionalNode(Mul()),
FunctionalNode(Neg()),
FunctionalNode(Div()),
)


terminal_set = [TerminalNode(X[:, i], f"x{i}") for i in range(n_dimension)]
terminal_set.extend([EphemeralNode(generator1), EphemeralNode(generator2)])
uniset = UniversalSet(functional_set, tuple(terminal_set))


def fitness_function(trees):
fitness = []
for tree in trees:
y_pred = tree() * np.ones(len(y))
fitness.append(coefficient_determination(y, y_pred))
return np.array(fitness)


optimizer = GeneticProgramming(
fitness_function=fitness_function,
uniset=uniset,
pop_size=population_size,
iters=number_of_iterations,
# show_progress_each=1,
minimization=False,
keep_history=False,
selection="tournament_k",
mutation="gp_weak_grow",
tour_size=5,
max_level=7,
random_state=18,
)

optimizer.fit()

fittest = optimizer.get_fittest()
stats = optimizer.get_stats()

predict = fittest["phenotype"]()

print("The fittest individ:", fittest["phenotype"])
print("with fitness", fittest["fitness"])


optimizer = GeneticProgramming(
fitness_function=fitness_function,
uniset=uniset,
pop_size=population_size,
iters=number_of_iterations,
# show_progress_each=10,
minimization=False,
keep_history=False,
selection="tournament_k",
mutation="gp_weak_grow",
tour_size=5,
max_level=7,
random_state=18,
)

optimizer.fit()

fittest = optimizer.get_fittest()
stats = optimizer.get_stats()

predict = fittest["phenotype"]()

print("The fittest individ:", fittest["phenotype"])
print("with fitness", fittest["fitness"])
103 changes: 103 additions & 0 deletions src/test4.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
# import numpy as np
# from numba import njit
# import timeit
# from numba import boolean
# from numba import float64
# from numba import int64
# from numba import njit

# import numpy as np
# from numba import njit
# import timeit
# import random


# # Ваша функция с использованием numba
# @njit(int64[:](int64, int64, int64))
# def randint_numba(low: np.int64, high: np.int64, size: np.int64):
# return np.random.randint(low, high, size)


# # Ваша функция с использованием numba
# @njit(int64[:](int64, int64, int64))
# def randint_numba2(low: np.int64, high: np.int64, size: np.int64):
# return np.random.uniform(low, high, size).astype(np.int64)


# @njit(int64[:](int64, int64, int64))
# def numba_randint(low, high, size):
# """
# Generate an array of random integers from a discrete uniform distribution.

# Parameters
# ----------
# low : int
# The lowest integer to be drawn from the distribution.
# high : int
# The highest integer to be drawn from the distribution.
# size : int
# The number of integers to generate.

# Returns
# -------
# NDArray[int64]
# An array of random integers.

# Examples
# --------
# >>> from numba import jit
# >>> import numpy as np
# >>>
# >>> # Example of generating random integers
# >>> result = numba_randint(low=1, high=10, size=5)
# >>> print("Random Integers:", result)
# Random Integers: ...

# Notes
# -----
# The generated integers follow a discrete uniform distribution.
# """
# result = np.empty(size, dtype=np.int64)

# for i in range(size):
# result[i] = low + np.int64(np.floor((high - low) * random.random()))

# return result


# # Параметры для сравнения
# low = 0
# high = 100
# size = 1000000 # Размер выборки

# # Используйте timeit для измерения времени выполнения вашей функции
# time_numba_randint = timeit.timeit(lambda: randint_numba(low, high, size), number=100)

# # Используйте timeit для измерения времени выполнения вашей функции
# time_numba_randint2 = timeit.timeit(lambda: numba_randint(low, high, size), number=100)

# # Используйте timeit для измерения времени выполнения вашей функции
# time_numba_randint3 = timeit.timeit(lambda: randint_numba2(low, high, size), number=100)

# # Измерьте время выполнения np.random.randint()
# time_numpy_randint = timeit.timeit(lambda: np.random.randint(low, high, size), number=100)


# # Выведите результаты
# print(f"Время выполнения вашей функции: {time_numba_randint:.6f} сек")
# print(f"Время выполнения вашей функции2: {time_numba_randint2:.6f} сек")
# print(f"Время выполнения вашей функции3: {time_numba_randint3:.6f} сек")
# print(f"Время выполнения np.random.randint(): {time_numpy_randint:.6f} сек")

import numpy as np
from thefittest.utils.random import sattolo_shuffle

# Example with a list
my_list = [1, 2, 3, 4, 5]
sattolo_shuffle(my_list)
print("Shuffled List:", my_list)

# Example with a NumPy array
my_array = np.array([1, 2, 3, 4, 5])
sattolo_shuffle(my_array)
print("Shuffled NumPy Array:", my_array)
48 changes: 48 additions & 0 deletions src/test_de.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import numpy as np
import matplotlib.pyplot as plt

from thefittest.benchmarks import Griewank
from thefittest.optimizers import DifferentialEvolution, SHADE, jDE


n_dimension = 10
left_border = -100.0
right_border = 100.0
number_of_iterations = 50
population_size = 50


left_border_array = np.full(shape=n_dimension, fill_value=left_border, dtype=np.float64)
right_border_array = np.full(shape=n_dimension, fill_value=right_border, dtype=np.float64)

optimizer = SHADE(
fitness_function=Griewank(),
iters=number_of_iterations,
pop_size=population_size,
left=left_border_array,
right=right_border_array,
# show_progress_each=10,
minimization=True,
keep_history=True,
random_state=18,
)

optimizer.fit()

print(optimizer.get_fittest())

optimizer = SHADE(
fitness_function=Griewank(),
iters=number_of_iterations,
pop_size=population_size,
left=left_border_array,
right=right_border_array,
# show_progress_each=10,
minimization=True,
keep_history=True,
random_state=18,
)

optimizer.fit()

print(optimizer.get_fittest())
3 changes: 1 addition & 2 deletions src/thefittest/base/_ea.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def __init__(
if self._n_jobs > 1:
self._parallel = Parallel(self._n_jobs)

self._random_state = random_state
self._random_state = check_random_state(random_state)

def _first_generation(self: EvolutionaryAlgorithm) -> None:
return None
Expand Down Expand Up @@ -265,7 +265,6 @@ def _split_population(self: EvolutionaryAlgorithm, population: NDArray) -> List:
return population_split

def fit(self: EvolutionaryAlgorithm) -> EvolutionaryAlgorithm:
self._random_state = check_random_state(self._random_state)

self._get_init_population()
self._from_population_g_to_fitness()
Expand Down
Loading

0 comments on commit 43066d5

Please sign in to comment.