Skip to content

Commit

Permalink
Merge pull request #38 from lacava/reproducible
Browse files Browse the repository at this point in the history
replaces np.random calls with self.random_state (issue #37 )
  • Loading branch information
lacava committed Aug 3, 2018
2 parents 2bdf074 + 62f6f91 commit d736e92
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 11 deletions.
5 changes: 3 additions & 2 deletions docs/few_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
input_data = pd.read_csv(dataset,sep=None,engine='python')

#generate train/test split
train_i, test_i = train_test_split(input_data.index, train_size=0.75, test_size=0.25)
train_i, test_i = train_test_split(input_data.index, train_size=0.75, test_size=0.25,
random_state=10)

# training data
X_train = input_data.loc[train_i].drop('label', axis=1).values
Expand All @@ -17,7 +18,7 @@
X_test = input_data.loc[test_i].drop('label', axis=1).values
Y_test = input_data.loc[test_i, 'label'].values

few = FEW(verbosity=1)
few = FEW(random_state=10,verbosity=1)
few.fit(X_train,Y_train)

print('\nTraining accuracy: {}'.format(few.score(X_train, Y_train)))
Expand Down
2 changes: 1 addition & 1 deletion few/lib/epsilon_lexicase.h
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ void epsilon_lexicase(const ExtMat & F, int n, int d,

// pick a winner from can_locs
locs(i) = *select_randomly(can_locs.begin(),can_locs.end(),gen);
// remove the winner from ind_locs
// survival: remove the winner from ind_locs
for (auto l = ind_locs.begin(); l!=ind_locs.end();){
if (*l == locs(i))
l = ind_locs.erase(l);
Expand Down
6 changes: 3 additions & 3 deletions few/population.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,13 +214,13 @@ def make_program(self,stack,func_set,term_set,max_d,ntype):
raise ValueError('no ts. ntype:'+ntype+'. term_set out_types:'+
','.join([t.out_type for t in term_set]))

stack.append(ts[np.random.choice(len(ts))])
stack.append(ts[self.random_state.choice(len(ts))])
else:
fs = [f for f in func_set if (f.out_type==ntype
and (f.in_type=='f' or max_d>1))]
if len(fs)==0:
print('ntype:',ntype,'\nfunc_set:',[f.name for f in func_set])
stack.append(fs[np.random.choice(len(fs))])
stack.append(fs[self.random_state.choice(len(fs))])
tmp = copy.copy(stack[-1])

for i in np.arange(tmp.arity['f']):
Expand Down Expand Up @@ -273,7 +273,7 @@ def init_pop(self):
for i,p in enumerate(pop.individuals):
if i < self.n_features:
p.stack = [node('x',
loc=np.random.randint(self.n_features))]
loc=self.random_state.randint(self.n_features))]
else:
# make program if pop is bigger than n_features
self.make_program(p.stack,self.func_set,self.term_set,
Expand Down
4 changes: 2 additions & 2 deletions few/variation.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def variation(self,parents):
# softmax transformation of the weights
weights = np.exp(weights)/np.sum(np.exp(weights))
offspring = copy.deepcopy(
list(np.random.choice(self.valid(parents),
list(self.random_state.choice(self.valid(parents),
self.population_size, p=weights)))
else:
offspring = copy.deepcopy(list(
Expand All @@ -45,7 +45,7 @@ def variation(self,parents):
# softmax transformation of the weights
weights = np.exp(weights)/np.sum(np.exp(weights))
offspring = copy.deepcopy(list(
np.random.choice(self.valid(parents),
self.random_state.choice(self.valid(parents),
self.population_size, p=weights)))
else:
offspring = copy.deepcopy(list(
Expand Down
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@
import importlib
try:
importlib.import_module('eigency')
except ImportError:
except (ImportError, AttributeError):
try:
import pip
pip.main(['install', 'eigency'])
from pip._internal import main
main(['install', 'eigency'])
except ImportError:
raise ImportError('The eigency library must be installed before FEW. '
'Automatic install with pip failed.')
Expand Down

0 comments on commit d736e92

Please sign in to comment.