Skip to content

Commit

Permalink
Merge pull request #449 from BDonnot/bd_dev
Browse files Browse the repository at this point in the history
Runner issue with caching
  • Loading branch information
BDonnot committed Apr 28, 2023
2 parents d191a50 + 007208a commit 8f19d96
Show file tree
Hide file tree
Showing 18 changed files with 180 additions and 60 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ Change Log
- [BREAKING] In `PandaPowerBackend` the kwargs argument "ligthsim2grid" was misspelled and is now properly
renamed `lightsim2grid`
- [BREAKING] you can no longer use the `env.reactivate_forecast()` in the middle of an episode.
- [BREAKING] the method `runner.run_one_episode()` (that should not use !) now
returns also the total number of steps of the environment.
- [FIXED] a bug in `PandapowerBackend` when running in dc mode (voltages were not read correctly
from the generators)
- [FIXED] issue https://github.com/rte-france/Grid2Op/issues/389 which was caused by 2 independant things:
Expand Down Expand Up @@ -72,6 +74,9 @@ Change Log
`GridStateFromFileWithForecastsWithoutMaintenance` classes that caused the maintenance file to be
ignored when "chunk_size" was set.
- [FIXED] a bug when shunts were alone in `backend.check_kirchoff()`
- [FIXED] an issue with "max_iter" in the runner when `MultifolderWithCache`
(see issue https://github.com/rte-france/Grid2Op/issues/447)
- [FIXED] a bug in `MultifolderWithCache` when seeding was applied
- [ADDED] the function `obs.get_forecast_env()` that is able to generate a grid2op environment from the
forecasts data in the observation. This is especially useful in model based RL.
- [ADDED] an example on how to write a backend.
Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
author = 'Benjamin Donnot'

# The full version, including alpha/beta/rc tags
release = '1.8.2.dev4'
release = '1.8.2.dev5'
version = '1.8'


Expand Down
10 changes: 7 additions & 3 deletions grid2op/Chronics/chronicsHandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,18 @@ def get_name(self):
"""
return str(os.path.split(self.get_id())[-1])

def set_max_iter(self, max_iter):
def set_max_iter(self, max_iter: int):
"""
This function is used to set the maximum number of iterations possible before the chronics ends.
This function is used to set the maximum number of
iterations possible before the chronics ends.
You can reset this by setting it to `-1`.
Parameters
----------
max_iter: ``int``
The maximum number of steps that can be done before reaching the end of the episode
The maximum number of steps that can be done before reaching
the end of the episode
"""

Expand Down
8 changes: 4 additions & 4 deletions grid2op/Chronics/fromChronix2grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def load_next(self):# TODO refacto with fromNPY
)

def max_timestep(self):
return self.max_iter
return self._max_iter

def forecasts(self):
"""
Expand Down Expand Up @@ -263,8 +263,8 @@ def done(self):
res = False
if self.current_index >= self._load_p.shape[0]:
res = True
elif self.max_iter > 0:
if self.curr_iter > self.max_iter:
elif self._max_iter > 0:
if self.curr_iter > self._max_iter:
res = True
return res

Expand All @@ -281,7 +281,7 @@ def next_chronics(self):
res_gen = self._generate_one_episode(self.env, self.dict_ref, self.dt, self._init_datetime,
seed=self._seed_used_for_chronix2grid,
with_loss=self._with_loss,
nb_steps=self.max_iter)
nb_steps=self._max_iter)

self._load_p = res_gen[0].values
self._load_p_forecasted = res_gen[1].values
Expand Down
8 changes: 4 additions & 4 deletions grid2op/Chronics/fromNPY.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,8 +454,8 @@ def done(self):
or self.current_index >= self._load_p.shape[0]
):
res = True
elif self.max_iter > 0:
if self.curr_iter > self.max_iter:
elif self._max_iter > 0:
if self.curr_iter > self._max_iter:
res = True
return res

Expand Down Expand Up @@ -608,8 +608,8 @@ def change_forecasts(
)

def max_timestep(self):
if self.max_iter >= 0:
return min(self.max_iter, self._load_p.shape[0], self._i_end)
if self._max_iter >= 0:
return min(self._max_iter, self._load_p.shape[0], self._i_end)
return min(self._load_p.shape[0], self._i_end)

def change_i_start(self, new_i_start: Union[int, None]):
Expand Down
48 changes: 32 additions & 16 deletions grid2op/Chronics/gridStateFromFile.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,8 +321,8 @@ def _get_data(self, data_name, chunksize=-1, nrows=None):
file_ext = self._get_fileext(data_name)

if nrows is None:
if self.max_iter > 0:
nrows = self.max_iter + 1
if self._max_iter > 0:
nrows = self._max_iter + 1

if file_ext is not None:
if chunksize == -1:
Expand Down Expand Up @@ -558,8 +558,8 @@ def initialize(
prod_v_iter = self._get_data("prod_v")
read_compressed = self._get_fileext("hazards")
nrows = None
if self.max_iter > 0:
nrows = self.max_iter + 1
if self._max_iter > 0:
nrows = self._max_iter + 1

if read_compressed is not None:
hazards = pd.read_csv(
Expand Down Expand Up @@ -664,18 +664,18 @@ def initialize(
)
self.n_ = n_ # the -1 is present because the initial grid state doesn't count as a "time step"

if self.max_iter > 0:
if self._max_iter > 0:
if self.n_ is not None:
if self.max_iter >= self.n_:
self.max_iter = self.n_ - 1
if self._max_iter >= self.n_:
self._max_iter = self.n_ - 1
# TODO: issue warning in this case
self.n_ = self.max_iter + 1
self.n_ = self._max_iter + 1
else:
# if the number of maximum time step is not set yet, we set it to be the number of
# data in the chronics (number of rows of the files) -1.
# the -1 is present because the initial grid state doesn't count as a "time step" but is read
# from these data.
self.max_iter = self.n_ - 1
self._max_iter = self.n_ - 1

self._init_attrs(
load_p, load_q, prod_p, prod_v, hazards=hazards, maintenance=maintenance,
Expand Down Expand Up @@ -785,11 +785,27 @@ def done(self):
# if self.current_index+1 >= self.tmp_max_index:
if self.current_index > self.n_:
res = True
elif self.max_iter > 0:
if self.curr_iter > self.max_iter:
elif self._max_iter > 0:
if self.curr_iter > self._max_iter:
res = True
return res

@property
def max_iter(self):
return self._max_iter

@max_iter.setter
def max_iter(self, value : int):
if value == -1:
self._max_iter = self.n_ - 1
else:
self._max_iter = int(value)

def max_timestep(self):
if self._max_iter == -1:
return self.n_ - 1
return self._max_iter

def _data_in_memory(self):
if self.chunk_size is None:
# if i don't use chunk, all the data are in memory alreay
Expand Down Expand Up @@ -824,8 +840,8 @@ def load_next(self):
if self.current_index >= self.tmp_max_index:
raise StopIteration

if self.max_iter > 0:
if self.curr_iter > self.max_iter:
if self._max_iter > 0:
if self.curr_iter > self._max_iter:
raise StopIteration

res = {}
Expand Down Expand Up @@ -987,10 +1003,10 @@ def check_validity(self, backend):
)
raise EnvError(msg_err.format(name_arr, arr.shape[0], self.n_))

if self.max_iter > 0:
if self.max_iter > self.n_:
if self._max_iter > 0:
if self._max_iter > self.n_:
msg_err = "Files count {} rows and you ask this episode to last at {} timestep."
raise InsufficientData(msg_err.format(self.n_, self.max_iter))
raise InsufficientData(msg_err.format(self.n_, self._max_iter))

def next_chronics(self):
self.current_datetime = self.start_datetime
Expand Down
4 changes: 2 additions & 2 deletions grid2op/Chronics/gridStateFromFileWithForecasts.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,8 @@ def initialize(
else:
chunk_size = None

if self.max_iter > 0:
nrows_to_load = (self.max_iter + 1) * self._nb_forecast
if self._max_iter > 0:
nrows_to_load = (self._max_iter + 1) * self._nb_forecast

load_p_iter = self._get_data("load_p_forecasted",
chunk_size, nrows_to_load)
Expand Down
10 changes: 9 additions & 1 deletion grid2op/Chronics/gridValue.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def __init__(
self.time_interval = time_interval
self.current_datetime = start_datetime
self.start_datetime = start_datetime
self.max_iter = max_iter
self._max_iter = max_iter
self.curr_iter = 0

self.maintenance_time = None
Expand All @@ -111,6 +111,14 @@ def get_kwargs(self, dict_):
"""
pass

@property
def max_iter(self):
return self._max_iter

@max_iter.setter
def max_iter(self, value : int):
self._max_iter = int(value)

@abstractmethod
def initialize(
self,
Expand Down
2 changes: 1 addition & 1 deletion grid2op/Chronics/multiFolder.py
Original file line number Diff line number Diff line change
Expand Up @@ -730,7 +730,7 @@ def split_and_save(self, datetime_beg, datetime_end, path_out):
time_interval=self.time_interval,
sep=self.sep,
path=subpath,
max_iter=self.max_iter,
max_iter=self._max_iter,
chunk_size=self.chunk_size,
)
seed_chronics = None
Expand Down
55 changes: 50 additions & 5 deletions grid2op/Chronics/multifolderWithCache.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,24 @@ def _default_filter(self, x):

def reset(self):
"""
Rebuilt the cache as if it were built from scratch. This call might take a while to process.
Rebuilt the cache as if it were built from scratch.
This call might take a while to process.
.. danger::
You NEED to call this function (with `env.chronics_handler.reset()`)
if you use the `MultiFolderWithCache` class in your experiments.
.. warning::
If a seed is set (see :func:`MultiFolderWithCache.seed`) then
all the data in the cache are also seeded when this
method is called.
"""
self._cached_data = [None for _ in self.subpaths]
self.__i = 0
# select the right paths, and store their id in "_order"
super().reset()
self.cache_size = 0
max_int = np.iinfo(dt_int).max
for i in self._order:
# everything in "_order" need to be put in cache
path = self.subpaths[i]
Expand All @@ -176,8 +187,7 @@ def reset(self):
max_iter=self.max_iter,
chunk_size=None,
)
if self.seed is not None:
max_int = np.iinfo(dt_int).max
if self.seed_used is not None:
seed_chronics = self.space_prng.randint(max_int)
data.seed(seed_chronics)

Expand All @@ -193,10 +203,10 @@ def reset(self):

if self.cache_size == 0:
raise RuntimeError("Impossible to initialize the new cache.")

self.__nb_reset_called += 1
return self.subpaths[self._order]

def initialize(
self,
order_backend_loads,
Expand Down Expand Up @@ -227,6 +237,41 @@ def initialize(
id_scenario = self._order[self._prev_cache_id]
self.data = self._cached_data[id_scenario]
self.data.next_chronics()

@property
def max_iter(self):
return self._max_iter

@max_iter.setter
def max_iter(self, value : int):
self._max_iter = int(value)
for el in self._cached_data:
if el is None:
continue
el.max_iter = value

def max_timestep(self):
return self.data.max_timestep()

def seed(self, seed : int):
"""This seeds both the MultiFolderWithCache
(which has an impact for example on :func:`MultiFolder.sample_next_chronics`)
and each data present in the cache.
Parameters
----------
seed : int
The seed to use
"""
res = super().seed(seed)
max_int = np.iinfo(dt_int).max
for i in self._order:
data = self._cached_data[i]
if data is None:
continue
seed_ts = self.space_prng.randint(max_int)
data.seed(seed_ts)
return res

def load_next(self):
self.__nb_step_called += 1
Expand Down
8 changes: 6 additions & 2 deletions grid2op/Chronics/time_series_from_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ def next_chronics(self):

def done(self):
# I am done if the part I control is "over"
if self.max_iter > 0 and self.curr_iter > self.max_iter:
if self._max_iter > 0 and self.curr_iter > self._max_iter:
return True

# or if any of the handler is "done"
Expand Down Expand Up @@ -443,7 +443,11 @@ def _update_max_iter(self):
max_iters = [el.get_max_iter() for el in self._active_handlers]
max_iters = [el for el in max_iters if el != -1]
# get the max iter from myself
max_iters.append(self.max_iter)
if self._max_iter != -1:
max_iters.append(self.max_iter)
# prevent empty list
if not max_iters:
max_iters.append(self.max_iter)
# take the minimum
self.max_iter = np.min(max_iters)

Expand Down

0 comments on commit 8f19d96

Please sign in to comment.