Skip to content

Commit

Permalink
Small fixes in docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
enekomartinmartinez committed Aug 1, 2023
1 parent 5290d83 commit 8acc8e8
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 56 deletions.
61 changes: 30 additions & 31 deletions pysd/py_backend/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1457,44 +1457,45 @@ def set_stepper(self, output_obj, params=None, step_vars=[],
Timeseries will be interpolated to give time-varying input.
step_vars: list
List of variable or parameter names whose values might be updated
after one or more simulation steps.
List of variable or parameter names whose values might be
updated after one or more simulation steps.
return_columns: list, 'step' or None (optional)
List of string model component names, returned dataframe
will have corresponding columns. If 'step' only variables with
cache step will be returned. If None, variables with cache step
and run will be returned. Default is None.
will have corresponding columns. If 'step' only variables
with cache step will be returned. If None, variables with
cache step and run will be returned. Default is None.
return_timestamps: list, numeric, ndarray (1D) (optional)
Timestamps in model execution at which to return state information.
Defaults to model-file specified timesteps.
Timestamps in model execution at which to return state
information. Defaults to model-file specified timesteps.
initial_condition: str or (float, dict) (optional)
The starting time, and the state of the system (the values of
all the stocks) at that starting time. 'original' or 'o'uses
model-file specified initial condition. 'current' or 'c' uses
the state of the model after the previous execution. Other str
objects, loads initial conditions from the pickle file with the
given name.(float, dict) tuple lets the user specify a starting
time (float) and (possibly partial) dictionary of initial values
for stock (stateful) objects. Default is 'original'.
The starting time, and the state of the system (the values
of all the stocks) at that starting time. 'original' or 'o'
uses model-file specified initial condition. 'current' or
'c' uses the state of the model after the previous
execution. Other str objects, loads initial conditions from
the pickle file with the given name.(float, dict) tuple lets
the user specify a starting time (float) and (possibly
partial) dictionary of initial values for stock (stateful)
objects. Default is 'original'.
final_time: float or None
Final time of the simulation. If float, the given value will be
used to compute the return_timestamps (if not given) and as a
final time. If None the last value of return_timestamps will be
used as a final time. Default is None.
Final time of the simulation. If float, the given value will
be used to compute the return_timestamps (if not given) and
as a final time. If None the last value of return_timestamps
will be used as a final time. Default is None.
time_step: float or None
Time step of the simulation. If float, the given value will be
used to compute the return_timestamps (if not given) and
Time step of the simulation. If float, the given value will
be used to compute the return_timestamps (if not given) and
euler time series. If None the default value from components
will be used. Default is None.
saveper: float or None
Saving step of the simulation. If float, the given value will be
used to compute the return_timestamps (if not given). If None
Saving step of the simulation. If float, the given value will
be used to compute the return_timestamps (if not given). If None
the default value from components will be used. Default is None.
cache_output: bool (optional)
Expand All @@ -1514,9 +1515,7 @@ def set_stepper(self, output_obj, params=None, step_vars=[],
saveper, cache_output, step_vars=step_vars)

self.output.set_capture_elements(self.capture_elements)

self.output.initialize(self)

self.output.update(self)

def step(self, num_steps=1, step_vars={}):
Expand All @@ -1528,8 +1527,8 @@ def step(self, num_steps=1, step_vars={}):
Parameters
----------
num_steps: int
Number of steps that the iterator should run with the values of
variables defined in step_vars argument.
Number of steps that the iterator should run with the values
of variables defined in step_vars argument.
step_vars: dict
Varibale names that should be updated before running the step
Expand All @@ -1549,11 +1548,11 @@ def step(self, num_steps=1, step_vars={}):
self.output.update(self)

def _config_simulation(self, params, return_columns, return_timestamps,
initial_condition, final_time, time_step, saveper,
cache_output, **kwargs):
initial_condition, final_time, time_step,
saveper, cache_output, **kwargs):
"""
Internal method to set all simulation config parameters. Arguments to
this function are those of the run and set_stepper methods.
Internal method to set all simulation config parameters. Arguments
to this function are those of the run and set_stepper methods.
"""

self._set_control_vars(return_timestamps, final_time, time_step,
Expand Down
57 changes: 32 additions & 25 deletions pysd/py_backend/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@

class ModelOutput():
"""
Manages outputs from simulations. Handles different types of outputs by
dispatchinging the tasks to adequate object handlers.
Manages outputs from simulations. Handles different types of outputs
by dispatchinging the tasks to adequate object handlers.
Parameters
----------
Expand All @@ -51,23 +51,31 @@ def set_capture_elements(self, capture_elements):
self.handler.capture_elements_run = capture_elements["run"]

def initialize(self, model):
""" Delegating the creation of the results object and its elements to
the appropriate handler."""
"""
Delegating the creation of the results object and its elements
to the appropriate handler.
"""
self.handler.initialize(model)

def update(self, model):
""" Delegating the update of the results object and its elements to the
appropriate handler."""
"""
Delegating the update of the results object and its elements
to the appropriate handler.
"""
self.handler.update(model)

def postprocess(self, **kwargs):
""" Delegating the postprocessing of the results object to the
appropriate handler."""
"""
Delegating the postprocessing of the results object
to the appropriate handler.
"""
return self.handler.postprocess(**kwargs)

def add_run_elements(self, model):
""" Delegating the addition of results with run cache in the output
object to the appropriate handler."""
"""
Delegating the addition of results with run cache in the
output object to the appropriate handler.
"""
self.handler.add_run_elements(model)

@staticmethod
Expand Down Expand Up @@ -97,7 +105,6 @@ def collect(model, flatten_output=True):
model: pysd.py_backend.model.Model
PySD Model object.
flatten_output: bool (optional)
If True, once the output dataframe has been formatted will
split the xarrays in new columns following Vensim's naming
Expand All @@ -106,7 +113,6 @@ def collect(model, flatten_output=True):
path in the output_file argument.
"""

del model._dependencies["OUTPUTS"]

model.output.add_run_elements(model)
Expand All @@ -126,8 +132,9 @@ def __init__(self, next=None):

def handle(self, out_file):
"""
If the concrete handler can write on the output file type passed by the
user, it returns the handler itself, else it goes to the next handler.
If the concrete handler can write on the output file type passed
by the user, it returns the handler itself, else it goes to the
next handler.
Parameters
----------
Expand Down Expand Up @@ -213,8 +220,8 @@ def __init__(self, next):
@property
def step(self):
"""
Used as time index for the output Dataset. Increases by one at each
iteration.
Used as time index for the output Dataset. Increases by one
at each iteration.
"""
return self._step

Expand All @@ -226,8 +233,8 @@ def __update_step(self):

def process_output(self, out_file):
"""
If out_file can be handled by this concrete handler, it returns the
handler instance, else it returns None.
If out_file can be handled by this concrete handler, it returns
the handler instance, else it returns None.
Parameters
----------
Expand All @@ -247,8 +254,8 @@ def process_output(self, out_file):

def initialize(self, model):
"""
Creates a netCDF4 Dataset and adds model dimensions and variables
present in the capture elements to it.
Creates a netCDF4 Dataset and adds model dimensions and
variables present in the capture elements to it.
Parameters
----------
Expand Down Expand Up @@ -344,6 +351,7 @@ def postprocess(self, **kwargs):
Returns
-------
None
"""
self.ds.close()
print(f"Results stored in {self.out_file}")
Expand Down Expand Up @@ -438,7 +446,6 @@ def process_output(self, out_file):
None or DataFrameHandler instance
"""

if not out_file:
self.out_file = None
return self
Expand Down Expand Up @@ -485,8 +492,8 @@ def update(self, model):

def postprocess(self, **kwargs):
"""
Delete time column from the pandas DataFrame and flatten xarrays if
required.
Delete time column from the pandas DataFrame and flatten
xarrays if required.
Returns
-------
Expand Down Expand Up @@ -531,8 +538,8 @@ def add_run_elements(self, model):
def make_flat_df(df, return_addresses, flatten=False):
"""
Takes a dataframe from the outputs of the integration processes,
renames the columns as the given return_adresses and splits xarrays
if needed.
renames the columns as the given return_adresses and splits
xarrays if needed.
Parameters
----------
Expand Down

0 comments on commit 8acc8e8

Please sign in to comment.