Commit e2f6395b authored by lukas leufen's avatar lukas leufen

Merge branch 'lukas_issue153_feat_advanced-docu' into 'develop'

Resolve "Advanced Documentation"

See merge request !138
parents 0a7c3d76 30cbfc0c
Pipeline #46061 passed with stages
in 6 minutes and 44 seconds
This diff is collapsed.
machinelearningtools package
============================
MLAir package
=============
.. automodule:: src
.. automodule:: mlair
:members:
:undoc-members:
:show-inheritance:
......
......@@ -15,6 +15,8 @@ import sys
sys.path.insert(0, os.path.abspath('../..'))
import mlair
# -- Project information -----------------------------------------------------
project = 'MLAir'
......@@ -22,9 +24,9 @@ copyright = '2020, Lukas H Leufen, Felix Kleinert'
author = 'Lukas H Leufen, Felix Kleinert'
# The short X.Y version
version = 'v0.9.0'
version = "v" + ".".join(mlair.__version__.split(".")[0:2])
# The full version, including alpha/beta/rc tags
release = 'v0.9.0'
release = "v" + mlair.__version__
# -- General configuration ---------------------------------------------------
......
This diff is collapsed.
Get started with MLAir
======================
Getting started with MLAir
==========================
.. role:: py(code)
:language: python
Install MLAir
-------------
MLAir is based on several python frameworks. To work properly, you have to install all packages from the
`requirements.txt` file. Additionally to support the geographical plotting part it is required to install geo
:py:`requirements.txt` file. Additionally to support the geographical plotting part it is required to install geo
packages built for your operating system. Name names of these package may differ for different systems, we refer
here to the opensuse / leap OS. The geo plot can be removed from the `plot_list`, in this case there is no need to
here to the opensuse / leap OS. The geo plot can be removed from the :py:`plot_list`, in this case there is no need to
install the geo packages.
* (geo) Install **proj** on your machine using the console. E.g. for opensuse / leap `zypper install proj`
Pre-requirements
~~~~~~~~~~~~~~~~
* (geo) Install **proj** on your machine using the console. E.g. for opensuse / leap :py:`zypper install proj`
* (geo) A c++ compiler is required for the installation of the program **cartopy**
* Install all requirements from [`requirements.txt`](https://gitlab.version.fz-juelich.de/toar/machinelearningtools/-/blob/master/requirements.txt)
preferably in a virtual environment
* (tf) Currently, TensorFlow-1.13 is mentioned in the requirements. We already tested the TensorFlow-1.15 version and couldn't
find any compatibility errors. Please note, that tf-1.13 and 1.15 have two distinct branches each, the default branch
for CPU support, and the "-gpu" branch for GPU support. If the GPU version is installed, MLAir will make use of the GPU
device.
* Installation of **MLAir**:
* Either clone MLAir from the [gitlab repository](https://gitlab.version.fz-juelich.de/toar/machinelearningtools.git)
and use it without installation (beside the requirements)
* or download the distribution file (?? .whl) and install it via `pip install <??>`. In this case, you can simply
import MLAir in any python script inside your virtual environment using `import mlair`.
Installation of MLAir
~~~~~~~~~~~~~~~~~~~~~
* Install all requirements from `requirements.txt <https://gitlab.version.fz-juelich.de/toar/machinelearningtools/-/blob/master/requirements.txt>`_
preferably in a virtual environment
* Either clone MLAir from the `gitlab repository <https://gitlab.version.fz-juelich.de/toar/machinelearningtools.git>`_
* or download the distribution file (?? .whl) and install it via :py:`pip install <??>`. In this case, you can simply
import MLAir in any python script inside your virtual environment using :py:`import mlair`.
How to start with MLAir
-----------------------
In this section, we show three examples how to work with MLAir.
In this section, we show three examples how to work with MLAir. Note, that for these examples MLAir was installed using
the distribution file. In case you are using the git clone it is required to adjust the import path if not directly
executed inside the source directory of MLAir.
Example 1
~~~~~~~~~
......@@ -126,107 +138,3 @@ We can see from the terminal that no training was performed. Analysis is now mad
...
INFO: mlair finished after 00:00:06 (hh:mm:ss)
Customised workflows and models
-------------------------------
Custom Workflow
~~~~~~~~~~~~~~~
MLAir provides a default workflow. If additional steps are to be performed, you have to append custom run modules to
the workflow.
.. code-block:: python
import mlair
import logging
class CustomStage(mlair.RunEnvironment):
"""A custom MLAir stage for demonstration."""
def __init__(self, test_string):
super().__init__() # always call super init method
self._run(test_string) # call a class method
def _run(self, test_string):
logging.info("Just running a custom stage.")
logging.info("test_string = " + test_string)
epochs = self.data_store.get("epochs")
logging.info("epochs = " + str(epochs))
# create your custom MLAir workflow
CustomWorkflow = mlair.Workflow()
# provide stages without initialisation
CustomWorkflow.add(mlair.ExperimentSetup, epochs=128)
# add also keyword arguments for a specific stage
CustomWorkflow.add(CustomStage, test_string="Hello World")
# finally execute custom workflow in order of adding
CustomWorkflow.run()
.. code-block::
INFO: mlair started
...
INFO: ExperimentSetup finished after 00:00:12 (hh:mm:ss)
INFO: CustomStage started
INFO: Just running a custom stage.
INFO: test_string = Hello World
INFO: epochs = 128
INFO: CustomStage finished after 00:00:01 (hh:mm:ss)
INFO: mlair finished after 00:00:13 (hh:mm:ss)
Custom Model
~~~~~~~~~~~~
Each model has to inherit from the abstract model class to ensure a smooth training and evaluation behaviour. It is
required to implement the set model and set compile options methods. The later has to set the loss at least.
.. code-block:: python
import keras
from keras.losses import mean_squared_error as mse
from keras.optimizers import SGD
from mlair.model_modules import AbstractModelClass
class MyLittleModel(AbstractModelClass):
"""
A customised model with a 1x1 Conv, and 3 Dense layers (32, 16
window_lead_time). Dropout is used after Conv layer.
"""
def __init__(self, window_history_size, window_lead_time, channels):
super().__init__()
# settings
self.window_history_size = window_history_size
self.window_lead_time = window_lead_time
self.channels = channels
self.dropout_rate = 0.1
self.activation = keras.layers.PReLU
self.lr = 1e-2
# apply to model
self.set_model()
self.set_compile_options()
self.set_custom_objects(loss=self.compile_options['loss'])
def set_model(self):
# add 1 to window_size to include current time step t0
shape = (self.window_history_size + 1, 1, self.channels)
x_input = keras.layers.Input(shape=shape)
x_in = keras.layers.Conv2D(32, (1, 1), padding='same')(x_input)
x_in = self.activation()(x_in)
x_in = keras.layers.Flatten()(x_in)
x_in = keras.layers.Dropout(self.dropout_rate)(x_in)
x_in = keras.layers.Dense(32)(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(16)(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(self.window_lead_time)(x_in)
out = self.activation()(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out])
def set_compile_options(self):
self.compile_options = {"optimizer": SGD(lr=self.lr),
"loss": mse,
"metrics": ["mse"]}
.. machinelearningtools documentation master file, created by
.. MLair documentation master file, created by
sphinx-quickstart on Wed Apr 15 14:27:29 2020.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to machinelearningtools's documentation!
Welcome to MLAir's documentation!
================================================
.. toctree::
:maxdepth: 2
:caption: Contents:
get-started
api
customise
Indices and tables
......
......@@ -11,5 +11,5 @@ __date__ = '2020-04-17'
from .bootstraps import BootStraps
from .iterator import KerasIterator, DataCollection
from .advanced_data_handler import DefaultDataPreparation, AbstractDataPreparation
from .data_preparation_neighbors import DataPreparationNeighbors
from .advanced_data_handler import DefaultDataHandler, AbstractDataHandler
from .data_preparation_neighbors import DataHandlerNeighbors
......@@ -17,7 +17,7 @@ import copy
from typing import Union, List, Tuple, Dict
import logging
from functools import reduce
from mlair.data_handler.station_preparation import StationPrep
from mlair.data_handler.station_preparation import DataHandlerSingleStation
from mlair.helpers.join import EmptyQueryResult
......@@ -49,7 +49,7 @@ class DummyDataSingleStation: # pragma: no cover
return self.name
class AbstractDataPreparation:
class AbstractDataHandler:
_requirements = []
......@@ -84,14 +84,15 @@ class AbstractDataPreparation:
return self.get_X(upsampling, as_numpy), self.get_Y(upsampling, as_numpy)
def get_coordinates(self) -> Union[None, Dict]:
"""Return coordinates as dictionary with keys `lon` and `lat`."""
return None
class DefaultDataPreparation(AbstractDataPreparation):
class DefaultDataHandler(AbstractDataHandler):
_requirements = remove_items(inspect.getfullargspec(StationPrep).args, ["self", "station"])
_requirements = remove_items(inspect.getfullargspec(DataHandlerSingleStation).args, ["self", "station"])
def __init__(self, id_class, data_path, min_length=0,
def __init__(self, id_class: DataHandlerSingleStation, data_path: str, min_length: int = 0,
extreme_values: num_or_list = None, extremes_on_right_tail_only: bool = False, name_affix=None):
super().__init__()
self.id_class = id_class
......@@ -109,9 +110,9 @@ class DefaultDataPreparation(AbstractDataPreparation):
self._store(fresh_store=True)
@classmethod
def build(cls, station, **kwargs):
def build(cls, station: str, **kwargs):
sp_keys = {k: copy.deepcopy(kwargs[k]) for k in cls._requirements if k in kwargs}
sp = StationPrep(station, **sp_keys)
sp = DataHandlerSingleStation(station, **sp_keys)
dp_args = {k: copy.deepcopy(kwargs[k]) for k in cls.own_args("id_class") if k in kwargs}
return cls(sp, **dp_args)
......@@ -286,7 +287,7 @@ class DefaultDataPreparation(AbstractDataPreparation):
mean, std = None, None
for station in set_stations:
try:
sp = StationPrep(station, transformation={"method": method}, **sp_keys)
sp = DataHandlerSingleStation(station, transformation={"method": method}, **sp_keys)
mean = sp.mean.copy(deep=True) if mean is None else mean.combine_first(sp.mean)
std = sp.std.copy(deep=True) if std is None else std.combine_first(sp.std)
except (AttributeError, EmptyQueryResult):
......@@ -303,23 +304,23 @@ class DefaultDataPreparation(AbstractDataPreparation):
def run_data_prep():
from .data_preparation_neighbors import DataPreparationNeighbors
data = DummyDataSingleStation("main_class")
from .data_preparation_neighbors import DataHandlerNeighbors
data = DummyDataHandler("main_class")
data.get_X()
data.get_Y()
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "testdata")
data_prep = DataPreparationNeighbors(DummyDataSingleStation("main_class"),
path,
neighbors=[DummyDataSingleStation("neighbor1"),
DummyDataSingleStation("neighbor2")],
extreme_values=[1., 1.2])
data_prep = DataHandlerNeighbors(DummyDataHandler("main_class"),
path,
neighbors=[DummyDataHandler("neighbor1"),
DummyDataHandler("neighbor2")],
extreme_values=[1., 1.2])
data_prep.get_data(upsampling=False)
def create_data_prep():
from .data_preparation_neighbors import DataPreparationNeighbors
from .data_preparation_neighbors import DataHandlerNeighbors
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "testdata")
station_type = None
network = 'UBA'
......@@ -329,22 +330,61 @@ def create_data_prep():
interpolation_dim = 'datetime'
window_history_size = 7
window_lead_time = 3
central_station = StationPrep("DEBW011", path, {'o3': 'dma8eu', 'temp': 'maximum'}, {},station_type, network, sampling, target_dim,
target_var, interpolation_dim, window_history_size, window_lead_time)
neighbor1 = StationPrep("DEBW013", path, {'o3': 'dma8eu', 'temp-rea-miub': 'maximum'}, {},station_type, network, sampling, target_dim,
target_var, interpolation_dim, window_history_size, window_lead_time)
neighbor2 = StationPrep("DEBW034", path, {'o3': 'dma8eu', 'temp': 'maximum'}, {}, station_type, network, sampling, target_dim,
target_var, interpolation_dim, window_history_size, window_lead_time)
central_station = DataHandlerSingleStation("DEBW011", path, {'o3': 'dma8eu', 'temp': 'maximum'}, {}, station_type, network, sampling, target_dim,
target_var, interpolation_dim, window_history_size, window_lead_time)
neighbor1 = DataHandlerSingleStation("DEBW013", path, {'o3': 'dma8eu', 'temp-rea-miub': 'maximum'}, {}, station_type, network, sampling, target_dim,
target_var, interpolation_dim, window_history_size, window_lead_time)
neighbor2 = DataHandlerSingleStation("DEBW034", path, {'o3': 'dma8eu', 'temp': 'maximum'}, {}, station_type, network, sampling, target_dim,
target_var, interpolation_dim, window_history_size, window_lead_time)
data_prep = []
data_prep.append(DataPreparationNeighbors(central_station, path, neighbors=[neighbor1, neighbor2]))
data_prep.append(DataPreparationNeighbors(neighbor1, path, neighbors=[central_station, neighbor2]))
data_prep.append(DataPreparationNeighbors(neighbor2, path, neighbors=[neighbor1, central_station]))
data_prep.append(DataHandlerNeighbors(central_station, path, neighbors=[neighbor1, neighbor2]))
data_prep.append(DataHandlerNeighbors(neighbor1, path, neighbors=[central_station, neighbor2]))
data_prep.append(DataHandlerNeighbors(neighbor2, path, neighbors=[neighbor1, central_station]))
return data_prep
class DummyDataHandler(AbstractDataHandler):
def __init__(self, name, number_of_samples=None):
"""This data handler takes a name argument and the number of samples to generate. If not provided, a random
number between 100 and 150 is set."""
super().__init__()
self.name = name
self.number_of_samples = number_of_samples if number_of_samples is not None else np.random.randint(100, 150)
self._X = self.create_X()
self._Y = self.create_Y()
def create_X(self):
"""Inputs are random numbers between 0 and 10 with shape (no_samples, window=14, variables=5)."""
X = np.random.randint(0, 10, size=(self.number_of_samples, 14, 5)) # samples, window, variables
datelist = pd.date_range(dt.datetime.today().date(), periods=self.number_of_samples, freq="H").tolist()
return xr.DataArray(X, dims=['datetime', 'window', 'variables'], coords={"datetime": datelist,
"window": range(14),
"variables": range(5)})
def create_Y(self):
"""Targets are normal distributed random numbers with shape (no_samples, window=5, variables=1)."""
Y = np.round(0.5 * np.random.randn(self.number_of_samples, 5, 1), 1) # samples, window, variables
datelist = pd.date_range(dt.datetime.today().date(), periods=self.number_of_samples, freq="H").tolist()
return xr.DataArray(Y, dims=['datetime', 'window', 'variables'], coords={"datetime": datelist,
"window": range(5),
"variables": range(1)})
def get_X(self, upsampling=False, as_numpy=False):
"""Upsampling parameter is not used for X."""
return np.copy(self._X) if as_numpy is True else self._X
def get_Y(self, upsampling=False, as_numpy=False):
"""Upsampling parameter is not used for Y."""
return np.copy(self._Y) if as_numpy is True else self._Y
def __str__(self):
return self.name
if __name__ == "__main__":
from mlair.data_handler.station_preparation import StationPrep
from mlair.data_handler.station_preparation import DataHandlerSingleStation
from mlair.data_handler.iterator import KerasIterator, DataCollection
data_prep = create_data_prep()
data_collection = DataCollection(data_prep)
......
......@@ -19,7 +19,7 @@ from itertools import chain
import numpy as np
import xarray as xr
from mlair.data_handler.advanced_data_handler import AbstractDataPreparation
from mlair.data_handler.advanced_data_handler import AbstractDataHandler
class BootstrapIterator(Iterator):
......@@ -82,7 +82,7 @@ class BootStraps(Iterable):
"""
Main class to perform bootstrap operations.
This class requires a data handler following the definition of the AbstractDataPreparation, the number of bootstraps
This class requires a data handler following the definition of the AbstractDataHandler, the number of bootstraps
to create and the dimension along this bootstrapping is performed (default dimension is `variables`).
When iterating on this class, it returns the bootstrapped X, Y and a tuple with (position of variable in X, name of
......@@ -91,7 +91,7 @@ class BootStraps(Iterable):
retrieved by calling the .bootstraps() method. Further more, by calling the .get_orig_prediction() this class
imitates according to the set number of bootstraps the original prediction
"""
def __init__(self, data: AbstractDataPreparation, number_of_bootstraps: int = 10,
def __init__(self, data: AbstractDataHandler, number_of_bootstraps: int = 10,
bootstrap_dimension: str = "variables"):
"""
Create iterable class to be ready to iter.
......
......@@ -4,8 +4,8 @@ __date__ = '2020-07-17'
from mlair.helpers import to_list
from mlair.data_handler.station_preparation import StationPrep
from mlair.data_handler.advanced_data_handler import DefaultDataPreparation
from mlair.data_handler.station_preparation import DataHandlerSingleStation
from mlair.data_handler.advanced_data_handler import DefaultDataHandler
import os
from typing import Union, List
......@@ -14,7 +14,7 @@ number = Union[float, int]
num_or_list = Union[number, List[number]]
class DataPreparationNeighbors(DefaultDataPreparation):
class DataHandlerNeighbors(DefaultDataHandler):
def __init__(self, id_class, data_path, neighbors=None, min_length=0,
extreme_values: num_or_list = None, extremes_on_right_tail_only: bool = False):
......@@ -25,10 +25,10 @@ class DataPreparationNeighbors(DefaultDataPreparation):
@classmethod
def build(cls, station, **kwargs):
sp_keys = {k: kwargs[k] for k in cls._requirements if k in kwargs}
sp = StationPrep(station, **sp_keys)
sp = DataHandlerSingleStation(station, **sp_keys)
n_list = []
for neighbor in kwargs.get("neighbors", []):
n_list.append(StationPrep(neighbor, **sp_keys))
n_list.append(DataHandlerSingleStation(neighbor, **sp_keys))
else:
kwargs["neighbors"] = n_list if len(n_list) > 0 else None
dp_args = {k: kwargs[k] for k in cls.own_args("id_class") if k in kwargs}
......@@ -39,12 +39,12 @@ class DataPreparationNeighbors(DefaultDataPreparation):
def get_coordinates(self, include_neighbors=False):
neighbors = list(map(lambda n: n.get_coordinates(), self.neighbors)) if include_neighbors is True else []
return [super(DataPreparationNeighbors, self).get_coordinates()].append(neighbors)
return [super(DataHandlerNeighbors, self).get_coordinates()].append(neighbors)
if __name__ == "__main__":
a = DataPreparationNeighbors
a = DataHandlerNeighbors
requirements = a.requirements()
kwargs = {"path": os.path.join(os.path.dirname(os.path.abspath(__file__)), "testdata"),
......
......@@ -39,7 +39,7 @@ DEFAULT_SAMPLING = "daily"
DEFAULT_INTERPOLATION_METHOD = "linear"
class AbstractStationPrep(object):
class AbstractDataHandlerSingleStation(object):
def __init__(self): #, path, station, statistics_per_var, transformation, **kwargs):
pass
......@@ -50,7 +50,7 @@ class AbstractStationPrep(object):
raise NotImplementedError
class StationPrep(AbstractStationPrep):
class DataHandlerSingleStation(AbstractDataHandlerSingleStation):
def __init__(self, station, data_path, statistics_per_var, station_type=DEFAULT_STATION_TYPE,
network=DEFAULT_NETWORK, sampling=DEFAULT_SAMPLING, target_dim=DEFAULT_TARGET_DIM,
......@@ -514,6 +514,59 @@ class StationPrep(AbstractStationPrep):
:param transformation: the transformation dictionary as described above.
:return: updated transformation dictionary
## Transformation
There are two different approaches (called scopes) to transform the data:
1) `station`: transform data for each station independently (somehow like batch normalisation)
1) `data`: transform all data of each station with shared metrics
Transformation must be set by the `transformation` attribute. If `transformation = None` is given to `ExperimentSetup`,
data is not transformed at all. For all other setups, use the following dictionary structure to specify the
transformation.
```
transformation = {"scope": <...>,
"method": <...>,
"mean": <...>,
"std": <...>}
ExperimentSetup(..., transformation=transformation, ...)
```
### scopes
**station**: mean and std are not used
**data**: either provide already calculated values for mean and std (if required by transformation method), or choose
from different calculation schemes, explained in the mean and std section.
### supported transformation methods
Currently supported methods are:
* standardise (default, if method is not given)
* centre
### mean and std
`"mean"="accurate"`: calculate the accurate values of mean and std (depending on method) by using all data. Although,
this method is accurate, it may take some time for the calculation. Furthermore, this could potentially lead to memory
issue (not explored yet, but could appear for a very big amount of data)
`"mean"="estimate"`: estimate mean and std (depending on method). For each station, mean and std are calculated and
afterwards aggregated using the mean value over all station-wise metrics. This method is less accurate, especially
regarding the std calculation but therefore much faster.
We recommend to use the later method *estimate* because of following reasons:
* much faster calculation
* real accuracy of mean and std is less important, because it is "just" a transformation / scaling
* accuracy of mean is almost as high as in the *accurate* case, because of
$\bar{x_{ij}} = \bar{\left(\bar{x_i}\right)_j}$. The only difference is, that in the *estimate* case, each mean is
equally weighted for each station independently of the actual data count of the station.
* accuracy of std is lower for *estimate* because of $\var{x_{ij}} \ne \bar{\left(\var{x_i}\right)_j}$, but still the mean of all
station-wise std is a decent estimate of the true std.
`"mean"=<value, e.g. xr.DataArray>`: If mean and std are already calculated or shall be set manually, just add the
scaling values instead of the calculation method. For method *centre*, std can still be None, but is required for the
*standardise* method. **Important**: Format of given values **must** match internal data format of DataPreparation
class: `xr.DataArray` with `dims=["variables"]` and one value for each variable.
"""
if transformation is None:
return
......@@ -681,18 +734,18 @@ if __name__ == "__main__":
# dp = AbstractDataPrep('data/', 'dummy', 'DEBW107', ['o3', 'temp'], statistics_per_var={'o3': 'dma8eu', 'temp': 'maximum'})
# print(dp)
statistics_per_var = {'o3': 'dma8eu', 'temp-rea-miub': 'maximum'}
sp = StationPrep(data_path='/home/felix/PycharmProjects/mlt_new/data/', station='DEBY122',
statistics_per_var=statistics_per_var, station_type='background',
network='UBA', sampling='daily', target_dim='variables', target_var='o3',
time_dim='datetime', window_history_size=7, window_lead_time=3,
interpolation_limit=0
) # transformation={'method': 'standardise'})
sp = DataHandlerSingleStation(data_path='/home/felix/PycharmProjects/mlt_new/data/', station='DEBY122',
statistics_per_var=statistics_per_var, station_type='background',
network='UBA', sampling='daily', target_dim='variables', target_var='o3',
time_dim='datetime', window_history_size=7, window_lead_time=3,
interpolation_limit=0
) # transformation={'method': 'standardise'})
# sp.set_transformation({'method': 'standardise', 'mean': sp.mean+2, 'std': sp.std+1})
sp2 = StationPrep(data_path='/home/felix/PycharmProjects/mlt_new/data/', station='DEBY122',
statistics_per_var=statistics_per_var, station_type='background',
network='UBA', sampling='daily', target_dim='variables', target_var='o3',
time_dim='datetime', window_history_size=7, window_lead_time=3,
transformation={'method': 'standardise'})
sp2 = DataHandlerSingleStation(data_path='/home/felix/PycharmProjects/mlt_new/data/', station='DEBY122',
statistics_per_var=statistics_per_var, station_type='background',
network='UBA', sampling='daily', target_dim='variables', target_var='o3',
time_dim='datetime', window_history_size=7, window_lead_time=3,
transformation={'method': 'standardise'})
sp2.transform(inverse=True)
sp.get_X()
sp.get_Y()
......
......@@ -351,9 +351,8 @@ class AbstractModelClass(ABC):
class MyLittleModel(AbstractModelClass):
"""
A customised model with a 1x1 Conv, and 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the
output layer depending on the window_lead_time parameter. Dropout is used between the Convolution and the first
Dense layer.
A customised model 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the output layer depending
on the window_lead_time parameter.
"""
def __init__(self, shape_inputs: list, shape_outputs: list):
......@@ -382,13 +381,8 @@ class MyLittleModel(AbstractModelClass):
"""
Build the model.
"""
# add 1 to window_size to include current time step t0
x_input = keras.layers.Input(shape=self.shape_inputs)
x_in = keras.layers.Conv2D(32, (1, 1), padding='same', name='{}_Conv_1x1'.format("major"))(x_input)
x_in = self.activation(name='{}_conv_act'.format("major"))(x_in)
x_in = keras.layers.Flatten(name='{}'.format("major"))(x_in)
x_in = keras.layers.Dropout(self.dropout_rate, name='{}_Dropout_1'.format("major"))(x_in)
x_in = keras.layers.Flatten(name='{}'.format("major"))(x_input)
x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in)
......
......@@ -786,8 +786,8 @@ class PlotTimeSeries:
def _plot(self, plot_folder):
pdf_pages = self._create_pdf_pages(plot_folder)
for pos, station in enumerate(self._stations):
start, end = self._get_time_range(self._load_data(self._stations[0]))
data = self._load_data(station)
start, end = self._get_time_range(data)
fig, axes, factor = self._create_subplots(start, end)
nan_list = []
for i_year in range(end - start + 1):
......
......@@ -18,7 +18,7 @@ from mlair.configuration.defaults import DEFAULT_STATIONS, DEFAULT_VAR_ALL_DICT,
DEFAULT_VAL_MIN_LENGTH, DEFAULT_TEST_START, DEFAULT_TEST_END, DEFAULT_TEST_MIN_LENGTH, DEFAULT_TRAIN_VAL_MIN_LENGTH, \
DEFAULT_USE_ALL_STATIONS_ON_ALL_DATA_SETS, DEFAULT_EVALUATE_BOOTSTRAPS, DEFAULT_CREATE_NEW_BOOTSTRAPS, \
DEFAULT_NUMBER_OF_BOOTSTRAPS, DEFAULT_PLOT_LIST
from mlair.data_handler.advanced_data_handler import DefaultDataPreparation
from mlair.data_handler.advanced_data_handler import DefaultDataHandler
from mlair.run_modules.run_environment import RunEnvironment