diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 53ffe7329..e67e982b0 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -10,21 +10,7 @@ on: branches: [ develop, master ] jobs: - create-badges: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - persist-credentials: false # otherwise, the token used is the GITHUB_TOKEN, instead of your personal token - fetch-depth: 0 # otherwise, you will failed to push refs to dest repo - - name: Colab Badge Action - uses: trsvchn/colab-badge-action@v4 - - name: Commit & Push changes - uses: actions-js/push@master - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - branch: ${{ github.head_ref }} - + build: strategy: diff --git a/.gitignore b/.gitignore index 6719f8484..2031a4d23 100644 --- a/.gitignore +++ b/.gitignore @@ -1,13 +1,12 @@ **/.ipynb_checkpoints **/__pycache__ deeptrack-app/* -*/datasets/* + paper-examples/models/* build/* dist/* *.egg-info/ -*/datasets/* */theory _src/build/**/* @@ -16,6 +15,11 @@ ParticleSizing CellData ParticleTracking data/ -datasets/ examples/**/*/models/ **/node_modules/ + +*.tif +*.png +*.jpg +*.jpeg +*.npy \ No newline at end of file diff --git a/README.md b/README.md index ed9623db0..5efa60a56 100644 --- a/README.md +++ b/README.md @@ -111,7 +111,7 @@ The second series focuses on individual topics, introducing them in a natural or Additionally, we have seven more case studies which are less documented, but gives additional insight in how to use DeepTrack with real datasets -1. [MNIST](examples/paper-examples/1_MNIST.ipynb) classifies handwritted digits. +1. [MNIST](examples/paper-examples/1-MNIST.ipynb) classifies handwritted digits. 2. [single particle tracking](examples/paper-examples/2-single_particle_tracking.ipynb) tracks experimentally captured videos of a single particle. (Requires opencv-python compiled with ffmpeg to open and read a video.) 3. [single particle sizing](examples/paper-examples/3-particle_sizing.ipynb) extracts the radius and refractive index of particles. 4. [multi-particle tracking](examples/paper-examples/4-multi-molecule-tracking.ipynb) detects quantum dots in a low SNR image. diff --git a/deeptrack/__init__.py b/deeptrack/__init__.py index 83e15e744..c7b5d7175 100644 --- a/deeptrack/__init__.py +++ b/deeptrack/__init__.py @@ -2,6 +2,7 @@ from pint import UnitRegistry, Context from .backend.pint_definition import pint_definitions + units = UnitRegistry(pint_definitions.split("\n")) import tensorflow as tf @@ -27,6 +28,7 @@ from .statistics import * from .holography import * + from .image import strip from . import ( @@ -39,4 +41,5 @@ backend, test, visualization, + datasets, ) diff --git a/deeptrack/augmentations.py b/deeptrack/augmentations.py index 8caaa7718..f882e98f1 100644 --- a/deeptrack/augmentations.py +++ b/deeptrack/augmentations.py @@ -681,6 +681,30 @@ def image_to_crop(image): ) +class CropTight(Feature): + def __init__(self, eps=1e-10, **kwargs): + """Crops input array to remove empty space. + + Removes indices from the start and end of the array, where all values are below eps. + + Currently only works for 3D arrays. + + Parameters + ---------- + eps : float, optional + The threshold for considering a pixel to be empty, by default 1e-10""" + super().__init__(eps=eps, **kwargs) + + def get(self, image, eps, **kwargs): + image = np.asarray(image) + + image = image[..., np.any(image > eps, axis=(0, 1))] + image = image[np.any(image > eps, axis=(1, 2)), ...] + image = image[:, np.any(image > eps, axis=(0, 2)), :] + + return image + + class Pad(Augmentation): """Pads the image. diff --git a/deeptrack/datasets/__init__.py b/deeptrack/datasets/__init__.py new file mode 100644 index 000000000..ec675718a --- /dev/null +++ b/deeptrack/datasets/__init__.py @@ -0,0 +1,6 @@ +from deeptrack.datasets import ( + detection_QuantumDots, + segmentation_ssTEM_drosophila, + regression_holography_nanoparticles, + segmentation_fluorescence_u2os, +) \ No newline at end of file diff --git a/deeptrack/datasets/detection_QuantumDots/__init__.py b/deeptrack/datasets/detection_QuantumDots/__init__.py new file mode 100644 index 000000000..445221397 --- /dev/null +++ b/deeptrack/datasets/detection_QuantumDots/__init__.py @@ -0,0 +1,3 @@ +"""detection_QuantumDots dataset.""" + +from .detection_QuantumDots import DetectionQuantumdots diff --git a/deeptrack/datasets/detection_QuantumDots/checksums.tsv b/deeptrack/datasets/detection_QuantumDots/checksums.tsv new file mode 100644 index 000000000..0a5d10880 --- /dev/null +++ b/deeptrack/datasets/detection_QuantumDots/checksums.tsv @@ -0,0 +1,2 @@ +https://drive.google.com/file/d/1naaoxIaAU1F_rBaI-I1pB1K4Sp6pq_Jv/view?usp=sharing 67850 95f52b3bbfbf1b2fe7f213021fbd63bdf5040a4dc099ef6903243feb849f06c6 view +https://drive.google.com/u/1/uc?id=1naaoxIaAU1F_rBaI-I1pB1K4Sp6pq_Jv&export=download 543855765 375476e7a70fa3c1a8f91f2e4035b896b8d8acb1800dae2e1e028c2db485a030 QuantumDots.zip diff --git a/deeptrack/datasets/detection_QuantumDots/detection_QuantomDuts_test.py b/deeptrack/datasets/detection_QuantumDots/detection_QuantomDuts_test.py new file mode 100644 index 000000000..4942ae07c --- /dev/null +++ b/deeptrack/datasets/detection_QuantumDots/detection_QuantomDuts_test.py @@ -0,0 +1,24 @@ +"""detection_QuantumDots dataset.""" + +import tensorflow_datasets as tfds +from . import detection_QuantumDots + + +class DetectionQuantumdotsTest(tfds.testing.DatasetBuilderTestCase): + """Tests for detection_QuantumDots dataset.""" + # TODO(detection_QuantumDots): + DATASET_CLASS = detection_QuantumDots.DetectionQuantumdots + SPLITS = { + 'train': 3, # Number of fake train example + 'test': 1, # Number of fake test example + } + + # If you are calling `download/download_and_extract` with a dict, like: + # dl_manager.download({'some_key': 'http://a.org/out.txt', ...}) + # then the tests needs to provide the fake output paths relative to the + # fake data directory + # DL_EXTRACT_RESULT = {'some_key': 'output_file1.txt', ...} + + +if __name__ == '__main__': + tfds.testing.test_main() diff --git a/deeptrack/datasets/detection_QuantumDots/detection_QuantumDots.py b/deeptrack/datasets/detection_QuantumDots/detection_QuantumDots.py new file mode 100644 index 000000000..cf0c598fa --- /dev/null +++ b/deeptrack/datasets/detection_QuantumDots/detection_QuantumDots.py @@ -0,0 +1,69 @@ +"""detection_QuantumDots dataset.""" + +import tensorflow_datasets as tfds +import tensorflow as tf +import numpy as np + +# TODO(detection_QuantumDots): Markdown description that will appear on the catalog page. +_DESCRIPTION = """ +Sequential images of quantum dots in a fluorescent microscope. The dataset is unlabeled. +""" + +# TODO(detection_QuantumDots): BibTeX citation +_CITATION = """ +""" + + +class DetectionQuantumdots(tfds.core.GeneratorBasedBuilder): + """DatasetBuilder for detection_QuantumDots dataset.""" + + VERSION = tfds.core.Version("1.0.0") + RELEASE_NOTES = { + "1.0.0": "Initial release.", + } + + def _info(self) -> tfds.core.DatasetInfo: + """Returns the dataset metadata.""" + # TODO(detection_QuantumDots): Specifies the tfds.core.DatasetInfo object + return tfds.core.DatasetInfo( + builder=self, + description=_DESCRIPTION, + features=tfds.features.FeaturesDict( + { + # These are the features of your dataset like images, labels ... + "image": tfds.features.Image( + shape=(1200, 1200, 1), + dtype=tf.uint16, + ), + } + ), + # If there's a common (input, target) tuple from the + # features, specify them here. They'll be used if + # `as_supervised=True` in `builder.as_dataset`. + supervised_keys=None, # Set to `None` to disable + homepage="https://dataset-homepage/", + citation=_CITATION, + ) + + def _split_generators(self, dl_manager: tfds.download.DownloadManager): + """Returns SplitGenerators.""" + # TODO(detection_QuantumDots): Downloads the data and defines the splits + path = dl_manager.download_and_extract( + "https://drive.google.com/u/1/uc?id=1naaoxIaAU1F_rBaI-I1pB1K4Sp6pq_Jv&export=download" + ) + + # TODO(detection_QuantumDots): Returns the Dict[split names, Iterator[Key, Example]] + return { + "train": self._generate_examples(path / "QuantumDots"), + } + + def _generate_examples(self, path): + """Yields examples.""" + tifpath = path / "Qdots.tif" + + image_stack = tfds.core.lazy_imports.tifffile.imread(tifpath) + image_stack = np.expand_dims(image_stack, axis=-1) + for i, image in enumerate(image_stack): + yield str(i), { + "image": image, + } diff --git a/deeptrack/datasets/detection_QuantumDots/dummy_data/TODO-add_fake_data_in_this_directory.txt b/deeptrack/datasets/detection_QuantumDots/dummy_data/TODO-add_fake_data_in_this_directory.txt new file mode 100644 index 000000000..e69de29bb diff --git a/deeptrack/datasets/regression_holography_nanoparticles/__init__.py b/deeptrack/datasets/regression_holography_nanoparticles/__init__.py new file mode 100644 index 000000000..b8fbcb70e --- /dev/null +++ b/deeptrack/datasets/regression_holography_nanoparticles/__init__.py @@ -0,0 +1,3 @@ +"""regression_holography_nanoparticles dataset.""" + +from .regression_holography_nanoparticles import RegressionHolographyNanoparticles diff --git a/deeptrack/datasets/regression_holography_nanoparticles/checksums.tsv b/deeptrack/datasets/regression_holography_nanoparticles/checksums.tsv new file mode 100644 index 000000000..b5ad4d05c --- /dev/null +++ b/deeptrack/datasets/regression_holography_nanoparticles/checksums.tsv @@ -0,0 +1,3 @@ +# TODO(regression_holography_nanoparticles): If your dataset downloads files, then the checksums +# will be automatically added here when running +# `tfds build --register_checksums`. diff --git a/deeptrack/datasets/regression_holography_nanoparticles/dummy_data/TODO-add_fake_data_in_this_directory.txt b/deeptrack/datasets/regression_holography_nanoparticles/dummy_data/TODO-add_fake_data_in_this_directory.txt new file mode 100644 index 000000000..e69de29bb diff --git a/deeptrack/datasets/regression_holography_nanoparticles/regression_holography_nanoparticles.py b/deeptrack/datasets/regression_holography_nanoparticles/regression_holography_nanoparticles.py new file mode 100644 index 000000000..f1e25098c --- /dev/null +++ b/deeptrack/datasets/regression_holography_nanoparticles/regression_holography_nanoparticles.py @@ -0,0 +1,83 @@ +"""regression_holography_nanoparticles dataset.""" + +import tensorflow_datasets as tfds +import tensorflow as tf +import numpy as np + +# TODO(regression_holography_nanoparticles): Markdown description that will appear on the catalog page. +_DESCRIPTION = """ +""" + +# TODO(regression_holography_nanoparticles): BibTeX citation +_CITATION = """ +""" + + +class RegressionHolographyNanoparticles(tfds.core.GeneratorBasedBuilder): + """DatasetBuilder for regression_holography_nanoparticles dataset.""" + + VERSION = tfds.core.Version("1.0.0") + RELEASE_NOTES = { + "1.0.0": "Initial release.", + } + + def _info(self) -> tfds.core.DatasetInfo: + """Returns the dataset metadata.""" + # TODO(regression_holography_nanoparticles): Specifies the tfds.core.DatasetInfo object + return tfds.core.DatasetInfo( + builder=self, + description=_DESCRIPTION, + features=tfds.features.FeaturesDict( + { + # These are the features of your dataset like images, labels ... + "image": tfds.features.Tensor(shape=(64, 64, 2), dtype=tf.float64), + "radius": tfds.features.Scalar(tf.float64), + "refractive_index": tfds.features.Scalar(tf.float64), + } + ), + # If there's a common (input, target) tuple from the + # features, specify them here. They'll be used if + # `as_supervised=True` in `builder.as_dataset`. + supervised_keys=( + "image", + "radius", + "refractive_index", + ), # Set to `None` to disable + homepage="https://dataset-homepage/", + citation=_CITATION, + ) + + def _split_generators(self, dl_manager: tfds.download.DownloadManager): + """Returns SplitGenerators.""" + # TODO(regression_holography_nanoparticles): Downloads the data and defines the splits + path = dl_manager.download_and_extract( + "https://drive.google.com/u/1/uc?id=1LJqWYmLj93WYLKaLm_yQFmiR1FZHhf1r&export=download" + ) + + # TODO(regression_holography_nanoparticles): Returns the Dict[split names, Iterator[Key, Example]] + return { + "train": self._generate_examples(path, "train"), + "test": self._generate_examples(path, "test"), + } + + def _generate_examples(self, path, split): + """Yields examples.""" + # TODO(regression_holography_nanoparticles): Yields (key, example) tuples from the dataset + + if split == "train": + data = np.load(path / "training_set.npy") + radius = np.load(path / "training_radius.npy") + refractive_index = np.load(path / "training_n.npy") + elif split == "test": + data = np.load(path / "validation_set.npy") + radius = np.load(path / "validation_radius.npy") + refractive_index = np.load(path / "validation_n.npy") + else: + raise ValueError("Split not recognized:", split) + + for idx in range(data.shape[0]): + yield str(idx), { + "image": data[idx], + "radius": radius[idx], + "refractive_index": refractive_index[idx], + } diff --git a/deeptrack/datasets/regression_holography_nanoparticles/regression_holography_nanoparticles_test.py b/deeptrack/datasets/regression_holography_nanoparticles/regression_holography_nanoparticles_test.py new file mode 100644 index 000000000..6af80870e --- /dev/null +++ b/deeptrack/datasets/regression_holography_nanoparticles/regression_holography_nanoparticles_test.py @@ -0,0 +1,24 @@ +"""regression_holography_nanoparticles dataset.""" + +import tensorflow_datasets as tfds +from . import regression_holography_nanoparticles + + +class RegressionHolographyNanoparticlesTest(tfds.testing.DatasetBuilderTestCase): + """Tests for regression_holography_nanoparticles dataset.""" + # TODO(regression_holography_nanoparticles): + DATASET_CLASS = regression_holography_nanoparticles.RegressionHolographyNanoparticles + SPLITS = { + 'train': 3, # Number of fake train example + 'test': 1, # Number of fake test example + } + + # If you are calling `download/download_and_extract` with a dict, like: + # dl_manager.download({'some_key': 'http://a.org/out.txt', ...}) + # then the tests needs to provide the fake output paths relative to the + # fake data directory + # DL_EXTRACT_RESULT = {'some_key': 'output_file1.txt', ...} + + +if __name__ == '__main__': + tfds.testing.test_main() diff --git a/deeptrack/datasets/segmentation_fluorescence_u2os/__init__.py b/deeptrack/datasets/segmentation_fluorescence_u2os/__init__.py new file mode 100644 index 000000000..a61a51d3a --- /dev/null +++ b/deeptrack/datasets/segmentation_fluorescence_u2os/__init__.py @@ -0,0 +1,3 @@ +"""segmentation_fluorescence_u2os dataset.""" + +from .segmentation_fluorescence_u2os import SegmentationFluorescenceU2os diff --git a/deeptrack/datasets/segmentation_fluorescence_u2os/checksums.tsv b/deeptrack/datasets/segmentation_fluorescence_u2os/checksums.tsv new file mode 100644 index 000000000..5b797d275 --- /dev/null +++ b/deeptrack/datasets/segmentation_fluorescence_u2os/checksums.tsv @@ -0,0 +1,3 @@ +# TODO(segmentation_fluorescence_u2os): If your dataset downloads files, then the checksums +# will be automatically added here when running +# `tfds build --register_checksums`. diff --git a/deeptrack/datasets/segmentation_fluorescence_u2os/dummy_data/TODO-add_fake_data_in_this_directory.txt b/deeptrack/datasets/segmentation_fluorescence_u2os/dummy_data/TODO-add_fake_data_in_this_directory.txt new file mode 100644 index 000000000..e69de29bb diff --git a/deeptrack/datasets/segmentation_fluorescence_u2os/segmentation_fluorescence_u2os.py b/deeptrack/datasets/segmentation_fluorescence_u2os/segmentation_fluorescence_u2os.py new file mode 100644 index 000000000..e96901ec9 --- /dev/null +++ b/deeptrack/datasets/segmentation_fluorescence_u2os/segmentation_fluorescence_u2os.py @@ -0,0 +1,104 @@ +"""segmentation_fluorescence_u2os dataset.""" + +import tensorflow_datasets as tfds +import tensorflow as tf + +# TODO(segmentation_fluorescence_u2os): Markdown description that will appear on the catalog page. +_DESCRIPTION = """ +Description is **formatted** as markdown. + +It should also contain any processing which has been applied (if any), +(e.g. corrupted example skipped, images cropped,...): +""" + +# TODO(segmentation_fluorescence_u2os): BibTeX citation +_CITATION = """ +""" + + +class SegmentationFluorescenceU2os(tfds.core.GeneratorBasedBuilder): + """DatasetBuilder for segmentation_fluorescence_u2os dataset.""" + + VERSION = tfds.core.Version("1.0.0") + RELEASE_NOTES = { + "1.0.0": "Initial release.", + } + + def _info(self) -> tfds.core.DatasetInfo: + """Returns the dataset metadata.""" + # TODO(segmentation_fluorescence_u2os): Specifies the tfds.core.DatasetInfo object + return tfds.core.DatasetInfo( + builder=self, + description=_DESCRIPTION, + features=tfds.features.FeaturesDict( + { + # These are the features of your dataset like images, labels ... + "image": tfds.features.Image( + shape=(None, None, 1), dtype=tf.uint16 + ), + "label": tfds.features.Image( + shape=(None, None, 4), + ), + } + ), + # If there's a common (input, target) tuple from the + # features, specify them here. They'll be used if + # `as_supervised=True` in `builder.as_dataset`. + supervised_keys=("image", "label"), # Set to `None` to disable + homepage="https://dataset-homepage/", + citation=_CITATION, + ) + + def _split_generators(self, dl_manager: tfds.download.DownloadManager): + """Returns SplitGenerators.""" + path_to_images = ( + dl_manager.download_and_extract( + "https://data.broadinstitute.org/bbbc/BBBC039/images.zip" + ) + / "images" + ) + + path_to_masks = ( + dl_manager.download_and_extract( + "https://data.broadinstitute.org/bbbc/BBBC039/masks.zip" + ) + / "masks" + ) + + path_to_metadata = ( + dl_manager.download_and_extract( + "https://data.broadinstitute.org/bbbc/BBBC039/metadata.zip" + ) + / "metadata" + ) + + return { + "train": self._generate_examples( + path_to_metadata / "training.txt", path_to_images, path_to_masks + ), + "test": self._generate_examples( + path_to_metadata / "test.txt", path_to_images, path_to_masks + ), + "validation": self._generate_examples( + path_to_metadata / "validation.txt", path_to_images, path_to_masks + ), + } + + def _generate_examples(self, path, images_path, masks_path): + """Yields examples.""" + with open(path, "r") as f: + for line in f: + filename = line.strip() + + if filename == "": + continue + + path_to_image = images_path / filename.replace(".png", ".tif") + path_to_label = masks_path / filename + + image = tfds.core.lazy_imports.tifffile.imread(path_to_image)[..., None] + + yield filename, { + "image": image, + "label": path_to_label, + } diff --git a/deeptrack/datasets/segmentation_fluorescence_u2os/segmentation_fluorescence_u2os_test.py b/deeptrack/datasets/segmentation_fluorescence_u2os/segmentation_fluorescence_u2os_test.py new file mode 100644 index 000000000..4cab40aaf --- /dev/null +++ b/deeptrack/datasets/segmentation_fluorescence_u2os/segmentation_fluorescence_u2os_test.py @@ -0,0 +1,24 @@ +"""segmentation_fluorescence_u2os dataset.""" + +import tensorflow_datasets as tfds +from . import segmentation_fluorescence_u2os + + +class SegmentationFluorescenceU2osTest(tfds.testing.DatasetBuilderTestCase): + """Tests for segmentation_fluorescence_u2os dataset.""" + # TODO(segmentation_fluorescence_u2os): + DATASET_CLASS = segmentation_fluorescence_u2os.SegmentationFluorescenceU2os + SPLITS = { + 'train': 3, # Number of fake train example + 'test': 1, # Number of fake test example + } + + # If you are calling `download/download_and_extract` with a dict, like: + # dl_manager.download({'some_key': 'http://a.org/out.txt', ...}) + # then the tests needs to provide the fake output paths relative to the + # fake data directory + # DL_EXTRACT_RESULT = {'some_key': 'output_file1.txt', ...} + + +if __name__ == '__main__': + tfds.testing.test_main() diff --git a/deeptrack/datasets/segmentation_ssTEM_drosophila/__init__.py b/deeptrack/datasets/segmentation_ssTEM_drosophila/__init__.py new file mode 100644 index 000000000..875c6f3f6 --- /dev/null +++ b/deeptrack/datasets/segmentation_ssTEM_drosophila/__init__.py @@ -0,0 +1,3 @@ +"""segmentation_ssTEM_drosophila dataset.""" + +from .segmentation_ssTEM_drosophila import SegmentationSstemDrosophila diff --git a/deeptrack/datasets/segmentation_ssTEM_drosophila/checksums.tsv b/deeptrack/datasets/segmentation_ssTEM_drosophila/checksums.tsv new file mode 100644 index 000000000..aca5f66c9 --- /dev/null +++ b/deeptrack/datasets/segmentation_ssTEM_drosophila/checksums.tsv @@ -0,0 +1 @@ +https://github.com/unidesigner/groundtruth-drosophila-vnc/archive/refs/heads/master.zip 42686039 f7bd0db03c86b64440a16b60360ad60c0a4411f89e2c021c7ee2c8d6af3d7e86 groundtruth-drosophila-vnc-master.zip diff --git a/deeptrack/datasets/segmentation_ssTEM_drosophila/dummy_data/TODO-add_fake_data_in_this_directory.txt b/deeptrack/datasets/segmentation_ssTEM_drosophila/dummy_data/TODO-add_fake_data_in_this_directory.txt new file mode 100644 index 000000000..e69de29bb diff --git a/deeptrack/datasets/segmentation_ssTEM_drosophila/segmentation_ssTEM_drosophila.py b/deeptrack/datasets/segmentation_ssTEM_drosophila/segmentation_ssTEM_drosophila.py new file mode 100644 index 000000000..0321da8aa --- /dev/null +++ b/deeptrack/datasets/segmentation_ssTEM_drosophila/segmentation_ssTEM_drosophila.py @@ -0,0 +1,106 @@ +"""segmentation_ssTEM_drosophila dataset.""" + +import tensorflow_datasets as tfds +import numpy as np + +_DESCRIPTION = """ +We provide two image stacks where each contains 20 sections from serial section Transmission Electron Microscopy (ssTEM) +of the Drosophila melanogaster third instar larva ventral nerve cord. +Both stacks measure approx. 4.7 x 4.7 x 1 microns with a resolution of 4.6 x 4.6 nm/pixel and section +thickness of 45-50 nm. + +In addition to the raw image data, +we provide for the first stack a dense labeling of neuron membranes (including orientation and junction), +mitochondria, synapses and glia/extracellular space. +The first stack serves as a training dataset, and a second stack of the same dimension can be used as a test dataset. + +labels: Series of merged labels including oriented membranes, membrane junctions, +mitochondria and synapses. The pixels are labeled as follows: + 0 -> membrane | (0°) + 32 -> membrane / (45°) + 64 -> membrane - (90°) + 96 -> membrane \ (135°) + 128 -> membrane "junction" + 159 -> glia/extracellular + 191 -> mitochondria + 223 -> synapse + 255 -> intracellular +""" + +_CITATION = """ +@article{Gerhard2013, +author = "Stephan Gerhard and Jan Funke and Julien Martel and Albert Cardona and Richard Fetter", +title = "{Segmented anisotropic ssTEM dataset of neural tissue}", +year = "2013", +month = "11", +url = "https://figshare.com/articles/dataset/Segmented_anisotropic_ssTEM_dataset_of_neural_tissue/856713", +doi = "10.6084/m9.figshare.856713.v1" +} +""" + + +class SegmentationSstemDrosophila(tfds.core.GeneratorBasedBuilder): + """DatasetBuilder for segmentation_ssTEM_drosophila dataset.""" + + VERSION = tfds.core.Version("1.0.2") + RELEASE_NOTES = { + "1.0.0": "Initial release.", + "1.0.1": "Fix loading of tif images.", + "1.0.2": "Fix ordering on unix systems.", + } + + def _info(self) -> tfds.core.DatasetInfo: + """Returns the dataset metadata.""" + # TODO(segmentation_ssTEM_drosophila): Specifies the tfds.core.DatasetInfo object + return tfds.core.DatasetInfo( + builder=self, + description=_DESCRIPTION, + features=tfds.features.FeaturesDict( + { + # These are the features of your dataset like images, labels ... + "image": tfds.features.Image(shape=(None, None, 1)), + "label": tfds.features.Image(shape=(None, None, 1)), + } + ), + # If there's a common (input, target) tuple from the + # features, specify them here. They'll be used if + # `as_supervised=True` in `builder.as_dataset`. + supervised_keys=("image", "label"), # Set to `None` to disable + homepage="https://dataset-homepage/", + citation=_CITATION, + ) + + def _split_generators(self, dl_manager: tfds.download.DownloadManager): + """Returns SplitGenerators.""" + + path = dl_manager.download_and_extract( + "https://github.com/unidesigner/groundtruth-drosophila-vnc/archive/refs/heads/master.zip" + ) + return { + "train": self._generate_examples( + path / "groundtruth-drosophila-vnc-master" / "stack1" + ), + } + + def _generate_examples(self, path): + """Yields examples.""" + + raws = path / "raw" + labels = path / "labels" + + raw_paths = list(raws.glob("*.tif")) + label_paths = list(labels.glob("*.png")) + + # sort paths by name of file + raw_paths.sort(key=lambda x: x.name) + label_paths.sort(key=lambda x: x.name) + + for r, l in zip(raw_paths, label_paths): + assert r.stem[-2:] == l.stem[-2:], "Mismatched raw and label files" + + image = tfds.core.lazy_imports.tifffile.imread(r) + image = np.expand_dims(image, axis=-1) + yield int(r.stem), { + "image": image, + "label": l, + } diff --git a/deeptrack/datasets/segmentation_ssTEM_drosophila/segmentation_ssTEM_drosophila_test.py b/deeptrack/datasets/segmentation_ssTEM_drosophila/segmentation_ssTEM_drosophila_test.py new file mode 100644 index 000000000..422fb7381 --- /dev/null +++ b/deeptrack/datasets/segmentation_ssTEM_drosophila/segmentation_ssTEM_drosophila_test.py @@ -0,0 +1,24 @@ +"""segmentation_ssTEM_drosophila dataset.""" + +import tensorflow_datasets as tfds +from . import segmentation_ssTEM_drosophila + + +class SegmentationSstemDrosophilaTest(tfds.testing.DatasetBuilderTestCase): + """Tests for segmentation_ssTEM_drosophila dataset.""" + # TODO(segmentation_ssTEM_drosophila): + DATASET_CLASS = segmentation_ssTEM_drosophila.SegmentationSstemDrosophila + SPLITS = { + 'train': 3, # Number of fake train example + 'test': 1, # Number of fake test example + } + + # If you are calling `download/download_and_extract` with a dict, like: + # dl_manager.download({'some_key': 'http://a.org/out.txt', ...}) + # then the tests needs to provide the fake output paths relative to the + # fake data directory + # DL_EXTRACT_RESULT = {'some_key': 'output_file1.txt', ...} + + +if __name__ == '__main__': + tfds.testing.test_main() diff --git a/deeptrack/extras/datasets.py b/deeptrack/extras/datasets.py index fa3c9a706..031c3d772 100644 --- a/deeptrack/extras/datasets.py +++ b/deeptrack/extras/datasets.py @@ -46,6 +46,7 @@ "CellData": ("1CJW7msDiI7xq7oMce4l9tRkNN6O5eKtj", "CellData", ""), "CellMigData": ("1vRsWcxjbTz6rffCkrwOfs_ezPvUjPwGw", "CellMigData", ""), "BFC2Cells": ("1lHgJdG5I3vRnU_DRFwTr_c69nx1Xkd3X", "BFC2Cells", ""), + "STrajCh": ("1wXCSzvHuLwz1dywxUu2aQXlqbgf2V8r3", "STrajCh", "") } diff --git a/deeptrack/features.py b/deeptrack/features.py index 612f3db07..ad4998cbb 100644 --- a/deeptrack/features.py +++ b/deeptrack/features.py @@ -5,7 +5,7 @@ import itertools import operator -from typing import Any, Callable, Iterable, Iterator, List +from typing import Any, Callable, Iterable, List import warnings import numpy as np @@ -474,7 +474,7 @@ def __rshift__(self, other: "Feature") -> "Feature": # or # feature1 >> some_function - if isinstance(other, Feature): + if isinstance(other, DeepTrackNode): return Chain(self, other) # Import here to avoid circular import. @@ -489,6 +489,19 @@ def __rshift__(self, other: "Feature") -> "Feature": return NotImplemented + def __rrshift__(self, other: "Feature") -> "Feature": + # Allows chaining of features. For example, + # some_function << feature1 << feature2 + # or + # some_function << feature1 + + if isinstance(other, Feature): + return Chain(other, self) + if isinstance(other, DeepTrackNode): + return Chain(Value(other), self) + + return NotImplemented + def __add__(self, other) -> "Feature": # Overrides add operator return self >> Add(other) @@ -1329,44 +1342,44 @@ def get(self, image, key, **kwargs): return self.collection[key](image) -class Dataset(Feature): - """Grabs data from a local set of data. +# class Dataset(Feature): +# """Grabs data from a local set of data. - The first argument should be an iterator, function or constant, - which provides access to a single sample from a dataset. If it returns - a tuple, the first element should be an array-like and the second a - dictionary. The array-like will be returned as an image with the dictionary - added to the set of properties. +# The first argument should be an iterator, function or constant, +# which provides access to a single sample from a dataset. If it returns +# a tuple, the first element should be an array-like and the second a +# dictionary. The array-like will be returned as an image with the dictionary +# added to the set of properties. - Parameters - ---------- - data : tuple or array_like - Any property that returns a single image or a tuple of two objects, - where the first is an array_like. - """ +# Parameters +# ---------- +# data : tuple or array_like +# Any property that returns a single image or a tuple of two objects, +# where the first is an array_like. +# """ - __distributed__ = False +# __distributed__ = False - def __init__( - self, data: Iterator or PropertyLike[float or ArrayLike[float]], **kwargs - ): - super().__init__(data=data, **kwargs) +# def __init__( +# self, data: Iterator or PropertyLike[float or ArrayLike[float]], **kwargs +# ): +# super().__init__(data=data, **kwargs) - def get(self, *ignore, data, **kwargs): - return data +# def get(self, *ignore, data, **kwargs): +# return data - def _process_properties(self, properties): - properties = super()._process_properties(properties) +# def _process_properties(self, properties): +# properties = super()._process_properties(properties) - data = properties["data"] +# data = properties["data"] - if isinstance(data, tuple): - properties["data"] = data[0] - if isinstance(data[1], dict): - properties.update(data[1]) - else: - properties["label"] = data[1] - return properties +# if isinstance(data, tuple): +# properties["data"] = data[0] +# if isinstance(data[1], dict): +# properties.update(data[1]) +# else: +# properties["label"] = data[1] +# return properties class Label(Feature): @@ -1783,4 +1796,330 @@ def get(self, image, factor, **kwargs): image, (factor[0], factor[1]) + (1,) * (image.ndim - 2), np.mean ) - return image \ No newline at end of file + return image + + +class TensorflowDataset(Feature): + """Loads a tensorflow dataset. Requires tensorflow_datasets to be installed. + + This feature loads a tensorflow dataset from its name. Check the + `tensorflow datasets `_ + for a list of available datasets. + + This feature will download the dataset if it is not already present. Each key of + the dataset will be added as a property to the feature. As such, separate pipelines + can be created for each key:: + + dataset = dt.TensorflowDataset("mnist") + image_pipeline = dataset.image + label_pipeline = dataset.label + + Alternatively, they can be loaded in conjunction:: + + dataset = dt.TensorflowDataset("mnist", keys=["image", "label"]) + image, label = dataset() + + Parameters + ---------- + dataset_name : str + The name of the dataset to load + split : str + The split of the dataset to load. Defaults to "train". + See `tensorflow splits `_ for more information on splits. + shuffle_files : bool + Whether to shuffle the files. Defaults to True. + keys : list of str + The keys to load from the dataset. Only used when calling the feature directly. + Any key can be accessed as a property of the feature. + + Examples + -------- + >>> dataset = dt.TensorflowDataset("mnist", split="train") + >>> image_pipeline = dataset.image + >>> label_pipeline = dataset.label + """ + + __distributed__ = False + + def __init__( + self, + dataset_name: str, + split="train", + shuffle_files=True, + keys=["image", "label"], + **kwargs + ): + + self.tfds = None + try: + import tensorflow_datasets as tfds + + self.tfds = tfds + except ImportError: + raise ImportError( + "Tensorflow Datasets is not installed. Install it with `pip install tensorflow_datasets`" + ) + + dataset = tfds.load(dataset_name, split=split, shuffle_files=shuffle_files) + dataset_size = tfds.builder(dataset_name).info.splits[split].num_examples + + self.dataset = dataset + self.split = split + self.shuffle_files = shuffle_files + self.size = dataset_size + + # get the keys of the dataset + keys = list(dataset.element_spec.keys()) + attr_getters = {key: lambda output, key=key: output[key] for key in keys} + + self.dataset_iterator = iter(tfds.as_numpy(self.dataset)) + + super().__init__( + output=self.get_next_output, keys=keys, **attr_getters, **kwargs + ) + + def take(self, n): + """Takes the n next elements of the dataset. Returns a dictionary of lists.""" + + # Prepare output + keys = self.dataset.element_spec.keys() + output_dict = {key: [] for key in keys} + + for data in self.dataset.take(n): + for key in keys: + output_dict[key].append(data[key]) + + return output_dict + + def reset_dataset(self): + """Resets the dataset iterator to the beginning of the dataset.""" + self.dataset_iterator = iter(self.tfds.as_numpy(self.dataset)) + + def get_next_output(self): + try: + return next(self.dataset_iterator) + except StopIteration: + self.reset_dataset() + return next(self.dataset_iterator) + + def get(self, _, keys, output, **kwargs): + return [output[key] for key in keys] + + +class NonOverlapping(Feature): + + __distributed__ = False + + def __init__(self, feature, min_distance=1, max_attempts=100, **kwargs): + """Places a list of volumes non-overlapping. + + Ensures that the volumes are placed non-overlapping by resampling the position of the volumes until they are non-overlapping. + If the maximum number of attempts is exceeded, a new list of volumes is generated by updating feature. + + Note: This feature does not work with non-volumetric scatterers, such as MieScatterers. + + Parameters + ---------- + feature : Feature + The feature that creates the list of volumes to be placed non-overlapping. + min_distance : float, optional + The minimum distance between volumes in pixels, by default 1 + max_attempts : int, optional + The maximum number of attempts to place the volumes non-overlapping. If this number is exceeded, a new list of volumes is generated, by default 100. + """ + super().__init__(min_distance=min_distance, max_attempts=max_attempts, **kwargs) + self.feature = self.add_feature(feature, **kwargs) + + def get(self, _, min_distance, max_attempts, **kwargs): + """ + Parameters + ---------- + list_of_volumes : list of 3d arrays + The volumes to be placed non-overlapping + min_distance : float + The minimum distance between volumes in pixels. + max_attempts : int + The maximum number of attempts to place the volumes non-overlapping. If this number is exceeded, a new list of volumes is generated. + """ + while True: + list_of_volumes = self.feature() + + if not isinstance(list_of_volumes, list): + list_of_volumes = [list_of_volumes] + + for attempt in range(max_attempts): + + list_of_volumes = [ + self._resample_volume_position(volume) for volume in list_of_volumes + ] + + if self._check_non_overlapping(list_of_volumes): + return list_of_volumes + + self.feature.update() + + def _check_non_overlapping(self, list_of_volumes): + """ + Checks that the non-zero voxels of the volumes in list_of_volumes are at least min_distance apart. + Each volume is a 3 dimnesional array. The first two dimensions are the x and y dimensions, and the third dimension is the z dimension. + The volumes are expected to have a position attribute. + + Parameters + ---------- + list_of_volumes : list of 3d arrays + The volumes to be checked for non-overlapping + """ + + from .optics import _get_position + + min_distance = self.min_distance() + + # The position of the top left corner of each volume (index (0, 0, 0)) + volume_positions_1 = [ + _get_position(volume, mode="corner", return_z=True).astype(int) + for volume in list_of_volumes + ] + + # The position of the bottom right corner of each volume (index (-1, -1, -1)) + volume_positions_2 = [ + p0 + np.array(v.shape) for v, p0 in zip(list_of_volumes, volume_positions_1) + ] + + # (x1, y1, z1, x2, y2, z2) for each volume + volume_bounding_cube = [ + [*p0, *p1] for p0, p1 in zip(volume_positions_1, volume_positions_2) + ] + + for i, j in itertools.combinations(range(len(list_of_volumes)), 2): + # If the bounding cubes do not overlap, the volumes do not overlap + if self._check_bounding_cubes_non_overlapping( + volume_bounding_cube[i], volume_bounding_cube[j], min_distance + ): + continue + + # If the bounding cubes overlap, get the overlapping region of each volume + overlapping_cube = self._get_overlapping_cube( + volume_bounding_cube[i], volume_bounding_cube[j] + ) + overlapping_volume_1 = self._get_overlapping_volume( + list_of_volumes[i], volume_bounding_cube[i], overlapping_cube + ) + overlapping_volume_2 = self._get_overlapping_volume( + list_of_volumes[j], volume_bounding_cube[j], overlapping_cube + ) + + # If either the overlapping regions are empty, the volumes do not overlap (done for speed) + if np.all(overlapping_volume_1 == 0) or np.all(overlapping_volume_2 == 0): + continue + + # If the products of the overlapping regions are non-zero, return False + if np.any(overlapping_volume_1 * overlapping_volume_2): + return False + + # Finally, check that the non-zero voxels of the volumes are at least min_distance apart + if not self._check_volumes_non_overlapping( + overlapping_volume_1, overlapping_volume_2, min_distance + ): + return False + + return True + + def _check_bounding_cubes_non_overlapping( + self, bounding_cube_1, bounding_cube_2, min_distance + ): + + # bounding_cube_1 and bounding_cube_2 are (x1, y1, z1, x2, y2, z2) + # Check that the bounding cubes are non-overlapping + return ( + bounding_cube_1[0] > bounding_cube_2[3] + min_distance + or bounding_cube_1[1] > bounding_cube_2[4] + min_distance + or bounding_cube_1[2] > bounding_cube_2[5] + min_distance + or bounding_cube_1[3] < bounding_cube_2[0] - min_distance + or bounding_cube_1[4] < bounding_cube_2[1] - min_distance + or bounding_cube_1[5] < bounding_cube_2[2] - min_distance + ) + + def _get_overlapping_cube(self, bounding_cube_1, bounding_cube_2): + """ + Returns the overlapping region of the two bounding cubes. + """ + return [ + max(bounding_cube_1[0], bounding_cube_2[0]), + max(bounding_cube_1[1], bounding_cube_2[1]), + max(bounding_cube_1[2], bounding_cube_2[2]), + min(bounding_cube_1[3], bounding_cube_2[3]), + min(bounding_cube_1[4], bounding_cube_2[4]), + min(bounding_cube_1[5], bounding_cube_2[5]), + ] + + def _get_overlapping_volume(self, volume, bounding_cube, overlapping_cube): + """ + Returns the overlapping region of the volume and the overlapping cube. + + Parameters + ---------- + volume : 3d array + The volume to be checked for non-overlapping + bounding_cube : list of 6 floats + The bounding cube of the volume. + The first three elements are the position of the top left corner of the volume, and the last three elements are the position of the bottom right corner of the volume. + overlapping_cube : list of 6 floats + The overlapping cube of the volume and the other volume. + """ + # The position of the top left corner of the overlapping cube in the volume + overlapping_cube_position = np.array(overlapping_cube[:3]) - np.array( + bounding_cube[:3] + ) + + # The position of the bottom right corner of the overlapping cube in the volume + overlapping_cube_end_position = np.array(overlapping_cube[3:]) - np.array( + bounding_cube[:3] + ) + + # cast to int + overlapping_cube_position = overlapping_cube_position.astype(int) + overlapping_cube_end_position = overlapping_cube_end_position.astype(int) + + return volume[ + overlapping_cube_position[0] : overlapping_cube_end_position[0], + overlapping_cube_position[1] : overlapping_cube_end_position[1], + overlapping_cube_position[2] : overlapping_cube_end_position[2], + ] + + def _check_volumes_non_overlapping(self, volume_1, volume_2, min_distance): + """ + Checks that the non-zero voxels of the volumes are at least min_distance apart. + """ + # Get the positions of the non-zero voxels of each volume + positions_1 = np.argwhere(volume_1) + positions_2 = np.argwhere(volume_2) + + # If the volumes are not the same size, the positions of the non-zero voxels of each volume need to be scaled + if volume_1.shape != volume_2.shape: + positions_1 = ( + positions_1 * np.array(volume_2.shape) / np.array(volume_1.shape) + ) + positions_1 = positions_1.astype(int) + + # Check that the non-zero voxels of the volumes are at least min_distance apart + import scipy.spatial.distance + + return np.all( + scipy.spatial.distance.cdist(positions_1, positions_2) > min_distance + ) + + def _resample_volume_position(self, volume): + """ Draws a new position for the volume. """ + + for pdict in volume.properties: + if "position" in pdict and "_position_sampler" in pdict: + new_position = pdict["_position_sampler"]() + if isinstance(new_position, Quantity): + new_position = new_position.to("pixel").magnitude + pdict["position"] = new_position + + return volume + + +# Alias +Dataset = TensorflowDataset \ No newline at end of file diff --git a/deeptrack/generators.py b/deeptrack/generators.py index 7d1cda5cf..6766c4bf1 100644 --- a/deeptrack/generators.py +++ b/deeptrack/generators.py @@ -215,6 +215,7 @@ def __init__( shuffle_batch=True, ndim=4, max_epochs_per_sample=np.inf, + use_multi_inputs=False, verbose=1, ): if label_function is None and batch_function is None: @@ -229,9 +230,7 @@ def __init__( if min_data_size is None: min_data_size = min(batch_size * 10, max_data_size - 1) - assert ( - min_data_size < max_data_size - ), "max_data_size needs to be larger than min_data_size" + max_data_size = max(max_data_size, min_data_size + 1) self.min_data_size = min_data_size self.max_data_size = max_data_size @@ -242,6 +241,7 @@ def __init__( self.batch_size = batch_size self.shuffle_batch = shuffle_batch self.max_epochs_per_sample = max_epochs_per_sample + self.use_multi_inputs = use_multi_inputs self.ndim = ndim self.augmentation = augmentation @@ -352,7 +352,18 @@ def __getitem__(self, idx): data = [self.batch_function(d["data"]) for d in subset] labels = [self.label_function(d["data"]) for d in subset] - return np.array(data), np.array(labels) + if self.use_multi_inputs: + return ( + tuple( + [ + np.stack(list(map(np.array, _data)), axis=0) + for _data in list(zip(*data)) + ] + ), + np.array(labels), + ) + else: + return np.array(data), np.array(labels) def __len__(self): steps = int((self.min_data_size // self._batch_size)) diff --git a/deeptrack/holography.py b/deeptrack/holography.py index dc1c627a0..f75e6176d 100644 --- a/deeptrack/holography.py +++ b/deeptrack/holography.py @@ -1,22 +1,29 @@ +from deeptrack.image import maybe_cupy from .features import Feature import numpy as np -def get_propagation_matrix(shape, to_z, pixel_size, wavelength): +def get_propagation_matrix(shape, to_z, pixel_size, wavelength, dx=0, dy=0): k = 2 * np.pi / wavelength yr, xr, *_ = shape - x = 2 * np.pi / pixel_size * np.arange(-(xr / 2 - 1 / 2), (xr / 2 + 1 / 2), 1) / xr - y = 2 * np.pi / pixel_size * np.arange(-(yr / 2 - 1 / 2), (yr / 2 + 1 / 2), 1) / yr + x = np.arange(0, xr, 1) - xr / 2 + (xr % 2) / 2 + y = np.arange(0, yr, 1) - yr / 2 + (yr % 2) / 2 + + x = 2 * np.pi / pixel_size * x / xr + y = 2 * np.pi / pixel_size * y / yr + KXk, KYk = np.meshgrid(x, y) + KXk = maybe_cupy(KXk.astype(complex)) + KYk = maybe_cupy(KYk.astype(complex)) - K = np.real( - np.sqrt(np.array(1 - (KXk / k) ** 2 - (KYk / k) ** 2, dtype=complex)) - ) + K = np.real(np.sqrt(1 - (KXk / k) ** 2 - (KYk / k) ** 2)) C = np.fft.fftshift(((KXk / k) ** 2 + (KYk / k) ** 2 < 1) * 1.0) - return C * np.fft.fftshift(np.exp(k * 1j * to_z * (K - 1))) + return C * np.fft.fftshift( + np.exp(k * 1j * (to_z * (K - 1) - dx * KXk / k - dy * KYk / k)) + ) class Rescale(Feature): diff --git a/deeptrack/math.py b/deeptrack/math.py index 1c1f3b982..8b9d0e911 100644 --- a/deeptrack/math.py +++ b/deeptrack/math.py @@ -17,7 +17,7 @@ from . import utils from .features import Feature -from .image import Image +from .image import Image, strip from .types import PropertyLike @@ -279,6 +279,80 @@ def __init__(self, ksize: PropertyLike[int] = 3, **kwargs): super().__init__(np.mean, ksize=ksize, **kwargs) +class MaxPooling(Pool): + """Apply max pooling to images. + + Parameters + ---------- + ksize : int + Size of the pooling kernel. + cval : number + Value to pad edges with if necessary. Default 0. + func_kwargs : dict + Additional parameters sent to the pooling function. + """ + + def __init__(self, ksize: PropertyLike[int] = 3, **kwargs): + super().__init__(np.max, ksize=ksize, **kwargs) + + +class MinPooling(Pool): + """Apply min pooling to images. + + Parameters + ---------- + ksize : int + Size of the pooling kernel. + cval : number + Value to pad edges with if necessary. Default 0. + func_kwargs : dict + Additional parameters sent to the pooling function. + """ + + def __init__(self, ksize: PropertyLike[int] = 3, **kwargs): + super().__init__(np.min, ksize=ksize, **kwargs) + + +class MedianPooling(Pool): + """Apply median pooling to images. + + Parameters + ---------- + ksize : int + Size of the pooling kernel. + cval : number + Value to pad edges with if necessary. Default 0. + func_kwargs : dict + Additional parameters sent to the pooling function. + """ + + def __init__(self, ksize: PropertyLike[int] = 3, **kwargs): + super().__init__(np.median, ksize=ksize, **kwargs) + + +class Resize(Feature): + """Resize an image. This is a wrapper around cv2.resize and takes the same arguments. + Note that the order of the axes is different in cv2 and numpy. In cv2, the first axis is the + vertical axis, while in numpy it is the horizontal axis. This is reflected in the default + values of the arguments. + + Parameters + ---------- + size : tuple + Size to resize to. + """ + + def __init__(self, dsize: PropertyLike[tuple] = (256, 256), **kwargs): + super().__init__(dsize=dsize, **kwargs) + + def get(self, image, dsize, **kwargs): + import cv2 + + return utils.safe_call( + cv2.resize, positional_args=[strip(image), dsize], **kwargs + ) + + # OPENCV2 blur try: diff --git a/deeptrack/models/__init__.py b/deeptrack/models/__init__.py index 8841638df..a2070a5fe 100644 --- a/deeptrack/models/__init__.py +++ b/deeptrack/models/__init__.py @@ -6,6 +6,7 @@ from .lodestar import * from .gans import * from .gnns import * +from .vaes import * # from .mrcnn import * # from .yolov1 import * diff --git a/deeptrack/models/convolutional.py b/deeptrack/models/convolutional.py index 03a051edd..5f055b1cc 100644 --- a/deeptrack/models/convolutional.py +++ b/deeptrack/models/convolutional.py @@ -1,9 +1,9 @@ from tensorflow.keras import layers, models from ..backend.citations import unet_bibtex -from .layers import as_block, TransformerEncoder -from .embeddings import ClassToken, LearnablePositionEmbs -from .utils import KerasModel, as_KerasModel, with_citation +from .layers import as_block, TransformerEncoderLayer, DenseBlock, Identity +from .embeddings import ClassToken, LearnablePositionEmbsLayer +from .utils import KerasModel, as_KerasModel, with_citation, GELU def center_crop(layer, target_layer): @@ -43,6 +43,10 @@ class Convolutional(KerasModel): Number of units in the output layer. output_activation : str or keras activation The activation function of the output. + flatten_method : str + The method used to flatten the output of the convolutional layers. + Must be one of 'flatten', 'global_average' or 'global_max'. + Only used if `dense_top` is True. loss : str or keras loss function The loss function of the network. layer_function : Callable[int] -> keras layer @@ -62,6 +66,7 @@ def __init__( steps_per_pooling=1, dropout=(), dense_top=True, + flatten_method="flatten", number_of_outputs=3, output_activation=None, output_kernel_size=3, @@ -108,7 +113,16 @@ def __init__( # DENSE TOP if dense_top: - layer = layers.Flatten()(layer) + if flatten_method == "flatten": + layer = layers.Flatten()(layer) + elif flatten_method == "global_average": + layer = layers.GlobalAveragePooling2D()(layer) + elif flatten_method == "global_max": + layer = layers.GlobalMaxPooling2D()(layer) + else: + raise ValueError( + f"flatten_method must be one of 'flatten', 'global_average' or 'global_max', not {flatten_method}" + ) for dense_layer_dimension in dense_layers_dimensions: layer = dense_block(dense_layer_dimension)(layer) output_layer = layers.Dense( @@ -131,6 +145,105 @@ def __init__( convolutional = Convolutional +class FullyConvolutional(KerasModel): + """A fully convolutional neural network. + + Parameters + ---------- + input_shape : tuple + The shape of the input. + conv_layers_dimensions : tuple of int or tuple of tuple of int + The number of filters in each convolutional layer. Examples: + - (32, 64, 128) results in + 1. Conv2D(32, 3, activation='relu', padding='same') + 2. MaxPooling2D() + 3. Conv2D(64, 3, activation='relu', padding='same') + 4. MaxPooling2D() + 5. Conv2D(128, 3, activation='relu', padding='same') + 6. MaxPooling2D() + 7. Conv2D(number_of_outputs, 3, activation=output_activation, padding='same') + + - ((32, 32), (64, 64), (128, 128)) results in + 1. Conv2D(32, 3, activation='relu', padding='same') + 2. Conv2D(32, 3, activation='relu', padding='same') + 3. MaxPooling2D() + 4. Conv2D(64, 3, activation='relu', padding='same') + 5. Conv2D(64, 3, activation='relu', padding='same') + 6. MaxPooling2D() + 7. Conv2D(128, 3, activation='relu', padding='same') + 8. Conv2D(128, 3, activation='relu', padding='same') + 9. MaxPooling2D() + 10. Conv2D(number_of_outputs, 3, activation=output_activation, padding='same') + omit_last_pooling : bool + If True, the last MaxPooling2D layer is omitted. Default is False + number_of_outputs : int + The number of output channels. + output_activation : str + The activation function of the output layer. + output_kernel_size : int + The kernel size of the output layer. + convolution_block : function or str + The function used to create the convolutional blocks. Defaults to + "convolutional" + pooling_block : function or str + The function used to create the pooling blocks. Defaults to "pooling" + """ + + def __init__( + self, + input_shape, + conv_layers_dimensions, + omit_last_pooling=False, + number_of_outputs=1, + output_activation="sigmoid", + output_kernel_size=3, + convolution_block="convolutional", + pooling_block="pooling", + **kwargs, + ): + + # Update layer functions + convolution_block = as_block(convolution_block) + pooling_block = as_block(pooling_block) + + # INITIALIZE DEEP LEARNING NETWORK + + if isinstance(input_shape, list): + network_input = [layers.Input(shape) for shape in input_shape] + inputs = layers.Concatenate(axis=-1)(network_input) + else: + network_input = layers.Input(input_shape) + inputs = network_input + + layer = inputs + + # CONVOLUTIONAL BASIS + for idx, depth_dimensions in enumerate(conv_layers_dimensions): + + if isinstance(depth_dimensions, int): + depth_dimensions = (depth_dimensions,) + + for conv_layer_dimension in depth_dimensions: + layer = convolution_block(conv_layer_dimension)(layer) + + # add pooling layer + if idx < len(conv_layers_dimensions) - 1 or not omit_last_pooling: + layer = pooling_block(conv_layer_dimension)(layer) + + # OUTPUT + output_layer = layers.Conv2D( + number_of_outputs, + kernel_size=output_kernel_size, + activation=output_activation, + padding="same", + name="output", + )(layer) + + model = models.Model(network_input, output_layer) + + super().__init__(model, **kwargs) + + class UNet(KerasModel): """Creates and compiles a U-Net. @@ -360,49 +473,163 @@ def __init__( super().__init__(model, **kwargs) -class ViT(KerasModel): +class ClsTransformerBaseModel(KerasModel): + """Base class for Transformer models with classification heads. + + Parameters + ---------- + inputs : list of keras.layers.Input + Input layers of the network. + encoder : tf.Tensor + Encoded representation of the input. + number_of_transformer_layers : int + Number of Transformer layers in the model. + base_fwd_mlp_dimensions : int + Size of the hidden layers in the forward MLP of the Transformer layers. + transformer_block : str or keras.layers.Layer + The Transformer layer to use. By default, uses the TransformerEncoder + block. See .layers for available Transformer layers. + cls_layer_dimensions : int, optional + Size of the ClassToken layer. If None, no ClassToken layer is added. + node_decoder_layer_dimensions: list of ints + List of the number of units in each dense layer of the nodes' decoder. The + number of layers is inferred from the length of this list. + number_of_cls_outputs: int + Number of output cls features. + number_of_nodes_outputs: int + Number of output nodes features. + cls_output_activation: str or activation function or layer + Activation function for the output cls layer. See keras docs for accepted strings. + node_output_activation: str or activation function or layer + Activation function for the output node layer. See keras docs for accepted strings. + transformer_block: str, keras.layers.Layer, or callable + The transformer layer. See .layers for available transformer blocks. + dense_block: str, keras.layers.Layer, or callable + The dense block to use for the nodes' decoder. + cls_norm_block: str, keras.layers.Layer, or callable + The normalization block to use for the cls layer. + use_learnable_positional_embs : bool + Whether to use learnable positional embeddings. + output_type: str + Type of output. Either "cls", "cls_rep", "nodes" or + "full". If 'key' is not a supported output type, then + the model output will be the concatenation of the node + and cls predictions ("full"). + kwargs : dict + Additional arguments to be passed to the KerasModel constructor. + """ + + def __init__( + self, + inputs, + encoder, + number_of_transformer_layers=12, + base_fwd_mlp_dimensions=256, + cls_layer_dimension=None, + number_of_cls_outputs=1, + cls_output_activation="linear", + transformer_block=TransformerEncoderLayer( + normalization="LayerNormalization", + dropout=0.1, + norm_kwargs={"epsilon": 1e-6}, + ), + dense_block=DenseBlock( + activation=GELU, + normalization="LayerNormalization", + norm_kwargs={"epsilon": 1e-6}, + ), + positional_embedding_block=Identity(), + output_type="cls", + transformer_input_kwargs={}, + **kwargs, + ): + transformer_block = as_block(transformer_block) + dense_block = as_block(dense_block) + positional_embedding_block = as_block(positional_embedding_block) + + layer = ClassToken(name="class_token")(encoder) + + layer = positional_embedding_block( + layer.shape[-1], name="Transformer/posembed_input" + )(layer) + + # Bottleneck path, Transformer layers + for n in range(number_of_transformer_layers): + layer, _ = transformer_block( + base_fwd_mlp_dimensions, name=f"Transformer/encoderblock_{n}" + )(layer, **transformer_input_kwargs) + + # Extract global representation + cls_rep = layers.Lambda(lambda x: x[:, 0], name="RetrieveClassToken")(layer) + + # Process cls features + cls_layer = cls_rep + if cls_layer_dimension is not None: + cls_layer = dense_block(cls_layer_dimension, name="cls_mlp")(cls_layer) + + cls_output = layers.Dense( + number_of_cls_outputs, + activation=cls_output_activation, + name="cls_prediction", + )(cls_layer) + + output_dict = { + "cls_rep": cls_rep, + "cls": cls_output, + } + try: + outputs = output_dict[output_type] + except KeyError: + outputs = output_dict["cls"] + + model = models.Model(inputs=inputs, outputs=outputs) + + super().__init__(model, **kwargs) + + +class ViT(ClsTransformerBaseModel): """ Creates and compiles a ViT model. input_shape : tuple of ints Size of the images to be analyzed. patch_shape : int Size of the patches to be extracted from the input images. - num_layers : int - Number of Transformer layers in the ViT model. hidden_size : int Size of the hidden layers in the ViT model. - number_of_heads : int - Number of attention heads in each Transformer layer. - fwd_mlp_dim : int + number_of_transformer_layers : int + Number of Transformer layers in the model. + base_fwd_mlp_dimensions : int Size of the hidden layers in the forward MLP of the Transformer layers. - dropout : float - Dropout rate of the forward MLP in the Transformer layers. - representation_size : int - Size of the representation vector of the ViT head. By default, it is - equal to the hidden size of the last Transformer layer. - include_top : bool - Whether to include the top layer of the ViT model. - output_size : int - Size of the output layer of the ViT model. - output_activation : str or keras activation - The activation function of the output. + transformer_block : str or keras.layers.Layer + The Transformer layer to use. By default, uses the TransformerEncoder + block. See .layers for available Transformer layers. + number_of_cls_outputs: int + Number of output cls features. + cls_output_activation: str or activation function or layer + Activation function for the output cls layer. See keras docs for accepted strings. + use_learnable_positional_embs : bool + Whether to use learnable positional embeddings. + output_type: str + Type of output. Either "cls", "cls_rep", "nodes" or + "full". If 'key' is not a supported output type, then + the model output will be the concatenation of the node + and cls predictions ("full"). kwargs : dict - Additional arguments to be passed to the KerasModel constructor. + Additional arguments to be passed to the TransformerBaseModel contructor + for advanced configuration. """ def __init__( self, - input_shape=(224, 224, 3), - patch_shape=16, - num_layers=12, - hidden_size=768, - number_of_heads=12, - fwd_mlp_dim=3072, - dropout=0.1, - representation_size=None, - include_top=True, - output_size=1000, - output_activation="linear", + input_shape=(28, 28, 1), + patch_shape=4, + hidden_size=72, + number_of_transformer_layers=4, + base_fwd_mlp_dimensions=256, + number_of_cls_outputs=10, + cls_output_activation="linear", + output_type="cls", + positional_embedding_block=LearnablePositionEmbsLayer(), **kwargs, ): @@ -411,39 +638,105 @@ def __init__( ), "image_size must be a multiple of patch_size" vit_input = layers.Input(shape=input_shape) - layer = layers.Conv2D( + encoder_layer = layers.Conv2D( filters=hidden_size, kernel_size=patch_shape, strides=patch_shape, padding="valid", name="embedding", )(vit_input) - layer = layers.Reshape((layer.shape[1] * layer.shape[2], hidden_size))(layer) - layer = ClassToken(name="class_token")(layer) - layer = LearnablePositionEmbs(name="Transformer/posembed_input")(layer) - for n in range(num_layers): - layer, _ = TransformerEncoder( - number_of_heads=number_of_heads, - fwd_mlp_dim=fwd_mlp_dim, - dropout=dropout, - name=f"Transformer/encoderblock_{n}", + encoder_layer = layers.Reshape( + (encoder_layer.shape[1] * encoder_layer.shape[2], hidden_size) + )(encoder_layer) + + super().__init__( + inputs=vit_input, + encoder=encoder_layer, + number_of_transformer_layers=number_of_transformer_layers, + base_fwd_mlp_dimensions=base_fwd_mlp_dimensions, + number_of_cls_outputs=number_of_cls_outputs, + cls_output_activation=cls_output_activation, + output_type=output_type, + positional_embedding_block=positional_embedding_block, + **kwargs, + ) + + +class Transformer(KerasModel): + """ + Creates and compiles a Transformer model. + """ + + def __init__( + self, + number_of_node_features=3, + dense_layer_dimensions=(64, 96), + number_of_transformer_layers=12, + base_fwd_mlp_dimensions=256, + number_of_node_outputs=1, + node_output_activation="linear", + transformer_block=TransformerEncoderLayer( + normalization="LayerNormalization", + dropout=0.1, + norm_kwargs={"epsilon": 1e-6}, + ), + dense_block=DenseBlock( + activation=GELU, + normalization="LayerNormalization", + norm_kwargs={"epsilon": 1e-6}, + ), + positional_embedding_block=Identity(), + **kwargs, + ): + + dense_block = as_block(dense_block) + + transformer_input, transformer_mask = ( + layers.Input(shape=(None, number_of_node_features)), + layers.Input(shape=(None, 2), dtype="int32"), + ) + + layer = transformer_input + # Encoder for input features + for dense_layer_number, dense_layer_dimension in zip( + range(len(dense_layer_dimensions)), dense_layer_dimensions + ): + layer = dense_block( + dense_layer_dimension, + name="fencoder_" + str(dense_layer_number + 1), )(layer) - layer = layers.LayerNormalization( - epsilon=1e-6, name="Transformer/encoder_norm" + + layer = positional_embedding_block( + layer.shape[-1], name="Transformer/posembed_input" )(layer) - layer = layers.Lambda(lambda v: v[:, 0], name="ExtractToken")(layer) - if representation_size is not None: - layer = layers.Dense( - representation_size, name="pre_logits", activation="tanh" - )(layer) + # Bottleneck path, Transformer layers + for n in range(number_of_transformer_layers): + layer, _ = transformer_block( + base_fwd_mlp_dimensions, name=f"Transformer/encoderblock_{n}" + )(layer, edges=transformer_mask) - if include_top: - output_layer = layers.Dense( - output_size, name="head", activation=output_activation + # Decoder for node and edge features + for dense_layer_number, dense_layer_dimension in zip( + range(len(dense_layer_dimensions)), + reversed(dense_layer_dimensions), + ): + layer = dense_block( + dense_layer_dimension, + name="fdecoder" + str(dense_layer_number + 1), + **kwargs, )(layer) - else: - output_layer = layer - model = models.Model(inputs=vit_input, outputs=output_layer, name="ViT") + # Output layers + output_layer = layers.Dense( + number_of_node_outputs, + activation=node_output_activation, + name="node_prediction", + )(layer) + + model = models.Model( + [transformer_input, transformer_mask], + output_layer, + ) + super().__init__(model, **kwargs) diff --git a/deeptrack/models/embeddings.py b/deeptrack/models/embeddings.py index 05f81f579..79c21fce7 100644 --- a/deeptrack/models/embeddings.py +++ b/deeptrack/models/embeddings.py @@ -12,7 +12,9 @@ def build(self, input_shape): self.hidden_size = input_shape[-1] self.cls = tf.Variable( name="cls", - initial_value=cls_init(shape=(1, 1, self.hidden_size), dtype="float32"), + initial_value=cls_init( + shape=(1, 1, self.hidden_size), dtype="float32" + ), trainable=True, ) @@ -26,7 +28,9 @@ def call(self, inputs): @register("ClassToken") -def ClassTokenLayer(activation=None, normalization=None, norm_kwargs={}, **kwargs): +def ClassTokenLayer( + activation=None, normalization=None, norm_kwargs={}, **kwargs +): """ClassToken Layer that append a class token to the input. Can optionally perform normalization or some activation function. @@ -162,7 +166,9 @@ def build(self, input_shape): ) self.beta = tf.Variable( - initial_value=tf.constant_initializer(value=4)(shape=(1,), dtype="float32"), + initial_value=tf.constant_initializer(value=4)( + shape=(1,), dtype="float32" + ), name="beta", trainable=True, constraint=lambda value: tf.clip_by_value(value, 1, 10), diff --git a/deeptrack/models/gnns/generators.py b/deeptrack/models/gnns/generators.py index 73a08b1ba..952412e88 100644 --- a/deeptrack/models/gnns/generators.py +++ b/deeptrack/models/gnns/generators.py @@ -116,7 +116,7 @@ class ContinuousGraphGenerator(ContinuousGenerator): the speed gained from reusing images. The generator will continuously create new trainingdata during training, until `max_data_size` is reached, at which point the oldest data point is replaced. - + Parameters ---------- feature : dt.Feature @@ -153,9 +153,9 @@ def __getitem__(self, idx): batch, labels = super().__getitem__(idx) # Extracts minimum number of nodes in the batch - cropNodesTo = np.min( - list(map(lambda _batch: np.shape(_batch[0])[0], batch)) - ) + numofnodes = list(map(lambda _batch: np.shape(_batch[0])[0], batch)) + bgraph_idx = np.argmin(numofnodes) + cropTo = int(numofnodes[bgraph_idx]) inputs = [[], [], [], []] outputs = [[], [], []] @@ -166,26 +166,28 @@ def __getitem__(self, idx): # Clip node features to the minimum number of nodes # in the batch - nodef = batch[i][0][:cropNodesTo, :] - - last_node_idx = 0 - # Extracts index of the last node in the adjacency matrix - try: - last_node_idx = int( - np.where(batch[i][2][:, 1] <= cropNodesTo - 1)[0][-1] + 1 + nodef = batch[i][0][:cropTo, :] + + edge_dropouts = ( + np.any(batch[i][2] > cropTo - 1, axis=-1) + if i != bgraph_idx + else np.array( + [ + False, + ] + * np.shape(batch[i][2])[0] ) - except IndexError: - continue + ) # Clips edge features and adjacency matrix to the index # of the last node - edgef = batch[i][1][:last_node_idx] - adjmx = batch[i][2][:last_node_idx] - wghts = batch[i][3][:last_node_idx] + edgef = batch[i][1][~edge_dropouts] + adjmx = batch[i][2][~edge_dropouts] + wghts = batch[i][3][~edge_dropouts] # Clips node and edge solutions - nodesol = labels[i][0][:cropNodesTo] - edgesol = labels[i][1][:last_node_idx] + nodesol = labels[i][0][:cropTo] + edgesol = labels[i][1][~edge_dropouts] globsol = labels[i][2].astype(np.float) inputs[0].append(nodef) diff --git a/deeptrack/models/gnns/layers.py b/deeptrack/models/gnns/layers.py index 5b9e3441c..da03b1f2b 100644 --- a/deeptrack/models/gnns/layers.py +++ b/deeptrack/models/gnns/layers.py @@ -9,10 +9,9 @@ from ..utils import as_activation, as_normalization, single_layer_call, GELU -class FGNN(tf.keras.layers.Layer): +class MPN(tf.keras.layers.Layer): """ - Fingerprinting Graph Layer. - + Message-passing Graph Layer. Parameters ---------- filters : int @@ -23,10 +22,8 @@ class FGNN(tf.keras.layers.Layer): Normalization function of the layer. See keras and tfa docs for accepted strings. random_edge_dropout : float, optional Random edge dropout. - use_gates : bool, optional - Whether to use gated self-attention layers as update layer. Defaults to True. - att_layer_kwargs : dict, optional - Keyword arguments for the self-attention layer. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. norm_kwargs : dict Arguments for the normalization function. kwargs : dict @@ -39,41 +36,47 @@ def __init__( activation=GELU, normalization="LayerNormalization", random_edge_dropout=False, - use_gates=True, - att_layer_kwargs={}, + combine_layer=tf.keras.layers.Lambda(lambda x: tf.concat(x, axis=-1)), norm_kwargs={}, **kwargs, ): super().__init__(**kwargs) self.filters = filters self.random_edge_dropout = random_edge_dropout + self.combine_layer = combine_layer - _multi_head_att_layer = ( - MultiHeadGatedSelfAttention - if use_gates - else MultiHeadSelfAttention - ) - # node update layer - self.update_layer = tf.keras.Sequential( + # message layer + self.message_layer = tf.keras.Sequential( [ - _multi_head_att_layer(**att_layer_kwargs), + layers.Dense(self.filters), as_activation(activation), as_normalization(normalization)(**norm_kwargs), ] ) - # message layer - self.message_layer = tf.keras.Sequential( + # node update layers + self.update_layer = layers.Dense(self.filters) + self.update_norm = tf.keras.Sequential( [ - layers.Dense(self.filters), as_activation(activation), as_normalization(normalization)(**norm_kwargs), ] ) + def nodes_handler(self, nodes): + return nodes, None + + def update_node_features(self, nodes, aggregated, learnable_embs, edges): + Combined = self.combine_layer([nodes, aggregated]) + updated_nodes = self.update_norm(self.update_layer(Combined)) + return updated_nodes + def call(self, inputs): nodes, edge_features, edges, edge_weights, edge_dropout = inputs + # Handles node features according to the implementation + nodes, learnable_embs = self.nodes_handler(nodes) + number_of_nodes = tf.shape(nodes)[1] number_of_edges = tf.shape(edges)[1] number_of_node_features = nodes.shape[-1] @@ -128,8 +131,9 @@ def aggregate(_, x): ) # Update node features, (nOfnode, filters) - Combined = [nodes, aggregated] - updated_nodes = self.update_layer(Combined) + updated_nodes = self.update_node_features( + nodes, aggregated, learnable_embs, edges + ) return ( updated_nodes, @@ -140,18 +144,199 @@ def aggregate(_, x): ) +@register("MPN") +def MPNLayer( + activation=GELU, + normalization="LayerNormalization", + norm_kwargs={}, + **kwargs, +): + """ + Message-passing Graph Layer. + Parameters + ---------- + filters : int + Number of filters. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + random_edge_dropout : float, optional + Random edge dropout. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ + + def Layer(filters, **kwargs_inner): + kwargs_inner.update(kwargs) + layer = MPN( + filters, + activation=activation, + normalization=normalization, + norm_kwargs=norm_kwargs, + **kwargs, + ) + return lambda x: single_layer_call(x, layer, None, None, {}) + + return Layer + + +class GRUMPN(MPN): + """ + GRU Graph Layer. + Parameters + ---------- + filters : int + Number of filters. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + random_edge_dropout : float, optional + Random edge dropout. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ + + def __init__( + self, + filters, + **kwargs, + ): + super().__init__(filters, **kwargs) + + # node update layer + self.update_layer = layers.GRU(filters, time_major=True) + + def update_node_features(self, nodes, aggregated, learnable_embs, edges): + Combined = tf.reshape( + tf.stack([nodes, aggregated], axis=0), (2, -1, nodes.shape[-1]) + ) + updated_nodes = self.update_layer(Combined) + return tf.reshape(updated_nodes, shape=tf.shape(nodes)) + + +@register("GRUMPN") +def GRUMPNLayer( + activation=GELU, + normalization="LayerNormalization", + norm_kwargs={}, + **kwargs, +): + """ + GRU Graph Layer. + Parameters + ---------- + filters : int + Number of filters. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + random_edge_dropout : float, optional + Random edge dropout. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ + + def Layer(filters, **kwargs_inner): + kwargs_inner.update(kwargs) + layer = GRUMPN( + filters, + activation=activation, + normalization=normalization, + norm_kwargs=norm_kwargs, + **kwargs, + ) + return lambda x: single_layer_call(x, layer, None, None, {}) + + return Layer + + +class FGNN(MPN): + """ + Fingerprinting Graph Layer. + Parameters + ---------- + filters : int + Number of filters. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + random_edge_dropout : float, optional + Random edge dropout. + use_gates : bool, optional + Whether to use gated self-attention layers as update layer. Defaults to True. + att_layer_kwargs : dict, optional + Keyword arguments for the self-attention layer. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ + + def __init__( + self, + filters, + activation=GELU, + normalization="LayerNormalization", + use_gates=True, + att_layer_kwargs={}, + combine_layer=layers.Layer(), + norm_kwargs={}, + **kwargs, + ): + super().__init__( + filters, + activation=activation, + normalization=normalization, + combine_layer=combine_layer, + norm_kwargs=norm_kwargs, + **kwargs, + ) + + multi_head_att_layer = ( + MultiHeadGatedSelfAttention + if use_gates + else MultiHeadSelfAttention + ) + + # node update layer + self.update_layer = multi_head_att_layer(**att_layer_kwargs) + self.update_norm = tf.keras.Sequential( + [ + as_activation(activation), + as_normalization(normalization)(**norm_kwargs), + ] + ) + + @register("FGNN") def FGNNlayer( activation=GELU, normalization="LayerNormalization", - random_edge_dropout=False, use_gates=True, att_layer_kwargs={}, norm_kwargs={}, **kwargs, ): - """Fingerprinting Graph Layer. - + """ + Fingerprinting Graph Layer. Parameters ---------- filters : int @@ -166,6 +351,8 @@ def FGNNlayer( Whether to use gated self-attention layers as update layer. Defaults to True. att_layer_kwargs : dict, optional Keyword arguments for the self-attention layer. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. norm_kwargs : dict Arguments for the normalization function. kwargs : dict @@ -176,12 +363,11 @@ def Layer(filters, **kwargs_inner): kwargs_inner.update(kwargs) layer = FGNN( filters, - activation, - normalization, - random_edge_dropout, - use_gates, - att_layer_kwargs, - norm_kwargs, + activation=activation, + normalization=normalization, + use_gates=use_gates, + att_layer_kwargs=att_layer_kwargs, + norm_kwargs=norm_kwargs, **kwargs_inner, ) return lambda x: single_layer_call(x, layer, None, None, {}) @@ -192,7 +378,6 @@ def Layer(filters, **kwargs_inner): class ClassTokenFGNN(FGNN): """ Fingerprinting Graph Layer with Class Token. - Parameters ---------- filters : int @@ -205,123 +390,301 @@ class ClassTokenFGNN(FGNN): Random edge dropout. use_gates : bool, optional Whether to use gated self-attention layers as update layer. Defaults to True. + dense_to_combine : bool, optional + Whether to use a dense layer to combine node features and aggregated messages. Defaults to True. att_layer_kwargs : dict, optional Keyword arguments for the self-attention layer. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. norm_kwargs : dict Arguments for the normalization function. kwargs : dict Additional arguments. """ - def build(self, input_shape): - super().build(input_shape) - self.combine_layer = tf.keras.Sequential( + def __init__( + self, + filters, + combine_layer=tf.keras.layers.Lambda(lambda x: tf.concat(x, axis=-1)), + dense_to_combine=True, + **kwargs, + ): + + super().__init__( + filters, + combine_layer=combine_layer, + **kwargs, + ) + + self.dense_to_combine = ( + layers.Dense(self.filters) if dense_to_combine else layers.Layer() + ) + + self.process_class_token = layers.Dense(self.filters) + + def nodes_handler(self, nodes): + return nodes[:, 1:, :], nodes[:, 0:1, :] + + def update_node_features(self, nodes, aggregated, learnable_embs, edges): + Combined = tf.concat( [ - tf.keras.layers.Lambda(lambda x: tf.concat(x, axis=-1)), - layers.Dense(self.filters), - ] + self.process_class_token(learnable_embs), + self.dense_to_combine(self.combine_layer([nodes, aggregated])), + ], + axis=1, ) + updated_nodes = self.update_norm(self.update_layer(Combined)) + return updated_nodes - def call(self, inputs): - nodes, edge_features, edges, edge_weights, edge_dropout = inputs - # Split nodes and class-token embeddings - class_token, nodes = nodes[:, 0:1, :], nodes[:, 1:, :] +@register("CTFGNN") +def ClassTokenFGNNlayer( + activation=GELU, + normalization="LayerNormalization", + use_gates=True, + att_layer_kwargs={}, + norm_kwargs={}, + **kwargs, +): + """ + Fingerprinting Graph Layer with Class Token. + Parameters + ---------- + filters : int + Number of filters. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + random_edge_dropout : float, optional + Random edge dropout. + use_gates : bool, optional + Whether to use gated self-attention layers as update layer. Defaults to True. + dense_to_combine : bool, optional + Whether to use a dense layer to combine node features and aggregated messages. Defaults to True. + att_layer_kwargs : dict, optional + Keyword arguments for the self-attention layer. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ - number_of_nodes = tf.shape(nodes)[1] - number_of_edges = tf.shape(edges)[1] - number_of_node_features = nodes.shape[-1] + def Layer(filters, **kwargs_inner): + kwargs_inner.update(kwargs) + layer = ClassTokenFGNN( + filters, + activation=activation, + normalization=normalization, + use_gates=use_gates, + att_layer_kwargs=att_layer_kwargs, + norm_kwargs=norm_kwargs, + **kwargs_inner, + ) + return lambda x: single_layer_call(x, layer, None, None, {}) - batch_size = tf.shape(nodes)[0] + return Layer - # Get neighbors node features, shape = (batch, nOfedges, 2, nOffeatures) - message_inputs = tf.gather(nodes, edges, batch_dims=1) - # Concatenate nodes features with edge features, - # shape = (batch, nOfedges, 2*nOffeatures + nOffedgefeatures) - messages = tf.reshape( - message_inputs, - ( - batch_size, - number_of_edges, - 2 * number_of_node_features, - ), +class MaskedFGNN(FGNN): + """ + Fingerprinting Graph Layer with Masked Attention. + Parameters + ---------- + filters : int + Parameters + ---------- + filters : int + Number of filters. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + random_edge_dropout : float, optional + Random edge dropout. + use_gates : bool, optional + Whether to use gated self-attention layers as update layer. Defaults to True. + att_layer_kwargs : dict, optional + Keyword arguments for the self-attention layer. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ + + def update_node_features(self, nodes, aggregated, learnable_embs, edges): + Combined = self.combine_layer([nodes, aggregated]) + updated_nodes = self.update_norm( + self.update_layer(Combined, edges=edges) ) - reshaped = tf.concat( - [ - messages, - edge_features, - ], - -1, + return updated_nodes + + +@register("MaskedFGNN") +def MaskedFGNNlayer( + activation=GELU, + normalization="LayerNormalization", + use_gates=True, + att_layer_kwargs={}, + norm_kwargs={}, + **kwargs, +): + """ + Fingerprinting Graph Layer with Masked Attention. + Parameters + ---------- + filters : int + Number of filters. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + random_edge_dropout : float, optional + Random edge dropout. + use_gates : bool, optional + Whether to use gated self-attention layers as update layer. Defaults to True. + att_layer_kwargs : dict, optional + Keyword arguments for the self-attention layer. + combine_layer : layer, optional + Layer to combine node features and aggregated messages. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ + + def Layer(filters, **kwargs_inner): + kwargs_inner.update(kwargs) + layer = MaskedFGNN( + filters, + activation=activation, + normalization=normalization, + use_gates=use_gates, + att_layer_kwargs=att_layer_kwargs, + norm_kwargs=norm_kwargs, + **kwargs_inner, ) + return lambda x: single_layer_call(x, layer, None, None, {}) - # Compute messages/update edges, shape = (batch, nOfedges, filters) - messages = self.message_layer(reshaped) + return Layer - # Compute weighted messages - # shape = (batch, nOfedges, filters) - weighted_messages = messages * edge_weights - # Merge repeated edges, shape = (batch, nOfedges (before augmentation), filters) - def aggregate(_, x): - message, edge, dropout = x +class GraphTransformer(tf.keras.layers.Layer): + """Graph Transformer. + Parameters + ---------- + fwd_mlp_dim : int + Dimension of the forward MLP. + number_of_heads : int + Number of attention heads. + activation : str or activation function or layer + Activation function of the layer. See keras docs for accepted strings. + normalization : str or normalization function or layer + Normalization function of the layer. See keras and tfa docs for accepted strings. + use_bias: bool, optional + Whether to use bias in the dense layers of the attention layers. Defaults to False. + dropout : float + Dropout rate. + norm_kwargs : dict + Arguments for the normalization function. + kwargs : dict + Additional arguments. + """ - merged_edges = tf.math.unsorted_segment_sum( - message * dropout[:, 1:2], - edge[:, 1], - number_of_nodes, - ) + def __init__( + self, + fwd_mlp_dim, + number_of_heads=12, + activation=GELU, + normalization="LayerNormalization", + use_bias=True, + clip_scores_by_value=(-5.0, 5.0), + dropout=0.0, + norm_kwargs={}, + **kwargs, + ): + super().__init__(**kwargs) + self.number_of_heads = number_of_heads + self.use_bias = use_bias - return merged_edges + self.fwd_mlp_dim = fwd_mlp_dim + self.dropout = dropout - # Aggregate messages, shape = (batch, nOfnodes, filters) - aggregated = tf.scan( - aggregate, - (weighted_messages, edges, edge_dropout), - initializer=tf.zeros((number_of_nodes, number_of_node_features)), - ) + self.activation = activation + self.normalization = normalization - # Update node features, (nOfnode, filters) - Combined = tf.concat( - [class_token, self.combine_layer([nodes, aggregated])], axis=1 + self.clip_scores_by_value = clip_scores_by_value + + self.MaskedMultiHeadAttLayer = MultiHeadSelfAttention( + number_of_heads=self.number_of_heads, + use_bias=self.use_bias, + name="MaskedMultiHeadAttLayer", + clip_scores_by_value=self.clip_scores_by_value, ) - updated_nodes = self.update_layer(Combined) + self.norm_0, self.norm_1 = ( + as_normalization(normalization)(**norm_kwargs), + as_normalization(normalization)(**norm_kwargs), + ) + self.dropout_layer = tf.keras.layers.Dropout(self.dropout) - return ( - updated_nodes, - weighted_messages, - edges, - edge_weights, - edge_dropout, + def build(self, input_shape): + input_shape, *_ = input_shape + + self.feed_forward_layer = tf.keras.Sequential( + [ + layers.Dense( + self.fwd_mlp_dim, + name=f"{self.name}/Dense_0", + ), + as_activation(self.activation), + layers.Dropout(self.dropout), + layers.Dense(input_shape[-1], name=f"{self.name}/Dense_1"), + layers.Dropout(self.dropout), + ], + name="feed_forward", ) + def call(self, inputs, training): + nodes, edges = inputs -@register("CTFGNN") -def ClassTokenFGNNlayer( + x = self.MaskedMultiHeadAttLayer(nodes, edges=edges) + x = self.dropout_layer(x, training=training) + x = self.norm_0(nodes + x) + + y = self.feed_forward_layer(x) + return self.norm_1(x + y), edges + + +@register("GraphTransformerLayer") +def GraphTransformerLayer( + number_of_heads=6, activation=GELU, normalization="LayerNormalization", - random_edge_dropout=False, - use_gates=True, - att_layer_kwargs={}, + use_bias=True, + clip_scores_by_value=(-5.0, 5.0), + dropout=0.0, norm_kwargs={}, **kwargs, ): - """Fingerprinting Graph Layer with Class Token. - + """Graph Transformer Layer. Parameters ---------- number_of_heads : int Number of attention heads. - message_layer : str or callable - Message layer. - update_layer : str or callable - Update layer. - random_edge_dropout : float, optional - Random edge dropout. + dropout : float + Dropout rate. activation : str or activation function or layer Activation function of the layer. See keras docs for accepted strings. normalization : str or normalization function or layer Normalization function of the layer. See keras and tfa docs for accepted strings. + use_gates : bool, optional + Whether to use gated self-attention layers as update layer. Defaults to False. + use_bias: bool, optional + Whether to use bias in the dense layers of the attention layers. Defaults to True. norm_kwargs : dict Arguments for the normalization function. kwargs : dict @@ -330,15 +693,16 @@ def ClassTokenFGNNlayer( def Layer(filters, **kwargs_inner): kwargs_inner.update(kwargs) - layer = ClassTokenFGNN( + layer = GraphTransformer( filters, + number_of_heads, activation, normalization, - random_edge_dropout, - use_gates, - att_layer_kwargs, + use_bias, + clip_scores_by_value, + dropout, norm_kwargs, - **kwargs_inner, + **kwargs, ) return lambda x: single_layer_call(x, layer, None, None, {}) diff --git a/deeptrack/models/gnns/models.py b/deeptrack/models/gnns/models.py index 6161c77da..46c137401 100644 --- a/deeptrack/models/gnns/models.py +++ b/deeptrack/models/gnns/models.py @@ -8,7 +8,9 @@ class MAGIK(KerasModel): """ - Message passing graph neural network. + MAGIK, a message-passing Graph Neural Network, to estimate the dynamical properties + of moving objects from time-lapse experiments. For more information about this model, + please refer to: https://arxiv.org/abs/2202.06355 Parameters: ----------- @@ -29,7 +31,8 @@ class MAGIK(KerasModel): dense_block: str, keras.layers.Layer, or callable The dense block to use for the encoder and decoder. graph_block: str, keras.layers.Layer, or callable - The graph block to use for the graph blocks. + The graph block to use for the graph computation. See gnns.layers for available + graph blocks. output_type: str Type of output. Either "nodes", "edges", or "graph". If 'key' is not a supported output type, then the @@ -160,8 +163,11 @@ def __init__( class CTMAGIK(KerasModel): """ - Message passing graph neural network. - + CTMAGIK, a message-passing Graph Neural Network, to estimate system-level dynamical properties + of moving objects from time-lapse experiments. This model implements an extra learnable token + to aggregate global attributes from the whole graph. For more information about this model, + please refer to: https://arxiv.org/abs/2202.06355 + Parameters: ----------- dense_layer_dimensions: list of ints @@ -187,7 +193,8 @@ class CTMAGIK(KerasModel): dense_block: str, keras.layers.Layer, or callable The dense block to use for the encoder and decoder. graph_block: str, keras.layers.Layer, or callable - The graph block to use for the graph blocks. + The graph block to use for the graph computation. See gnns.layers for available + graph blocks. classtokens_block: str, keras.layers.Layer, or callable The embedding block to use for the class tokens. output_type: str @@ -343,3 +350,166 @@ def __init__( ) super().__init__(model, **kwargs) + + +class MPNGNN(KerasModel): + """ + Message-passing Graph Neural Network. + + Parameters: + ----------- + dense_layer_dimensions: list of ints + List of the number of units in each dense layer of the encoder and decoder. The + number of layers is inferred from the length of this list. + base_layer_dimensions: list of ints + List of the latent dimensions of the graph blocks. The number of layers is + inferred from the length of this list. + number_of_node_outputs: int + Number of output node features. + number_of_edge_outputs: int + Number of output edge features. + number_of_global_outputs: int + Number of output global features. + node_output_activation: str or activation function or layer + Activation function for the output node layer. See keras docs for accepted strings. + edge_output_activation: str or activation function or layer + Activation function for the output edge layer. See keras docs for accepted strings. + cls_layer_dimension: int + Number of units in the decoder layer for global features. + global_output_activation: str or activation function or layer + Activation function for the output global layer. See keras docs for accepted strings. + dense_block: str, keras.layers.Layer, or callable + The dense block to use for the encoder and decoder. + graph_block: str, keras.layers.Layer, or callable + The graph block to use for the graph computation. See gnns.layers for available + graph blocks. + readout_block: str, keras.layers.Layer, or callable + The readout block used to compute global features. + output_type: str + Type of output. Either "nodes", "edges", "global" or + "graph". If 'key' is not a supported output type, then + the model output will be the concatenation of the node, + edge, and global predictions. + kwargs: dict + Keyword arguments for the dense block. + Returns: + -------- + tf.keras.Model + Keras model for the graph neural network. + """ + + def __init__( + self, + dense_layer_dimensions=(32, 64, 96), + base_layer_dimensions=(96, 96), + number_of_node_features=3, + number_of_edge_features=1, + number_of_node_outputs=1, + number_of_edge_outputs=1, + number_of_global_outputs=1, + node_output_activation=None, + edge_output_activation=None, + global_layer_dimension=64, + global_output_activation=None, + dense_block=DenseBlock( + activation=GELU, + normalization="LayerNormalization", + ), + graph_block="MPN", + readout_block=layers.Lambda( + lambda x: tf.math.reduce_sum(x, axis=1), name="global_readout" + ), + output_type="graph", + **kwargs + ): + dense_block = as_block(dense_block) + graph_block = as_block(graph_block) + + node_features, edge_features, edges, edge_dropout = ( + tf.keras.Input(shape=(None, number_of_node_features)), + tf.keras.Input(shape=(None, number_of_edge_features)), + tf.keras.Input(shape=(None, 2), dtype=tf.int32), + tf.keras.Input(shape=(None, 2)), + ) + + node_layer = node_features + edge_layer = edge_features + + # Encoder for node and edge features + for dense_layer_number, dense_layer_dimension in zip( + range(len(dense_layer_dimensions)), dense_layer_dimensions + ): + node_layer = dense_block( + dense_layer_dimension, + name="node_ide" + str(dense_layer_number + 1), + **kwargs + )(node_layer) + + edge_layer = dense_block( + dense_layer_dimension, + name="edge_ide" + str(dense_layer_number + 1), + **kwargs + )(edge_layer) + + # Bottleneck path, graph blocks + layer = ( + node_layer, + edge_layer, + edges, + tf.ones_like(edge_features[..., 0:1]), + edge_dropout, + ) + + for base_layer_number, base_layer_dimension in zip( + range(len(base_layer_dimensions)), base_layer_dimensions + ): + layer = graph_block( + base_layer_dimension, + name="graph_block_" + str(base_layer_number), + )(layer) + + # Split nodes and edges + node_layer, edge_layer, *_ = layer + + # Compute global features + global_layer = readout_block(node_layer) + global_layer = dense_block( + global_layer_dimension, name="cls_mlp", **kwargs + )(global_layer) + + # Output layers + node_output = layers.Dense( + number_of_node_outputs, + activation=node_output_activation, + name="node_prediction", + )(node_layer) + + edge_output = layers.Dense( + number_of_edge_outputs, + activation=edge_output_activation, + name="edge_prediction", + )(edge_layer) + + global_output = layers.Dense( + number_of_global_outputs, + activation=global_output_activation, + name="global_prediction", + )(global_layer) + + output_dict = { + "nodes": node_output, + "edges": edge_output, + "global": global_output, + "graph": [node_output, edge_output, global_output], + } + try: + outputs = output_dict[output_type] + except KeyError: + outputs = output_dict["graph"] + + model = tf.keras.models.Model( + [node_features, edge_features, edges, edge_dropout], + outputs, + ) + + super().__init__(model, **kwargs) \ No newline at end of file diff --git a/deeptrack/models/layers.py b/deeptrack/models/layers.py index 19c5f3f26..b16d46380 100644 --- a/deeptrack/models/layers.py +++ b/deeptrack/models/layers.py @@ -43,7 +43,9 @@ def as_block(x): + ", ".join(BLOCKS.keys()) ) if isinstance(x, layers.Layer) or not callable(x): - raise TypeError("Layer block should be a function that returns a keras Layer.") + raise TypeError( + "Layer block should be a function that returns a keras Layer." + ) else: return x @@ -97,7 +99,9 @@ def Layer(filters, **kwargs_inner): @register("dense") -def DenseBlock(activation="relu", normalization=False, norm_kwargs={}, **kwargs): +def DenseBlock( + activation="relu", normalization=False, norm_kwargs={}, **kwargs +): """A single dense layer. Accepts arguments of keras.layers.Dense. @@ -159,7 +163,10 @@ def PoolingBlock( def Layer(filters=None, **kwargs_inner): kwargs_inner.update(kwargs) layer = layers.MaxPool2D( - pool_size=pool_size, padding=padding, strides=strides, **kwargs_inner + pool_size=pool_size, + padding=padding, + strides=strides, + **kwargs_inner, ) return lambda x: single_layer_call( x, layer, activation, normalization, norm_kwargs @@ -318,7 +325,9 @@ def Layer(filters, **kwargs_inner): ) def call(x): - y = single_layer_call(x, conv, activation, normalization, norm_kwargs) + y = single_layer_call( + x, conv, activation, normalization, norm_kwargs + ) y = single_layer_call(y, conv2, None, normalization, norm_kwargs) y = layers.Add()([identity(x), y]) if activation: @@ -362,7 +371,6 @@ def Layer(filters, **kwargs_inner): class MultiHeadSelfAttention(layers.Layer): """Multi-head self-attention layer. - Parameters ---------- number_of_heads : int @@ -371,6 +379,8 @@ class MultiHeadSelfAttention(layers.Layer): Whether to use bias in attention layer. return_attention_weights : bool Whether to return the attention weights for visualization. + clip_scores_by_value: tuple of float (Optional) + Clipping values for attention scores. kwargs Other arguments for the keras.layers.Layer """ @@ -380,12 +390,14 @@ def __init__( number_of_heads=12, use_bias=True, return_attention_weights=False, + clip_scores_by_value: tuple = None, **kwargs, ): super().__init__(**kwargs) self.number_of_heads = number_of_heads self.use_bias = use_bias self.return_attention_weights = return_attention_weights + self.clip_scores_by_value = clip_scores_by_value def build(self, input_shape): try: @@ -406,9 +418,49 @@ def build(self, input_shape): self.combine_dense = layers.Dense(filters, use_bias=self.use_bias) - def SingleAttention(self, query, key, value, gate=None, **kwargs): - """Single attention layer. + def compute_attention_mask(self, x, edges, batch_size=None, **kwargs): + """ + Computes the attention mask. The mask prevents + attention to certain positions. + Parameters + ---------- + edges : tf.Tensor + The edges of the graph. + Returns + ------- + tf.Tensor + The attention mask. + """ + number_of_edges = tf.shape(edges)[1] + + batch_dims = tf.range(batch_size) + batch_dims = tf.repeat(batch_dims, number_of_edges) + batch_dims = tf.reshape( + batch_dims, shape=(batch_size, number_of_edges, 1) + ) + indices = tf.concat( + [batch_dims, tf.zeros_like(batch_dims), edges], axis=-1 + ) + + mask = tf.tensor_scatter_nd_update( + x, indices, tf.ones((batch_size, number_of_edges)) + ) + + return -10e9 * (1.0 - mask) + + def softmax(self, x, axis=-1): + exp = tf.exp(x - tf.reduce_max(x, axis=axis, keepdims=True)) + if self.clip_scores_by_value: + exp = tf.clip_by_value(exp, *self.clip_scores_by_value) + + return tf.math.divide(exp, tf.reduce_sum(exp, axis=-1, keepdims=True)) + + def SingleAttention( + self, query, key, value, gate=None, edges=None, **kwargs + ): + """ + Single attention layer. Parameters ---------- query : tf.Tensor @@ -424,7 +476,14 @@ def SingleAttention(self, query, key, value, gate=None, **kwargs): dim_key = tf.cast(tf.shape(key)[-1], score.dtype) scaled_score = score / tf.math.sqrt(dim_key) - weights = tf.nn.softmax(scaled_score, axis=-1) + if edges is not None: + scaled_score += self.compute_attention_mask( + tf.zeros_like(scaled_score[:, 0:1]), + edges, + **kwargs, + ) + + weights = self.softmax(scaled_score, axis=-1) output = tf.matmul(weights, value) if gate is not None: @@ -434,7 +493,6 @@ def SingleAttention(self, query, key, value, gate=None, **kwargs): def separate_heads(self, x, batch_size): """ - Parameters ---------- x : tf.Tensor @@ -444,12 +502,13 @@ def separate_heads(self, x, batch_size): projection_dim : int Projection dimension. """ - x = tf.reshape(x, (batch_size, -1, self.number_of_heads, self.projection_dim)) + x = tf.reshape( + x, (batch_size, -1, self.number_of_heads, self.projection_dim) + ) return tf.transpose(x, perm=[0, 2, 1, 3]) def compute_attention(self, x, **kwargs): """ - Parameters ---------- x : tf.Tensor @@ -472,32 +531,39 @@ def compute_attention(self, x, **kwargs): value = self.separate_heads(value, batch_size) return ( - self.SingleAttention(query, key, value, **kwargs), + self.SingleAttention( + query, key, value, batch_size=batch_size, **kwargs + ), batch_size, ) def call(self, x, **kwargs): """ - Parameters ---------- x : tuple of tf.Tensors Input tensors. """ - (attention, weights), batch_size = self.compute_attention(x, **kwargs) + (attention, self.att_weights), batch_size = self.compute_attention( + x, **kwargs + ) attention = tf.transpose(attention, perm=[0, 2, 1, 3]) - concat_attention = tf.reshape(attention, (batch_size, -1, self.filters)) + concat_attention = tf.reshape( + attention, (batch_size, -1, self.filters) + ) output = self.combine_dense(concat_attention) if self.return_attention_weights: - return output, weights + return output, self.att_weights else: return output class MultiHeadGatedSelfAttention(MultiHeadSelfAttention): def build(self, input_shape): - """Build the layer.""" + """ + Build the layer. + """ try: filters = input_shape[1][-1] except TypeError: @@ -510,16 +576,18 @@ def build(self, input_shape): self.filters = filters self.projection_dim = filters // self.number_of_heads - self.query_dense = layers.Dense(filters) - self.key_dense = layers.Dense(filters) - self.value_dense = layers.Dense(filters) - self.gate_dense = layers.Dense(filters, activation="sigmoid") + self.query_dense = layers.Dense(filters, use_bias=self.use_bias) + self.key_dense = layers.Dense(filters, use_bias=self.use_bias) + self.value_dense = layers.Dense(filters, use_bias=self.use_bias) + self.gate_dense = layers.Dense( + filters, use_bias=self.use_bias, activation="sigmoid" + ) self.combine_dense = layers.Dense(filters) def compute_attention(self, x, **kwargs): - """Compute attention. - + """ + Compute attention. Parameters ---------- x : tf.Tensor @@ -544,7 +612,9 @@ def compute_attention(self, x, **kwargs): gate = self.separate_heads(gate, batch_size) return ( - self.SingleAttention(query, key, value, gate, **kwargs), + self.SingleAttention( + query, key, value, gate=gate, batch_size=batch_size, **kwargs + ), batch_size, ) @@ -553,7 +623,6 @@ def compute_attention(self, x, **kwargs): def MultiHeadSelfAttentionLayer( number_of_heads=12, use_bias=True, - return_attention_weights=False, activation="relu", normalization="LayerNormalization", norm_kwargs={}, @@ -571,8 +640,8 @@ def MultiHeadSelfAttentionLayer( Number of attention heads. use_bias : bool Whether to use bias in the dense layers. - return_attention_weights : bool - Whether to return attention weights for visualization. + clip_scores_by_value: tuple of float + Clipping values for attention scores. activation : str or activation function or layer Activation function of the layer. See keras docs for accepted strings. normalization : str or normalization function or layer @@ -586,7 +655,10 @@ def MultiHeadSelfAttentionLayer( def Layer(filters, **kwargs_inner): kwargs_inner.update(kwargs) layer = MultiHeadSelfAttention( - number_of_heads, use_bias, return_attention_weights, **kwargs_inner + number_of_heads, + use_bias, + return_attention_weights=False, + **kwargs_inner, ) return lambda x: single_layer_call( x, layer, activation, normalization, norm_kwargs @@ -599,7 +671,6 @@ def Layer(filters, **kwargs_inner): def MultiHeadGatedSelfAttentionLayer( number_of_heads=12, use_bias=True, - return_attention_weights=False, activation="relu", normalization="LayerNormalization", norm_kwargs={}, @@ -617,8 +688,8 @@ def MultiHeadGatedSelfAttentionLayer( Number of attention heads. use_bias : bool Whether to use bias in the dense layers. - return_attention_weights : bool - Whether to return attention weights for visualization. + clip_scores_by_value: tuple of float + Clipping values for attention scores. activation : str or activation function or layer Activation function of the layer. See keras docs for accepted strings. normalization : str or normalization function or layer @@ -632,7 +703,10 @@ def MultiHeadGatedSelfAttentionLayer( def Layer(filters, **kwargs_inner): kwargs_inner.update(kwargs) layer = MultiHeadGatedSelfAttention( - number_of_heads, use_bias, return_attention_weights, **kwargs_inner + number_of_heads, + use_bias, + return_attention_weights=False, + **kwargs_inner, ) return lambda x: single_layer_call( x, layer, activation, normalization, norm_kwargs @@ -691,7 +765,9 @@ def __init__( self.normalization = normalization self.MultiHeadAttLayer = ( - MultiHeadGatedSelfAttention if self.use_gates else MultiHeadSelfAttention + MultiHeadGatedSelfAttention + if self.use_gates + else MultiHeadSelfAttention )( number_of_heads=self.number_of_heads, use_bias=self.use_bias, @@ -719,8 +795,8 @@ def build(self, input_shape): name="feed_forward", ) - def call(self, inputs, training): - x, weights = self.MultiHeadAttLayer(inputs) + def call(self, inputs, training, edges=None, **kwargs): + x, weights = self.MultiHeadAttLayer(inputs, edges=edges) x = self.dropout_layer(x, training=training) x = self.norm_0(inputs + x) @@ -772,8 +848,10 @@ def Layer(filters, **kwargs_inner): use_gates, use_bias, norm_kwargs, - **kwargs, + **kwargs_inner, + ) + return lambda x, **kwargs: single_layer_call( + x, layer, None, None, {}, **kwargs ) - return lambda x: single_layer_call(x, layer, None, None, {}) return Layer diff --git a/deeptrack/models/utils.py b/deeptrack/models/utils.py index 19f0f1d40..054277f09 100644 --- a/deeptrack/models/utils.py +++ b/deeptrack/models/utils.py @@ -115,7 +115,7 @@ def as_normalization(x): def single_layer_call( - x, layer, activation, normalization, norm_kwargs, activation_first=True + x, layer, activation, normalization, norm_kwargs, activation_first=True, **kwargs ): """Calls a layer with activation and normalization.""" assert isinstance(norm_kwargs, dict), "norm_kwargs must be a dict. Got {0}".format( @@ -128,9 +128,10 @@ def single_layer_call( else x ) a = lambda x: as_activation(activation)(x) if activation else x - fs = [layer, a, n] if activation_first else [layer, n, a] + fs = [(layer, kwargs)] + fs = fs + [(a, {}), (n, {})] if activation_first else fs + [(n, {}), (a, {})] - return reduce(lambda x, f: f(x), fs, x) + return reduce(lambda x, f: f[0](x, **f[1]), fs, x) def with_citation(citation): @@ -244,7 +245,7 @@ def fit(self, x, *args, batch_size=32, generator_kwargs={}, **kwargs): # Code is not actually unreachable if fit crashes. return None - return self.model.fit(x, *args, **kwargs) + return self.model.fit(x, *args, batch_size=batch_size, **kwargs) def export( self, @@ -320,8 +321,78 @@ def add_preprocessing(self, other, input_shape="same"): return self - def __rrshift__(self, other): - return self.add_preprocessing(other) + @staticmethod + def append_layer_to_sequential(model, layer): + """Append a layer to a sequential model. + + Parameters + ---------- + model : Sequential + Model to append layer to. + layer : Layer + Layer to append. + """ + new_model = models.Sequential() + for l in model.layers: + new_model.add(l) + new_model.add(layer) + + return new_model + + @staticmethod + def append_layer_to_functional(model, layer): + """Append a layer to a functional model. + + Parameters + ---------- + model : Model + Model to append layer to. + layer : Layer + Layer to append. + """ + i = layers.Input(model.input_shape[1:]) + o = model(i) + o = layer(o) + new_model = models.Model(i, o) + return new_model + + @staticmethod + def append_model_to_model(model, other): + """Append a model to a another model. + + Parameters + ---------- + model : Model + Model to append layer to. + other : Model + Model to append. + """ + i = layers.Input(model.input_shape[1:]) + o = model(i) + o = other(o) + new_model = models.Model(i, o) + return new_model + + def __rshift__(self, other): + """Create a new model by adding a layer or model to the end of the current model.""" + + if isinstance(other, KerasModel): + other = other.model + + if isinstance(other, models.Model): + return KerasModel(self.append_model_to_model(self.model, other)) + + if isinstance(other, layers.Layer) and isinstance( + self.model, models.Sequential + ): + return KerasModel(self.append_layer_to_sequential(self.model, other)) + + if isinstance(other, layers.Layer) and isinstance(self.model, models.Model): + return KerasModel(self.append_layer_to_functional(self.model, other)) + + raise ValueError( + "Can only add a layer or model to a model. Got {}".format(type(other)) + ) def __call__(self, *args, **kwargs): return self.model(*args, **kwargs) diff --git a/deeptrack/models/vaes/__init__.py b/deeptrack/models/vaes/__init__.py new file mode 100644 index 000000000..c6926c043 --- /dev/null +++ b/deeptrack/models/vaes/__init__.py @@ -0,0 +1 @@ +from .vae import * \ No newline at end of file diff --git a/deeptrack/models/vaes/vae.py b/deeptrack/models/vaes/vae.py new file mode 100644 index 000000000..e1f10dbff --- /dev/null +++ b/deeptrack/models/vaes/vae.py @@ -0,0 +1,127 @@ +import tensorflow as tf +from tensorflow.keras import layers + +from ..utils import as_KerasModel + + +@as_KerasModel +class VAE(tf.keras.Model): + def __init__(self, encoder=None, decoder=None, latent_dim=2, **kwargs): + super().__init__(**kwargs) + + # Dimensionality of the latent space + self.latent_dim = latent_dim + + if encoder is None: + self.encoder = self.default_encoder() + + if decoder is None: + self.decoder = self.default_decoder() + + def train_step(self, data): + + data, _ = data + + with tf.GradientTape() as tape: + # The encoder outputs the mean and log of the variance of the + # Gaussian distribution. The log of the variance is computed + # instead of the variance for numerical stability. + z_mean, z_log_var = tf.split(self.encoder(data), 2, axis=1) + + # Sample a random point in the latent space + epsilon = tf.random.normal(shape=tf.shape(z_mean)) + z = z_mean + tf.exp(z_log_var) * epsilon + + # Reconstruct the input image + rdata = self.decoder(z) + + # Reconstruction loss + rloss = self.loss(data, rdata) + + # KL divergence loss + kl_loss = -0.5 * ( + 1 + z_log_var - tf.square(z_mean) - tf.exp(z_log_var) + ) + kl_loss = tf.reduce_mean(tf.reduce_sum(kl_loss, axis=1)) + + # Total loss + loss = rloss + kl_loss + + # Compute gradients + grads = tape.gradient(loss, self.trainable_weights) + + # Update weights + self.optimizer.apply_gradients( + zip(grads, self.trainable_weights), + ) + + # Update metrics + self.compiled_metrics.update_state(data, rdata) + + return { + "loss": loss, + "reconstruction_loss": rloss, + "kl_loss": kl_loss, + } + + def call(self, inputs): + return self.encoder(inputs) + + def default_encoder(self): + return tf.keras.Sequential( + [ + tf.keras.Input(shape=(28, 28, 1)), + layers.Conv2D( + 32, + kernel_size=3, + strides=2, + padding="same", + ), + layers.LeakyReLU(alpha=0.2), + layers.Conv2D( + 64, + kernel_size=3, + strides=2, + padding="same", + ), + layers.LeakyReLU(alpha=0.2), + layers.Flatten(), + layers.Dense(16), + layers.LeakyReLU(alpha=0.2), + layers.Dense( + self.latent_dim + self.latent_dim, name="z_mean_log_var" + ), + ], + name="encoder", + ) + + def default_decoder(self): + return tf.keras.Sequential( + [ + tf.keras.Input(shape=(self.latent_dim,)), + layers.Dense(7 * 7 * 64), + layers.LeakyReLU(alpha=0.2), + layers.Reshape((7, 7, 64)), + layers.Conv2DTranspose( + 64, + kernel_size=3, + strides=2, + padding="same", + ), + layers.LeakyReLU(alpha=0.2), + layers.Conv2DTranspose( + 32, + kernel_size=3, + strides=2, + padding="same", + ), + layers.LeakyReLU(alpha=0.2), + layers.Conv2D( + 1, + kernel_size=3, + activation="sigmoid", + padding="same", + ), + ], + name="decoder", + ) diff --git a/deeptrack/optics.py b/deeptrack/optics.py index 7db604ca2..f55928976 100644 --- a/deeptrack/optics.py +++ b/deeptrack/optics.py @@ -75,7 +75,7 @@ def get(self, image, **kwargs): ) ): - upscale = get_active_scale() + upscale = np.round(get_active_scale()) output_region = additional_sample_kwargs.pop("output_region") additional_sample_kwargs["output_region"] = [ @@ -98,7 +98,9 @@ def get(self, image, **kwargs): ) self._objective.padding.set_value(additional_sample_kwargs["padding"]) - propagate_data_to_dependencies(self._sample, **additional_sample_kwargs) + propagate_data_to_dependencies( + self._sample, **{"return_fft": True, **additional_sample_kwargs} + ) list_of_scatterers = self._sample() @@ -328,6 +330,8 @@ def _pupil( copy=False, ) + z_shift._value[z_shift._value.imag != 0] = 0 + try: z_shift = np.nan_to_num(z_shift, False, 0, 0, 0) except TypeError: @@ -406,7 +410,6 @@ def _pad_volume( old_region[1, 0] : old_region[1, 0] + limits[1, 1] - limits[1, 0], old_region[2, 0] : old_region[2, 0] + limits[2, 1] - limits[2, 0], ] = volume - return new_volume, new_limits def __call__(self, sample, **kwargs): @@ -574,6 +577,10 @@ class Brightfield(Optics): __gpu_compatible__ = True + __conversion_table__ = ConversionTable( + working_distance=(u.meter, u.meter), + ) + def get(self, illuminated_volume, limits, fields, **kwargs): """Convolves the image with a pupil function""" # Pad volume @@ -649,44 +656,10 @@ def get(self, illuminated_volume, limits, fields, **kwargs): K = 2 * np.pi / kwargs["wavelength"] - field_z = [field.get_property("z") for field in fields] - field_offsets = [field.get_property("offset_z", default=0) for field in fields] - z = z_limits[1] for i, z in zip(index_iterator, z_iterator): light_in = light_in * pupil_step - to_remove = [] - for idx, fz in enumerate(field_z): - if fz < z: - propagation_matrix = self._pupil( - fields[idx].shape, - defocus=[z - fz - field_offsets[idx] / voxel_size[-1]], - include_aberration=False, - **kwargs, - )[0] - - propagation_matrix = propagation_matrix * np.exp( - 1j - * voxel_size[-1] - * 2 - * np.pi - / kwargs["wavelength"] - * kwargs["refractive_index_medium"] - * (z - fz) - ) - pf = np.fft.fft2(fields[idx][:, :, 0]) * np.fft.fftshift( - propagation_matrix - ) - - light_in += pf - to_remove.append(idx) - - for idx in reversed(to_remove): - fields.pop(idx) - field_z.pop(idx) - field_offsets.pop(idx) - if zero_plane[i]: continue @@ -695,24 +668,16 @@ def get(self, illuminated_volume, limits, fields, **kwargs): light_out = light * np.exp(1j * ri_slice * voxel_size[-1] * K) light_in = np.fft.fft2(light_out) - # Add remaining fields - for idx, fz in enumerate(field_z): - prop_dist = z - fz - field_offsets[idx] / voxel_size[-1] - propagation_matrix = self._pupil( - fields[idx].shape, - defocus=[prop_dist], - include_aberration=False, - **kwargs, - )[0] - - propagation_matrix = propagation_matrix - - import matplotlib.pyplot as plt + shifted_pupil = np.fft.fftshift(pupils[-1]) + light_in_focus = light_in * shifted_pupil - pf = np.fft.fft2(fields[idx][:, :, 0]) * np.fft.fftshift(propagation_matrix) - light_in += pf + if len(fields) > 0: + field = np.sum(fields, axis=0) + light_in_focus += field[..., 0] - light_in_focus = light_in * np.fft.fftshift(pupils[-1]) + # Mask to remove light outside the pupil. + mask = np.abs(shifted_pupil) > 0 + light_in_focus = light_in_focus * mask output_image = np.fft.ifft2(light_in_focus)[ : padded_volume.shape[0], : padded_volume.shape[1] @@ -722,12 +687,20 @@ def get(self, illuminated_volume, limits, fields, **kwargs): if not kwargs.get("return_field", False): output_image = np.square(np.abs(output_image)) + else: + # Fudge factor. Not sure why this is needed. + output_image = output_image - 1 + output_image = output_image * np.exp(1j * -np.pi / 4) + output_image = output_image + 1 output_image.properties = illuminated_volume.properties return output_image +Holography = Brightfield + + class IlluminationGradient(Feature): """Adds a gradient in the illumination @@ -782,6 +755,8 @@ def _get_position(image, mode="corner", return_z=False): if mode == "corner" and image.size > 0: import scipy.ndimage + image = image.to_numpy() + shift = scipy.ndimage.center_of_mass(np.abs(image)) if np.isnan(shift).any(): diff --git a/deeptrack/scatterers.py b/deeptrack/scatterers.py index dd393585a..fad133373 100644 --- a/deeptrack/scatterers.py +++ b/deeptrack/scatterers.py @@ -19,6 +19,8 @@ from pint import Quantity + +from deeptrack.holography import get_propagation_matrix from . import image from deeptrack.backend.units import ( ConversionTable, @@ -108,6 +110,7 @@ def __init__( upsample=upsample, voxel_size=voxel_size, pixel_size=pixel_size, + _position_sampler=lambda: position, **kwargs, ) @@ -150,29 +153,6 @@ def _process_and_get( Warning, ) - # # Downsamples the image along the axes it was upsampled - # if upsample != 1 and upsample_axes: - - # # Pad image to ensure it is divisible by upsample - # increase = np.array(new_image.shape) - # for axis in upsample_axes: - # increase[axis] = upsample - (new_image.shape[axis] % upsample) - # pad_width = [(0, inc) for inc in increase] - # new_image = np.pad(new_image, pad_width, mode="constant") - - # # Finds reshape size for downsampling - # new_shape = [] - # for axis in range(new_image.ndim): - # if axis in upsample_axes: - # new_shape += [new_image.shape[axis] // upsample, upsample] - # else: - # new_shape += [new_image.shape[axis]] - - # # Downsamples - # new_image = np.reshape(new_image, new_shape).mean( - # axis=tuple(np.array(upsample_axes, dtype=np.int32) * 2 + 1) - # ) - # Crops empty slices if crop_empty: new_image = new_image[~np.all(new_image == 0, axis=(1, 2))] @@ -515,6 +495,9 @@ class MieScatterer(Scatterer): z : float The position in the direction normal to the camera plane. Used if `position` is of length 2. + return_fft : bool + If True, the feature returns the fft of the field, rather than the + field itself. """ __gpu_compatible__ = True @@ -541,6 +524,9 @@ def __init__( padding=(0,) * 4, output_region=None, polarization_angle=None, + working_distance=1000000, # large value to avoid numerical issues unless the user specifies a smaller value + position_objective=(0, 0), + return_fft=False, **kwargs, ): if polarization_angle is not None: @@ -566,6 +552,9 @@ def __init__( padding=padding, output_region=output_region, polarization_angle=polarization_angle, + working_distance=working_distance, + position_objective=position_objective, + return_fft=return_fft, **kwargs, ) @@ -590,90 +579,138 @@ def _process_properties(self, properties): np.array(properties["output_region"][2:]) - properties["output_region"][:2] ) - / 2 + * 0.75 * min(properties["voxel_size"][:2]) - / np.sin(properties["collection_angle"]) + / np.tan(properties["collection_angle"]) ) return properties + def get_xy_size(self): + output_region = self.properties["output_region"]() + padding = self.properties["padding"]() + return ( + output_region[2] - output_region[0] + padding[0] + padding[2], + output_region[3] - output_region[1] + padding[1] + padding[3], + ) + + def get_XY(self, shape, voxel_size): + x = np.arange(shape[0]) - shape[0] / 2 + y = np.arange(shape[1]) - shape[1] / 2 + return np.meshgrid(x * voxel_size[0], y * voxel_size[1], indexing="ij") + + def get_detector_mask(self, X, Y, radius): + return np.sqrt(X ** 2 + Y ** 2) < radius + + def get_plane_in_polar_coords(self, shape, voxel_size, plane_position): + + X, Y = self.get_XY(shape, voxel_size) + X = image.maybe_cupy(X) + Y = image.maybe_cupy(Y) + + # the X, Y coordinates of the pupil relative to the particle + X = X + plane_position[0] + Y = Y + plane_position[1] + Z = plane_position[2] # might be +z or -z + + R2_squared = X ** 2 + Y ** 2 + R3 = np.sqrt(R2_squared + Z ** 2) # might be +z instead of -z + + # get the angles + cos_theta = Z / R3 + phi = np.arctan2(Y, X) + + return R3, cos_theta, phi + def get( self, inp, position, - output_region, voxel_size, padding, wavelength, refractive_index_medium, L, - offset_z, collection_angle, input_polarization, output_polarization, coefficients, + offset_z, + z, + working_distance, + position_objective, + return_fft, **kwargs, ): - xSize = padding[2] + output_region[2] - output_region[0] + padding[0] - ySize = padding[3] + output_region[3] - output_region[1] + padding[1] + # Get size of the output + xSize, ySize = self.get_xy_size() + voxel_size = get_active_voxel_size() + arr = pad_image_to_fft(np.zeros((xSize, ySize))).astype(complex) + arr = image.maybe_cupy(arr) + position = np.array(position) * voxel_size[: len(position)] + + pupil_physical_size = working_distance * np.tan(collection_angle) * 2 - scale = get_active_scale() + z = z * voxel_size[2] + + ratio = offset_z / (working_distance - z) - arr = pad_image_to_fft(np.zeros((xSize, ySize))) - position = np.array(position) * scale[: len(position)] - pos_floor = np.floor(position) - pos_digits = position - pos_floor - # Evluation grid - x = ( - np.arange(-padding[0], arr.shape[0] - padding[0]) - - arr.shape[0] // 2 - + padding[0] - - pos_digits[0] + # position of pbjective relative particle + relative_position = np.array( + ( + position_objective[0] - position[0], + position_objective[1] - position[1], + working_distance - z, + ) ) - y = ( - np.arange(-padding[1], arr.shape[1] - padding[1]) - - arr.shape[1] // 2 - + padding[1] - - pos_digits[1] + + # get field evaluation plane at offset_z + R3_field, cos_theta_field, phi_field = self.get_plane_in_polar_coords( + arr.shape, voxel_size, relative_position * ratio + ) + cos_phi_field, sin_phi_field = np.cos(phi_field), np.sin(phi_field) + # x and y position of a beam passing through field evaluation plane on the objective + x_farfield = ( + position[0] + + R3_field * np.sqrt(1 - cos_theta_field ** 2) * cos_phi_field / ratio + ) + y_farfield = ( + position[1] + + R3_field * np.sqrt(1 - cos_theta_field ** 2) * sin_phi_field / ratio ) - x = np.roll(x, int(-arr.shape[0] // 2 + padding[0] + pos_floor[0]), 0) - y = np.roll(y, int(-arr.shape[1] // 2 + padding[1] + pos_floor[1]), 0) - X, Y = np.meshgrid(x * voxel_size[0], y * voxel_size[1], indexing="ij") + # if the beam is within the pupil + pupil_mask = (x_farfield - position_objective[0]) ** 2 + ( + y_farfield - position_objective[1] + ) ** 2 < (pupil_physical_size / 2) ** 2 - X = image.maybe_cupy(X) - Y = image.maybe_cupy(Y) + R3_field = R3_field[pupil_mask] + cos_theta_field = cos_theta_field[pupil_mask] + phi_field = phi_field[pupil_mask] - R2 = np.sqrt(X ** 2 + Y ** 2) - R3 = np.sqrt(R2 ** 2 + (offset_z) ** 2) - ct = offset_z / R3 - - angle = np.arctan2(Y, X) if isinstance(input_polarization, (float, int, Quantity)): if isinstance(input_polarization, Quantity): input_polarization = input_polarization.to("rad") input_polarization = input_polarization.magnitude - S1_coef = np.sin(angle + input_polarization) - S2_coef = np.cos(angle + input_polarization) + S1_coef = np.sin(phi_field + input_polarization) + S2_coef = np.cos(phi_field + input_polarization) if isinstance(output_polarization, (float, int, Quantity)): if isinstance(input_polarization, Quantity): output_polarization = output_polarization.to("rad") output_polarization = output_polarization.magnitude - S1_coef *= np.sin(angle + output_polarization) - S2_coef *= np.cos(angle + output_polarization) - - ct_max = np.cos(collection_angle) + S1_coef *= np.sin(phi_field + output_polarization) + S2_coef *= np.cos(phi_field + output_polarization) # Wave vector k = 2 * np.pi / wavelength * refractive_index_medium # Harmonics A, B = coefficients(L) - PI, TAU = D.mie_harmonics(ct, L) + PI, TAU = D.mie_harmonics(cos_theta_field, L) # Normalization factor E = [(2 * i + 1) / (i * (i + 1)) for i in range(1, L + 1)] @@ -682,15 +719,36 @@ def get( S1 = sum([E[i] * A[i] * PI[i] + E[i] * B[i] * TAU[i] for i in range(0, L)]) S2 = sum([E[i] * B[i] * PI[i] + E[i] * A[i] * TAU[i] for i in range(0, L)]) - field = ( - (ct > ct_max) - * 1j - / (k * R3) - * np.exp(1j * k * (R3 - offset_z)) + arr[pupil_mask] = ( + 1j + / (k * R3_field) + * np.exp(1j * k * R3_field) * (S2 * S2_coef + S1 * S1_coef) ) - return np.expand_dims(field, axis=-1) + fourier_field = np.fft.fft2(arr) + + propagation_matrix = get_propagation_matrix( + fourier_field.shape, + pixel_size=voxel_size[2], + wavelength=wavelength, + to_z=(-offset_z - z) / refractive_index_medium, + dy=( + relative_position[0] * ratio + + position[0] + + (padding[0] - arr.shape[0] / 2) * voxel_size[0] + ), + dx=( + relative_position[1] * ratio + + position[1] + + (padding[1] - arr.shape[1] / 2) * voxel_size[1] + ), + ) + fourier_field = fourier_field * propagation_matrix * np.exp(-1j * k * offset_z) + if return_fft: + return fourier_field[..., np.newaxis] + else: + return np.fft.ifft2(fourier_field)[..., np.newaxis] class MieSphere(MieScatterer): @@ -744,6 +802,12 @@ def __init__( **kwargs, ): def coeffs(radius, refractive_index, refractive_index_medium, wavelength): + + if isinstance(radius, Quantity): + radius = radius.to("m").magnitude + if isinstance(wavelength, Quantity): + wavelength = wavelength.to("m").magnitude + def inner(L): return D.mie_coefficients( refractive_index / refractive_index_medium, diff --git a/deeptrack/test/test_features.py b/deeptrack/test/test_features.py index 510b785da..548b40521 100644 --- a/deeptrack/test/test_features.py +++ b/deeptrack/test/test_features.py @@ -9,7 +9,10 @@ from numpy.testing._private.utils import assert_almost_equal +from deeptrack import scatterers + from .. import features, Image, properties, utils +from .. import units import numpy as np @@ -913,6 +916,324 @@ def test_OneOfDict(self): self.assertRaises(KeyError, lambda: values.update().resolve(key="4")) + def test_NonOverlapping_resample_volume_position(self): + + # setup + nonOverlapping = features.NonOverlapping( + features.Value(value=1), + ) + + positions_no_unit = [1, 2] + positions_with_unit = [1 * units.px, 2 * units.px] + + positions_no_unit_iter = iter(positions_no_unit) + positions_with_unit_iter = iter(positions_with_unit) + + volume_1 = scatterers.PointParticle( + position=lambda: next(positions_no_unit_iter) + )() + volume_2 = scatterers.PointParticle( + position=lambda: next(positions_with_unit_iter) + )() + + # test + + self.assertEqual(volume_1.get_property("position"), positions_no_unit[0]) + self.assertEqual( + volume_2.get_property("position"), + positions_with_unit[0].to("px").magnitude, + ) + + nonOverlapping._resample_volume_position(volume_1) + nonOverlapping._resample_volume_position(volume_2) + + self.assertEqual(volume_1.get_property("position"), positions_no_unit[1]) + self.assertEqual( + volume_2.get_property("position"), + positions_with_unit[1].to("px").magnitude, + ) + + def test_NonOverlapping_check_volumes_non_overlapping(self): + + # setup + nonOverlapping = features.NonOverlapping( + features.Value(value=1), + ) + + volume_test0_a = np.zeros((5, 5, 5)) + volume_test0_b = np.zeros((5, 5, 5)) + + volume_test1_a = np.zeros((5, 5, 5)) + volume_test1_b = np.zeros((5, 5, 5)) + volume_test1_a[0, 0, 0] = 1 + volume_test1_b[0, 0, 0] = 1 + + volume_test2_a = np.zeros((5, 5, 5)) + volume_test2_b = np.zeros((5, 5, 5)) + volume_test2_a[0, 0, 0] = 1 + volume_test2_b[0, 0, 1] = 1 + + volume_test3_a = np.zeros((5, 5, 5)) + volume_test3_b = np.zeros((5, 5, 5)) + volume_test3_a[0, 0, 0] = 1 + volume_test3_b[0, 1, 0] = 1 + + volume_test4_a = np.zeros((5, 5, 5)) + volume_test4_b = np.zeros((5, 5, 5)) + volume_test4_a[0, 0, 0] = 1 + volume_test4_b[1, 0, 0] = 1 + + volume_test5_a = np.zeros((5, 5, 5)) + volume_test5_b = np.zeros((5, 5, 5)) + volume_test5_a[0, 0, 0] = 1 + volume_test5_b[0, 1, 1] = 1 + + volume_test6_a = np.zeros((5, 5, 5)) + volume_test6_b = np.zeros((5, 5, 5)) + volume_test6_a[1:3, 1:3, 1:3] = 1 + volume_test6_b[0:2, 0:2, 0:2] = 1 + + volume_test7_a = np.zeros((5, 5, 5)) + volume_test7_b = np.zeros((5, 5, 5)) + volume_test7_a[2:4, 2:4, 2:4] = 1 + volume_test7_b[0:2, 0:2, 0:2] = 1 + + volume_test8_a = np.zeros((5, 5, 5)) + volume_test8_b = np.zeros((5, 5, 5)) + volume_test8_a[3:, 3:, 3:] = 1 + volume_test8_b[:2, :2, :2] = 1 + + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test0_a, + volume_test0_b, + min_distance=0, + ), + ) + + self.assertFalse( + nonOverlapping._check_volumes_non_overlapping( + volume_test1_a, + volume_test1_b, + min_distance=0, + ) + ) + + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test2_a, + volume_test2_b, + min_distance=0, + ) + ) + self.assertFalse( + nonOverlapping._check_volumes_non_overlapping( + volume_test2_a, + volume_test2_b, + min_distance=1, + ) + ) + + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test3_a, + volume_test3_b, + min_distance=0, + ) + ) + self.assertFalse( + nonOverlapping._check_volumes_non_overlapping( + volume_test3_a, + volume_test3_b, + min_distance=1, + ) + ) + + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test4_a, + volume_test4_b, + min_distance=0, + ) + ) + self.assertFalse( + nonOverlapping._check_volumes_non_overlapping( + volume_test4_a, + volume_test4_b, + min_distance=1, + ) + ) + + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test5_a, + volume_test5_b, + min_distance=0, + ) + ) + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test5_a, + volume_test5_b, + min_distance=1, + ) + ) + + self.assertFalse( + nonOverlapping._check_volumes_non_overlapping( + volume_test6_a, + volume_test6_b, + min_distance=0, + ) + ) + + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test7_a, + volume_test7_b, + min_distance=0, + ) + ) + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test7_a, + volume_test7_b, + min_distance=1, + ) + ) + + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test8_a, + volume_test8_b, + min_distance=0, + ) + ) + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test8_a, + volume_test8_b, + min_distance=1, + ) + ) + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test8_a, + volume_test8_b, + min_distance=2, + ) + ) + self.assertTrue( + nonOverlapping._check_volumes_non_overlapping( + volume_test8_a, + volume_test8_b, + min_distance=3, + ) + ) + self.assertFalse( + nonOverlapping._check_volumes_non_overlapping( + volume_test8_a, + volume_test8_b, + min_distance=4, + ) + ) + + def test_NonOverlapping_check_non_overlapping(self): + + # setup + nonOverlapping = features.NonOverlapping( + features.Value(value=1), + min_distance=1, + ) + + # Two spheres at the same position + volume_test0_a = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 0) * units.px + )() + volume_test0_b = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 0) * units.px + )() + + # Two spheres of the same size, one under the other + volume_test1_a = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 0) * units.px + )() + volume_test1_b = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 10) * units.px + )() + + # Two spheres of the same size, one under the other, but with a + # spacing of 1 + volume_test2_a = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 0) * units.px + )() + volume_test2_b = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 11) * units.px + )() + + # Two spheres of the same size, one under the other, but with a + # spacing of -1 + volume_test3_a = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 0) * units.px + )() + volume_test3_b = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 9) * units.px + )() + + # Two spheres of the same size, diagonally next to each other + volume_test4_a = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 0) * units.px + )() + volume_test4_b = scatterers.Sphere( + radius=5 * units.px, position=(6, 6, 6) * units.px + )() + + # Two spheres of the same size, diagonally next to each other, but + # with a spacing of 1 + volume_test5_a = scatterers.Sphere( + radius=5 * units.px, position=(0, 0, 0) * units.px + )() + volume_test5_b = scatterers.Sphere( + radius=5 * units.px, position=(7, 7, 7) * units.px + )() + + # Run tests + self.assertFalse( + nonOverlapping._check_non_overlapping( + [volume_test0_a, volume_test0_b], + ) + ) + + self.assertFalse( + nonOverlapping._check_non_overlapping( + [volume_test1_a, volume_test1_b], + ) + ) + + self.assertTrue( + nonOverlapping._check_non_overlapping( + [volume_test2_a, volume_test2_b], + ) + ) + + self.assertFalse( + nonOverlapping._check_non_overlapping( + [volume_test3_a, volume_test3_b], + ) + ) + + self.assertFalse( + nonOverlapping._check_non_overlapping( + [volume_test4_a, volume_test4_b], + ) + ) + + self.assertTrue( + nonOverlapping._check_non_overlapping( + [volume_test5_a, volume_test5_b], + ) + ) + if __name__ == "__main__": unittest.main() diff --git a/deeptrack/test/test_generators.py b/deeptrack/test/test_generators.py index 61f7e7825..8cd8d88b8 100644 --- a/deeptrack/test/test_generators.py +++ b/deeptrack/test/test_generators.py @@ -4,6 +4,7 @@ import unittest +from .. import features from .. import generators from ..optics import Fluorescence from ..scatterers import PointParticle @@ -68,6 +69,50 @@ def get_particle_position(result): with generator: self.assertGreater(len(generator.data), 10) self.assertLess(len(generator.data), 21) + + + def test_MultiInputs_ContinuousGenerator(self): + optics = Fluorescence( + NA=0.7, + wavelength=680e-9, + resolution=1e-6, + magnification=10, + output_region=(0, 0, 128, 128), + ) + scatterer_A = PointParticle( + intensity=100, + position_unit="pixel", + position=lambda: np.random.rand(2) * 128, + ) + scatterer_B = PointParticle( + intensity=10, + position_unit="pixel", + position=lambda: np.random.rand(2) * 128, + ) + imaged_scatterer_A = optics(scatterer_A) + imaged_scatterer_B = optics(scatterer_B) + + def get_particle_position(result): + result = result[0] + for property in result.properties: + if "position" in property: + return property["position"] + + imaged_scatterers = imaged_scatterer_A & imaged_scatterer_B + + generator = generators.ContinuousGenerator( + imaged_scatterers, + get_particle_position, + batch_size=8, + min_data_size=10, + max_data_size=20, + use_multi_inputs=True, + ) + + with generator: + data, _ = generator[0] + self.assertEqual(data[0].shape, (8, 128, 128, 1)) + self.assertEqual(data[1].shape, (8, 128, 128, 1)) def test_CappedContinuousGenerator(self): diff --git a/deeptrack/test/test_layers.py b/deeptrack/test/test_layers.py index 8c741ee8a..b74859672 100644 --- a/deeptrack/test/test_layers.py +++ b/deeptrack/test/test_layers.py @@ -115,6 +115,49 @@ def test_Multi_Head_Attention_filters(self): model = makeMinimalModel(block(1), shape=(100, 96)) self.assertEqual(model.layers[1].filters, 96) + def test_Multi_Head_Masked_Attention(self): + model = layers.MultiHeadSelfAttention(return_attention_weights=True) + edges = tf.constant( + [ + [ + [0, 1], + [0, 5], + [1, 2], + [1, 3], + [1, 4], + [2, 3], + [2, 4], + [3, 4], + [4, 5], + [5, 6], + [5, 7], + [6, 7], + [7, 8], + [8, 9], + [9, 1], + [9, 3], + ] + ] + ) + *_, attention_weights = model( + tf.random.uniform((1, 10, 96)), + edges=edges, + ) + number_of_heads = model.number_of_heads + head_dims = tf.repeat( + tf.range(number_of_heads)[tf.newaxis], edges.shape[1], axis=1 + ) + edges = tf.tile(edges, multiples=[1, number_of_heads, 1]) + ind = tf.concat( + [ + tf.zeros_like(tf.expand_dims(head_dims, -1)), + tf.expand_dims(head_dims, -1), + edges, + ], + axis=-1, + ) + self.assertTrue(np.all(tf.where(attention_weights).numpy() == ind)) + def test_Multi_Head_Gated_Attention(self): block = layers.MultiHeadGatedSelfAttentionLayer() model = makeMinimalModel(block(1), shape=(100, 96)) @@ -125,6 +168,28 @@ def test_Multi_Head_Gated_Attention_filters(self): model = makeMinimalModel(block(1), shape=(100, 96)) self.assertEqual(model.layers[1].filters, 96) + def test_Multi_Head_Gated_Attention_bias(self): + block = layers.MultiHeadGatedSelfAttentionLayer(use_bias=False) + model = makeMinimalModel(block(1), shape=(100, 96)) + self.assertFalse(model.layers[1].gate_dense.use_bias) + + def test_Transformer_Encoder(self): + block = layers.TransformerEncoderLayer() + model = makeMinimalModel(block(300), shape=(50, 300)) + self.assertTrue(model.layers[-1], layers.TransformerEncoder) + + def test_Tranformer_Encoder_parameters(self): + block = layers.TransformerEncoderLayer(number_of_heads=6) + model = makeMinimalModel(block(300), shape=(50, 300)) + self.assertEqual(model.layers[-1].MultiHeadAttLayer.number_of_heads, 6) + + def test_Transformer_Encoder_bias(self): + block = layers.TransformerEncoderLayer(use_bias=True) + model = makeMinimalModel(block(300), shape=(50, 300)) + self.assertTrue( + model.layers[-1].MultiHeadAttLayer.key_dense.use_bias, True + ) + def test_FGNN_layer(self): block = layers.FGNNlayer() model = makeMinimalModel( @@ -139,6 +204,22 @@ def test_FGNN_layer(self): ) self.assertTrue(model.layers[-1], layers.FGNN) + def test_FGNN_layer_combine_layer(self): + block = layers.FGNNlayer( + combine_layer=tf.keras.layers.Lambda(lambda x: tf.math.add(*x)) + ) + model = makeMinimalModel( + block(96), + input_layer=( + k_layers.Input(shape=(None, 96)), + k_layers.Input(shape=(None, 10)), + k_layers.Input(shape=(None, 2), dtype=tf.int32), + k_layers.Input(shape=(None, 1)), + k_layers.Input(shape=(None, 2)), + ), + ) + self.assertEqual(model.layers[-1].combine_layer([0.5, 0.5]), 1) + def test_Class_Token_FGNN_layer(self): block = layers.ClassTokenFGNNlayer() model = makeMinimalModel( @@ -167,9 +248,7 @@ def test_Class_Token_FGNN_update_layer(self): k_layers.Input(shape=(None, 2)), ), ) - self.assertEqual( - model.layers[-1].update_layer.layers[0].number_of_heads, 6 - ) + self.assertEqual(model.layers[-1].update_layer.number_of_heads, 6) def test_Class_Token_FGNN_normalization(self): # By setting center=False, scale=False, the number of trainable parameters should be 0 @@ -187,25 +266,33 @@ def test_Class_Token_FGNN_normalization(self): ), ) self.assertEqual( - model.layers[-1].update_layer.layers[-1].count_params(), 0 + model.layers[-1].update_norm.layers[-1].count_params(), 0 ) - def test_Transformer_Encoder(self): - block = layers.TransformerEncoderLayer() - model = makeMinimalModel(block(300), shape=(50, 300)) - self.assertTrue(model.layers[-1], layers.TransformerEncoder) - - def test_Tranformer_Encoder_parameters(self): - block = layers.TransformerEncoderLayer(number_of_heads=6) - model = makeMinimalModel(block(300), shape=(50, 300)) - self.assertEqual(model.layers[-1].MultiHeadAttLayer.number_of_heads, 6) + def test_Masked_FGNN_layer(self): + block = layers.MaskedFGNNlayer() + model = makeMinimalModel( + block(96), + input_layer=( + k_layers.Input(shape=(None, 96)), + k_layers.Input(shape=(None, 10)), + k_layers.Input(shape=(None, 2), dtype=tf.int32), + k_layers.Input(shape=(None, 1)), + k_layers.Input(shape=(None, 2)), + ), + ) + self.assertTrue(model.layers[-1], layers.MaskedFGNN) - def test_Transformer_Encoder_bias(self): - block = layers.TransformerEncoderLayer(use_bias=True) - model = makeMinimalModel(block(300), shape=(50, 300)) - self.assertTrue( - model.layers[-1].MultiHeadAttLayer.key_dense.use_bias, True + def test_GraphTransformer(self): + block = layers.GraphTransformerLayer() + model = makeMinimalModel( + block(96), + input_layer=( + k_layers.Input(shape=(None, 96)), + k_layers.Input(shape=(None, 2), dtype=tf.int32), + ), ) + self.assertTrue(model.layers[-1], layers.GraphTransformer) if __name__ == "__main__": diff --git a/deeptrack/test/test_models.py b/deeptrack/test/test_models.py index c5a1fe191..0bc545623 100644 --- a/deeptrack/test/test_models.py +++ b/deeptrack/test/test_models.py @@ -4,9 +4,11 @@ import unittest -from .. import models +from .. import models, layers import numpy as np +import tensorflow as tf + class TestModels(unittest.TestCase): def test_FullyConnected(self): @@ -110,6 +112,197 @@ def test_ViT(self): model.predict(np.zeros((1, 224, 224, 3))) + def test_MAGIK(self): + model = models.MAGIK( + dense_layer_dimensions=( + 64, + 96, + ), # number of features in each dense encoder layer + base_layer_dimensions=( + 96, + 96, + 96, + ), # Latent dimension throughout the message passing layers + number_of_node_features=7, # Number of node features in the graphs + number_of_edge_features=1, # Number of edge features in the graphs + number_of_edge_outputs=1, # Number of predicted features + edge_output_activation="sigmoid", # Activation function for the output layer + output_type="edges", + ) + + self.assertIsInstance(model, models.KerasModel) + + graph = ( + tf.random.uniform((8, 10, 7)), # Node features + tf.random.uniform((8, 50, 1)), # Edge features + tf.random.uniform( + (8, 50, 2), minval=0, maxval=10, dtype=tf.int32 + ), # Edges + tf.random.uniform((8, 50, 2)), # Edge dropouts + ) + model(graph) + + def test_CTMAGIK(self): + model = models.CTMAGIK( + dense_layer_dimensions=( + 32, + 64, + 96, + ), # number of features in each dense encoder layer + base_layer_dimensions=( + 96, + 96, + ), # Latent dimension throughout the message passing layers + number_of_node_features=7, # Number of node features in the graphs + number_of_edge_features=1, # Number of edge features in the graphs + number_of_global_outputs=1, # Number of predicted features + global_output_activation="softmax", # Activation function for the output layer + output_type="global", + ) + + self.assertIsInstance(model, models.KerasModel) + + graph = ( + tf.random.uniform((8, 10, 7)), # Node features + tf.random.uniform((8, 50, 1)), # Edge features + tf.random.uniform( + (8, 50, 2), minval=0, maxval=10, dtype=tf.int32 + ), # Edges + tf.random.uniform((8, 50, 2)), # Edge dropouts + ) + prediction = model(graph) + + self.assertEqual(prediction.shape, (8, 1)) + + def test_MAGIK_with_MaskedFGNN(self): + model = models.MAGIK( + dense_layer_dimensions=( + 64, + 96, + ), # number of features in each dense encoder layer + base_layer_dimensions=( + 96, + 96, + 96, + ), # Latent dimension throughout the message passing layers + number_of_node_features=7, # Number of node features in the graphs + number_of_edge_features=1, # Number of edge features in the graphs + number_of_edge_outputs=1, # Number of predicted features + edge_output_activation="sigmoid", # Activation function for the output layer + output_type="edges", + graph_block="MaskedFGNN", + ) + + self.assertIsInstance(model, models.KerasModel) + self.assertIsInstance(model.layers[15], layers.MaskedFGNN) + + graph = ( + tf.random.uniform((8, 10, 7)), # Node features + tf.random.uniform((8, 50, 1)), # Edge features + tf.random.uniform( + (8, 50, 2), minval=0, maxval=10, dtype=tf.int32 + ), # Edges + tf.random.uniform((8, 50, 2)), # Edge dropouts + ) + model(graph) + + def test_MPGNN(self): + model = models.MPNGNN( + dense_layer_dimensions=( + 64, + 96, + ), # number of features in each dense encoder layer + base_layer_dimensions=( + 96, + 96, + 96, + ), # Latent dimension throughout the message passing layers + number_of_node_features=7, # Number of node features in the graphs + number_of_edge_features=1, # Number of edge features in the graphs + number_of_edge_outputs=1, # Number of predicted features + edge_output_activation="sigmoid", # Activation function for the output layer + output_type="edges", + ) + + self.assertIsInstance(model, models.KerasModel) + + graph = ( + tf.random.uniform((8, 10, 7)), # Node features + tf.random.uniform((8, 50, 1)), # Edge features + tf.random.uniform( + (8, 50, 2), minval=0, maxval=10, dtype=tf.int32 + ), # Edges + tf.random.uniform((8, 50, 2)), # Edge dropouts + ) + model(graph) + + def test_MPGNN_readout(self): + model = models.MPNGNN( + dense_layer_dimensions=( + 32, + 64, + 96, + ), # number of features in each dense encoder layer + base_layer_dimensions=( + 96, + 96, + ), # Latent dimension throughout the message passing layers + number_of_node_features=7, # Number of node features in the graphs + number_of_edge_features=1, # Number of edge features in the graphs + number_of_global_outputs=1, # Number of predicted features + global_output_activation="softmax", # Activation function for the output layer + output_type="global", + readout_block=tf.keras.layers.GlobalAveragePooling1D(), + ) + + self.assertIsInstance(model, models.KerasModel) + self.assertIsInstance( + model.layers[-4], tf.keras.layers.GlobalAveragePooling1D + ) + + graph = ( + tf.random.uniform((8, 10, 7)), # Node features + tf.random.uniform((8, 50, 1)), # Edge features + tf.random.uniform( + (8, 50, 2), minval=0, maxval=10, dtype=tf.int32 + ), # Edges + tf.random.uniform((8, 50, 2)), # Edge dropouts + ) + prediction = model(graph) + + self.assertEqual(prediction.shape, (8, 1)) + + def test_GRU_MPGNN(self): + model = models.MPNGNN( + dense_layer_dimensions=( + 64, + 96, + ), # number of features in each dense encoder layer + base_layer_dimensions=( + 96, + 96, + 96, + ), # Latent dimension throughout the message passing layers + number_of_node_features=7, # Number of node features in the graphs + number_of_edge_features=1, # Number of edge features in the graphs + number_of_edge_outputs=1, # Number of predicted features + edge_output_activation="sigmoid", # Activation function for the output layer + output_type="edges", + graph_block="GRUMPN", + ) + + self.assertIsInstance(model, models.KerasModel) + + graph = ( + tf.random.uniform((8, 10, 7)), # Node features + tf.random.uniform((8, 50, 1)), # Edge features + tf.random.uniform( + (8, 50, 2), minval=0, maxval=10, dtype=tf.int32 + ), # Edges + tf.random.uniform((8, 50, 2)), # Edge dropouts + ) + model(graph) + if __name__ == "__main__": unittest.main() \ No newline at end of file diff --git a/examples/LodeSTAR/01. autotracker_template.ipynb b/examples/LodeSTAR/01. autotracker_template.ipynb index 6d11a1d2c..0a6310365 100644 --- a/examples/LodeSTAR/01. autotracker_template.ipynb +++ b/examples/LodeSTAR/01. autotracker_template.ipynb @@ -89,7 +89,7 @@ "id": "5400afe3", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -485,4 +485,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/02. tracking_particles_of_various_shapes.ipynb b/examples/LodeSTAR/02. tracking_particles_of_various_shapes.ipynb index 7853dab39..66115ffc6 100644 --- a/examples/LodeSTAR/02. tracking_particles_of_various_shapes.ipynb +++ b/examples/LodeSTAR/02. tracking_particles_of_various_shapes.ipynb @@ -89,7 +89,7 @@ "id": "dfd5b527", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -497,4 +497,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/03.track_BF-C2DL-HSC.ipynb b/examples/LodeSTAR/03.track_BF-C2DL-HSC.ipynb index e68902515..6880b46e3 100644 --- a/examples/LodeSTAR/03.track_BF-C2DL-HSC.ipynb +++ b/examples/LodeSTAR/03.track_BF-C2DL-HSC.ipynb @@ -89,7 +89,7 @@ "id": "5116dc3e", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -346,4 +346,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/04.track_Fluo-C2DL-Huh7.ipynb b/examples/LodeSTAR/04.track_Fluo-C2DL-Huh7.ipynb index d53172ad8..16a983929 100644 --- a/examples/LodeSTAR/04.track_Fluo-C2DL-Huh7.ipynb +++ b/examples/LodeSTAR/04.track_Fluo-C2DL-Huh7.ipynb @@ -89,7 +89,7 @@ "id": "609e5b32", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -353,4 +353,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/05.track_PhC-C2DL-PSC.ipynb b/examples/LodeSTAR/05.track_PhC-C2DL-PSC.ipynb index 954fd41c3..d2e814018 100644 --- a/examples/LodeSTAR/05.track_PhC-C2DL-PSC.ipynb +++ b/examples/LodeSTAR/05.track_PhC-C2DL-PSC.ipynb @@ -89,7 +89,7 @@ "id": "700eb3e2", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -351,4 +351,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/06.track_plankton.ipynb b/examples/LodeSTAR/06.track_plankton.ipynb index 5f2054f86..2f0bacee7 100644 --- a/examples/LodeSTAR/06.track_plankton.ipynb +++ b/examples/LodeSTAR/06.track_plankton.ipynb @@ -89,7 +89,7 @@ "id": "982319a0", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -321,4 +321,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/07.track_3D_holography.ipynb b/examples/LodeSTAR/07.track_3D_holography.ipynb index 69292cf17..8dde70649 100644 --- a/examples/LodeSTAR/07.track_3D_holography.ipynb +++ b/examples/LodeSTAR/07.track_3D_holography.ipynb @@ -89,7 +89,7 @@ "id": "bf6decf3", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -1070,4 +1070,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/08.measure_mass_simulated.ipynb b/examples/LodeSTAR/08.measure_mass_simulated.ipynb index a4ee30645..595d724a7 100644 --- a/examples/LodeSTAR/08.measure_mass_simulated.ipynb +++ b/examples/LodeSTAR/08.measure_mass_simulated.ipynb @@ -89,7 +89,7 @@ "id": "fdb17053", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -590,4 +590,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/09.measure_mass_experimental.ipynb b/examples/LodeSTAR/09.measure_mass_experimental.ipynb index a88a99d60..31c4f9760 100644 --- a/examples/LodeSTAR/09.measure_mass_experimental.ipynb +++ b/examples/LodeSTAR/09.measure_mass_experimental.ipynb @@ -89,7 +89,7 @@ "id": "730650bd", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -692,4 +692,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/LodeSTAR/10.measure_mass_cell.ipynb b/examples/LodeSTAR/10.measure_mass_cell.ipynb index 581ab0870..343aea2e2 100644 --- a/examples/LodeSTAR/10.measure_mass_cell.ipynb +++ b/examples/LodeSTAR/10.measure_mass_cell.ipynb @@ -72,7 +72,7 @@ "id": "1c221a13", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -785,4 +785,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/MAGIK/cell_migration_analysis.ipynb b/examples/MAGIK/cell_migration_analysis.ipynb index 83885a974..beb6bc754 100644 --- a/examples/MAGIK/cell_migration_analysis.ipynb +++ b/examples/MAGIK/cell_migration_analysis.ipynb @@ -92,7 +92,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -155,7 +155,7 @@ "source": [ "## 2. Overview\n", "\n", - "In this example, we exemplify how to use [MAGIK](https://arxiv.org/abs/2202.06355) (Motion Analysis through GNN Inductive Knowledge) in its most natural application, trajectory linking. We will analyze a live cell migration experiment and address practical implications of using MAGIK (Data courtesy of Sergi Mas\u00f3 Orriols, [the QuBI lab](https://mon.uvic.cat/qubilab/)).\n", + "In this example, we exemplify how to use [MAGIK](https://arxiv.org/abs/2202.06355) (Motion Analysis through GNN Inductive Knowledge) in its most natural application, trajectory linking. We will analyze a live cell migration experiment and address practical implications of using MAGIK (Data courtesy of Sergi Masó Orriols, [the QuBI lab](https://mon.uvic.cat/qubilab/)).\n", "\n", "One of the main impediments when using deep learning methods is the availability of a suitable dataset, especially intended for the problem to be solved. Particularly for trajectory linking tasks, the absence of datasets is more notorious.\n", "\n", @@ -210,7 +210,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "MAGIK models the objects\u2019 motion and physical interactions using a graph representation. Graphs can define arbitrary relational structures between nodes connecting them pairwise through edges. In MAGIK, each node describes an object detection at a specific time:\n" + "MAGIK models the objects’ motion and physical interactions using a graph representation. Graphs can define arbitrary relational structures between nodes connecting them pairwise through edges. In MAGIK, each node describes an object detection at a specific time:\n" ] }, { @@ -1229,7 +1229,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1/1 [00:05<00:00, 5.21s/it]\n" + "100%|█████████████████████████████████████████████████████████████████| 1/1 [00:05<00:00, 5.21s/it]\n" ] } ], @@ -1647,4 +1647,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/get-started/01. deeptrack_introduction_tutorial.ipynb b/examples/get-started/01. deeptrack_introduction_tutorial.ipynb index 5083cb9d6..9d13efa02 100644 --- a/examples/get-started/01. deeptrack_introduction_tutorial.ipynb +++ b/examples/get-started/01. deeptrack_introduction_tutorial.ipynb @@ -108,7 +108,7 @@ "source": [ "# DeepTrack 2.1 - Introduction\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This tutorial gives an overview of how to use DeepTrack 2.1.\n", "\n", @@ -1031,4 +1031,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/get-started/02. using_deeptrack_generators.ipynb b/examples/get-started/02. using_deeptrack_generators.ipynb index 39cda03c1..24ad938bf 100644 --- a/examples/get-started/02. using_deeptrack_generators.ipynb +++ b/examples/get-started/02. using_deeptrack_generators.ipynb @@ -91,7 +91,7 @@ "source": [ "# DeepTrack 2.1 - Generators\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This tutorial introduces and explains generators.\n", "\n", @@ -476,4 +476,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/get-started/03. customizing_deeptrack_models.ipynb b/examples/get-started/03. customizing_deeptrack_models.ipynb index 800636664..2f4aebce8 100644 --- a/examples/get-started/03. customizing_deeptrack_models.ipynb +++ b/examples/get-started/03. customizing_deeptrack_models.ipynb @@ -91,7 +91,7 @@ "source": [ "# DeepTrack 2.1 - Introduction\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This tutorial gives an overview of how to use DeepTrack 2.1.\n", "\n", @@ -1009,4 +1009,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/aberrations_example.ipynb b/examples/module-examples/aberrations_example.ipynb index dcbed5012..215a706b4 100644 --- a/examples/module-examples/aberrations_example.ipynb +++ b/examples/module-examples/aberrations_example.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -848,4 +848,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/augmentations_example.ipynb b/examples/module-examples/augmentations_example.ipynb index 2f0170e11..d1450fd1e 100644 --- a/examples/module-examples/augmentations_example.ipynb +++ b/examples/module-examples/augmentations_example.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -433,4 +433,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/features_example.ipynb b/examples/module-examples/features_example.ipynb index 80e97bcd5..28afa96d1 100644 --- a/examples/module-examples/features_example.ipynb +++ b/examples/module-examples/features_example.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -481,4 +481,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/generators_example.ipynb b/examples/module-examples/generators_example.ipynb index 700df556b..c96b0257e 100644 --- a/examples/module-examples/generators_example.ipynb +++ b/examples/module-examples/generators_example.ipynb @@ -86,7 +86,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -271,4 +271,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/image_example.ipynb b/examples/module-examples/image_example.ipynb index 26686d825..5bbc291eb 100644 --- a/examples/module-examples/image_example.ipynb +++ b/examples/module-examples/image_example.ipynb @@ -86,7 +86,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -449,4 +449,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/losses_example.ipynb b/examples/module-examples/losses_example.ipynb index 98272563f..d07ec3a90 100644 --- a/examples/module-examples/losses_example.ipynb +++ b/examples/module-examples/losses_example.ipynb @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -452,4 +452,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/math_example.ipynb b/examples/module-examples/math_example.ipynb index edd5c62ed..3f7ab457a 100644 --- a/examples/module-examples/math_example.ipynb +++ b/examples/module-examples/math_example.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -247,4 +247,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/models_example.ipynb b/examples/module-examples/models_example.ipynb index b78248c37..08f458ca3 100644 --- a/examples/module-examples/models_example.ipynb +++ b/examples/module-examples/models_example.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -419,4 +419,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/noises_example.ipynb b/examples/module-examples/noises_example.ipynb index fc2a7fa92..864896526 100644 --- a/examples/module-examples/noises_example.ipynb +++ b/examples/module-examples/noises_example.ipynb @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -252,4 +252,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/optics_example.ipynb b/examples/module-examples/optics_example.ipynb index 5462a5c33..3a51e2975 100644 --- a/examples/module-examples/optics_example.ipynb +++ b/examples/module-examples/optics_example.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -363,4 +363,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/properties_example.ipynb b/examples/module-examples/properties_example.ipynb index 6a302912c..37a25a1cb 100644 --- a/examples/module-examples/properties_example.ipynb +++ b/examples/module-examples/properties_example.ipynb @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -543,4 +543,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/scatterers_example.ipynb b/examples/module-examples/scatterers_example.ipynb index c7f18659e..13ed26594 100644 --- a/examples/module-examples/scatterers_example.ipynb +++ b/examples/module-examples/scatterers_example.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -410,4 +410,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/sequences_example.ipynb b/examples/module-examples/sequences_example.ipynb index 5d33b9261..6e60e94a5 100644 --- a/examples/module-examples/sequences_example.ipynb +++ b/examples/module-examples/sequences_example.ipynb @@ -85,7 +85,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -3631,4 +3631,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/module-examples/utils_example.ipynb b/examples/module-examples/utils_example.ipynb index 4e75ab576..148d2261f 100644 --- a/examples/module-examples/utils_example.ipynb +++ b/examples/module-examples/utils_example.ipynb @@ -85,7 +85,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -285,4 +285,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/paper-examples/1-MNIST.ipynb b/examples/paper-examples/1-MNIST.ipynb index e58fd0b9a..d1b4edb24 100644 --- a/examples/paper-examples/1-MNIST.ipynb +++ b/examples/paper-examples/1-MNIST.ipynb @@ -7,7 +7,7 @@ "id": "view-in-github" }, "source": [ - "\"Open" + "\"Open" ] }, { @@ -1608,4 +1608,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +} diff --git a/examples/paper-examples/2-single_particle_tracking.ipynb b/examples/paper-examples/2-single_particle_tracking.ipynb index 12f921559..9ee9208eb 100644 --- a/examples/paper-examples/2-single_particle_tracking.ipynb +++ b/examples/paper-examples/2-single_particle_tracking.ipynb @@ -92,7 +92,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -1613,4 +1613,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/paper-examples/3-particle_sizing.ipynb b/examples/paper-examples/3-particle_sizing.ipynb index f315e1699..ef3f1ee9a 100644 --- a/examples/paper-examples/3-particle_sizing.ipynb +++ b/examples/paper-examples/3-particle_sizing.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -1187,4 +1187,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/paper-examples/4-multi-molecule-tracking.ipynb b/examples/paper-examples/4-multi-molecule-tracking.ipynb index c24f46a70..89f635dc2 100644 --- a/examples/paper-examples/4-multi-molecule-tracking.ipynb +++ b/examples/paper-examples/4-multi-molecule-tracking.ipynb @@ -92,7 +92,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -13103,4 +13103,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/paper-examples/5-inline_holography_3d_tracking.ipynb b/examples/paper-examples/5-inline_holography_3d_tracking.ipynb index c0641ced1..1592c7a1f 100644 --- a/examples/paper-examples/5-inline_holography_3d_tracking.ipynb +++ b/examples/paper-examples/5-inline_holography_3d_tracking.ipynb @@ -92,7 +92,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -2928,4 +2928,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/paper-examples/6-cell_counting.ipynb b/examples/paper-examples/6-cell_counting.ipynb index 277a4ee6b..99d925dab 100644 --- a/examples/paper-examples/6-cell_counting.ipynb +++ b/examples/paper-examples/6-cell_counting.ipynb @@ -92,7 +92,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -9290,4 +9290,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/paper-examples/7-GAN_image_generation.ipynb b/examples/paper-examples/7-GAN_image_generation.ipynb index b7bc91334..1f5a60f2a 100644 --- a/examples/paper-examples/7-GAN_image_generation.ipynb +++ b/examples/paper-examples/7-GAN_image_generation.ipynb @@ -21,7 +21,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -1683,4 +1683,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/examples/tutorials/analyzing_video_tutorial.ipynb b/examples/tutorials/analyzing_video_tutorial.ipynb index 285e7768a..77d617b9b 100644 --- a/examples/tutorials/analyzing_video_tutorial.ipynb +++ b/examples/tutorials/analyzing_video_tutorial.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -4156,4 +4156,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/tutorials/characterizing_aberrations_tutorial.ipynb b/examples/tutorials/characterizing_aberrations_tutorial.ipynb index a26d3eff1..ecf16aa89 100644 --- a/examples/tutorials/characterizing_aberrations_tutorial.ipynb +++ b/examples/tutorials/characterizing_aberrations_tutorial.ipynb @@ -14,7 +14,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -543,7 +543,7 @@ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQsAAAEWCAYAAABxHbIXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAerklEQVR4nO3dfZgcZZ3u8e+dyUDCa5AEkZAwCAgSEYlRMeDKm6KIIK6oIGjwEnRXz4EjCyKwCyoeX1BxFzgr+LKo+LKsaESPAmEFFBUkIQmYACqYECaACRBISEgyM7/9o6pDpdPdUzPT3dXTfX+uK1emq7qrftPTc0/VU8/zlCICM7PBjCm6ADMbHRwWZpaLw8LMcnFYmFkuDgszy8VhYWa5OCxykHSbpA/VcXu/lPSBEW5jlqQ7hvnaayRdMpL915ukr0n656LryEvSJZJWSno8fXyCpGWS1kg6SNIiSYfl2M4aSS9tdL31MLboAppF0qHAF4FpQD9wP3BWRNzd7Foi4q3N3mcrkTQL+FBEHFpaFhEfKa6ioZE0FTgb2CMi/pYu/hLwsYj4afp4Wp5tRcR2darpGuDRiLiwHturpCPCQtIOwM+BfwCuA7YC3gCsb3IdAhQRA83cbyNJ6oqI/szjsRHRV2RNTTAVeDITFAB7AIsKqqc5IqLt/wEzgFU11s8CfgtcATwDPAAcmVl/G/CZ9DmrgZuBiZn1BwO/A1YBC4HDyl772fS164C902UfyjzndJIjndXAYmB6uvw84KHM8hPKar6jxvf0X8Dj6ffza2BaZt01wNeAOem2byf5K1lav1+67ingQeDdZa/9d+AXwHPAUcAS4BPAvSQBPLZa7cDLgedJju7WlH4u6XYvKXtP/pLWcAOwW2ZdAB8B/py+51eShHCl96ELOD9TyzxgSrpuJnB3+h7dDczMvG5H4JvAY0AvcEm6raPSn+NAWv8P0v8jfT8eSl+/BDgqRw0B7J1+vTXJEcojwBPpz2h8uu4w4FGSI5q/pXWdlq47A9gIbEhr+VlDfo+K/kVuUljsADwJfBt4K7BThbDoA/4P0A28J/0AvSjzC/8Q8DJgfPr48+m6yem2jyFpA3pT+nhS5rWPkByWjk23fxtpWAAnph/G1wAiCZM9Mut2S7f7nvTD+JKcYfFBYPv0A/hVYEHZL/xq4O/S9f9a2hawLbAMOC2t9yBgJbB/5rXPAIekdY1LfzEWAFMyH+4h1U4mLIAj0n1OT+u7HPh1WVj8HJhA8ld+BfCWKu/DOcB9wL7p+3sgsDPwIuBp4NT0+zwpfbxz+rqfAFel78cuwB+AD2d/ccv2s+mXvkJYVKyhQlhcRhKML0p/dj8DPpfZZx/waZLP0DHAWtLPMmVh67AYWWC8PH1DH03f9BuAF2c+vMvJ/HVKPxynZn7hL8ys+0fgxvTrTwDfLdvXTcAHMq/9dNn623ghLG4Czsz5PSwAjs/UXDUsyl43If1Q7pj5YP0ws347kr/0U0h+sX9T9vqrgIsyr/1O2folwAdHUjubh8U3gS+W1bcR6EkfB3BoZv11wHlV9vtgab9ly08F/lC27PdpbS8mOUIan1l3EnBr+vVhDC0sKtaQfR1JiDwH7JVZ93rgr5l9rgPGZtb/DTi4/P1r1L+OaLMAiIj7ST4ISNoPuJbkL+5J6VN6I33XU0tJ/jKWPJ75ei3JBxiSc9UTJb09s74buDXzeFmN0qaQHLVsQdL7gY8DPemi7YCJNbZVel0XyanPicAkkkNm0tc+U15TRKyR9BTJ97sH8DpJqzKbHAt8d5DvZ7Nlw609tRtwT1l9T5IcxS1JF1f7eZSr9v7uRvIzzlqa7mMPkp/hY0kzE5AcIdX6OdZS9WecMQnYBpiX2adITmFKnozN24Nqfd911zFhkRURD6Stxx/OLJ4sSZnAmEpy9DGYZSRHFqfX2uUgr9+rfKGkPYCvA0cCv4+IfkkLSD5AgzkZOJ4X2hN2JDnEzr52SmZf25Ec+i5P67k9It5UY/uVvp9Ny3LUPthQ5+Ukv7Cl7W1LcurQO8jrKim9v3+stY/UVODG9DXrSdql6tFYW62GrJUkRw7TImI43+dg7+mIdUQ/C0n7STpb0u7p4ykkRxR3Zp62C/C/JXVLOpHktOUXOTZ/LfB2SUdL6pI0TtJhpX3l8A3gnyS9Wom901+2bUk+ACvSmk8DXpFzm9uTfNifJPlr9X8rPOcYSYdK2oqk8fbOiFhG0hbwMkmnpu9Ft6TXSHp5zn2To/YngN3TfVfyA+A0Sa+StHVa/10RsWQINZR8A/iMpH3S9/eVknYm+dm+TNLJksZKeg+wP/DziHiMpBH7y5J2kDRG0l6S3jiM/deqYZNIrpB9HbhM0i4AkiZLOjrnPp4AGtpfoyPCgqQx73XAXZKeIwmJP5K0LJfcBexDkvCfBd4VEU8OtuH0F+x4ktbuFSR/Rc4h53sbEf+V7u/7aZ2zSRpWFwNfJjmPfgI4gOSKSh7fITmk7iW5EnFnhed8H7iI5GrDq4FT0npWA28G3kvy1/dx4AskDY255Kj9VySXGR+XtLLC628B/hm4nqTVf6+0nuH4Ckmbxs3AsyTtIePTn+2xJJ+BJ4FzgWMjolTP+0kusS8mOSr7EfCSetZQ4XmfILkCdKekZ4FbSBpF8/gmsL+kVZJmD7POmrT5aXpnqtRJyMw21ylHFmY2Qg4LM8vFpyFmlouPLMwsl1HVz2LixInR09NTdBlmbWvevHkrI2JSpXWjKix6enqYO3du0WWYtS1J5b1aN/FpiJnl4rAws1wcFmaWi8PCzHJxWJhZLg4LM8vFYWFmuTgszNrcmvV9XHzDIlY/v3FE23FYmLWxNev7OO0//sB371zKgmWrRrQth4VZmyoFxT2PrOLykw7iDftU7MWdm8PCrA2VB8UxBwx3kq8XOCzM2kwjggIcFmZtpVFBAQ4Ls7bRyKAAh4VZW2h0UIDDwmzUa0ZQgMPCbFRrVlCAw8Js1GpmUIDDwmxUanZQgMPCbNQpIijAYWE2qhQVFOCwMBs1igwKcFiYjQpFBwU4LMxaXisEBTgszFpaqwQFtEBYSOqSNF/Sz4uuxayVtFJQQAuEBXAmcH/RRZi1klYLCij4XqeSdgfeBnwW+HiRtZgVafb8Xi696UGWr1rHrjuOY3x3F0ufWtsyQQHF3xj5q8C5wPbVniDpDOAMgKlTpzanKrMmmj2/l0/++D7WbewH4LFnngdg1syelgkKKPA0RNKxwN8iYl6t50XE1RExIyJmTJo0sjkEzVrRpTc9uCkosuYsfqKAaqorss3iEOA4SUuAHwJHSLq2wHrMCrF81bohLS9KYWEREZ+MiN0jogd4L/CriDilqHrMirLrjuMqLt9twvgmV1Jb0W0WZh2p1KDZu2odXWO0xfrx3V2cc/S+BVRWXUuERUTcBtxWcBlmTVHeoNk/EHQJdhjfzaq1G9ltwnjOOXpf3nHQ5IIr3VxLhIVZJ6nUoNkfsM1WY5n/L28uqKrBOSzMmmD2/F4+9bNFPL22+v1GW61Bs5zDwqzBZs/v5ZwfLWRjf9R8Xqs1aJZrhe7eZm3t4hsWDRoUrdigWc5HFmYNNHt+L6vWVT/1AJjcog2a5RwWZg106U0P1lw/ecJ4fnveEU2qZmR8GmLWQL01Gi27x6jlTz2yHBZmDbJmfR9bdVX+FZPg0hMPbPlTjyyHhVkDlOaj6BsY2CIwxnd3cdm7XzWqggLcZmFWd9mJa644eTob+gY2zVXRqr0z83BYmNVRtRmuRmM4lPNpiFmdtOJUePXksDCrg3YPCnBYmI1YJwQFOCzMRqRTggIcFmbD1klBAQ4Ls2HptKAAh4XZkHViUID7WZjlkp0zc6uuMfQNDHDFydM7JijAYWE2qPI5Mzf0J124N/QNFFxZc/k0xGwQlebM3NA/MOjw83bjsDAbRLVh5q0+Z2a9+TTErEz5TYrHjhF9A1tOi9fqc2bWm8PCLKPaTYq7xkB/poliNMyZWW8OC7OMajcp3n7rbrbdeuyoH2Y+Eg4Ls4xq7RDPrNvIgota9wZAzeAGTrOM0XKT4iI4LMxSa9b3Mb67a4vlndg+UYnDwowXunAvfWots2b2MHnCeEQyVf/n3nlAx7VPVOI2C+t4lcZ6XHzctKLLajmFHVlImiLpVkmLJS2SdGZRtVjn6tRBYcNR5JFFH3B2RNwjaXtgnqQ5EbG4wJqsA3hQ2PAUFhYR8RjwWPr1akn3A5MBh4U1jAeFDV9LNHBK6gEOAu4quBRrcx4UNnyFh4Wk7YDrgbMi4tkK68+QNFfS3BUrVjS/QGsrHhQ2fIWGhaRukqD4XkT8uNJzIuLqiJgRETMmTZrU3AKtrdS696g7XQ2uyKshAr4J3B8RXymqDusMg9171J2uBjdoA6eklwH/Drw4Il4h6ZXAcRFxyQj3fQhwKnCfpAXpsvMj4hcj3K7ZJrPn9/KFGx/YNHp01sweXjVlQlvce7TZ8lwN+TpwDnAVQETcK+n7wIjCIiLuADSSbZjVMnt+L+ddfy/PZ650/Ofdy3jVlAn89rwjCqxsdMpzGrJNRPyhbFlfI4oxq6cv3PjAZkEBsG5jv698DFOesFgpaS8gACS9i7R/hFmrWrO+b9OpRzlf+RiePKchHwWuBvaT1Av8FTiloVWZjUCpMbMaX/kYnkGPLCLi4Yg4CpgE7BcRh0bEkoZXZjYM2bEes2b2bDHk3Fc+hi/P1ZB/KXsMQER8ukE1mQ1LpUFhvvJRP3lOQ57LfD0OOBa4vzHlmA3NYIPC3nHQZIdDnQwaFhHx5exjSV8CbmpYRWY5eVBYcw2nB+c2wO71LsRsqDworLnytFncR3rZFOgiaeh0e4UVzoPCmitPm8Wxma/7gCciwp2yrFClQWEb+rc85fCl0caoGhaSXpR+ubps1Q6SiIinGleWWXXlg8KygeFLo41T68hiHsnpR6XxGwG8tCEVmVVQ7arHhr4BXxptkqphERF7NrMQs2pqXfXwpdHmyXU1RNJOkl4r6e9K/xpdmFmJr3q0hjxXQz4EnElyuXQBcDDwe8BjfK0pfNWjNeQ5sjgTeA2wNCIOJ5lYd1UjizIr8VR4rSPPpdPnI+J5SUjaOiIekOTmZmuY8sbMjf2+6tEK8hxZPCppAjAbmCPpp8DSRhZlnavUmFk69djQP0B31xje/Zrdff/RguUZG3JC+uXFkm4FdgRubGhV1rGqNWbe+sAKT4VXsDwNnP8G/DAifhcRtzehJutA2VOPStyYWbw8pyHzgAslPSTpS5JmNLoo6yzlpx6VuDGzeHlmyvp2RBxDckXkQeALkv7c8MqsI8ye38vZ1y3c4tQjy42ZrWEoN0beG9gP2ANPfmN1UDqi6I+o+pzJ7sLdMvK0WXwROAF4CPgh8JmIWNXguqwDVGrMzJo8YbwbNVtIniOLh4DXR8TKRhdjnaVWG4VPPVpPnkunVzWjEOssa9bXnhLF/ShaT6F3UbfOtGZ9H8ddfkfN5zgoWo/DwpqqNHHNwyufq/qcyb5M2pLyzJRVkWfKsqHK3tejFrdVtKZaRxbzgLnp/yuAPwF/Tr+e1/jSrJ2U3wCo2tHDTtt0+xSkRVUNi4jYMyJeCtwCvD0iJkbEziQT+N7crAJt9Cu1Udy95Gn6B4LP/v/7OXy/SRVvLXjR26cVVKUNJk+bxcER8YvSg4j4JTCzHjuX9BZJD0r6i6Tz6rFNay2loMi2UfSuWsf183r5+1dP9kjSUSRPP4vlki4Erk0fvw9YPtIdS+oCrgTeBDwK3C3phohYPNJtW2uo1Zi5bmO/R5KOMnmOLE4iubHQT4Afp1+fVId9vxb4S3qX9g0kvUOPr8N2rQXkacz0SNLRJc9Asqci4kzg0IiYHhFn1elKyGRgWebxo+myzUg6Q9JcSXNXrFhRh91ao+VtzPRI0tFl0LCQNFPSYtLBY5IOlPT/Gl5ZKiKujogZETFj0qRJzdqtDVN5UBxzwEs45+h9KzZm+hLp6JLnNOQy4GjgSYCIWAjU41YAvcCUzOPd02U2SlUKCkh6Y37unQe4MXOUyzVEPSKWSZvdmKz6UMH87gb2kbQnSUi8Fzi5Dtu1AlQLihLfDGj0yxMWyyTNBEJSN8mtAUY8n0VE9En6GHATyd3ZvxURi0a6XWu+wYLC2kOesPgI8K8kjY+9JB2y/rEeO0/7b/xi0Cday3JQdI48YbFvRLwvu0DSIcBvG1OSjRYOis6Sp4Hz8pzLrIM4KDpPrVGnryfp1j1J0sczq3YgaWOwDuWg6Ey1TkO2ArZLn7N9ZvmzwLsaWZS1LgdF56oaFukNhW6XdE1E+HaFHap085/lq9ax647jGN/dxdKn1jooOlCeNotvpPc6BUDSTpJualxJ1iqyN/8J4LFnnufhlc9x6sF7OCg6UJ6wmJid+j8ingZ2aVhF1jKqTdU/Z/ETBVRjRcsTFgOSppYeSNoDqH5XGGsb1UaFerRoZ8rTz+IC4A5JtwMC3gCc0dCqrCXsuuM4Hnvm+S2We7RoZ8pz35AbJU0HDk4XneUbDrWf2fN7+dTPFvH02o0A7DhuLFt1b3mF3KNFO1fV0xBJ+6X/TwemksyOtRyYmi6zNjF7fi/n/GjhpqAAeOb5PlasXs8b9pno0aIG1D6yOBs4HfhyhXUBeD60NnHpTQ+ysb9yM9TDK57z1HcG1O5ncXr6/+HNK8eabfb83pr3HHVjppXU6u79zlovjIgf178ca6ZSP4pa3JhpJbVOQ96e/r8LyRiRX6WPDwd+RzJ5r41i1fpRlHSPkRszbZNapyGnAUi6Gdg/Ih5LH78EuKYp1VlD1TrFmDC+m4uPm+bGTNskTz+LKaWgSD1BcnXERrlq/SgmTxjvRk3bQp6w+O90LMgP0sfvIbmloY1CpYFhvavWMXaMtljvfhRWTZ5OWR+TdAIvzOh9dUT8pLFlWSOUGjRL7RR9A0HXGNh+626eWbeR3SaM55yj9/Wph1WUa3Zv4B5gdUTcImkbSdtHxOpGFmb1V6lBs38Att16LAsuenNBVdlokecmQ6cDPwKuShdNBmY3sCZrkGr9KdyXwvLIM+r0o8AhJDNkERF/xkPUR5016/vYqqvyj9t9KSyPPGGxPr1xMQCSxuIh6qNKaSq8voGBLQLDDZqWV542i9slnQ+Ml/QmknuG/KyxZVm9ZOfMvOLk6WzoG9g0TZ4bNG0o8oTFJ4APAfcBHya5KdA3GlmU1Uete4+aDVXNsJDUBSyKiP2ArzenJBsuT65rjVQzLCKiX9KDkqZGxCPNKsqGrrwPRaln5qyZPQ4Kq4s8DZw7AYsk/bekG0r/Gl2YDY0n17VGy9Nm8c8Nr8JGzJPrWqPVms9iHMkd1Pcmadz8ZkT0NaswGxpPrmuNVus05NvADJKgeCuVp9cbFkmXSnpA0r2SfpK9iZEN3Zr1fYz35LrWYLXCYv+IOCUiriK5t+kb6rjfOcArIuKVwJ+AT9Zx2x2ldHl06VNrmTWzx5PrWsPUarPYNNVzRPRJWw5nHq6IuDnz8E58o+VhqdSP4uLjphVdlrWpWmFxoKRn069F0oPz2fTriIgd6lTDB4H/rNO2OobvZm7NVmtavS1PgodA0i3ArhVWXRARP02fcwHQB3yvxnbOIL0D2tSpnqALHBRWjLzzWQxZRBxVa72kWcCxwJERUXVgWkRcDVwNMGPGjI4fwOagsKI0LCxqkfQW4FzgjRGxtogaRpPsVHhbdY2hb2CAK06e7qCwpiokLIArgK2BOWnD6Z0R8ZGCamlp5d24N/Qnw8w39A0UXJl1mkLCIiL2LmK/o1Glbtwb+pNh5r4sas2UZ2yIFchT4VmrcFi0ME+FZ63EYdGiPBWetZqiGjitBk+FZ63IYdEiBrs86nCwojksWoAvj9po4DaLFlDr8qhZq3BYtABfHrXRwKchBcm2UVTjy6PWShwWBShvo6jEl0et1TgsClBtJu4uiYEIXx61luSwKEC1U4+BCP76+bc1uRqzfNzA2WTuwm2jlcOiidyF20Yzn4Y0ibtw22jnsGigC2ffxw/uWkZ/OmuggCvf5y7cNjr5NKRBLpx9H9fe+cimoAAI4HcPrSyuKLMRcFg0yA/uWjak5WatzmHRIP1VJiyvttys1TksGmDN+ur3j+6q453dzJrJDZx1Uj4fRTUnvW5KE6syqx8fWdRBaaxHqWdmaT6KQ/Z60aYjiS6JUw6eyiXvOKDIUs2GzUcWIzR7fi9nX7dwi7aIDf0DLHlyHQ997piCKjOrLx9ZjEDpiKJao6Xno7B24rAYgWqjR0s81sPaicNiBGpNXOOxHtZuHBbDVGv0aJfE5955gLtzW1txWAzDYKNHv/zuAx0U1nZ8NSSnavf18OhR6xQOi0HMnt/LxTcsYtW6jZuWZe/r8Y6DJjscrCP4NKSG0qXRbFCU+L4e1mkKDQtJZ0sKSROLrKOawS6Nuh+FdZLCwkLSFODNwCNF1TCYwcLA/SiskxR5ZHEZcC7JnDAtadcdx1Vd534U1mkKCQtJxwO9EbEwx3PPkDRX0twVK1Y0obrEmvV9jO/uqrhup2263Y/COk7DroZIugXYtcKqC4DzSU5BBhURVwNXA8yYMaMpRyGlfhRLn1rLrJk9zFn8hC+NWsdrWFhExFGVlks6ANgTWKhk+PbuwD2SXhsRjzeqnryys3BfftJBHHPAS7j4uGlFl2VWuKb3s4iI+4BdSo8lLQFmREThM9lWCgozS7ifRcpBYVZb4T04I6Kn6BocFGaD6/gjCweFWT4dHRYOCrP8OjYsHBRmQ9ORYeGgMBu6jgsLB4XZ8HRUWDgozIavY8LCQWE2Mh0RFg4Ks5Fr+7BwUJjVR1uHhYPCrH7aNiwcFGb11ZZh4aAwq7+2CwsHhVljtFVYrN3goDBrlMKHqNfT1mO76Nl5W047ZE8HhVmdtVVYdI0Rl554YNFlmLWltjoNMbPGcViYWS4OCzPLxWFhZrk4LMwsF4eFmeXisDCzXBwWZpaLIppyr+G6kLQCWFrAricChd9esYpWrg1au75Wrg2KqW+PiJhUacWoCouiSJobETOKrqOSVq4NWru+Vq4NWq8+n4aYWS4OCzPLxWGRz9VFF1BDK9cGrV1fK9cGLVaf2yzMLBcfWZhZLg4LM8vFYTEEks6WFJImFl1LlqRLJT0g6V5JP5E0oQVqeoukByX9RdJ5RdeTJWmKpFslLZa0SNKZRddUTlKXpPmSfl50LSUOi5wkTQHeDDxSdC0VzAFeERGvBP4EfLLIYiR1AVcCbwX2B06StH+RNZXpA86OiP2Bg4GPtlh9AGcC9xddRJbDIr/LgHOBlmsRjoibI6IvfXgnsHuR9QCvBf4SEQ9HxAbgh8DxBde0SUQ8FhH3pF+vJvmlnFxsVS+QtDvwNuAbRdeS5bDIQdLxQG9ELCy6lhw+CPyy4BomA8syjx+lhX4ZsyT1AAcBdxVcStZXSf4wDRRcx2baasLekZB0C7BrhVUXAOeTnIIUplZ9EfHT9DkXkBxif6+ZtY1WkrYDrgfOiohni64HQNKxwN8iYp6kwwouZzMOi1REHFVpuaQDgD2BhZIgOcS/R9JrI+LxousrkTQLOBY4MorvPNMLTMk83j1d1jIkdZMExfci4sdF15NxCHCcpGOAccAOkq6NiFMKrsudsoZK0hJgRkS0zGhFSW8BvgK8MSJWtEA9Y0kaWo8kCYm7gZMjYlGhhaWUpP63gaci4qyCy6kqPbL4p4g4tuBSALdZtIsrgO2BOZIWSPpakcWkja0fA24iaTy8rlWCInUIcCpwRPp+LUj/klsNPrIws1x8ZGFmuTgszCwXh4WZ5eKwMLNcHBZmlovDos1JerGk70t6WNI8Sb+XdEKTa+iR9Mcqy08e5jbPkrRN5vGakdRog3NYtLG089Fs4NcR8dKIeDXwXioMNEs7UjVbD1AxLHLUcxawzSDPsTpyd+/2dgSwISI2ddKKiKXA5bCpi/g7ge2ArvSI41vAS4G1wBkRca+ki4E1EfGl9HV/JOlaDsmgtTuAmSS9NY+PiHWSXp1uC+DmKvV9Hni5pAUkPSqfLqvnIjI9GCVdAcwFdgB2A26VtDIiDk/Xfzata11axxPDedOsMh9ZtLdpwD2DPGc68K6IeCPwKWB+Oi/G+cB3cuxjH+DKiJgGrAL+Pl3+H8D/iogDa7z2POA3EfGqiLisQj0VRcS/AcuBw0tBAWwL3Jnu79fA6TlqtyFwWHQQSVdKWijp7sziORHxVPr1ocB3ASLiV8DOknYYZLN/jYgF6dfzgJ50pq4JEfHrdPl3h1Bmtp6h2ACUZpWaR3KKY3XksGhvi0j+UgMQER8lGdyVvT3dczm208fmn5Vxma/XZ77uZ+Snttl6au233MbMaNt61GFlHBbt7VfAOEn/kFlWq1HwN8D7YNOIx5XpPA9LSENH0nSSIftVRcQqYJWkQ9NF76vy1NUkA+CqWQrsL2nr9GjlyCG81urM6dvGIiIkvQO4TNK5wAqSv9yfqPKSi4FvSbqXpIHzA+ny64H3S1pEMqPUn3Ls/rR0W0H1Bs57gX5JC4FrSBo4s/Uvk3Qd8Efgr8D8zOqrgRslLc+0W1gDedSpmeXi0xAzy8VhYWa5OCzMLBeHhZnl4rAws1wcFmaWi8PCzHL5H9K0TrR/U7E8AAAAAElFTkSuQmCC", - "image/svg+xml": "\r\n\r\n\r\n\r\n \r\n \r\n \r\n \r\n 2021-06-04T18:58:55.192992\r\n image/svg+xml\r\n \r\n \r\n Matplotlib v3.3.3, https://matplotlib.org/\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n", + "image/svg+xml": "\r\n\r\n\r\n\r\n \r\n \r\n \r\n \r\n 2021-06-04T18:58:55.192992\r\n image/svg+xml\r\n \r\n \r\n Matplotlib v3.3.3, https://matplotlib.org/\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n", "text/plain": [ "
" ] @@ -590,4 +590,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/tutorials/classifying_MNIST_vit_tutorial.ipynb b/examples/tutorials/classifying_MNIST_vit_tutorial.ipynb index b506d3b38..36c0c1d2d 100644 --- a/examples/tutorials/classifying_MNIST_vit_tutorial.ipynb +++ b/examples/tutorials/classifying_MNIST_vit_tutorial.ipynb @@ -4,80 +4,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open " + "\"Open " ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: deeptrack in c:\\users\\gu\\deeptrack\\deeptrack-2.0 (1.2.0)\n", - "Requirement already satisfied: tensorflow in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (2.9.1)\n", - "Requirement already satisfied: tensorflow-probability in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (0.17.0)\n", - "Requirement already satisfied: numpy in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (1.23.0)\n", - "Requirement already satisfied: scipy in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (1.8.1)\n", - "Requirement already satisfied: pint in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (0.19.2)\n", - "Requirement already satisfied: scikit-image>=0.18.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (0.19.3)\n", - "Requirement already satisfied: pydeepimagej in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (1.1.0)\n", - "Requirement already satisfied: more_itertools in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (8.13.0)\n", - "Requirement already satisfied: tensorflow_addons in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from deeptrack) (0.17.1)\n", - "Requirement already satisfied: PyWavelets>=1.1.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from scikit-image>=0.18.0->deeptrack) (1.3.0)\n", - "Requirement already satisfied: pillow!=7.1.0,!=7.1.1,!=8.3.0,>=6.1.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from scikit-image>=0.18.0->deeptrack) (9.1.1)\n", - "Requirement already satisfied: networkx>=2.2 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from scikit-image>=0.18.0->deeptrack) (2.8.4)\n", - "Requirement already satisfied: imageio>=2.4.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from scikit-image>=0.18.0->deeptrack) (2.19.3)\n", - "Requirement already satisfied: packaging>=20.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from scikit-image>=0.18.0->deeptrack) (21.3)\n", - "Requirement already satisfied: tifffile>=2019.7.26 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from scikit-image>=0.18.0->deeptrack) (2022.5.4)\n", - "Requirement already satisfied: protobuf<3.20,>=3.9.2 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (3.19.4)\n", - "Requirement already satisfied: typing-extensions>=3.6.6 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (4.2.0)\n", - "Requirement already satisfied: google-pasta>=0.1.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (0.2.0)\n", - "Requirement already satisfied: tensorboard<2.10,>=2.9 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (2.9.1)\n", - "Requirement already satisfied: tensorflow-estimator<2.10.0,>=2.9.0rc0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (2.9.0)\n", - "Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (0.26.0)\n", - "Requirement already satisfied: gast<=0.4.0,>=0.2.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (0.4.0)\n", - "Requirement already satisfied: wrapt>=1.11.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.14.1)\n", - "Requirement already satisfied: opt-einsum>=2.3.2 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (3.3.0)\n", - "Requirement already satisfied: keras<2.10.0,>=2.9.0rc0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (2.9.0)\n", - "Requirement already satisfied: libclang>=13.0.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (14.0.1)\n", - "Requirement already satisfied: absl-py>=1.0.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.1.0)\n", - "Requirement already satisfied: astunparse>=1.6.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.6.3)\n", - "Requirement already satisfied: flatbuffers<2,>=1.12 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.12)\n", - "Requirement already satisfied: termcolor>=1.1.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.1.0)\n", - "Requirement already satisfied: six>=1.12.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.16.0)\n", - "Requirement already satisfied: h5py>=2.9.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (3.7.0)\n", - "Requirement already satisfied: setuptools in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (58.1.0)\n", - "Requirement already satisfied: keras-preprocessing>=1.1.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.1.2)\n", - "Requirement already satisfied: grpcio<2.0,>=1.24.3 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow->deeptrack) (1.47.0)\n", - "Requirement already satisfied: typeguard>=2.7 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow_addons->deeptrack) (2.13.3)\n", - "Requirement already satisfied: dm-tree in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow-probability->deeptrack) (0.1.7)\n", - "Requirement already satisfied: cloudpickle>=1.3 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow-probability->deeptrack) (2.1.0)\n", - "Requirement already satisfied: decorator in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorflow-probability->deeptrack) (5.1.1)\n", - "Requirement already satisfied: wheel<1.0,>=0.23.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from astunparse>=1.6.0->tensorflow->deeptrack) (0.37.1)\n", - "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from packaging>=20.0->scikit-image>=0.18.0->deeptrack) (3.0.9)\n", - "Requirement already satisfied: google-auth<3,>=1.6.3 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorboard<2.10,>=2.9->tensorflow->deeptrack) (2.9.0)\n", - "Requirement already satisfied: markdown>=2.6.8 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorboard<2.10,>=2.9->tensorflow->deeptrack) (3.3.7)\n", - "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorboard<2.10,>=2.9->tensorflow->deeptrack) (0.4.6)\n", - "Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorboard<2.10,>=2.9->tensorflow->deeptrack) (1.8.1)\n", - "Requirement already satisfied: requests<3,>=2.21.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorboard<2.10,>=2.9->tensorflow->deeptrack) (2.28.0)\n", - "Requirement already satisfied: werkzeug>=1.0.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorboard<2.10,>=2.9->tensorflow->deeptrack) (2.1.2)\n", - "Requirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from tensorboard<2.10,>=2.9->tensorflow->deeptrack) (0.6.1)\n", - "Requirement already satisfied: pyasn1-modules>=0.2.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from google-auth<3,>=1.6.3->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (0.2.8)\n", - "Requirement already satisfied: cachetools<6.0,>=2.0.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from google-auth<3,>=1.6.3->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (5.2.0)\n", - "Requirement already satisfied: rsa<5,>=3.1.4 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from google-auth<3,>=1.6.3->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (4.8)\n", - "Requirement already satisfied: requests-oauthlib>=0.7.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (1.3.1)\n", - "Requirement already satisfied: idna<4,>=2.5 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (3.3)\n", - "Requirement already satisfied: charset-normalizer~=2.0.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (2.0.12)\n", - "Requirement already satisfied: urllib3<1.27,>=1.21.1 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (1.26.9)\n", - "Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from requests<3,>=2.21.0->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (2022.6.15)\n", - "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (0.4.8)\n", - "Requirement already satisfied: oauthlib>=3.0.0 in c:\\users\\gu\\appdata\\local\\programs\\python\\python310\\lib\\site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.10,>=2.9->tensorflow->deeptrack) (3.2.0)\n" - ] - } - ], + "outputs": [], "source": [ "%matplotlib inline\n", "\n", @@ -102,11 +36,21 @@ "execution_count": 2, "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\GU\\DeepTrack-2.0\\deeptrack\\backend\\_config.py:11: UserWarning: cupy not installed. GPU-accelerated simulations will not be possible\n", + " warnings.warn(\n", + "c:\\GU\\DeepTrack-2.0\\deeptrack\\backend\\_config.py:25: UserWarning: cupy not installed, CPU acceleration not enabled\n", + " warnings.warn(\"cupy not installed, CPU acceleration not enabled\")\n" + ] + }, { "name": "stdout", "output_type": "stream", "text": [ - "MNIST already downloaded! Use force_overwrite=True to redownload the dataset.\n" + "Dataset already downloaded.\n" ] } ], @@ -254,7 +198,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAH0ElEQVR4nO3dfWxVdxkH8O+XlhcrtYwVkCUbdJQNBFxVoqtrYImyzWVxmAVxa2LEmOgYWxydYRKduODElywiwpYsQ1i2KeuyuU1lanxBMgFBDEaJ4IDO8FZtS0vpNuhuH//oJelzH+y9bWnvPfT7SRr6nPs7Lxe+/fXh3nPOpZlBpKcR+T4AKTwKhQQKhQQKhQQKhQQKhQSXbChIriL5dL6PI4kSHwqSd5HcQ/IMyRMkt5KsycNx/IHk2+njOEPywFAfw8WS6FCQXA7gBwAeATAJwFUANgC4PU+HtMzMxqa/rs3TMQxYYkNBsgzAwwDuMbMXzKzDzDrN7BUz+8oFxteTPEmyjeQfSc7q8ditJPeTbCd5jOQD6eXlJH9OspVkC8ntJBP7d5arJD/BagBjALyY4/itAKYDmAhgL4Bnejz2JIAvmlkpgNkAfpdeXgfgKIAJ6J6JVgIwACC5geSGjH18m2QTyddI3tjXJ1QoivN9AANwOYAmM3snl8FmtvH89yRXAThFsszM2gB0AngfyX1mdgrAqfTQTgCTAUwxs9cBbO+xvaUZu1gBYD+AcwA+A+AVklVmdqhfzy6PkjxTNAMoJ5k12CSLSK4heYjkaQAN6YfK03/eAeBWAG+Q3EayOr38ewBeB/BrkodJPvj/9mFmu8ys3czOmtlmAK+lt5k4SQ7FDgBnASzMYexd6G4+Pw6gDMDU9HICgJntNrPb0f2r5WcAnksvbzezOjO7GsAnASwn+bEcj8/Obz9pEhuK9LT/EID1JBeSLCE5kuQnSH43Y3gpugPUDKAE3f9bAQCQHEWyNv2rpBPAaQBd6cduI1lJkgDaAKTOP9YTyXEkbyY5hmQxyVoA8wC8evGf+RAws0R/AagFsAdAB4CTAH4B4KMAVgF4Oj1mLICXALQDeAPAZ9H9k1wJYBS6//FOoTsQuwHUpNe7H92/ajrQ3XB+vcd+HwfwePr7Cen12gG0AtgJYEG+/276+0XTSTaSIbG/PmTwKBQSKBQSKBQS9PrCz4IRi9SFXsJ+01V/wddRNFNIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIkORbEfQJi+NTLZpQfoGRvTvwwFRXp0r8paVTpv3H1SVL47mxJx8d5eq9c7e4uinVEdb5SH2dqyuX78x6rP2lmUIChUIChUIChUKCRDSaRTOnh2U2eqSrj88f5+q3rvfN2viy2Lxtv25LWDZQW98sdfV3fnRLGLNrzrOuPtL5lqvXNC4I61yxfegu1tNMIYFCIYFCIUFB9hSpGz/o6kc3rQ9jrhk5KizLh05LufqhdZ9zdXFH7AWq65e5uvSYvxXo6CbfYwBAyZ5d/TzCvtNMIYFCIYFCIUFB9hSjDxx39V/evjKMuWZk40Xfb92J6119+Ex8w2zTtOdd3dble4ZJP/zTgI8j37cP0kwhgUIhgUIhgUIhQa/35i6UWya2LKkOy07f4t/gKvrbWFfvW7ou63ZXN73f1bvn+8Yy1doW1rHq61zdcJ9/vOLOfVn3Wyh0y0TJmUIhgUIhQSJ6igspKr/c1anmFlcfedb3C/+YtxGZPvzIva6euH7gLzwliXoKyZlCIYFCIUFBviGWi1RTc6+Pd57OfhLOrNr9rv7vY0V+QJc/gWa40EwhgUIhgUIhQWJ7imxmrjjo6iVz4qdR/3jKb109f9E9ri7dMnhXdhcyzRQSKBQSKBQSKBQSXLKNZuYJMs13zwxj/v2yvxLrwdVPufqrn/5UWMf+WubqK7+1I2NAwb6HmDPNFBIoFBIoFBIk9iSbi6Hl8/6E4Ge+8X1XVxSPybqNWU/5K8inP3HC1e8cbujfwQ0BnWQjOVMoJFAoJBjWPUUmu6HK1e9ZczSM+cnVv+p1GzN+/wVXX/vNeEFR6l+H+35wg0A9heRMoZBAoZBAoZBAjWYviiZNDMuOL6509a4Va109IuPnrPbITWEbbTW9n4k+VNRoSs4UCgkUCgnUUwzQc0f9STYl9FemvWnnwjq33ftlv86LQ3eL5Z7UU0jOFAoJFAoJLtkTd/ujq6bK1YcWxZNsZlc1uDqzh8i0ruUDYVnJS3v6fGxDSTOFBAqFBAqFBMOmp+Dc2WHZwft8P/DEDZtdPW9MfI0hm7PW6eqdLRVxUNeJuKyAaKaQQKGQQKGQQKGQ4JJpNIsrprj60JIrXL1q8U/DOneMbRrwflc2znX1trX+c8gu25xxVXoCaKaQQKGQQKGQIBE9RfHUq8Kytg9NdvXih1919ZfGvTDg/WZ+TikA7Njge4jxm/7s6su6ktdDZNJMIYFCIYFCIUFB9BTFk9/r6paN73b13RXbwjp3lg78s86XHatx9d7Hqlxd/vzfwzrj25PfM2SjmUIChUIChUIChUKCQW80z93sX+w5d39LGLOy8peuvuldHWFMXzWm/C2W571cF8bM+No/XT2+1TeRXQM+imTSTCGBQiGBQiHBoPcUDQt97g7Oqe/zNta3TgvL1m7zd4hhyl9APWP1EVdPb4xXdg/PTx3NTjOFBAqFBAqFBLqTzTCmO9lIzhQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCXp9Q0yGJ80UEigUEigUEigUEigUEigUEvwPNz0RiWmvrP8AAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAH0ElEQVR4nO3dfWxVdxkH8O+XlhcrtYwVkCUbdJQNBFxVoqtrYImyzWVxmAVxa2LEmOgYWxydYRKduODElywiwpYsQ1i2KeuyuU1lanxBMgFBDEaJ4IDO8FZtS0vpNuhuH//oJelzH+y9bWnvPfT7SRr6nPs7Lxe+/fXh3nPOpZlBpKcR+T4AKTwKhQQKhQQKhQQKhQQKhQSXbChIriL5dL6PI4kSHwqSd5HcQ/IMyRMkt5KsycNx/IHk2+njOEPywFAfw8WS6FCQXA7gBwAeATAJwFUANgC4PU+HtMzMxqa/rs3TMQxYYkNBsgzAwwDuMbMXzKzDzDrN7BUz+8oFxteTPEmyjeQfSc7q8ditJPeTbCd5jOQD6eXlJH9OspVkC8ntJBP7d5arJD/BagBjALyY4/itAKYDmAhgL4Bnejz2JIAvmlkpgNkAfpdeXgfgKIAJ6J6JVgIwACC5geSGjH18m2QTyddI3tjXJ1QoivN9AANwOYAmM3snl8FmtvH89yRXAThFsszM2gB0AngfyX1mdgrAqfTQTgCTAUwxs9cBbO+xvaUZu1gBYD+AcwA+A+AVklVmdqhfzy6PkjxTNAMoJ5k12CSLSK4heYjkaQAN6YfK03/eAeBWAG+Q3EayOr38ewBeB/BrkodJPvj/9mFmu8ys3czOmtlmAK+lt5k4SQ7FDgBnASzMYexd6G4+Pw6gDMDU9HICgJntNrPb0f2r5WcAnksvbzezOjO7GsAnASwn+bEcj8/Obz9pEhuK9LT/EID1JBeSLCE5kuQnSH43Y3gpugPUDKAE3f9bAQCQHEWyNv2rpBPAaQBd6cduI1lJkgDaAKTOP9YTyXEkbyY5hmQxyVoA8wC8evGf+RAws0R/AagFsAdAB4CTAH4B4KMAVgF4Oj1mLICXALQDeAPAZ9H9k1wJYBS6//FOoTsQuwHUpNe7H92/ajrQ3XB+vcd+HwfwePr7Cen12gG0AtgJYEG+/276+0XTSTaSIbG/PmTwKBQSKBQSKBQS9PrCz4IRi9SFXsJ+01V/wddRNFNIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIkORbEfQJi+NTLZpQfoGRvTvwwFRXp0r8paVTpv3H1SVL47mxJx8d5eq9c7e4uinVEdb5SH2dqyuX78x6rP2lmUIChUIChUIChUKCRDSaRTOnh2U2eqSrj88f5+q3rvfN2viy2Lxtv25LWDZQW98sdfV3fnRLGLNrzrOuPtL5lqvXNC4I61yxfegu1tNMIYFCIYFCIUFB9hSpGz/o6kc3rQ9jrhk5KizLh05LufqhdZ9zdXFH7AWq65e5uvSYvxXo6CbfYwBAyZ5d/TzCvtNMIYFCIYFCIUFB9hSjDxx39V/evjKMuWZk40Xfb92J6119+Ex8w2zTtOdd3dble4ZJP/zTgI8j37cP0kwhgUIhgUIhgUIhQa/35i6UWya2LKkOy07f4t/gKvrbWFfvW7ou63ZXN73f1bvn+8Yy1doW1rHq61zdcJ9/vOLOfVn3Wyh0y0TJmUIhgUIhQSJ6igspKr/c1anmFlcfedb3C/+YtxGZPvzIva6euH7gLzwliXoKyZlCIYFCIUFBviGWi1RTc6+Pd57OfhLOrNr9rv7vY0V+QJc/gWa40EwhgUIhgUIhQWJ7imxmrjjo6iVz4qdR/3jKb109f9E9ri7dMnhXdhcyzRQSKBQSKBQSKBQSXLKNZuYJMs13zwxj/v2yvxLrwdVPufqrn/5UWMf+WubqK7+1I2NAwb6HmDPNFBIoFBIoFBIk9iSbi6Hl8/6E4Ge+8X1XVxSPybqNWU/5K8inP3HC1e8cbujfwQ0BnWQjOVMoJFAoJBjWPUUmu6HK1e9ZczSM+cnVv+p1GzN+/wVXX/vNeEFR6l+H+35wg0A9heRMoZBAoZBAoZBAjWYviiZNDMuOL6509a4Va109IuPnrPbITWEbbTW9n4k+VNRoSs4UCgkUCgnUUwzQc0f9STYl9FemvWnnwjq33ftlv86LQ3eL5Z7UU0jOFAoJFAoJLtkTd/ujq6bK1YcWxZNsZlc1uDqzh8i0ruUDYVnJS3v6fGxDSTOFBAqFBAqFBMOmp+Dc2WHZwft8P/DEDZtdPW9MfI0hm7PW6eqdLRVxUNeJuKyAaKaQQKGQQKGQQKGQ4JJpNIsrprj60JIrXL1q8U/DOneMbRrwflc2znX1trX+c8gu25xxVXoCaKaQQKGQQKGQIBE9RfHUq8Kytg9NdvXih1919ZfGvTDg/WZ+TikA7Njge4jxm/7s6su6ktdDZNJMIYFCIYFCIUFB9BTFk9/r6paN73b13RXbwjp3lg78s86XHatx9d7Hqlxd/vzfwzrj25PfM2SjmUIChUIChUIChUKCQW80z93sX+w5d39LGLOy8peuvuldHWFMXzWm/C2W571cF8bM+No/XT2+1TeRXQM+imTSTCGBQiGBQiHBoPcUDQt97g7Oqe/zNta3TgvL1m7zd4hhyl9APWP1EVdPb4xXdg/PTx3NTjOFBAqFBAqFBLqTzTCmO9lIzhQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCRQKCXp9Q0yGJ80UEigUEigUEigUEigUEigUEvwPNz0RiWmvrP8AAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -266,7 +210,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAIP0lEQVR4nO3dfWxV5R0H8O+vtUhbSjPEoi5zBOHaVTZx4rI6HNmcBrZFNBPNIIOwLdtCxhBwmzHbQlhimDN7cWvDkk1F2TAzU7bowNfGOcUIyAgZIGJtFYEZawVaofbl2R/nst3f+V5ogftybvl+kkZ/556Xp82X5zw9fc45FkKASKayYjdAkkehEKJQCFEohCgUQhQKIcM2FGa23MzWFLsdpajkQ2Fmc8xss5l1mdl+M1tvZtOK0I4xZvaImXWbWbuZzSl0G3LlrGI34HSY2VIAtwH4DoDHAXwAYAaAWQC6C9ycpvTxxwGYAuAxM9sWQvh3gdtx+kIIJfkFoBZAF4DZx/l8OYA1GfVDAA4AOAjgHwAuyfjsiwB2ADgM4C0At6aXjwXwKID3ALwL4DkAZVmOVY0oEKmMZQ8AWFnsn9OpfJXy6aMRwEgAjwxx/fUAJgGoA/AygD9mfPYHAN8OIdQAmAzgmfTyZQD2AjgXUQ9wO4AAAGbWbGbN6fVSAPpCCLsz9rkNwCUn+T0lQimfPs4B8E4IoW8oK4cQ7jn2/2a2HECnmdWGEA4C6AXQkO7uOwF0plftBXA+gI+GEPYg6imO7W9hxu5HATgUO+RBADUn9y0lQyn3FB0AxprZoME2s3IzW2lmr5nZIQBt6Y/Gpv/7FUSnkHYze9bMGtPLfw5gD4AnzKzVzG47ziG6AIyOLRuN6HRUcko5FBsB9AC4fgjrzkE0+PwCorHI+PRyA4AQwqYQwixEp5Z1AP6cXn44hLAshDABwHUAlprZ1Vn2vxvAWWY2KWPZpQBKb5CJEg5Futv/CYAmM7vezKrMrMLMZprZnbHVaxAFqANAFYA7jn1gZiPMbG76VNKL6DQwkP7sy2Y20cwM0emg/9hnsbZ0A3gYwAozqzazzyAK4QO5/r4Lotgj3Rz8FjIXwGZEv4IeAPAYgCuR8dsHonP+XxF15+0A5iEaME4EMALABkTjiEMANgGYlt5uCaJTTTeiAeePM467CsCqjHoMol6mG8AbAOYU+2dzql+W/oZE/qdkTx+SPwqFEIVCiEIh5IQXfq4pm61R6DD25MBDlm25egohCoUQhUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCIUShEKJQCFEohCgUQkr5tsG86/v85bRs/8IeV29rXO3qSzfOd/UFTSNoH+UtL+egdfmjnkKIQiFEoRCiMUWGgemXufrue35L60ys8D+y+I2lWxvvdfUrU/tpH98f/+lTa2CBqKcQolAIUSiEKBRCzuiBZu+1U139g2b/jJFUBV94GogNLVt7e119cOBsV1/mSwBAz8wrXF3Zst0f4+jR7A0uEPUUQhQKIQqFkGE7pigf7Z9g2P3ZelpnyS//5OrPVXbF1hj838x9nVe6+unmRlc/v/xu2ubJ369ydcOa77p6wg83DnrcfFJPIUShEKJQCBm2Y4q993/Y1ZuuaMrLcVbUbXL1hlF+jLGg7VraZvX4p1w9uqEj9w07DeophCgUQhQKIQqFkGEz0IzPvF47xc+aKgP/cStuQbt/a8Pmpz5G62z/ht9vy5GRrq7bfMTVezr5olnFHS2+bVkfXFg86imEKBRCFAohJ3zfR5Ifwxyfef2r1c2ujs+6zua6XTe4uvxG/yrTd790MW3TMdkPAFJNb7q67829gx730be2uHp/vx+HfH3+92ibfNxVpscwy5ApFEIUCiElcZ3CLucXAb+z1J+H45Nst/ibw/FMVwPto+PBj7j6nE4/uaV2zYu0TW2sHtKbcgcxrtzP7u245X1ap66FFuWNegohCoUQhUJIIscUZVVVru67M/5ueeDF+odd/XrfB65eevsyV3/ouTdoH3XVb7ua7w8vjk+d307L2gp4fPUUQhQKIQqFEIVCSCIHmkem+4tVj9c3H2fN//vm4iWurlnnLzzl4iLTmUI9hRCFQohCISSRY4pP/PRfri7Lkt34JNvKdS/ls0k5VWHlru6NTWUqt+LObVJPIUShEKJQCEnEmOK9r/mnv/xo3F2uHshyI8+WJ/ykmQvxQu4blie9wf/pLf7EvQ07eULQJBTudRDqKYQoFEIUCiEKhZBEDDT7Kn1dW+YHlhuP8rOMJ9y/z+8j5606NfFZY7vumpxlLX+H2NzWma6uX/w6bVHIWWHqKYQoFEIUCiGJGFMMpqN/FC3ra20rfEOyiI8hXln5cVfvmsXvIVv/vr/PbF/TRFfXdPKdaYWknkKIQiFEoRBSEmOKW5+fTctSsd/1CyX+BJ23Y3e/75zqxxBXb7+Z9lE9o9XVNSjuGCJOPYUQhUKIQiEkGWOK2DPa4hN1fz1tLW3ShFQ+WwQAaF/RSMv+Mu8Xro4/QeeTL8139QU37Mh9w/JMPYUQhUKIQiFEoRCSjIFm7Iao+Ozm6ZX8jq1b7vOvcrjoXr9NxYHDrv7P9HNpH2Nu9o9MXnTh066eWcUXyP7WPc7V87bPcPXY31XTNqVGPYUQhUKIQiEkGWOKQYw0bubOa/z7wv95lX9t06s957l6QW3bSR938b6raNmGF6a4etLiZP0xKxfUUwhRKIQoFEIS8bqo8tRFrk6t9Y8h/tl5/pUL2cT/iBa/1pHN1h6/zVef/ZZvx4LiTOQpFL0uSoZMoRCiUAhRKIQk4uJV/+7XXP3q7PGubli0iLbZcdNvTuoY9X9fSMsubvbv6kptHd4Dy6FSTyFEoRCiUAhJxMUrKQ5dvJIhUyiEKBRCFAohCoUQhUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCIUShEHLCSTZyZlJPIUShEKJQCFEohCgUQhQKIf8FXtstQ0LBRQQAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAIP0lEQVR4nO3dfWxV5R0H8O+vtUhbSjPEoi5zBOHaVTZx4rI6HNmcBrZFNBPNIIOwLdtCxhBwmzHbQlhimDN7cWvDkk1F2TAzU7bowNfGOcUIyAgZIGJtFYEZawVaofbl2R/nst3f+V5ogftybvl+kkZ/556Xp82X5zw9fc45FkKASKayYjdAkkehEKJQCFEohCgUQhQKIcM2FGa23MzWFLsdpajkQ2Fmc8xss5l1mdl+M1tvZtOK0I4xZvaImXWbWbuZzSl0G3LlrGI34HSY2VIAtwH4DoDHAXwAYAaAWQC6C9ycpvTxxwGYAuAxM9sWQvh3gdtx+kIIJfkFoBZAF4DZx/l8OYA1GfVDAA4AOAjgHwAuyfjsiwB2ADgM4C0At6aXjwXwKID3ALwL4DkAZVmOVY0oEKmMZQ8AWFnsn9OpfJXy6aMRwEgAjwxx/fUAJgGoA/AygD9mfPYHAN8OIdQAmAzgmfTyZQD2AjgXUQ9wO4AAAGbWbGbN6fVSAPpCCLsz9rkNwCUn+T0lQimfPs4B8E4IoW8oK4cQ7jn2/2a2HECnmdWGEA4C6AXQkO7uOwF0plftBXA+gI+GEPYg6imO7W9hxu5HATgUO+RBADUn9y0lQyn3FB0AxprZoME2s3IzW2lmr5nZIQBt6Y/Gpv/7FUSnkHYze9bMGtPLfw5gD4AnzKzVzG47ziG6AIyOLRuN6HRUcko5FBsB9AC4fgjrzkE0+PwCorHI+PRyA4AQwqYQwixEp5Z1AP6cXn44hLAshDABwHUAlprZ1Vn2vxvAWWY2KWPZpQBKb5CJEg5Futv/CYAmM7vezKrMrMLMZprZnbHVaxAFqANAFYA7jn1gZiPMbG76VNKL6DQwkP7sy2Y20cwM0emg/9hnsbZ0A3gYwAozqzazzyAK4QO5/r4Lotgj3Rz8FjIXwGZEv4IeAPAYgCuR8dsHonP+XxF15+0A5iEaME4EMALABkTjiEMANgGYlt5uCaJTTTeiAeePM467CsCqjHoMol6mG8AbAOYU+2dzql+W/oZE/qdkTx+SPwqFEIVCiEIh5IQXfq4pm61R6DD25MBDlm25egohCoUQhUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCIUShEKJQCFEohCgUQkr5tsG86/v85bRs/8IeV29rXO3qSzfOd/UFTSNoH+UtL+egdfmjnkKIQiFEoRCiMUWGgemXufrue35L60ys8D+y+I2lWxvvdfUrU/tpH98f/+lTa2CBqKcQolAIUSiEKBRCzuiBZu+1U139g2b/jJFUBV94GogNLVt7e119cOBsV1/mSwBAz8wrXF3Zst0f4+jR7A0uEPUUQhQKIQqFkGE7pigf7Z9g2P3ZelpnyS//5OrPVXbF1hj838x9nVe6+unmRlc/v/xu2ubJ369ydcOa77p6wg83DnrcfFJPIUShEKJQCBm2Y4q993/Y1ZuuaMrLcVbUbXL1hlF+jLGg7VraZvX4p1w9uqEj9w07DeophCgUQhQKIQqFkGEz0IzPvF47xc+aKgP/cStuQbt/a8Pmpz5G62z/ht9vy5GRrq7bfMTVezr5olnFHS2+bVkfXFg86imEKBRCFAohJ3zfR5Ifwxyfef2r1c2ujs+6zua6XTe4uvxG/yrTd790MW3TMdkPAFJNb7q67829gx730be2uHp/vx+HfH3+92ibfNxVpscwy5ApFEIUCiElcZ3CLucXAb+z1J+H45Nst/ibw/FMVwPto+PBj7j6nE4/uaV2zYu0TW2sHtKbcgcxrtzP7u245X1ap66FFuWNegohCoUQhUJIIscUZVVVru67M/5ueeDF+odd/XrfB65eevsyV3/ouTdoH3XVb7ua7w8vjk+d307L2gp4fPUUQhQKIQqFEIVCSCIHmkem+4tVj9c3H2fN//vm4iWurlnnLzzl4iLTmUI9hRCFQohCISSRY4pP/PRfri7Lkt34JNvKdS/ls0k5VWHlru6NTWUqt+LObVJPIUShEKJQCEnEmOK9r/mnv/xo3F2uHshyI8+WJ/ykmQvxQu4blie9wf/pLf7EvQ07eULQJBTudRDqKYQoFEIUCiEKhZBEDDT7Kn1dW+YHlhuP8rOMJ9y/z+8j5606NfFZY7vumpxlLX+H2NzWma6uX/w6bVHIWWHqKYQoFEIUCiGJGFMMpqN/FC3ra20rfEOyiI8hXln5cVfvmsXvIVv/vr/PbF/TRFfXdPKdaYWknkKIQiFEoRBSEmOKW5+fTctSsd/1CyX+BJ23Y3e/75zqxxBXb7+Z9lE9o9XVNSjuGCJOPYUQhUKIQiEkGWOK2DPa4hN1fz1tLW3ShFQ+WwQAaF/RSMv+Mu8Xro4/QeeTL8139QU37Mh9w/JMPYUQhUKIQiFEoRCSjIFm7Iao+Ozm6ZX8jq1b7vOvcrjoXr9NxYHDrv7P9HNpH2Nu9o9MXnTh066eWcUXyP7WPc7V87bPcPXY31XTNqVGPYUQhUKIQiEkGWOKQYw0bubOa/z7wv95lX9t06s957l6QW3bSR938b6raNmGF6a4etLiZP0xKxfUUwhRKIQoFEIS8bqo8tRFrk6t9Y8h/tl5/pUL2cT/iBa/1pHN1h6/zVef/ZZvx4LiTOQpFL0uSoZMoRCiUAhRKIQk4uJV/+7XXP3q7PGubli0iLbZcdNvTuoY9X9fSMsubvbv6kptHd4Dy6FSTyFEoRCiUAhJxMUrKQ5dvJIhUyiEKBRCFAohCoUQhUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCIUShEHLCSTZyZlJPIUShEKJQCFEohCgUQhQKIf8FXtstQ0LBRQQAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -278,7 +222,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAG3klEQVR4nO3db4xcVR3G8e+z223LNtCmW7YWhZbQ7gvhxZpYDUpRQzGG1CBpS4yCbwXtC0QMSNBUm6hoDCbaQgyticGEpIl/kKRYig1iRGkDMQrtC4qtbbHtCtuWtts/7B5f7GyyZ36znWU7d2ZufT7JZPd375l7z908c+7Z+XeVUsJsvI5Wd8Daj0NhgUNhgUNhgUNhgUNhwUUbCklrJT3R6n6UUelDIekLknZKOiHpP5K2SLqhhf1ZIul0mQM5rdUduBCS7gUeAO4C/gCcBT4D3AqcbFG31gM7WrTvhijtSCFpNvBd4KsppV+nlE6mlM6llH6fUvpGjfabJR2SdEzSnyRdO27dLZJek/SOpIOS7qssnyfpaUlHJb0t6QVJE/7NJH0eOAo81/ADbqLShgK4HpgJ/GaS7bcAS4Be4GXgV+PWbQS+nFK6FLgO+GNl+deBA8DlwHzgQSABSNogacPYBiRdxmhI753i8bSNMp8+eoD/ppTenUzjlNKmsd8lrQUGJc1OKR0DzgEflPT3lNIgMFhpeg5YACxMKb0OvDBue1+p2sU6YGNK6YCkqR5TWyjzSPEWME9S3WBL6pT0A0l7JB0H9lZWzav8XAncAuyT9Lyk6yvLfwS8DmyV9IakBybYfj+wHHhkykfTTlJKpbwBsxmdTK6aYP1a4InK73cCu4CrAQFzGD0NLK66TxfwNWB/je1dBxwBbqqx7p5KXw5VbieAIeDlVv+dpnIr7ekjpXRM0reB9ZLeBbYyOtwvBz4FnBrX/FLgDKOjSzfwvbEVkqYDq4GnK9s8DoxU1q0AdgN7gGPA8Ni6Kj8HnhxX3wcsAu6+4ANtgTKfPkgp/ZjRid1DwACwH1gD/Laq6S+BfcBB4DXgr1Xr7wT2VgJxF/DFyvIlwDZGH/kvAhtSStsBJD0m6bFKP06llA6N3SrtT6eUBhp4uE2j5DfZWJVSjxRWDIfCAofCAofCgvP+S3pzx2rPQi9iz45srvnUq0cKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKC0r7WdIyO7nqo1n98A8fzep1t38p3Cft/GehfRrPI4UFDoUFDoUFDoUFhU80h279SF73dIY2cze9WHQ32sqRD+ePxXV7P9uintTmkcICh8ICh8KCwucUb96Y5677mqOx0aa46KLREedQ6aqhrL6pd3dWP6ePFdqlejxSWOBQWOBQWFD4nOI7KzZn9cO7Pl30LttK5zULw7Ldn8gnUf0v3ZHVV+z4R6F9qscjhQUOhQUOhQUOhQWFTzS7NKlrtFy0pj1+qm6boT2XNaEnk+eRwgKHwgKHwoKGzylGbujP6mUz/9zoXZTKollv1W1z5bbhJvRk8jxSWOBQWOBQWNDwOcW+FZdkdW9nd6N30damLboqq1fNfarufS7512BWt3qG4ZHCAofCAofCgobPKaYtfue860/vntPoXbaV/T+ZldUfnxEveLzx+AfyBUePF9ml98wjhQUOhQUOhQUOhQVN/3qj3p1x4tWuOuf1hGWHV/Zl9dzbD2T1830bq+4xM2zj0fWfy+rew3+ZUv+K4pHCAofCAofCgqbPKYbmxhzOqtGunpFlH8rq1Jlftnv/8hlZffaKc2EbHdPzl562LvtpVnfVuBL4oeF8u99647asfnsknzN1d8SXt+b/LX+Cr90uKO+RwgKHwgKHwoKGzynOnO7K6pGqM+YvHnwk3OepNf3veT/39zye1R3kE4ChdDar3xyO5/afDXwyq5dvuyer57wyPdxnwdbDWa19+fMUA7vyNxnN74xzmdTiT5XX45HCAofCAofCAofCgoZPNBff8UpWX/v9NVl95dKDDdnP9iP5C1MDW/J3M/W8mk/wpj+zo8ZW8jZ97Ky73+rp6sH78683XDoj/0rpJ0+8v+42241HCgscCgscCgsKf0Hs6m8257INC/h3U/ZTrfvGgfOuf2j7yrCsj5eK6k5DeKSwwKGwwKGwwNclLdjC37XbW2jq80hhgUNhgUNhgUNhgUNhgUNhgUNhgUNhgZ+8arBO5Y+zwb6u0OZ9W5rVm6nxSGGBQ2GBQ2GB5xQNNpyqvqmnhA+7EnbZiuZQWOBQWOA5RcFOLa1/Ccp245HCAofCAofCAofCAk80G6z6BbEyKv8RWMM5FBY4FBZ4TnGBzmy7PKuH+8tz6YqJeKSwwKGwwKGwQClN/KnomztWl+8j0zZpz45srnHxCo8UVoNDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYcF5XxCz/08eKSxwKCxwKCxwKCxwKCxwKCz4H3kcp1jbgLI0AAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAG3klEQVR4nO3db4xcVR3G8e+z223LNtCmW7YWhZbQ7gvhxZpYDUpRQzGG1CBpS4yCbwXtC0QMSNBUm6hoDCbaQgyticGEpIl/kKRYig1iRGkDMQrtC4qtbbHtCtuWtts/7B5f7GyyZ36znWU7d2ZufT7JZPd375l7z908c+7Z+XeVUsJsvI5Wd8Daj0NhgUNhgUNhgUNhgUNhwUUbCklrJT3R6n6UUelDIekLknZKOiHpP5K2SLqhhf1ZIul0mQM5rdUduBCS7gUeAO4C/gCcBT4D3AqcbFG31gM7WrTvhijtSCFpNvBd4KsppV+nlE6mlM6llH6fUvpGjfabJR2SdEzSnyRdO27dLZJek/SOpIOS7qssnyfpaUlHJb0t6QVJE/7NJH0eOAo81/ADbqLShgK4HpgJ/GaS7bcAS4Be4GXgV+PWbQS+nFK6FLgO+GNl+deBA8DlwHzgQSABSNogacPYBiRdxmhI753i8bSNMp8+eoD/ppTenUzjlNKmsd8lrQUGJc1OKR0DzgEflPT3lNIgMFhpeg5YACxMKb0OvDBue1+p2sU6YGNK6YCkqR5TWyjzSPEWME9S3WBL6pT0A0l7JB0H9lZWzav8XAncAuyT9Lyk6yvLfwS8DmyV9IakBybYfj+wHHhkykfTTlJKpbwBsxmdTK6aYP1a4InK73cCu4CrAQFzGD0NLK66TxfwNWB/je1dBxwBbqqx7p5KXw5VbieAIeDlVv+dpnIr7ekjpXRM0reB9ZLeBbYyOtwvBz4FnBrX/FLgDKOjSzfwvbEVkqYDq4GnK9s8DoxU1q0AdgN7gGPA8Ni6Kj8HnhxX3wcsAu6+4ANtgTKfPkgp/ZjRid1DwACwH1gD/Laq6S+BfcBB4DXgr1Xr7wT2VgJxF/DFyvIlwDZGH/kvAhtSStsBJD0m6bFKP06llA6N3SrtT6eUBhp4uE2j5DfZWJVSjxRWDIfCAofCAofCgvP+S3pzx2rPQi9iz45srvnUq0cKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKCxwKC0r7WdIyO7nqo1n98A8fzep1t38p3Cft/GehfRrPI4UFDoUFDoUFDoUFhU80h279SF73dIY2cze9WHQ32sqRD+ePxXV7P9uintTmkcICh8ICh8KCwucUb96Y5677mqOx0aa46KLREedQ6aqhrL6pd3dWP6ePFdqlejxSWOBQWOBQWFD4nOI7KzZn9cO7Pl30LttK5zULw7Ldn8gnUf0v3ZHVV+z4R6F9qscjhQUOhQUOhQUOhQWFTzS7NKlrtFy0pj1+qm6boT2XNaEnk+eRwgKHwgKHwoKGzylGbujP6mUz/9zoXZTKollv1W1z5bbhJvRk8jxSWOBQWOBQWNDwOcW+FZdkdW9nd6N30damLboqq1fNfarufS7512BWt3qG4ZHCAofCAofCgobPKaYtfue860/vntPoXbaV/T+ZldUfnxEveLzx+AfyBUePF9ml98wjhQUOhQUOhQUOhQVN/3qj3p1x4tWuOuf1hGWHV/Zl9dzbD2T1830bq+4xM2zj0fWfy+rew3+ZUv+K4pHCAofCAofCgqbPKYbmxhzOqtGunpFlH8rq1Jlftnv/8hlZffaKc2EbHdPzl562LvtpVnfVuBL4oeF8u99647asfnsknzN1d8SXt+b/LX+Cr90uKO+RwgKHwgKHwoKGzynOnO7K6pGqM+YvHnwk3OepNf3veT/39zye1R3kE4ChdDar3xyO5/afDXwyq5dvuyer57wyPdxnwdbDWa19+fMUA7vyNxnN74xzmdTiT5XX45HCAofCAofCAofCgoZPNBff8UpWX/v9NVl95dKDDdnP9iP5C1MDW/J3M/W8mk/wpj+zo8ZW8jZ97Ky73+rp6sH78683XDoj/0rpJ0+8v+42241HCgscCgscCgsKf0Hs6m8257INC/h3U/ZTrfvGgfOuf2j7yrCsj5eK6k5DeKSwwKGwwKGwwNclLdjC37XbW2jq80hhgUNhgUNhgUNhgUNhgUNhgUNhgUNhgZ+8arBO5Y+zwb6u0OZ9W5rVm6nxSGGBQ2GBQ2GB5xQNNpyqvqmnhA+7EnbZiuZQWOBQWOA5RcFOLa1/Ccp245HCAofCAofCAofCAk80G6z6BbEyKv8RWMM5FBY4FBZ4TnGBzmy7PKuH+8tz6YqJeKSwwKGwwKGwQClN/KnomztWl+8j0zZpz45srnHxCo8UVoNDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYYFDYcF5XxCz/08eKSxwKCxwKCxwKCxwKCxwKCz4H3kcp1jbgLI0AAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -290,7 +234,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAGQElEQVR4nO3dbWiVZRwG8Otybs23BmqmlCZlS9IPWfYyMaVSMjE1wqwZSV9M7IV8CUSoRlhJQiSUjCI1sD7k26LCrAhN0EopA18gXDldKaTOqQttursPO8buc425ubfznF0/OOj/Ps+e3Wdc/M9/Z2fPGEKAWUPdOnsDlnkcChMOhQmHwoRDYcKhMJG1oSBZQnJtZ+8jiRIfCpLFJHeTPEvyKMnNJMd2wj6eS+3jPMk1Hf3521L3zt5Aa5BcAGAxgLkAtgD4F8AkANMA1HTwdv4CsBTAgwB6dPDnblOJ7RQkCwC8BuDZEMLGEEJNCKE2hPB5COGlRo5fR/IYyWqS35Mc0eC+yST3kzxD8k+Si1Lr/Ul+QfIUyZMkt5Ns9GuW2kMZgBPt84g7TmJDAaAIQD6ATc08fjOAmwEMAPAzgI8b3PchgGdCCH0AjATwXWp9IYBKANcAuBbAEgABAEiuJLmylY8hIyX56aMfgOMhhAvNOTiEsOrS/0mWAKgiWRBCqAZQC+BWkr+GEKoAVKUOrQUwCMANIYSDALY3ON+8tnkYmSfJneIEgP4kLxtskjkkl5EsJ3kawKHUXf1T/z4KYDKACpLbSBal1pcDOAjga5K/k1zctg8hMyU5FDsBnAcwvRnHFqN++JwAoADA0NQ6ASCEsCuEMA31Ty1lAD5NrZ8JISwMIdwIYCqABSQfaLuHkJkSG4pU238FwHskp5PsSTKX5EMk30o7vA/qA3QCQE8Ab1y6g2QeyVmpp5JaAKcB1KXum0JyGEkCqAZw8dJ96Uh2J5kPIAdADsn85nSxjBRCSPQNwCwAu1H/LegxAF8CGAOgBMDa1DG9AXwG4AyACgBPoX5gHAYgD8BXqJ8jTgPYBWBs6uPmo/6ppgb1A+fLDT5vKYDSBnVJ6pwNbyWd/fW5khv9JhtLl9inD2s/DoUJh8KEQ2GiyW+ZJnab4Sk0i31Tt46NrbtTmHAoTDgUJhwKEw6FCYfChENhwqEw4VCYcChMOBQmHAoTDoUJh8KEQ2HCoTDhUJhwKEw4FCYcChPJ/F3HDFa+vCiqDxS/K8fkMieqx82bE9U9yn5q+421gDuFCYfChENhwqEw4UGzlY7NHxPVW2fG10upDXmXP0mG/R6eO4UJh8KEQ2HCM0UrnR0cXxetb7dmzBAZzp3ChENhwqEw4Zmihc7OuDuqNzyyIu2I+OIwpaeGyzm+fWx0VPeq2BfVjV69tQO5U5hwKEw4FCYcChMeNJtwbspdsvbqm6uiujC30asO/u+jDybJ2sD9O1q3sXbmTmHCoTDhUJjwTNGEo0+ek7X7eqSvxe/Mnn1oQlQPXJHZ80Nj3ClMOBQmHAoTnika6H79dVG9797VckxtuBjVB2rj+w+/XRjVvfBj22yuA7lTmHAoTDgUJrr0TJEz4paoHv3J3hafY+bGF6L6pg0/tGpPmcCdwoRDYcKhMOFQmOjSg2bF1H5Rvb7fL2lH5CBdcfnDUV24rDyq45e2ksmdwoRDYcKhMNFlZoqTTxfJ2qa5y9NWcqNq7pHx8jG1s6+K6ot/H2713jKNO4UJh8KEQ2Eia2eK9B927Viql0MG8ps8x87KobI2+FDLf2iWNO4UJhwKEw6FCYfCRNYOmr8t6RnV6e/Cbo4hy3Qtw66Y3C7cKUw4FCYcChNZM1PUjR8V1UtHl7X4HBP3Ph7VvXdn/wtVjXGnMOFQmHAoTGTNTPH6mvejemTu5V9RWHR0XFQXPFEV1dnwJtwr4U5hwqEw4VCYyJqZYlRenO/m/Kxj5+rbo3pAVfKuZNce3ClMOBQmHAoTDoWJxA6aR9aPjOpc7mnxOQZtPR7VXfXFqnTuFCYcChMOhYlEzBTpb6ABgHduWxvV6S9WVdfFf4Lhzs0vyjmGV+xv/eaykDuFCYfChENhIhEzxbm+ebI2Nr8mbSW+kt2Wf4ZEdeGcXXKOzv6b4pnKncKEQ2HCoTDhUJhwKEw4FCYcChMOhYlEvHh19Z5jsvZ85f1RXTp4W0dtJ+u5U5hwKEw4FCYSMVNc+KNC1irviespuKODdpP93ClMOBQmHAoTDoUJh8KEQ2HCoTDhUJhwKEw4FCYcChMOhQmG0BX+AJK1hDuFCYfChENhwqEw4VCYcChM/AfSumKNtkRdcwAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAGQElEQVR4nO3dbWiVZRwG8Otybs23BmqmlCZlS9IPWfYyMaVSMjE1wqwZSV9M7IV8CUSoRlhJQiSUjCI1sD7k26LCrAhN0EopA18gXDldKaTOqQttursPO8buc425ubfznF0/OOj/Ps+e3Wdc/M9/Z2fPGEKAWUPdOnsDlnkcChMOhQmHwoRDYcKhMJG1oSBZQnJtZ+8jiRIfCpLFJHeTPEvyKMnNJMd2wj6eS+3jPMk1Hf3521L3zt5Aa5BcAGAxgLkAtgD4F8AkANMA1HTwdv4CsBTAgwB6dPDnblOJ7RQkCwC8BuDZEMLGEEJNCKE2hPB5COGlRo5fR/IYyWqS35Mc0eC+yST3kzxD8k+Si1Lr/Ul+QfIUyZMkt5Ns9GuW2kMZgBPt84g7TmJDAaAIQD6ATc08fjOAmwEMAPAzgI8b3PchgGdCCH0AjATwXWp9IYBKANcAuBbAEgABAEiuJLmylY8hIyX56aMfgOMhhAvNOTiEsOrS/0mWAKgiWRBCqAZQC+BWkr+GEKoAVKUOrQUwCMANIYSDALY3ON+8tnkYmSfJneIEgP4kLxtskjkkl5EsJ3kawKHUXf1T/z4KYDKACpLbSBal1pcDOAjga5K/k1zctg8hMyU5FDsBnAcwvRnHFqN++JwAoADA0NQ6ASCEsCuEMA31Ty1lAD5NrZ8JISwMIdwIYCqABSQfaLuHkJkSG4pU238FwHskp5PsSTKX5EMk30o7vA/qA3QCQE8Ab1y6g2QeyVmpp5JaAKcB1KXum0JyGEkCqAZw8dJ96Uh2J5kPIAdADsn85nSxjBRCSPQNwCwAu1H/LegxAF8CGAOgBMDa1DG9AXwG4AyACgBPoX5gHAYgD8BXqJ8jTgPYBWBs6uPmo/6ppgb1A+fLDT5vKYDSBnVJ6pwNbyWd/fW5khv9JhtLl9inD2s/DoUJh8KEQ2GiyW+ZJnab4Sk0i31Tt46NrbtTmHAoTDgUJhwKEw6FCYfChENhwqEw4VCYcChMOBQmHAoTDoUJh8KEQ2HCoTDhUJhwKEw4FCYcChPJ/F3HDFa+vCiqDxS/K8fkMieqx82bE9U9yn5q+421gDuFCYfChENhwqEw4UGzlY7NHxPVW2fG10upDXmXP0mG/R6eO4UJh8KEQ2HCM0UrnR0cXxetb7dmzBAZzp3ChENhwqEw4Zmihc7OuDuqNzyyIu2I+OIwpaeGyzm+fWx0VPeq2BfVjV69tQO5U5hwKEw4FCYcChMeNJtwbspdsvbqm6uiujC30asO/u+jDybJ2sD9O1q3sXbmTmHCoTDhUJjwTNGEo0+ek7X7eqSvxe/Mnn1oQlQPXJHZ80Nj3ClMOBQmHAoTnika6H79dVG9797VckxtuBjVB2rj+w+/XRjVvfBj22yuA7lTmHAoTDgUJrr0TJEz4paoHv3J3hafY+bGF6L6pg0/tGpPmcCdwoRDYcKhMOFQmOjSg2bF1H5Rvb7fL2lH5CBdcfnDUV24rDyq45e2ksmdwoRDYcKhMNFlZoqTTxfJ2qa5y9NWcqNq7pHx8jG1s6+K6ot/H2713jKNO4UJh8KEQ2Eia2eK9B927Viql0MG8ps8x87KobI2+FDLf2iWNO4UJhwKEw6FCYfCRNYOmr8t6RnV6e/Cbo4hy3Qtw66Y3C7cKUw4FCYcChNZM1PUjR8V1UtHl7X4HBP3Ph7VvXdn/wtVjXGnMOFQmHAoTGTNTPH6mvejemTu5V9RWHR0XFQXPFEV1dnwJtwr4U5hwqEw4VCYyJqZYlRenO/m/Kxj5+rbo3pAVfKuZNce3ClMOBQmHAoTDoWJxA6aR9aPjOpc7mnxOQZtPR7VXfXFqnTuFCYcChMOhYlEzBTpb6ABgHduWxvV6S9WVdfFf4Lhzs0vyjmGV+xv/eaykDuFCYfChENhIhEzxbm+ebI2Nr8mbSW+kt2Wf4ZEdeGcXXKOzv6b4pnKncKEQ2HCoTDhUJhwKEw4FCYcChMOhYlEvHh19Z5jsvZ85f1RXTp4W0dtJ+u5U5hwKEw4FCYSMVNc+KNC1irviespuKODdpP93ClMOBQmHAoTDoUJh8KEQ2HCoTDhUJhwKEw4FCYcChMOhQmG0BX+AJK1hDuFCYfChENhwqEw4VCYcChM/AfSumKNtkRdcwAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -302,7 +246,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAHp0lEQVR4nO3de2zVdxnH8fdTaKHFylYqBHUQTLltk3lZ3JgmyyKd8Ta2QZMBWfhjy1YmM25Do4sXEtEYN+KFjIDRpS4sbhIyMVvIBoHURQcLEuYfQ4SZ1m7igM5eEIHSfv3jHE2f8xxaqvX0nJ7PK2nI87udb8vnfH9Pf+f0dyylhMhgFWM9ACk+CoUECoUECoUECoUECoUE4zYUZrbezLaN9ThKUcmHwsxWmtlBMztjZifMbJeZfWIMxrHQzPaaWbeZHTezOwo9htFS0qEws4eBHwLfBWYAs4DNwNICj2MisBN4HqgD7gO2mdm8Qo5j1KSUSvILmAqcAZousX49sG1QvR34G9AN/Aa4ZtC6zwCvA73AW8C67PJ6Mv/RXcA7wMtARZ7HujY7Fhu07CXg22P9c/pvvkp5plgMTAaeu8ztdwFzgenAIeDpQet+BtyfUqol8x+8N7v8EeBN4D1kZqJHgQRgZpvNbPMQj2fZY5WcUg7FNOB0Suni5WycUnoypdSbUjpPZha5zsymZlf3AVeb2btTSn9PKR0atHwmMDul1JdSejllp4GU0gMppQey2x0FTgJfNrNKM7sVuBmoGY1vtNBKORSdQH32fD4kM5tgZt8zszfMrAdoy66qz/67jMwppN3MWs1scXb5Y8Bx4CUz+7OZfTXf8VNKfcDtwGfJnKIeAX5JZpYpPWN9/vofe4p/AMuH6ymAu4EjwBwy0/oVZE4DDTn7VAIPAR2X6BtOAp+8zPH9jswpacx/ViP9KtmZIqXUDXwTeMLMbjezmuzU/Wkz+37O5rXAeTKzSw2Z31YAMLMqM1tlZlOzz/geYCC77nNm1mBmRqZB7f/3ulxmtsjMJmfHsY7MaadlVL/pAinZUACklDYCDwNfB04BHcBa4Fc5mz4FtJP5zeJ1YH/O+ruBtuyppRlYlV0+F9hD5jeLV4DNKaV9AGa2xcy25BzjBNnZBGhMmf6l5Fh2qhP5j5KeKeT/Q6GQQKGQQKGQYMgLP40VTepCx7HdA9st33LNFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIMe7vB8eLCp64Py9pX+XuarflIq6u/dOWfhj3uB3/6oKtrTvg/1O+6Kd72avbT/rlY9eLBYR+nkDRTSKBQSKBQSDBue4pTzYtdvekrT4Rtrp/U7+qKnOfI6rYlYZ8PT/2Lq1+790dDjiP3mAA31a1wdd2LQx6i4DRTSKBQSKBQSKBQSFCyjaZVVrn63JLrXL3ja4+5+r0TJ4Vj3NPe6Or2x+e7esoLh8M++2pmubr1Of8xYTvm/jr/gAfpOTzN1XXD7lFYmikkUCgkUCgkKNme4sRa/wLXq+tyLyL5HqLp+OfDMS4u63N1zekDrs53D+q/3vdRVx+YO/TFq11na8Oyhq0dfhxDHqHwNFNIoFBIoFBIUBI9xbFNN4RlR+/c5OrcjwBcuLvZ1QvWtYVj9J/uHPFYmtfsHNH2G76zOiy7suOVET9uIWmmkEChkEChkKAoe4o3Nt7o6qN3xjfIdA+cc3XTH1e6ev6D/k23/b29wz5uxZQpru5cvihss/Rd/jWVCqpdvWD7F1zd0FLc/UM+mikkUCgkUCgkUCgkKIpGc8KM6a7++R2bXT0QLk3FxrKqsT1nn+FVfOhqV1/75BFXb5jx4zx7+RfaPn74LlfPX++P4d8vXho0U0igUEigUEhQFD2FTfbn6dy/3Mqn+ov+jbs2+ypXH2t+v6tvXXIoHOOh6T9x9ayJ/kJUvr6kP/m33tiz9X5917G84y0lmikkUCgkUCgkKIqeIp3zd3s5cL7S1TdM8m+wBdi55xlX57uWMZw9//T9wLE+3y/cUn0m7HPwgu9lrniq9F7wGo5mCgkUCgkUCgkUCgmKotHsf/ukq7+15l5XP77Fv0AGsMj3e2zr8RevNrTe5up5Lf6dWgAT3+529fRfvOPqW67aG/ZZvc+PbR7FdbvD0aCZQgKFQgKFQoKi6Cly5d6W+NE5HxvxMebx6rDb9C71x31hlv/rr74UnzPVbVVh2XijmUIChUIChUKCouwpCuVitX9O9CX/5p58L7LNafG3YS62u9CMBs0UEigUEigUEpR1T1H7zH6/YOPYjKPYaKaQQKGQQKGQQKGQoKwbzd67bsxZ8vsxGUex0UwhgUIhgUIhQVn3FN0f0HMiH/1UJFAoJFAoJCjrnuJ9rWddXbl2gqv78n1eVBnQTCGBQiGBQiGBQiFBWTea9tvDrm7p8beDXlH7Vtjn7DUzXV3V8eaoj2usaaaQQKGQQKGQoKx7ilw/2Lrc1SvC56fDzG8cd3VnV87njO3/w6iPq9A0U0igUEigUEhgKV36VZ/GiqayekloQv00V1ftiC3Xsw3Pu/rm11a4um7lKVf3d/k78BWT3QPbLd9yzRQSKBQSKBQS6DrFIP2nO119Ydm0sM3Cjfe7+siSra6+bcE9focSvG6hmUIChUIChUIChUICXbwqY7p4JZdNoZBAoZBgyJ5CypNmCgkUCgkUCgkUCgkUCgkUCgn+BehFKP3k01GqAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAHp0lEQVR4nO3de2zVdxnH8fdTaKHFylYqBHUQTLltk3lZ3JgmyyKd8Ta2QZMBWfhjy1YmM25Do4sXEtEYN+KFjIDRpS4sbhIyMVvIBoHURQcLEuYfQ4SZ1m7igM5eEIHSfv3jHE2f8xxaqvX0nJ7PK2nI87udb8vnfH9Pf+f0dyylhMhgFWM9ACk+CoUECoUECoUECoUECoUE4zYUZrbezLaN9ThKUcmHwsxWmtlBMztjZifMbJeZfWIMxrHQzPaaWbeZHTezOwo9htFS0qEws4eBHwLfBWYAs4DNwNICj2MisBN4HqgD7gO2mdm8Qo5j1KSUSvILmAqcAZousX49sG1QvR34G9AN/Aa4ZtC6zwCvA73AW8C67PJ6Mv/RXcA7wMtARZ7HujY7Fhu07CXg22P9c/pvvkp5plgMTAaeu8ztdwFzgenAIeDpQet+BtyfUqol8x+8N7v8EeBN4D1kZqJHgQRgZpvNbPMQj2fZY5WcUg7FNOB0Suni5WycUnoypdSbUjpPZha5zsymZlf3AVeb2btTSn9PKR0atHwmMDul1JdSejllp4GU0gMppQey2x0FTgJfNrNKM7sVuBmoGY1vtNBKORSdQH32fD4kM5tgZt8zszfMrAdoy66qz/67jMwppN3MWs1scXb5Y8Bx4CUz+7OZfTXf8VNKfcDtwGfJnKIeAX5JZpYpPWN9/vofe4p/AMuH6ymAu4EjwBwy0/oVZE4DDTn7VAIPAR2X6BtOAp+8zPH9jswpacx/ViP9KtmZIqXUDXwTeMLMbjezmuzU/Wkz+37O5rXAeTKzSw2Z31YAMLMqM1tlZlOzz/geYCC77nNm1mBmRqZB7f/3ulxmtsjMJmfHsY7MaadlVL/pAinZUACklDYCDwNfB04BHcBa4Fc5mz4FtJP5zeJ1YH/O+ruBtuyppRlYlV0+F9hD5jeLV4DNKaV9AGa2xcy25BzjBNnZBGhMmf6l5Fh2qhP5j5KeKeT/Q6GQQKGQQKGQYMgLP40VTepCx7HdA9st33LNFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIoFBIMe7vB8eLCp64Py9pX+XuarflIq6u/dOWfhj3uB3/6oKtrTvg/1O+6Kd72avbT/rlY9eLBYR+nkDRTSKBQSKBQSDBue4pTzYtdvekrT4Rtrp/U7+qKnOfI6rYlYZ8PT/2Lq1+790dDjiP3mAA31a1wdd2LQx6i4DRTSKBQSKBQSKBQSFCyjaZVVrn63JLrXL3ja4+5+r0TJ4Vj3NPe6Or2x+e7esoLh8M++2pmubr1Of8xYTvm/jr/gAfpOTzN1XXD7lFYmikkUCgkUCgkKNme4sRa/wLXq+tyLyL5HqLp+OfDMS4u63N1zekDrs53D+q/3vdRVx+YO/TFq11na8Oyhq0dfhxDHqHwNFNIoFBIoFBIUBI9xbFNN4RlR+/c5OrcjwBcuLvZ1QvWtYVj9J/uHPFYmtfsHNH2G76zOiy7suOVET9uIWmmkEChkEChkKAoe4o3Nt7o6qN3xjfIdA+cc3XTH1e6ev6D/k23/b29wz5uxZQpru5cvihss/Rd/jWVCqpdvWD7F1zd0FLc/UM+mikkUCgkUCgkUCgkKIpGc8KM6a7++R2bXT0QLk3FxrKqsT1nn+FVfOhqV1/75BFXb5jx4zx7+RfaPn74LlfPX++P4d8vXho0U0igUEigUEhQFD2FTfbn6dy/3Mqn+ov+jbs2+ypXH2t+v6tvXXIoHOOh6T9x9ayJ/kJUvr6kP/m33tiz9X5917G84y0lmikkUCgkUCgkKIqeIp3zd3s5cL7S1TdM8m+wBdi55xlX57uWMZw9//T9wLE+3y/cUn0m7HPwgu9lrniq9F7wGo5mCgkUCgkUCgkUCgmKotHsf/ukq7+15l5XP77Fv0AGsMj3e2zr8RevNrTe5up5Lf6dWgAT3+529fRfvOPqW67aG/ZZvc+PbR7FdbvD0aCZQgKFQgKFQoKi6Cly5d6W+NE5HxvxMebx6rDb9C71x31hlv/rr74UnzPVbVVh2XijmUIChUIChUKCouwpCuVitX9O9CX/5p58L7LNafG3YS62u9CMBs0UEigUEigUEpR1T1H7zH6/YOPYjKPYaKaQQKGQQKGQQKGQoKwbzd67bsxZ8vsxGUex0UwhgUIhgUIhQVn3FN0f0HMiH/1UJFAoJFAoJCjrnuJ9rWddXbl2gqv78n1eVBnQTCGBQiGBQiGBQiFBWTea9tvDrm7p8beDXlH7Vtjn7DUzXV3V8eaoj2usaaaQQKGQQKGQoKx7ilw/2Lrc1SvC56fDzG8cd3VnV87njO3/w6iPq9A0U0igUEigUEhgKV36VZ/GiqayekloQv00V1ftiC3Xsw3Pu/rm11a4um7lKVf3d/k78BWT3QPbLd9yzRQSKBQSKBQS6DrFIP2nO119Ydm0sM3Cjfe7+siSra6+bcE9focSvG6hmUIChUIChUIChUICXbwqY7p4JZdNoZBAoZBgyJ5CypNmCgkUCgkUCgkUCgkUCgkUCgn+BehFKP3k01GqAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -314,7 +258,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAIVElEQVR4nO3dfWxVZx0H8O/vtqUFHbC1UHkbHSmErSWEKDYQIsPJHzMMyDokynSLiS8ZaFKri2yKLCbL1EQ3dIgzG1NQpqBDhWyTTOaWMBMmY2gdZR1boSggUFsKFPry+Me9aH/3e7kvo723t3w/SUN/5z7nOefefHnu03POvcdCCBDpK5LrHZDBR6EQolAIUSiEKBRCFAohQzYUZrbWzDbnej/yUd6Hwsw+ZWavmVmHmf3LzJ4zs3lZ3odiM3vSzJrN7KyZ7Tez27O5D/0pr0NhZl8B8CiAhwGUA7gRwHoAS7K8K4UAjgKYD2AUgG8A+LWZVWR5P/pHCCEvfxB98TsALLvC42sBbO5TbwVwHEAbgJcBVPV57OMA/gHgLIBjAL4aW14GYAeA/wA4A+AVAJE09+8AgNpcv07v5SefR4o5AEoAPJtm++cATAUwFsA+AL/o89iTAL4QQrgOQDWAP8WW1wNoATAG0ZHoAQABAMxsvZmtT7QhMysHMA1AQwbPZ9AozPUOXIVSAKdCCN3pNA4hPHX5dzNbC6DVzEaFENoAdAG4xczeCCG0AmiNNe0CMA7A5BBCE6IjxeX+7ku0HTMrQjRwPwshHMz8aeVePo8UpwGUmVnKYJtZgZk9YmZvm1k7gHdjD5XF/q1F9C2k2cz+bGZzYsu/B6AJwB/N7LCZfT3FdiIANgG4BGBVxs9osMj1+9dVzinOAbgr1ZwCwKcBvAngJgAGYDSibwOVcesUAagDcDRBf9UATgK47QrbMwAbAewGMDzXr881OaeIDftrADxuZkvNbISZFZnZ7Wb23bjm1wG4iOjoMgLRv1YAAGY2zMxWxN5KugC0A+iNPbbIzCrNzBCdoPZcfiyBHwO4GcAdIYQL/fhUsy/XqeyHEWMFgNcQHTWOA9gJYC78SPF+AL9D9K+LZgCfQWykADAMwPOIziPaAewFMC+2Xh2ibzXnEJ1wfrPPdjcA2BD7fXKsv05E/yK6/LMi16/Pe/mx2JMS+Z+8ffuQgaNQCFEohCgUQpIe+FkYWaZZ6BC2q3erJVqukUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCIUShEKJQCFEohCgUQhQKIQqFEIVCiEIhRKEQolAIyeevIhhwBaU30DIbNdLVR2rHu7qzzF/rXPnQG9RH7/nz/bB3A0cjhRCFQohCIUShEHJNTzQj1dNd/dbq4a7+7Iw9tE596QsZbePm8i/Ssqn3/jWjPrJNI4UQhUKIQiFkyM4pbPYMVzfVFVCbl+b9yNVjCopdHUnwf2bn+etdffjiWFevvL7R1Zs+8lPq49uz73F12Ps3apNLGimEKBRCFAoheTunKBgzxtWHHpvg6j/M9d+lPqWoKEEvxQmW/d/G9km0bHutv5VIb7Hvd+UOP6f4UHEP9XGh3B8PKUm6F9mnkUKIQiFEoRCiUAjJ24nmsbunurph/mNxLRJNLJPbHDex3L50LrXpaTzkaptVlfF2BjuNFEIUCiEKhZC8nVNMWPxuRu23dXyAln3/0G2uLr/fX4nd0/hWyn5bZ4xM2SbfaKQQolAIUSiE5O2cAp/zJ7NuWfklV0/a5U9Eva/hOHVR1uyPOfCpq9TOlye8O0Je00ghRKEQolAIyds5RU/TO66urHvnCi2jugdoP7pmnx2gnnNHI4UQhUKIQiFEoRCStxPN/nBkjb+IpntE3G1YEx2Ximty59RXk25jVcuttGz48/uSdZlzGimEKBRCFAohQ2ZOUTDSX+zS+WF/YW/R6hO0zoHpP0zaZ5HxJ9W7QvLTZrsvjHB1y+dvpDah+82kfeSaRgohCoUQhUJIXswprJg/HX5pvv+mmrr1m1y9YPiLrj7Rc5H62H3BfyvNmkNLXL2l6mlaZ3xh8k+ql0S6XH34E6OpzZRG/znz3s7OpH1mm0YKIQqFEIVCiEIhZFBONCMlfiJ2evksavPKw+uS9lG1xV/dPXE3H3Qq3rnX1aXjOly95YUP0jr1pX9Put2aYj/RPHAv7+eco192dfnP/T1Bcn0/EI0UQhQKIQqFEAvhypd4LIwsy8r1H/EHpxp/MNPVB5c8nrKPJY1LXR35pH9v7zlxktYpnDTR1TN/f8TVD419ndZp6/UHmmp+U+/qcdP9dl6c8avEO9zH8qZFrj61roLalJzuomV9Fby0L+njiezq3Zrw420aKYQoFEIUCiFZP05hhbzJxkfj5hCL/RyipZtPZi3+yf2urnjqbVd3x80huj7Gxxyqv+PnDN8a62/jtLF9Mq2z6cE7XF3527+4uqCs1NW3LvTHSwDg3PI2Vz87y9/+YeK65CfdAGDHOb+dJ6ZNSblOujRSCFEohCgUQrJ+nKJlNX+L7b5V/tty/xk3h6h95Gu0zrjt/lPmZxZUuDrcfcrV26qfpj7ibw9V9Yx//5/2hO8DAHoam2jZ1Tp5n39Nyu9qTr1S/WhXhtcbMt6ujlNI2hQKIQqFEIVCSNYnmg8e3k/L4i9MORN35fWG1hpaZ8KwVlffMzKNyVmcql/6i10qV/uLbkL3QH0p0uCgiaakTaEQolAIyfoJsZc7ptOymmJ/r+8b4g4qPVC2P2W/iw7e6eojr/oLaKZs8yehAKCywZ8AG+pziHRppBCiUAhRKIRkfU6xZ8F4Wlaz4qOubpt5ydWF/+bbSU7bcMy3Oe4vqqnoPOrq3oz28tqmkUKIQiFEoRCiUAjJ+kSz5/QZWla+bo+v0+hHh5kGjkYKIQqFEIVCiEIhRKEQolAIUSiEKBRCFAohCoUQhUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCISTpN9nItUkjhRCFQohCIUShEKJQCFEohPwXC5h3ASupiaIAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAIVElEQVR4nO3dfWxVZx0H8O/vtqUFHbC1UHkbHSmErSWEKDYQIsPJHzMMyDokynSLiS8ZaFKri2yKLCbL1EQ3dIgzG1NQpqBDhWyTTOaWMBMmY2gdZR1boSggUFsKFPry+Me9aH/3e7kvo723t3w/SUN/5z7nOefefHnu03POvcdCCBDpK5LrHZDBR6EQolAIUSiEKBRCFAohQzYUZrbWzDbnej/yUd6Hwsw+ZWavmVmHmf3LzJ4zs3lZ3odiM3vSzJrN7KyZ7Tez27O5D/0pr0NhZl8B8CiAhwGUA7gRwHoAS7K8K4UAjgKYD2AUgG8A+LWZVWR5P/pHCCEvfxB98TsALLvC42sBbO5TbwVwHEAbgJcBVPV57OMA/gHgLIBjAL4aW14GYAeA/wA4A+AVAJE09+8AgNpcv07v5SefR4o5AEoAPJtm++cATAUwFsA+AL/o89iTAL4QQrgOQDWAP8WW1wNoATAG0ZHoAQABAMxsvZmtT7QhMysHMA1AQwbPZ9AozPUOXIVSAKdCCN3pNA4hPHX5dzNbC6DVzEaFENoAdAG4xczeCCG0AmiNNe0CMA7A5BBCE6IjxeX+7ku0HTMrQjRwPwshHMz8aeVePo8UpwGUmVnKYJtZgZk9YmZvm1k7gHdjD5XF/q1F9C2k2cz+bGZzYsu/B6AJwB/N7LCZfT3FdiIANgG4BGBVxs9osMj1+9dVzinOAbgr1ZwCwKcBvAngJgAGYDSibwOVcesUAagDcDRBf9UATgK47QrbMwAbAewGMDzXr881OaeIDftrADxuZkvNbISZFZnZ7Wb23bjm1wG4iOjoMgLRv1YAAGY2zMxWxN5KugC0A+iNPbbIzCrNzBCdoPZcfiyBHwO4GcAdIYQL/fhUsy/XqeyHEWMFgNcQHTWOA9gJYC78SPF+AL9D9K+LZgCfQWykADAMwPOIziPaAewFMC+2Xh2ibzXnEJ1wfrPPdjcA2BD7fXKsv05E/yK6/LMi16/Pe/mx2JMS+Z+8ffuQgaNQCFEohCgUQpIe+FkYWaZZ6BC2q3erJVqukUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCIUShEKJQCFEohCgUQhQKIQqFEIVCiEIhRKEQolAIyeevIhhwBaU30DIbNdLVR2rHu7qzzF/rXPnQG9RH7/nz/bB3A0cjhRCFQohCIUShEHJNTzQj1dNd/dbq4a7+7Iw9tE596QsZbePm8i/Ssqn3/jWjPrJNI4UQhUKIQiFkyM4pbPYMVzfVFVCbl+b9yNVjCopdHUnwf2bn+etdffjiWFevvL7R1Zs+8lPq49uz73F12Ps3apNLGimEKBRCFAoheTunKBgzxtWHHpvg6j/M9d+lPqWoKEEvxQmW/d/G9km0bHutv5VIb7Hvd+UOP6f4UHEP9XGh3B8PKUm6F9mnkUKIQiFEoRCiUAjJ24nmsbunurph/mNxLRJNLJPbHDex3L50LrXpaTzkaptVlfF2BjuNFEIUCiEKhZC8nVNMWPxuRu23dXyAln3/0G2uLr/fX4nd0/hWyn5bZ4xM2SbfaKQQolAIUSiE5O2cAp/zJ7NuWfklV0/a5U9Eva/hOHVR1uyPOfCpq9TOlye8O0Je00ghRKEQolAIyds5RU/TO66urHvnCi2jugdoP7pmnx2gnnNHI4UQhUKIQiFEoRCStxPN/nBkjb+IpntE3G1YEx2Ximty59RXk25jVcuttGz48/uSdZlzGimEKBRCFAohQ2ZOUTDSX+zS+WF/YW/R6hO0zoHpP0zaZ5HxJ9W7QvLTZrsvjHB1y+dvpDah+82kfeSaRgohCoUQhUJIXswprJg/HX5pvv+mmrr1m1y9YPiLrj7Rc5H62H3BfyvNmkNLXL2l6mlaZ3xh8k+ql0S6XH34E6OpzZRG/znz3s7OpH1mm0YKIQqFEIVCiEIhZFBONCMlfiJ2evksavPKw+uS9lG1xV/dPXE3H3Qq3rnX1aXjOly95YUP0jr1pX9Put2aYj/RPHAv7+eco192dfnP/T1Bcn0/EI0UQhQKIQqFEAvhypd4LIwsy8r1H/EHpxp/MNPVB5c8nrKPJY1LXR35pH9v7zlxktYpnDTR1TN/f8TVD419ndZp6/UHmmp+U+/qcdP9dl6c8avEO9zH8qZFrj61roLalJzuomV9Fby0L+njiezq3Zrw420aKYQoFEIUCiFZP05hhbzJxkfj5hCL/RyipZtPZi3+yf2urnjqbVd3x80huj7Gxxyqv+PnDN8a62/jtLF9Mq2z6cE7XF3527+4uqCs1NW3LvTHSwDg3PI2Vz87y9/+YeK65CfdAGDHOb+dJ6ZNSblOujRSCFEohCgUQrJ+nKJlNX+L7b5V/tty/xk3h6h95Gu0zrjt/lPmZxZUuDrcfcrV26qfpj7ibw9V9Yx//5/2hO8DAHoam2jZ1Tp5n39Nyu9qTr1S/WhXhtcbMt6ujlNI2hQKIQqFEIVCSNYnmg8e3k/L4i9MORN35fWG1hpaZ8KwVlffMzKNyVmcql/6i10qV/uLbkL3QH0p0uCgiaakTaEQolAIyfoJsZc7ptOymmJ/r+8b4g4qPVC2P2W/iw7e6eojr/oLaKZs8yehAKCywZ8AG+pziHRppBCiUAhRKIRkfU6xZ8F4Wlaz4qOubpt5ydWF/+bbSU7bcMy3Oe4vqqnoPOrq3oz28tqmkUKIQiFEoRCiUAjJ+kSz5/QZWla+bo+v0+hHh5kGjkYKIQqFEIVCiEIhRKEQolAIUSiEKBRCFAohCoUQhUKIQiFEoRCiUAhRKIQoFEIUCiEKhRCFQohCISTpN9nItUkjhRCFQohCIUShEKJQCFEohPwXC5h3ASupiaIAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -326,7 +270,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAFoklEQVR4nO3dX2jVZRzH8ffHqam1zDKtiyjMP9WiCLtIsYssMyVMEKspdFNUZBSpgUTFiIhI6CJQJLC8MILsj2JhZQjmRYISKbRyWGb4D0nnNC9sbU8X5wjnt+9J53a28+/zgqHP7/zO2XPGm+c809/OlFLCrNCQck/AKo+jsMBRWOAoLHAUFjgKC2o2CkktktaXex7VqOqjkLRI0m5Jf0s6KmmLpBllmMfz+Xmck7RusD9/KQ0t9wT6Q9JSYAXwLPAN8A/wEPAIcHaQp3MEeBOYDYwc5M9dUlW7UkgaDbwBLEkpfZ5SOptS6kwpbU4pvVzk/A2SjknqkPS9pKaC2+ZKapV0RtJhScvzx8dK+lLSKUknJe2QVPRrlp/DRuDEwDzjwVO1UQDTgBHAF708fwswCRgH/Ah8VHDbWuCZlFIjcDuwLX98GXAIuBYYD7wCJABJqyWt7udzqEjV/PJxDfBXSunf3pycUvrg/N8ltQDtkkanlDqATuA2SXtSSu1Ae/7UTuB64MaU0n5gR8HjPVeap1F5qnmlOAGMlXTRsCU1SHpb0m+STgN/5G8am/9zATAXOChpu6Rp+eMrgf3At5J+l7SitE+hMlVzFD8A54D5vTh3EbnN5wPAaOCm/HEBpJR2pZQeIffSshH4JH/8TEppWUppAjAPWCrp/tI9hcpUtVHkl/3XgVWS5ksaJWmYpDmS3ulxeiO5gE4Ao4C3zt8gabikxfmXkk7gNNCdv+1hSRMlCegAus7f1pOkoZJGAA1Ag6QRvVnFKlJKqao/gMXAbnLfgh4DvgKmAy3A+vw5VwCbgDPAQeAJchvGicBw4Gty+4jTwC5gRv5+L5F7qTlLbsP5WsHnXQOsKRi35B+z8KOl3F+fvnzIF9lYT1X78mEDx1FY4CgscBQWXPBbpllDFnoXWsO2dm9QseNeKSxwFBY4CgschQWOwgJHYYGjsMBRWOAoLHAUFjgKCxyFBY7CAkdhgaOwwFFYUJ0/l9AHbR9ODccOzF6bGb97ckJm/N2jd4f7dLW2lXZiFcgrhQWOwgJHYUHN7ikamqZkxpvuWxXO6UzDMuMlY/Zlxp/e8WC4T2NrCSZX4bxSWOAoLHAUFjgKC2p2o8nhY5nhC22Ph1O2Nn02WLOpKl4pLHAUFjgKC2p2T9F1qiMzPnhoUjypKR4yrxRWhKOwwFFYULN7iobx4zLje2+t/YtjSsUrhQWOwgJHYYGjsKBmN5o0Xp4Zzr161yU/xPGp8R0Fr9o7OTOuxau7vVJY4CgscBQW1Oyeomv/gcz41c2PhXMWNMcrvAv9vOi9cOyujhcz4xu8p7B64CgscBQW1Oyeoqebl++MB5sHfx7VwCuFBY7CAkdhQd3sKYoZpobMuNO/HAvwSmFFOAoLHIUFjsKCut5odqauzLib7jLNpLJ4pbDAUVjgKCxwFBY4CgschQWOwgJHYYGjsMBRWOAoLHAUFtT1f4j15cqrK6cfH6DZVA6vFBY4CgschQV1vafoy0U22+/8ODOed8+T2RN27u33vMrNK4UFjsICR2FBXe8pbtn2VGbcOvP9S36MtqeHZ8aTi/xwe7XxSmGBo7DAUVhQ13uKy9pGZg/MLM88Ko1XCgschQWOwgJHYYFS+v8rS2YNWVhXb/jT/OuRzHhx49GL3qfnhTpz5mTfh7F7zy/9n9gA2dq9If7uCrxSWBGOwgJHYUFd/+NVT+v+nJ4ZNzdtuOh9avFtFr1SWOAoLHAUFnhPUeDcuuuyB1aWZx7l5pXCAkdhgaOwwFFY4I1mgTE/ncyMV7VPCecsGbNvsKZTNl4pLHAUFjgKC3yRTR3zRTbWa47CAkdhgaOwwFFY4CgscBQWOAoLHIUFjsICR2GBo7DAUVjgKCxwFBY4CgsueJGN1SevFBY4CgschQWOwgJHYYGjsOA/ah5I91dxj2gAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAFoklEQVR4nO3dX2jVZRzH8ffHqam1zDKtiyjMP9WiCLtIsYssMyVMEKspdFNUZBSpgUTFiIhI6CJQJLC8MILsj2JhZQjmRYISKbRyWGb4D0nnNC9sbU8X5wjnt+9J53a28+/zgqHP7/zO2XPGm+c809/OlFLCrNCQck/AKo+jsMBRWOAoLHAUFjgKC2o2CkktktaXex7VqOqjkLRI0m5Jf0s6KmmLpBllmMfz+Xmck7RusD9/KQ0t9wT6Q9JSYAXwLPAN8A/wEPAIcHaQp3MEeBOYDYwc5M9dUlW7UkgaDbwBLEkpfZ5SOptS6kwpbU4pvVzk/A2SjknqkPS9pKaC2+ZKapV0RtJhScvzx8dK+lLSKUknJe2QVPRrlp/DRuDEwDzjwVO1UQDTgBHAF708fwswCRgH/Ah8VHDbWuCZlFIjcDuwLX98GXAIuBYYD7wCJABJqyWt7udzqEjV/PJxDfBXSunf3pycUvrg/N8ltQDtkkanlDqATuA2SXtSSu1Ae/7UTuB64MaU0n5gR8HjPVeap1F5qnmlOAGMlXTRsCU1SHpb0m+STgN/5G8am/9zATAXOChpu6Rp+eMrgf3At5J+l7SitE+hMlVzFD8A54D5vTh3EbnN5wPAaOCm/HEBpJR2pZQeIffSshH4JH/8TEppWUppAjAPWCrp/tI9hcpUtVHkl/3XgVWS5ksaJWmYpDmS3ulxeiO5gE4Ao4C3zt8gabikxfmXkk7gNNCdv+1hSRMlCegAus7f1pOkoZJGAA1Ag6QRvVnFKlJKqao/gMXAbnLfgh4DvgKmAy3A+vw5VwCbgDPAQeAJchvGicBw4Gty+4jTwC5gRv5+L5F7qTlLbsP5WsHnXQOsKRi35B+z8KOl3F+fvnzIF9lYT1X78mEDx1FY4CgscBQWXPBbpllDFnoXWsO2dm9QseNeKSxwFBY4CgschQWOwgJHYYGjsMBRWOAoLHAUFjgKCxyFBY7CAkdhgaOwwFFYUJ0/l9AHbR9ODccOzF6bGb97ckJm/N2jd4f7dLW2lXZiFcgrhQWOwgJHYUHN7ikamqZkxpvuWxXO6UzDMuMlY/Zlxp/e8WC4T2NrCSZX4bxSWOAoLHAUFjgKC2p2o8nhY5nhC22Ph1O2Nn02WLOpKl4pLHAUFjgKC2p2T9F1qiMzPnhoUjypKR4yrxRWhKOwwFFYULN7iobx4zLje2+t/YtjSsUrhQWOwgJHYYGjsKBmN5o0Xp4Zzr161yU/xPGp8R0Fr9o7OTOuxau7vVJY4CgscBQW1Oyeomv/gcz41c2PhXMWNMcrvAv9vOi9cOyujhcz4xu8p7B64CgscBQW1Oyeoqebl++MB5sHfx7VwCuFBY7CAkdhQd3sKYoZpobMuNO/HAvwSmFFOAoLHIUFjsKCut5odqauzLib7jLNpLJ4pbDAUVjgKCxwFBY4CgschQWOwgJHYYGjsMBRWOAoLHAUFtT1f4j15cqrK6cfH6DZVA6vFBY4CgschQV1vafoy0U22+/8ODOed8+T2RN27u33vMrNK4UFjsICR2FBXe8pbtn2VGbcOvP9S36MtqeHZ8aTi/xwe7XxSmGBo7DAUVhQ13uKy9pGZg/MLM88Ko1XCgschQWOwgJHYYFS+v8rS2YNWVhXb/jT/OuRzHhx49GL3qfnhTpz5mTfh7F7zy/9n9gA2dq9If7uCrxSWBGOwgJHYUFd/+NVT+v+nJ4ZNzdtuOh9avFtFr1SWOAoLHAUFnhPUeDcuuuyB1aWZx7l5pXCAkdhgaOwwFFY4I1mgTE/ncyMV7VPCecsGbNvsKZTNl4pLHAUFjgKC3yRTR3zRTbWa47CAkdhgaOwwFFY4CgscBQWOAoLHIUFjsICR2GBo7DAUVjgKCxwFBY4CgsueJGN1SevFBY4CgschQWOwgJHYYGjsOA/ah5I91dxj2gAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -338,7 +282,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAHvElEQVR4nO3df2zUZx0H8Pe7pUX5VWeBORXGalUQjLglTgrGhbHAFhXEkf1g2XBKFkBIXOeyLNEQnb+miUuMbIsZRGUxGwmMONgccRnOyQzLsoUxO+jGUEvBwYB2MOBoP/5xX5J+7nNcr1y5uy++X0lDP8899/0+B+977qF336c0M4j0VVPpAUj1USgkUCgkUCgkUCgkUCgkuGBDQXIlybWVHkcapT4UJG8m+RLJ90h2knyK5IwKjGNtcv4ukrtIfrvcYxgsqQ4FyTsBPADgJwAuBjAewCoAcyswnJ8CmGBmowB8DcB9JK+owDhKltpQkGwA8EMAy8xsvZkdM7OMmf3JzL6Xp/86kvtJHiX5V5KT+9x2HcnXSXaT7CB5V9I+muSTJI+QfJfk8yTz/p2Z2U4zO3mmTL4+MegPvAxSGwoA0wB8AMCGIvs/BeCTAMYCeBnAo31uewTAHWY2EsAUAM8m7a0A/gNgDLIz0b3I/mOD5CqSq/qeIGk7DqANQCeAzQN/WJU3pNIDKEEjgINmdrqYzma2+sz3JFcCOEyywcyOAsgA+AzJV83sMIDDSdcMgEsAXGpm7QCe73O8pXnOsZTkcmQDexWAk7l90iDNM8UhAKNJ9htskrUkf0byTZJdAN5Obhqd/PkNANcB2EtyK8lpSfsvALQDeIbkWyTv6e9cZtZjZn8D8HEASwb2kKpDmkOxDdln4rwi+t6M7OJzFoAGABOSdgKAmW03s7nIvrQ8AeDxpL3bzFrNrAnZxeOdJK8ucnxDoDVFeSXT/g8A/IbkPJLDSNaRvJbk/TndRyIboEMAhiH7vxUAAMl6kguTl5IMgC4AvcltXyHZTJIAjgLoOXNbXyTHkryR5IhkVpoN4CYAfxn8R14GZpbqLwALAbwE4BiA/QA2AWgBsBLA2qTPCAAbAXQD2AvgVmQXjM0A6gE8jew6ogvAdgAzkvt9F9mXmmPILji/3+e8DwF4KPl+DICtAI4kx9gBYHGl/27O9Yv6kI3kSu3Lh5w/CoUECoUECoUEBX/wc03NAq1CL2BbetcxX7tmCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgnSvBVBQbxisqt76+ND7bhquKt3LnfbTSBjPYM/MABXv3a9q4fP7XR174kT5+W8xdJMIYFCIYFCIYFCIUFqF5o27XOu3r2o3tW/mvlHV9cxbo0164Pdrs6Yf470xv1JBsWWKY+7euofbnf1ZUv2hfv0HDx0XsaSj2YKCRQKCRQKCdK7prjvXVe3TVxfoZGU7pWW1a6efWXYohNDN2lNIRWkUEigUEiQ2jVFx3PjfMPEwv23nRga2m7fvNg35O7rUsQ+Pl+8fJer10x4pv87VTnNFBIoFBIoFBIoFBIU3Ju7mrdMZJ1/A6ymaXzh/qcyoe30nr0lj6N2dKOrl734gqtz33TLZ+aOG1w9av7+0Kf3+PFzGF1h2jJRiqZQSKBQSJDaH15Z5pSre95or8g4Dsz/lKs/W78xp0f8oVmuffs+7OoRx98qdVgl0UwhgUIhgUIhQWrXFJXyzpJprp54S5urL67tfw2Ra9Lde1x9fq5LK55mCgkUCgkUCgm0pujjv99pcfVtSzaHPreM+qWrR9bUhz79+dE7l7vaTp46S8/K0EwhgUIhgUIhgUIhQWoXmrWTP+3qXd+8yNVfnvHagI/55Lhfuzr/VeeFF5btGX91+w0PtoY+4zcc8OfpfrO4AZaJZgoJFAoJFAoJUrGmsOlTQ9uiNRtcPXf4wUE4U+nPkRXt/kO4H/v530OfSr/h1R/NFBIoFBIoFBKkYk2RT23OJeE1g5DvOta6OnMOl0I9Pcmvdb60cFno0/DoiwM/cBlpppBAoZBAoZBAoZAgFQtNvvBKaHtk3hxX37PIX/09/s/+00y178dtmM/F7m/VubptzoODctxqoplCAoVCAoVCglSsKfLped1vVdh0d3nOO2n3GN8wJ3+/NNNMIYFCIYFCIUFq1xSVcmB+c6WHcN5pppBAoZBAoZCgKtYUHOp3fzmy4POuvmjjznCf3u7+d7ItVWdrS2jbuOL+nJaB71xT7TRTSKBQSKBQSKBQSFD2heaJr34htDXc9S9Xb232V39/fftN8UBvlL7QHHLJR1zdcX2Tqx9b7rcyAoCPDim8sDzQc9LVde9X7W/HOCvNFBIoFBIoFBKUfU0x+8dbQ1trY+FdZ9ruHRUb37uy5LHc2LLN1U+M3eTqXvgP6eZz29uzXd2+xu+w07jenyMNNFNIoFBIoFBIUBVviPXnn7MeLtOZ/HMk3+9HX/yPW13dvHi3qxuPpW8NkUszhQQKhQQKhQQKhQRlX2g+u2J6aPv9Uv8m2avTVw/6edd2jQttnZkPuXr1y35szb+Nmxs25VwBn2+j5rTTTCGBQiGBQiEBzc7+IZBrahaU5RMiNcOGufrfK6a6+nd3PBDuM6Werp65w29/fPQ5/wGaSx/rCMc4vWfvAEZ54dnSu4752jVTSKBQSKBQSFAVawqpDK0ppGgKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQF3xCT/0+aKSRQKCRQKCRQKCRQKCRQKCT4H8/z2mF4d5REAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIUAAACVCAYAAAB2DDmnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAHvElEQVR4nO3df2zUZx0H8Pe7pUX5VWeBORXGalUQjLglTgrGhbHAFhXEkf1g2XBKFkBIXOeyLNEQnb+miUuMbIsZRGUxGwmMONgccRnOyQzLsoUxO+jGUEvBwYB2MOBoP/5xX5J+7nNcr1y5uy++X0lDP8899/0+B+977qF336c0M4j0VVPpAUj1USgkUCgkUCgkUCgkUCgkuGBDQXIlybWVHkcapT4UJG8m+RLJ90h2knyK5IwKjGNtcv4ukrtIfrvcYxgsqQ4FyTsBPADgJwAuBjAewCoAcyswnJ8CmGBmowB8DcB9JK+owDhKltpQkGwA8EMAy8xsvZkdM7OMmf3JzL6Xp/86kvtJHiX5V5KT+9x2HcnXSXaT7CB5V9I+muSTJI+QfJfk8yTz/p2Z2U4zO3mmTL4+MegPvAxSGwoA0wB8AMCGIvs/BeCTAMYCeBnAo31uewTAHWY2EsAUAM8m7a0A/gNgDLIz0b3I/mOD5CqSq/qeIGk7DqANQCeAzQN/WJU3pNIDKEEjgINmdrqYzma2+sz3JFcCOEyywcyOAsgA+AzJV83sMIDDSdcMgEsAXGpm7QCe73O8pXnOsZTkcmQDexWAk7l90iDNM8UhAKNJ9htskrUkf0byTZJdAN5Obhqd/PkNANcB2EtyK8lpSfsvALQDeIbkWyTv6e9cZtZjZn8D8HEASwb2kKpDmkOxDdln4rwi+t6M7OJzFoAGABOSdgKAmW03s7nIvrQ8AeDxpL3bzFrNrAnZxeOdJK8ucnxDoDVFeSXT/g8A/IbkPJLDSNaRvJbk/TndRyIboEMAhiH7vxUAAMl6kguTl5IMgC4AvcltXyHZTJIAjgLoOXNbXyTHkryR5IhkVpoN4CYAfxn8R14GZpbqLwALAbwE4BiA/QA2AWgBsBLA2qTPCAAbAXQD2AvgVmQXjM0A6gE8jew6ogvAdgAzkvt9F9mXmmPILji/3+e8DwF4KPl+DICtAI4kx9gBYHGl/27O9Yv6kI3kSu3Lh5w/CoUECoUECoUEBX/wc03NAq1CL2BbetcxX7tmCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgkUCgnSvBVBQbxisqt76+ND7bhquKt3LnfbTSBjPYM/MABXv3a9q4fP7XR174kT5+W8xdJMIYFCIYFCIYFCIUFqF5o27XOu3r2o3tW/mvlHV9cxbo0164Pdrs6Yf470xv1JBsWWKY+7euofbnf1ZUv2hfv0HDx0XsaSj2YKCRQKCRQKCdK7prjvXVe3TVxfoZGU7pWW1a6efWXYohNDN2lNIRWkUEigUEiQ2jVFx3PjfMPEwv23nRga2m7fvNg35O7rUsQ+Pl+8fJer10x4pv87VTnNFBIoFBIoFBIoFBIU3Ju7mrdMZJ1/A6ymaXzh/qcyoe30nr0lj6N2dKOrl734gqtz33TLZ+aOG1w9av7+0Kf3+PFzGF1h2jJRiqZQSKBQSJDaH15Z5pSre95or8g4Dsz/lKs/W78xp0f8oVmuffs+7OoRx98qdVgl0UwhgUIhgUIhQWrXFJXyzpJprp54S5urL67tfw2Ra9Lde1x9fq5LK55mCgkUCgkUCgm0pujjv99pcfVtSzaHPreM+qWrR9bUhz79+dE7l7vaTp46S8/K0EwhgUIhgUIhgUIhQWoXmrWTP+3qXd+8yNVfnvHagI/55Lhfuzr/VeeFF5btGX91+w0PtoY+4zcc8OfpfrO4AZaJZgoJFAoJFAoJUrGmsOlTQ9uiNRtcPXf4wUE4U+nPkRXt/kO4H/v530OfSr/h1R/NFBIoFBIoFBKkYk2RT23OJeE1g5DvOta6OnMOl0I9Pcmvdb60cFno0/DoiwM/cBlpppBAoZBAoZBAoZAgFQtNvvBKaHtk3hxX37PIX/09/s/+00y178dtmM/F7m/VubptzoODctxqoplCAoVCAoVCglSsKfLped1vVdh0d3nOO2n3GN8wJ3+/NNNMIYFCIYFCIUFq1xSVcmB+c6WHcN5pppBAoZBAoZCgKtYUHOp3fzmy4POuvmjjznCf3u7+d7ItVWdrS2jbuOL+nJaB71xT7TRTSKBQSKBQSKBQSFD2heaJr34htDXc9S9Xb232V39/fftN8UBvlL7QHHLJR1zdcX2Tqx9b7rcyAoCPDim8sDzQc9LVde9X7W/HOCvNFBIoFBIoFBKUfU0x+8dbQ1trY+FdZ9ruHRUb37uy5LHc2LLN1U+M3eTqXvgP6eZz29uzXd2+xu+w07jenyMNNFNIoFBIoFBIUBVviPXnn7MeLtOZ/HMk3+9HX/yPW13dvHi3qxuPpW8NkUszhQQKhQQKhQQKhQRlX2g+u2J6aPv9Uv8m2avTVw/6edd2jQttnZkPuXr1y35szb+Nmxs25VwBn2+j5rTTTCGBQiGBQiEBzc7+IZBrahaU5RMiNcOGufrfK6a6+nd3PBDuM6Werp65w29/fPQ5/wGaSx/rCMc4vWfvAEZ54dnSu4752jVTSKBQSKBQSFAVawqpDK0ppGgKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQKhQQF3xCT/0+aKSRQKCRQKCRQKCRQKCRQKCT4H8/z2mF4d5REAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -380,14 +324,43 @@ "metadata": {}, "outputs": [ { - "ename": "AttributeError", - "evalue": "module 'deeptrack.models' has no attribute 'ViT'", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m model = dt.models.ViT(\n\u001b[0m\u001b[0;32m 2\u001b[0m \u001b[0minput_shape\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m28\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m28\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;31m# Size of the images to be analyzed\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3\u001b[0m \u001b[0mpatch_shape\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m4\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;31m# Size of the patches to be extracted from the input images.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[0mnum_layers\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m4\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;31m# Number of Transformer layers in the ViT model.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mhidden_size\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m72\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;31m# Size of the hidden layers in the ViT model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;31mAttributeError\u001b[0m: module 'deeptrack.models' has no attribute 'ViT'" + "name": "stdout", + "output_type": "stream", + "text": [ + "Model: \"functional_1\"\n", + "_________________________________________________________________\n", + "Layer (type) Output Shape Param # \n", + "=================================================================\n", + "input_1 (InputLayer) [(None, 28, 28, 1)] 0 \n", + "_________________________________________________________________\n", + "embedding (Conv2D) (None, 7, 7, 72) 1224 \n", + "_________________________________________________________________\n", + "reshape (Reshape) (None, 49, 72) 0 \n", + "_________________________________________________________________\n", + "class_token (ClassToken) (None, 50, 72) 72 \n", + "_________________________________________________________________\n", + "Transformer/posembed_input ( (None, 50, 72) 3600 \n", + "_________________________________________________________________\n", + "Transformer/encoderblock_0 ( ((None, 50, 72), (None, 1 58216 \n", + "_________________________________________________________________\n", + "Transformer/encoderblock_1 ( ((None, 50, 72), (None, 1 58216 \n", + "_________________________________________________________________\n", + "Transformer/encoderblock_2 ( ((None, 50, 72), (None, 1 58216 \n", + "_________________________________________________________________\n", + "Transformer/encoderblock_3 ( ((None, 50, 72), (None, 1 58216 \n", + "_________________________________________________________________\n", + "RetrieveClassToken (Lambda) (None, 72) 0 \n", + "_________________________________________________________________\n", + "layer_1 (Layer) (None, 72) 0 \n", + "_________________________________________________________________\n", + "layer_normalization_8 (Layer (None, 72) 144 \n", + "_________________________________________________________________\n", + "cls_prediction (Dense) (None, 10) 730 \n", + "=================================================================\n", + "Total params: 238,634\n", + "Trainable params: 238,634\n", + "Non-trainable params: 0\n", + "_________________________________________________________________\n" ] } ], @@ -395,14 +368,11 @@ "model = dt.models.ViT(\n", " input_shape=(28, 28, 1), # Size of the images to be analyzed\n", " patch_shape=4, # Size of the patches to be extracted from the input images.\n", - " num_layers=4, # Number of Transformer layers in the ViT model.\n", " hidden_size=72, # Size of the hidden layers in the ViT model\n", - " number_of_heads=12, # Number of attention heads in each Transformer layer\n", - " fwd_mlp_dim=256, # Size of the hidden layers in the forward MLP of the Transformer layers.\n", - " dropout=0.1, # Dropout rate of the forward MLP in the Transformer layers.\n", - " include_top=True, # Whether to include the top layer of the ViT model.\n", - " output_size=10, # Size of the output layer of the ViT model (i.e., the number of classes).\n", - " output_activation=\"linear\", # The activation function of the output.\n", + " number_of_transformer_layers=4, # Number of Transformer layers in the ViT model.\n", + " base_fwd_mlp_dimensions=256, # Size of the hidden layers in the forward MLP of the Transformer layers.\n", + " number_of_cls_outputs=10, # Size of the output layer of the ViT model (i.e., the number of classes).\n", + " cls_output_activation=\"linear\", # The activation function of the output.\n", " )\n", "\n", "\n", @@ -426,7 +396,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -435,211 +405,210 @@ "text": [ "Generating 1001 / 1000 samples before starting training\n", "Epoch 1/100\n", - "7/7 [==============================] - ETA: 0s - loss: 2.5730 - accuracy: 0.0904WARNING:tensorflow:Keras is training/fitting/evaluating on array-like data. Keras may not be optimized for this format, so if your input data format is supported by TensorFlow I/O (https://github.com/tensorflow/io) we recommend using that to load a Dataset instead.\n", - "7/7 [==============================] - 2s 263ms/step - loss: 2.5730 - accuracy: 0.0904 - val_loss: 2.2582 - val_accuracy: 0.1260\n", + "62/62 [==============================] - 7s 121ms/step - loss: 2.4491 - accuracy: 0.1280 - val_loss: 2.1479 - val_accuracy: 0.2240\n", "Epoch 2/100\n", - "7/7 [==============================] - 1s 190ms/step - loss: 2.3235 - accuracy: 0.1328 - val_loss: 2.1832 - val_accuracy: 0.2180\n", + "62/62 [==============================] - 5s 73ms/step - loss: 2.1951 - accuracy: 0.2026 - val_loss: 1.9656 - val_accuracy: 0.2660\n", "Epoch 3/100\n", - "7/7 [==============================] - 2s 332ms/step - loss: 2.2043 - accuracy: 0.1842 - val_loss: 2.0810 - val_accuracy: 0.2520\n", + "62/62 [==============================] - 8s 131ms/step - loss: 1.9966 - accuracy: 0.2893 - val_loss: 1.7649 - val_accuracy: 0.3840\n", "Epoch 4/100\n", - "7/7 [==============================] - 3s 358ms/step - loss: 2.1722 - accuracy: 0.2221 - val_loss: 1.9673 - val_accuracy: 0.3240\n", + "62/62 [==============================] - 7s 112ms/step - loss: 1.8088 - accuracy: 0.3357 - val_loss: 1.4759 - val_accuracy: 0.4820\n", "Epoch 5/100\n", - "7/7 [==============================] - 3s 362ms/step - loss: 2.0488 - accuracy: 0.2567 - val_loss: 1.8099 - val_accuracy: 0.3760\n", + "62/62 [==============================] - 7s 109ms/step - loss: 1.4302 - accuracy: 0.4940 - val_loss: 1.1877 - val_accuracy: 0.5980\n", "Epoch 6/100\n", - "7/7 [==============================] - 1s 102ms/step - loss: 1.9210 - accuracy: 0.3203 - val_loss: 1.6748 - val_accuracy: 0.3960\n", + "62/62 [==============================] - 8s 124ms/step - loss: 1.2743 - accuracy: 0.5685 - val_loss: 1.0220 - val_accuracy: 0.6460\n", "Epoch 7/100\n", - "7/7 [==============================] - 2s 227ms/step - loss: 1.7685 - accuracy: 0.3616 - val_loss: 1.5208 - val_accuracy: 0.4680\n", + "62/62 [==============================] - 6s 98ms/step - loss: 1.0633 - accuracy: 0.6411 - val_loss: 0.8392 - val_accuracy: 0.7180\n", "Epoch 8/100\n", - "7/7 [==============================] - 1s 113ms/step - loss: 1.6964 - accuracy: 0.3996 - val_loss: 1.3052 - val_accuracy: 0.5880\n", + "62/62 [==============================] - 6s 91ms/step - loss: 1.0047 - accuracy: 0.6875 - val_loss: 0.7549 - val_accuracy: 0.7620\n", "Epoch 9/100\n", - "7/7 [==============================] - 1s 103ms/step - loss: 1.4934 - accuracy: 0.5112 - val_loss: 1.1594 - val_accuracy: 0.6040\n", + "62/62 [==============================] - 6s 100ms/step - loss: 0.9575 - accuracy: 0.6855 - val_loss: 0.6982 - val_accuracy: 0.7760\n", "Epoch 10/100\n", - "7/7 [==============================] - 3s 377ms/step - loss: 1.3223 - accuracy: 0.5480 - val_loss: 1.0097 - val_accuracy: 0.6780\n", + "62/62 [==============================] - 6s 102ms/step - loss: 0.7863 - accuracy: 0.7500 - val_loss: 0.6334 - val_accuracy: 0.8080\n", "Epoch 11/100\n", - "7/7 [==============================] - 2s 341ms/step - loss: 1.1913 - accuracy: 0.6004 - val_loss: 0.8438 - val_accuracy: 0.7560\n", + "62/62 [==============================] - 12s 201ms/step - loss: 0.7864 - accuracy: 0.7440 - val_loss: 0.5998 - val_accuracy: 0.8220\n", "Epoch 12/100\n", - "7/7 [==============================] - 2s 322ms/step - loss: 1.0817 - accuracy: 0.6652 - val_loss: 0.8040 - val_accuracy: 0.7340\n", + "62/62 [==============================] - 12s 187ms/step - loss: 0.6996 - accuracy: 0.7641 - val_loss: 0.5375 - val_accuracy: 0.8240oss: 0.6741 - accuracy: 0.769 - ETA: 14s -\n", "Epoch 13/100\n", - "7/7 [==============================] - 2s 317ms/step - loss: 0.9844 - accuracy: 0.6953 - val_loss: 0.7588 - val_accuracy: 0.7400\n", + "62/62 [==============================] - 8s 128ms/step - loss: 0.7516 - accuracy: 0.7631 - val_loss: 0.5180 - val_accuracy: 0.8420\n", "Epoch 14/100\n", - "7/7 [==============================] - 2s 313ms/step - loss: 0.9689 - accuracy: 0.6719 - val_loss: 0.6327 - val_accuracy: 0.7920\n", + "62/62 [==============================] - 8s 127ms/step - loss: 0.7025 - accuracy: 0.7742 - val_loss: 0.4844 - val_accuracy: 0.8460\n", "Epoch 15/100\n", - "7/7 [==============================] - 2s 314ms/step - loss: 0.8498 - accuracy: 0.7165 - val_loss: 0.5748 - val_accuracy: 0.8120\n", + "62/62 [==============================] - 3s 53ms/step - loss: 0.6864 - accuracy: 0.7742 - val_loss: 0.4602 - val_accuracy: 0.8460\n", "Epoch 16/100\n", - "7/7 [==============================] - 2s 321ms/step - loss: 0.7865 - accuracy: 0.7623 - val_loss: 0.5465 - val_accuracy: 0.8220\n", + "62/62 [==============================] - 8s 125ms/step - loss: 0.6703 - accuracy: 0.7863 - val_loss: 0.4637 - val_accuracy: 0.8580\n", "Epoch 17/100\n", - "7/7 [==============================] - 2s 342ms/step - loss: 0.8014 - accuracy: 0.7522 - val_loss: 0.4943 - val_accuracy: 0.8380\n", + "62/62 [==============================] - 7s 107ms/step - loss: 0.6400 - accuracy: 0.8085 - val_loss: 0.4169 - val_accuracy: 0.8700\n", "Epoch 18/100\n", - "7/7 [==============================] - 2s 307ms/step - loss: 0.6923 - accuracy: 0.7946 - val_loss: 0.4984 - val_accuracy: 0.8300\n", + "62/62 [==============================] - 7s 118ms/step - loss: 0.6192 - accuracy: 0.8024 - val_loss: 0.3981 - val_accuracy: 0.8720\n", "Epoch 19/100\n", - "7/7 [==============================] - 2s 345ms/step - loss: 0.6485 - accuracy: 0.7824 - val_loss: 0.4565 - val_accuracy: 0.8540\n", + "62/62 [==============================] - 4s 70ms/step - loss: 0.6040 - accuracy: 0.8286 - val_loss: 0.4123 - val_accuracy: 0.8620\n", "Epoch 20/100\n", - "7/7 [==============================] - 2s 337ms/step - loss: 0.6532 - accuracy: 0.7891 - val_loss: 0.4481 - val_accuracy: 0.8440\n", + "62/62 [==============================] - 6s 101ms/step - loss: 0.5750 - accuracy: 0.8266 - val_loss: 0.3943 - val_accuracy: 0.8720\n", "Epoch 21/100\n", - "7/7 [==============================] - 2s 281ms/step - loss: 0.6054 - accuracy: 0.8125 - val_loss: 0.4526 - val_accuracy: 0.8560\n", + "62/62 [==============================] - 8s 124ms/step - loss: 0.5686 - accuracy: 0.8226 - val_loss: 0.3713 - val_accuracy: 0.8920\n", "Epoch 22/100\n", - "7/7 [==============================] - 2s 349ms/step - loss: 0.6091 - accuracy: 0.8136 - val_loss: 0.3973 - val_accuracy: 0.8640\n", + "62/62 [==============================] - 5s 80ms/step - loss: 0.5640 - accuracy: 0.8266 - val_loss: 0.3896 - val_accuracy: 0.8780\n", "Epoch 23/100\n", - "7/7 [==============================] - 2s 334ms/step - loss: 0.5881 - accuracy: 0.8225 - val_loss: 0.4073 - val_accuracy: 0.8760\n", + "62/62 [==============================] - 6s 98ms/step - loss: 0.5400 - accuracy: 0.8458 - val_loss: 0.3491 - val_accuracy: 0.8880\n", "Epoch 24/100\n", - "7/7 [==============================] - 2s 353ms/step - loss: 0.6210 - accuracy: 0.7991 - val_loss: 0.3821 - val_accuracy: 0.8660\n", + "62/62 [==============================] - 6s 94ms/step - loss: 0.5583 - accuracy: 0.8196 - val_loss: 0.3454 - val_accuracy: 0.8880\n", "Epoch 25/100\n", - "7/7 [==============================] - 1s 113ms/step - loss: 0.5261 - accuracy: 0.8337 - val_loss: 0.3702 - val_accuracy: 0.8880\n", + "62/62 [==============================] - 8s 131ms/step - loss: 0.5245 - accuracy: 0.8337 - val_loss: 0.3444 - val_accuracy: 0.8840\n", "Epoch 26/100\n", - "7/7 [==============================] - 3s 365ms/step - loss: 0.5691 - accuracy: 0.8348 - val_loss: 0.3385 - val_accuracy: 0.8860\n", + "62/62 [==============================] - 8s 130ms/step - loss: 0.5596 - accuracy: 0.8145 - val_loss: 0.3474 - val_accuracy: 0.8900\n", "Epoch 27/100\n", - "7/7 [==============================] - 2s 315ms/step - loss: 0.5610 - accuracy: 0.8237 - val_loss: 0.3338 - val_accuracy: 0.9040\n", + "62/62 [==============================] - 4s 66ms/step - loss: 0.5105 - accuracy: 0.8498 - val_loss: 0.3419 - val_accuracy: 0.8940\n", "Epoch 28/100\n", - "7/7 [==============================] - 2s 330ms/step - loss: 0.5142 - accuracy: 0.8382 - val_loss: 0.3127 - val_accuracy: 0.9040\n", + "62/62 [==============================] - 7s 112ms/step - loss: 0.5175 - accuracy: 0.8427 - val_loss: 0.4005 - val_accuracy: 0.8860\n", "Epoch 29/100\n", - "7/7 [==============================] - 1s 123ms/step - loss: 0.4929 - accuracy: 0.8438 - val_loss: 0.3217 - val_accuracy: 0.8920\n", + "62/62 [==============================] - 5s 79ms/step - loss: 0.4774 - accuracy: 0.8558 - val_loss: 0.3121 - val_accuracy: 0.9000\n", "Epoch 30/100\n", - "7/7 [==============================] - 2s 320ms/step - loss: 0.4533 - accuracy: 0.8650 - val_loss: 0.3123 - val_accuracy: 0.9000\n", + "62/62 [==============================] - 6s 95ms/step - loss: 0.4484 - accuracy: 0.8750 - val_loss: 0.3343 - val_accuracy: 0.8960\n", "Epoch 31/100\n", - "7/7 [==============================] - 2s 340ms/step - loss: 0.5377 - accuracy: 0.8214 - val_loss: 0.3173 - val_accuracy: 0.8960\n", + "62/62 [==============================] - 4s 67ms/step - loss: 0.4951 - accuracy: 0.8579 - val_loss: 0.3289 - val_accuracy: 0.8980\n", "Epoch 32/100\n", - "7/7 [==============================] - 2s 335ms/step - loss: 0.4745 - accuracy: 0.8560 - val_loss: 0.3037 - val_accuracy: 0.9020\n", + "62/62 [==============================] - 4s 68ms/step - loss: 0.4790 - accuracy: 0.8538 - val_loss: 0.3148 - val_accuracy: 0.9020\n", "Epoch 33/100\n", - "7/7 [==============================] - 2s 347ms/step - loss: 0.4972 - accuracy: 0.8504 - val_loss: 0.2874 - val_accuracy: 0.9080\n", + "62/62 [==============================] - 5s 86ms/step - loss: 0.4849 - accuracy: 0.8599 - val_loss: 0.3005 - val_accuracy: 0.9120\n", "Epoch 34/100\n", - "7/7 [==============================] - 2s 269ms/step - loss: 0.4895 - accuracy: 0.8426 - val_loss: 0.2953 - val_accuracy: 0.9020\n", + "62/62 [==============================] - 5s 80ms/step - loss: 0.5203 - accuracy: 0.8569 - val_loss: 0.3196 - val_accuracy: 0.9060\n", "Epoch 35/100\n", - "7/7 [==============================] - 2s 250ms/step - loss: 0.4970 - accuracy: 0.8415 - val_loss: 0.2798 - val_accuracy: 0.9140\n", + "62/62 [==============================] - 5s 86ms/step - loss: 0.4942 - accuracy: 0.8579 - val_loss: 0.3232 - val_accuracy: 0.9040\n", "Epoch 36/100\n", - "7/7 [==============================] - 1s 102ms/step - loss: 0.4464 - accuracy: 0.8638 - val_loss: 0.2859 - val_accuracy: 0.9180\n", + "62/62 [==============================] - 7s 109ms/step - loss: 0.4407 - accuracy: 0.8659 - val_loss: 0.3506 - val_accuracy: 0.8920\n", "Epoch 37/100\n", - "7/7 [==============================] - 2s 346ms/step - loss: 0.4419 - accuracy: 0.8583 - val_loss: 0.2638 - val_accuracy: 0.9120\n", + "62/62 [==============================] - 6s 97ms/step - loss: 0.4730 - accuracy: 0.8569 - val_loss: 0.2918 - val_accuracy: 0.9220\n", "Epoch 38/100\n", - "7/7 [==============================] - 2s 318ms/step - loss: 0.4384 - accuracy: 0.8571 - val_loss: 0.2591 - val_accuracy: 0.9180\n", + "62/62 [==============================] - 7s 109ms/step - loss: 0.4657 - accuracy: 0.8599 - val_loss: 0.2920 - val_accuracy: 0.9200\n", "Epoch 39/100\n", - "7/7 [==============================] - 2s 302ms/step - loss: 0.4164 - accuracy: 0.8694 - val_loss: 0.2460 - val_accuracy: 0.9240\n", + "62/62 [==============================] - 6s 100ms/step - loss: 0.4285 - accuracy: 0.8841 - val_loss: 0.3092 - val_accuracy: 0.9120\n", "Epoch 40/100\n", - "7/7 [==============================] - 3s 365ms/step - loss: 0.4030 - accuracy: 0.8605 - val_loss: 0.2485 - val_accuracy: 0.9300\n", + "62/62 [==============================] - 6s 94ms/step - loss: 0.4307 - accuracy: 0.8639 - val_loss: 0.2952 - val_accuracy: 0.9040\n", "Epoch 41/100\n", - "7/7 [==============================] - 2s 351ms/step - loss: 0.4459 - accuracy: 0.8650 - val_loss: 0.2365 - val_accuracy: 0.9300\n", + "62/62 [==============================] - 6s 90ms/step - loss: 0.4658 - accuracy: 0.8690 - val_loss: 0.3040 - val_accuracy: 0.9060\n", "Epoch 42/100\n", - "7/7 [==============================] - 2s 306ms/step - loss: 0.3639 - accuracy: 0.8850 - val_loss: 0.2247 - val_accuracy: 0.9360\n", + "62/62 [==============================] - 8s 124ms/step - loss: 0.4044 - accuracy: 0.8740 - val_loss: 0.2775 - val_accuracy: 0.9220\n", "Epoch 43/100\n", - "7/7 [==============================] - 1s 205ms/step - loss: 0.4574 - accuracy: 0.8594 - val_loss: 0.2164 - val_accuracy: 0.9420\n", + "62/62 [==============================] - 8s 129ms/step - loss: 0.4619 - accuracy: 0.8629 - val_loss: 0.2840 - val_accuracy: 0.9220\n", "Epoch 44/100\n", - "7/7 [==============================] - 1s 213ms/step - loss: 0.4095 - accuracy: 0.8705 - val_loss: 0.2361 - val_accuracy: 0.9320\n", + "62/62 [==============================] - 6s 104ms/step - loss: 0.4336 - accuracy: 0.8609 - val_loss: 0.2875 - val_accuracy: 0.9080\n", "Epoch 45/100\n", - "7/7 [==============================] - 2s 330ms/step - loss: 0.3767 - accuracy: 0.8795 - val_loss: 0.2113 - val_accuracy: 0.9360\n", + "62/62 [==============================] - 8s 130ms/step - loss: 0.4292 - accuracy: 0.8780 - val_loss: 0.2664 - val_accuracy: 0.9300\n", "Epoch 46/100\n", - "7/7 [==============================] - 2s 297ms/step - loss: 0.3968 - accuracy: 0.8739 - val_loss: 0.2135 - val_accuracy: 0.9400\n", + "62/62 [==============================] - 6s 94ms/step - loss: 0.4476 - accuracy: 0.8710 - val_loss: 0.2563 - val_accuracy: 0.9320\n", "Epoch 47/100\n", - "7/7 [==============================] - 2s 327ms/step - loss: 0.4207 - accuracy: 0.8750 - val_loss: 0.2076 - val_accuracy: 0.9360\n", + "62/62 [==============================] - 7s 119ms/step - loss: 0.4311 - accuracy: 0.8740 - val_loss: 0.2571 - val_accuracy: 0.9340\n", "Epoch 48/100\n", - "7/7 [==============================] - 1s 198ms/step - loss: 0.4016 - accuracy: 0.8717 - val_loss: 0.2108 - val_accuracy: 0.9420\n", + "62/62 [==============================] - 4s 71ms/step - loss: 0.4109 - accuracy: 0.8821 - val_loss: 0.2536 - val_accuracy: 0.9320\n", "Epoch 49/100\n", - "7/7 [==============================] - 2s 351ms/step - loss: 0.3604 - accuracy: 0.8839 - val_loss: 0.2106 - val_accuracy: 0.9320\n", + "62/62 [==============================] - 4s 68ms/step - loss: 0.4050 - accuracy: 0.8851 - val_loss: 0.2613 - val_accuracy: 0.9260\n", "Epoch 50/100\n", - "7/7 [==============================] - 2s 336ms/step - loss: 0.3674 - accuracy: 0.8906 - val_loss: 0.2145 - val_accuracy: 0.9360\n", + "62/62 [==============================] - 8s 128ms/step - loss: 0.4014 - accuracy: 0.8931 - val_loss: 0.2532 - val_accuracy: 0.9260\n", "Epoch 51/100\n", - "7/7 [==============================] - 1s 122ms/step - loss: 0.3065 - accuracy: 0.9085 - val_loss: 0.2085 - val_accuracy: 0.9380\n", + "62/62 [==============================] - 6s 94ms/step - loss: 0.4035 - accuracy: 0.8871 - val_loss: 0.2657 - val_accuracy: 0.9380\n", "Epoch 52/100\n", - "7/7 [==============================] - 3s 462ms/step - loss: 0.3546 - accuracy: 0.8929 - val_loss: 0.2083 - val_accuracy: 0.9280\n", + "62/62 [==============================] - 7s 110ms/step - loss: 0.4089 - accuracy: 0.8760 - val_loss: 0.2451 - val_accuracy: 0.9360\n", "Epoch 53/100\n", - "7/7 [==============================] - 3s 366ms/step - loss: 0.3408 - accuracy: 0.8996 - val_loss: 0.1994 - val_accuracy: 0.9360\n", + "62/62 [==============================] - 7s 106ms/step - loss: 0.3840 - accuracy: 0.8841 - val_loss: 0.2385 - val_accuracy: 0.9320\n", "Epoch 54/100\n", - "7/7 [==============================] - 1s 171ms/step - loss: 0.3883 - accuracy: 0.8739 - val_loss: 0.2217 - val_accuracy: 0.9280\n", + "62/62 [==============================] - 6s 102ms/step - loss: 0.4300 - accuracy: 0.8629 - val_loss: 0.2669 - val_accuracy: 0.9320\n", "Epoch 55/100\n", - "7/7 [==============================] - 2s 348ms/step - loss: 0.3340 - accuracy: 0.8962 - val_loss: 0.1774 - val_accuracy: 0.9540\n", + "62/62 [==============================] - 5s 88ms/step - loss: 0.4017 - accuracy: 0.8800 - val_loss: 0.2454 - val_accuracy: 0.9220\n", "Epoch 56/100\n", - "7/7 [==============================] - 1s 116ms/step - loss: 0.3575 - accuracy: 0.8839 - val_loss: 0.1924 - val_accuracy: 0.9380\n", + "62/62 [==============================] - 4s 70ms/step - loss: 0.4196 - accuracy: 0.8881 - val_loss: 0.2515 - val_accuracy: 0.9320\n", "Epoch 57/100\n", - "7/7 [==============================] - 2s 318ms/step - loss: 0.3124 - accuracy: 0.9129 - val_loss: 0.1908 - val_accuracy: 0.9440\n", + "62/62 [==============================] - 5s 79ms/step - loss: 0.3872 - accuracy: 0.8891 - val_loss: 0.2438 - val_accuracy: 0.9280\n", "Epoch 58/100\n", - "7/7 [==============================] - 2s 330ms/step - loss: 0.3426 - accuracy: 0.8940 - val_loss: 0.1801 - val_accuracy: 0.9400\n", + "62/62 [==============================] - 7s 111ms/step - loss: 0.3934 - accuracy: 0.8962 - val_loss: 0.2611 - val_accuracy: 0.9280\n", "Epoch 59/100\n", - "7/7 [==============================] - 2s 307ms/step - loss: 0.3154 - accuracy: 0.9040 - val_loss: 0.1794 - val_accuracy: 0.9440\n", + "62/62 [==============================] - 5s 78ms/step - loss: 0.3737 - accuracy: 0.9002 - val_loss: 0.2536 - val_accuracy: 0.9180\n", "Epoch 60/100\n", - "7/7 [==============================] - 1s 149ms/step - loss: 0.2404 - accuracy: 0.9241 - val_loss: 0.1874 - val_accuracy: 0.9420\n", + "62/62 [==============================] - 6s 104ms/step - loss: 0.3045 - accuracy: 0.9204 - val_loss: 0.2517 - val_accuracy: 0.9340\n", "Epoch 61/100\n", - "7/7 [==============================] - 1s 115ms/step - loss: 0.3479 - accuracy: 0.8962 - val_loss: 0.1877 - val_accuracy: 0.9420\n", + "62/62 [==============================] - 4s 58ms/step - loss: 0.4279 - accuracy: 0.8760 - val_loss: 0.2456 - val_accuracy: 0.9380\n", "Epoch 62/100\n", - "7/7 [==============================] - 2s 232ms/step - loss: 0.2951 - accuracy: 0.9096 - val_loss: 0.1832 - val_accuracy: 0.9420\n", + "62/62 [==============================] - 8s 135ms/step - loss: 0.3885 - accuracy: 0.8851 - val_loss: 0.2415 - val_accuracy: 0.9360\n", "Epoch 63/100\n", - "7/7 [==============================] - 3s 361ms/step - loss: 0.3330 - accuracy: 0.8984 - val_loss: 0.1863 - val_accuracy: 0.9420\n", + "62/62 [==============================] - 8s 128ms/step - loss: 0.3841 - accuracy: 0.8881 - val_loss: 0.2470 - val_accuracy: 0.9240\n", "Epoch 64/100\n", - "7/7 [==============================] - 1s 117ms/step - loss: 0.3131 - accuracy: 0.8996 - val_loss: 0.1904 - val_accuracy: 0.9280\n", + "62/62 [==============================] - 4s 68ms/step - loss: 0.4375 - accuracy: 0.8740 - val_loss: 0.2444 - val_accuracy: 0.9300\n", "Epoch 65/100\n", - "7/7 [==============================] - 2s 333ms/step - loss: 0.3491 - accuracy: 0.8884 - val_loss: 0.1707 - val_accuracy: 0.9440\n", + "62/62 [==============================] - 7s 120ms/step - loss: 0.4220 - accuracy: 0.8851 - val_loss: 0.2526 - val_accuracy: 0.9360\n", "Epoch 66/100\n", - "7/7 [==============================] - 2s 278ms/step - loss: 0.3178 - accuracy: 0.8962 - val_loss: 0.1806 - val_accuracy: 0.9380\n", + "62/62 [==============================] - 7s 120ms/step - loss: 0.3702 - accuracy: 0.8972 - val_loss: 0.2669 - val_accuracy: 0.9340\n", "Epoch 67/100\n", - "7/7 [==============================] - 1s 113ms/step - loss: 0.3161 - accuracy: 0.8951 - val_loss: 0.1743 - val_accuracy: 0.9460\n", + "62/62 [==============================] - 4s 64ms/step - loss: 0.3951 - accuracy: 0.8861 - val_loss: 0.2362 - val_accuracy: 0.9380\n", "Epoch 68/100\n", - "7/7 [==============================] - 3s 363ms/step - loss: 0.3489 - accuracy: 0.8873 - val_loss: 0.1802 - val_accuracy: 0.9400\n", + "62/62 [==============================] - 6s 103ms/step - loss: 0.4095 - accuracy: 0.8679 - val_loss: 0.2341 - val_accuracy: 0.9360\n", "Epoch 69/100\n", - "7/7 [==============================] - 2s 335ms/step - loss: 0.3153 - accuracy: 0.9118 - val_loss: 0.1685 - val_accuracy: 0.9500\n", + "62/62 [==============================] - 5s 81ms/step - loss: 0.3540 - accuracy: 0.9123 - val_loss: 0.2377 - val_accuracy: 0.9360\n", "Epoch 70/100\n", - "7/7 [==============================] - 2s 291ms/step - loss: 0.3049 - accuracy: 0.9062 - val_loss: 0.2140 - val_accuracy: 0.9400\n", + "62/62 [==============================] - 5s 86ms/step - loss: 0.3616 - accuracy: 0.8901 - val_loss: 0.2388 - val_accuracy: 0.9280\n", "Epoch 71/100\n", - "7/7 [==============================] - 2s 349ms/step - loss: 0.3427 - accuracy: 0.8940 - val_loss: 0.1661 - val_accuracy: 0.9520\n", + "62/62 [==============================] - 6s 96ms/step - loss: 0.4136 - accuracy: 0.8780 - val_loss: 0.2499 - val_accuracy: 0.9260\n", "Epoch 72/100\n", - "7/7 [==============================] - 1s 195ms/step - loss: 0.2644 - accuracy: 0.9118 - val_loss: 0.1799 - val_accuracy: 0.9480\n", + "62/62 [==============================] - 5s 86ms/step - loss: 0.3264 - accuracy: 0.9133 - val_loss: 0.2357 - val_accuracy: 0.9440\n", "Epoch 73/100\n", - "7/7 [==============================] - 3s 369ms/step - loss: 0.3261 - accuracy: 0.8984 - val_loss: 0.1631 - val_accuracy: 0.9560\n", + "62/62 [==============================] - 10s 161ms/step - loss: 0.4090 - accuracy: 0.8790 - val_loss: 0.2394 - val_accuracy: 0.9480 14s -\n", "Epoch 74/100\n", - "7/7 [==============================] - 2s 341ms/step - loss: 0.3244 - accuracy: 0.8996 - val_loss: 0.1678 - val_accuracy: 0.9480\n", + "62/62 [==============================] - 5s 89ms/step - loss: 0.3669 - accuracy: 0.8972 - val_loss: 0.2278 - val_accuracy: 0.9380\n", "Epoch 75/100\n", - "7/7 [==============================] - 3s 418ms/step - loss: 0.3011 - accuracy: 0.9051 - val_loss: 0.1623 - val_accuracy: 0.9500\n", + "62/62 [==============================] - 5s 86ms/step - loss: 0.3839 - accuracy: 0.8881 - val_loss: 0.2227 - val_accuracy: 0.9340\n", "Epoch 76/100\n", - "7/7 [==============================] - 2s 343ms/step - loss: 0.3024 - accuracy: 0.8973 - val_loss: 0.1765 - val_accuracy: 0.9460\n", + "62/62 [==============================] - 2s 39ms/step - loss: 0.3814 - accuracy: 0.8921 - val_loss: 0.2444 - val_accuracy: 0.9420\n", "Epoch 77/100\n", - "7/7 [==============================] - 3s 364ms/step - loss: 0.3130 - accuracy: 0.9040 - val_loss: 0.1568 - val_accuracy: 0.9480\n", + "62/62 [==============================] - 2s 40ms/step - loss: 0.3788 - accuracy: 0.8952 - val_loss: 0.2387 - val_accuracy: 0.9380\n", "Epoch 78/100\n", - "7/7 [==============================] - 2s 345ms/step - loss: 0.3009 - accuracy: 0.8929 - val_loss: 0.1421 - val_accuracy: 0.9640\n", + "62/62 [==============================] - 3s 52ms/step - loss: 0.3854 - accuracy: 0.9002 - val_loss: 0.2387 - val_accuracy: 0.9260\n", "Epoch 79/100\n", - "7/7 [==============================] - 2s 340ms/step - loss: 0.2807 - accuracy: 0.9096 - val_loss: 0.1441 - val_accuracy: 0.9660\n", + "62/62 [==============================] - 3s 52ms/step - loss: 0.3478 - accuracy: 0.9073 - val_loss: 0.2153 - val_accuracy: 0.9380\n", "Epoch 80/100\n", - "7/7 [==============================] - 2s 280ms/step - loss: 0.2904 - accuracy: 0.9062 - val_loss: 0.1680 - val_accuracy: 0.9380\n", + "62/62 [==============================] - 3s 51ms/step - loss: 0.3367 - accuracy: 0.9204 - val_loss: 0.2239 - val_accuracy: 0.9500\n", "Epoch 81/100\n", - "7/7 [==============================] - 2s 340ms/step - loss: 0.2555 - accuracy: 0.9118 - val_loss: 0.1478 - val_accuracy: 0.9540\n", + "62/62 [==============================] - 2s 25ms/step - loss: 0.3476 - accuracy: 0.9062 - val_loss: 0.2392 - val_accuracy: 0.9420\n", "Epoch 82/100\n", - "7/7 [==============================] - 2s 356ms/step - loss: 0.2826 - accuracy: 0.9230 - val_loss: 0.1502 - val_accuracy: 0.9480\n", + "62/62 [==============================] - 4s 57ms/step - loss: 0.3735 - accuracy: 0.8911 - val_loss: 0.2143 - val_accuracy: 0.9440\n", "Epoch 83/100\n", - "7/7 [==============================] - 1s 116ms/step - loss: 0.2423 - accuracy: 0.9185 - val_loss: 0.1431 - val_accuracy: 0.9560\n", + "62/62 [==============================] - 3s 54ms/step - loss: 0.3382 - accuracy: 0.9153 - val_loss: 0.2285 - val_accuracy: 0.9380\n", "Epoch 84/100\n", - "7/7 [==============================] - 2s 319ms/step - loss: 0.3141 - accuracy: 0.9007 - val_loss: 0.1736 - val_accuracy: 0.9460\n", + "62/62 [==============================] - 3s 54ms/step - loss: 0.3839 - accuracy: 0.8831 - val_loss: 0.2310 - val_accuracy: 0.9440\n", "Epoch 85/100\n", - "7/7 [==============================] - 2s 338ms/step - loss: 0.2619 - accuracy: 0.9096 - val_loss: 0.1221 - val_accuracy: 0.9640\n", + "62/62 [==============================] - 3s 54ms/step - loss: 0.3434 - accuracy: 0.9083 - val_loss: 0.2260 - val_accuracy: 0.9420\n", "Epoch 86/100\n", - "7/7 [==============================] - 1s 111ms/step - loss: 0.2739 - accuracy: 0.9174 - val_loss: 0.1433 - val_accuracy: 0.9600\n", + "62/62 [==============================] - 3s 50ms/step - loss: 0.3844 - accuracy: 0.8901 - val_loss: 0.2459 - val_accuracy: 0.9400\n", "Epoch 87/100\n", - "7/7 [==============================] - 2s 281ms/step - loss: 0.2538 - accuracy: 0.9219 - val_loss: 0.1275 - val_accuracy: 0.9700\n", + "62/62 [==============================] - 3s 50ms/step - loss: 0.3591 - accuracy: 0.8921 - val_loss: 0.2367 - val_accuracy: 0.9420\n", "Epoch 88/100\n", - "7/7 [==============================] - 2s 329ms/step - loss: 0.2464 - accuracy: 0.9252 - val_loss: 0.1363 - val_accuracy: 0.9640\n", + "62/62 [==============================] - 3s 53ms/step - loss: 0.3678 - accuracy: 0.9032 - val_loss: 0.2058 - val_accuracy: 0.9500\n", "Epoch 89/100\n", - "7/7 [==============================] - 2s 282ms/step - loss: 0.2749 - accuracy: 0.9096 - val_loss: 0.1378 - val_accuracy: 0.9640\n", + "62/62 [==============================] - 3s 51ms/step - loss: 0.3405 - accuracy: 0.9062 - val_loss: 0.2144 - val_accuracy: 0.9500\n", "Epoch 90/100\n", - "7/7 [==============================] - 2s 343ms/step - loss: 0.2148 - accuracy: 0.9297 - val_loss: 0.1610 - val_accuracy: 0.9460\n", + "62/62 [==============================] - 1s 24ms/step - loss: 0.2875 - accuracy: 0.9274 - val_loss: 0.2176 - val_accuracy: 0.9560\n", "Epoch 91/100\n", - "7/7 [==============================] - 1s 198ms/step - loss: 0.2855 - accuracy: 0.9096 - val_loss: 0.1317 - val_accuracy: 0.9600\n", + "62/62 [==============================] - 3s 51ms/step - loss: 0.3868 - accuracy: 0.8931 - val_loss: 0.2415 - val_accuracy: 0.9360\n", "Epoch 92/100\n", - "7/7 [==============================] - 2s 345ms/step - loss: 0.2446 - accuracy: 0.9185 - val_loss: 0.1560 - val_accuracy: 0.9620\n", + "62/62 [==============================] - 3s 53ms/step - loss: 0.3472 - accuracy: 0.9062 - val_loss: 0.2194 - val_accuracy: 0.9500\n", "Epoch 93/100\n", - "7/7 [==============================] - 2s 345ms/step - loss: 0.2724 - accuracy: 0.9141 - val_loss: 0.1378 - val_accuracy: 0.9520\n", + "62/62 [==============================] - 3s 54ms/step - loss: 0.3660 - accuracy: 0.8992 - val_loss: 0.2233 - val_accuracy: 0.9420\n", "Epoch 94/100\n", - "7/7 [==============================] - 2s 354ms/step - loss: 0.2793 - accuracy: 0.9141 - val_loss: 0.1453 - val_accuracy: 0.9540\n", + "62/62 [==============================] - 3s 53ms/step - loss: 0.3935 - accuracy: 0.8810 - val_loss: 0.2117 - val_accuracy: 0.9480\n", "Epoch 95/100\n", - "7/7 [==============================] - 2s 355ms/step - loss: 0.3030 - accuracy: 0.9029 - val_loss: 0.1279 - val_accuracy: 0.9640\n", + "62/62 [==============================] - 3s 54ms/step - loss: 0.3734 - accuracy: 0.8992 - val_loss: 0.2250 - val_accuracy: 0.9340\n", "Epoch 96/100\n", - "7/7 [==============================] - 2s 326ms/step - loss: 0.2406 - accuracy: 0.9219 - val_loss: 0.1261 - val_accuracy: 0.9680\n", + "62/62 [==============================] - 3s 49ms/step - loss: 0.3360 - accuracy: 0.9012 - val_loss: 0.2168 - val_accuracy: 0.9380\n", "Epoch 97/100\n", - "7/7 [==============================] - 1s 198ms/step - loss: 0.2535 - accuracy: 0.9129 - val_loss: 0.1377 - val_accuracy: 0.9600\n", + "62/62 [==============================] - 3s 52ms/step - loss: 0.3458 - accuracy: 0.9012 - val_loss: 0.2272 - val_accuracy: 0.9460\n", "Epoch 98/100\n", - "7/7 [==============================] - 2s 323ms/step - loss: 0.2646 - accuracy: 0.9163 - val_loss: 0.1189 - val_accuracy: 0.9680\n", + "62/62 [==============================] - 3s 50ms/step - loss: 0.3667 - accuracy: 0.8871 - val_loss: 0.2176 - val_accuracy: 0.9400\n", "Epoch 99/100\n", - "7/7 [==============================] - 1s 167ms/step - loss: 0.2377 - accuracy: 0.9263 - val_loss: 0.1328 - val_accuracy: 0.9560\n", + "62/62 [==============================] - 3s 55ms/step - loss: 0.3490 - accuracy: 0.9073 - val_loss: 0.2139 - val_accuracy: 0.9480\n", "Epoch 100/100\n", - "7/7 [==============================] - 2s 349ms/step - loss: 0.2340 - accuracy: 0.9330 - val_loss: 0.1487 - val_accuracy: 0.9540\n" + "62/62 [==============================] - 3s 49ms/step - loss: 0.3303 - accuracy: 0.9073 - val_loss: 0.2311 - val_accuracy: 0.9260\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAEGCAYAAAB7DNKzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAA4vklEQVR4nO3dd3xUVfr48c9JQkhCIEBCCZCQQKSTkBBYBZGmAq4gXVgrWFZcV92i8nV3LWtby66IP13FBrYFREEEROkgRYHQW0IJJJCEkpDe5/z+OJMwqQTMZJKZ5/16zWsyZ+bOPHcunGdOuecqrTVCCCFEVdwcHYAQQoj6TRKFEEKIakmiEEIIUS1JFEIIIaoliUIIIUS1PBwdgD0EBATokJAQR4chhBANxs6dO89rrVtV9pxTJoqQkBB27Njh6DCEEKLBUEqdrOo56XoSQghRLUkUQgghqlXvu56UUk2Ad4ECYL3W+gsHhySEEC7FIS0KpdTHSqmzSqn95cpHKqWOKKWOKqVmWovHA4u01g8AY+o8WCGEcHGO6nqaC4y0LVBKuQPvAKOAHsBUpVQPoAOQYH1ZcR3GKIQQAgclCq31RiC1XHF/4KjW+rjWugCYD9wGJGKSBciYihBC1Ln6VPG251LLAUyCaA98A0xQSv0X+K6qjZVSDyqldiildpw7d86+kQohhAup94PZWutsYFoNXjcHmAMQHR0ta6cL4Qq0hpwcyMiA3FwICQE3O/z+zcyEtWvh8GG45hro2RM6dwaPWq5CtYb0dEhIgMRE8PWF6Gjw9jbPFxTArl1QWAjXXQfu7rX7+VWoT4niNBBk87iDtUwI16Y1FBfXfqXkaFrD0aOQlASRkdC0ac23TUiAt96CDz80FWuJ4cNh/nwICLhUlp1tkkdJZQumot27Fxo1gt69QSlTvmkTPPssbN8O7dpBUJCpnLduhaKiinGUJCUfH3jwQXjqKWjdGiwWWLfOxJKcbBJZdjaEh8OoUXDjjdC8uSlLS4MtW+D77+GHH8zrbXl4QFSUiXXHDsjPN+Vt2sDEiTBiBPj7g58fNGsG7dvXerJUjrpwkVIqBFimte5lfewBxALDMQliO/A7rfWBK33v6OhoLWdmi6tSUhlcTaWstalUGjeuvXjWroU77jDv/eijMGMGtGhh4jx61FQ0QUHQqpWp7DIyTCWanm4qXj8/86u0pCKsSqNG5nXl9+fgQVP5dOhg3kNrOH4ctm0zlWOvXtCpE6SkwMqV5ubhYSrDESNMnAsXmgozPt68T1CQqUi3boWSbmI3N1OJ9u5tWgYZGZCXB4GBEBwMbdua8vR08/lLlpjtJk0ySaZZM1PhPv+8ee3ixaYi/s9/4KOPTOXaubNpCVy8CL/8YloiYF4/cqT5Bb96tXk8fryJLSHBJOnhw81rwsPh2DE4cMDEYbGY9zh2DBYsMMno9tth/XrzvJ+f+X6aNTP/Ln75xXy+UuZWsj1Ay5Zw882mBREUZL6rCxdMEtm61SS3a6+FAQNMTAsXwvLl5nuylZsLXl5X+i8NpdROrXV0pc85IlEopf4HDAECgBTgWa31R0qpW4BZgDvwsdb6pat5f0kU4rK0hlOnYP9+2Lfv0v3hw+Y/9E03wS23QJcupgJJSDCvL7nPzDS/3IKCTAV7+LB5j9RU6N/fVJSDB5sK79QpOH3aVGTp6aaCioqC3/7W3OfkwM8/m0qkVSu4/noIC4OXXjIVX9eu0LGj+bXZpImp8A4fNkmpROPG5paRcfXfyY03wkMPwa23mgrotddMXGAqvG7dTGWfklJ2Oy+vS5VV+/YmOaSkXEpOWpvKPCrKfA8JCeY1114LAweaX+6//GIqxCNHzPfZrBl4eprWRkLCpV/RXl6mQp0yBR57zCQRW9u3X6rkCwtN18ydd5rvb/9+c2vSxHzuddeZuL//Hn780SS4mTNNMvbxufLv78gRc7wWLIBBg0wLY/z4spV2UZH5TtesMfGVtALCw6FfvyvvSsrKgn370OnpnD0TR0rSUcKfnnX5HwaVqHeJwt4kUQjOnTMVv+3twAHzC7wyQUHm12zPnqYyX7HCJAhbzZub1wUHm1/rp0+bJJCRYSrRXr1Ml8fatabis/2/5eFhWgJ+fqYCPHTIPN+ypfm84nIzv729zS/Du+6Cd981lefevTBrluma6NXL3Jo1MxVpQoJ5fXCwibF5c8jMJOVMHKfOHKJvYF/cqqs8zp+HTz81++PpaZJQp06mMm7UyFSwhw6ZX7kllWx+/qXKt3Vrkxx79zb7tWuXaV1obX71d+16FQfRqqTf3sfHxHY5Z8+a1lfHjib+du0AKCwuZNOpTWQVZNGpRSdCm4fSxLOJ2abk+6+ios4uyCb+Yjwn009yJvMMSZlJFFoKuTvibsJahgGQV5THG1ve4NOYT5jUewpPD3r60vtfhdMZp8kqyKKLfxdUuWNn0RZikmJYFruMDSc3sCtpF+n56SgU6TPTadr4CrrxrCRRiIavuNj0+S5fbn7VnjxpKoROncyvsS5dTCW3d69JCrb9vAEBpgLr1ctUzCXatr1U7udX9vO0Nu+TlGQq3qCgK+tDP3fO9CcHBJhtW7cu22987pypSNeuvVT5Xnut+SW+ebP51TlokOl2qqKCj0mKISkziVuuuaVCRZJdkM2LG1/k31v/TaGlkDFdx/D5uM9LKxCtNedyztHKp9WlbYuLTUzffmu6WiZMuOIuuMPnD3Py4kl6tOpBh2YdKsQFkJCewImLJxgYNBB3t9objD2XfQ6lFP7e/iilKLYUczztOPvP7md53HKWHF7ChdwLZbbp164fLw17iRs73Vgaa25hLtsSt7E5YTObEzazK2kXKdkpFT7PTbmhtWZCjwmM6DyCV356heNpx4kKjCImKYYOzTrw4tAXySrIYvWJ1Ww+tZmmjZsS2jyU0Oah+Hpe6uoLbxPO2G5jaeHdgrTcNF7c+CJv//I2hZZC2jdtz/BOw+no15GkzCTOZJ0hJimG5KxkFIq+7foSHRhNVGAUUYFRRLSNwMPtyrtOJVEI+7twAd5801TgJXr0gN/9zvyyA1P5xsaaLpiSfmetIS4O9uwxlXxJRX/2LHTvbpKAry98/bWptL29TXLo2NFUwkePmtdnZpomfo8epvIv6e/u1ct8zlU0xWvbhZwLvPLTK7y/830GBg1k5vUzGdxxMEopCosLOZZ2jNDmoTT2uPwYx1cHvuKuxXeRX5zPoOBBvDniTfq268uR80dYemQps3+ZTWJGIvdE3EOPVj14es3T9GjVg8/GfcaGkxt4f+f7HDx3kG4B3ZjUYxK3db0Nn0Y+5BXlUWQpIrxNeI3iKGHRFv695d88vfZpiixmnKdZ42Zc1+E67gq/i3Hdx5FTmMPLm17mne3vUFBcQFCzIO6Pup8J3ScA5hd5am4qB88d5MC5AyRlJRHZNpLrg68nvE04cRfi2HFmB0cuHGFg0EDGdB2Dn5cfcRfieGHjC3yx7wss2kIjt0a08W3Duexz5BebLqumnk0Z03UME3tMJNA3kBMXT3A09Sgf7fqI+IvxDAsdxrCQYayNX8vmU5tLt+vVuhf92/UnrGUYoS1CCWkeQrum7Wjr25bU3FRm/zybd7e/S3p+Ot0DuvP2qLcZ3mk4m09t5pHvH2F38m4AOvp1ZEjIEAqKCziedpwTF0+QV2S664osReQU5uDh5sHQkKHEJMWQmpvK9Mjp9GvXj7Xxa1l7Yi2puam0btKaQN9AugZ05ZawWxgZNpJWTSpdGfyKuUyiUEqNBkaHhYU9EBcX5+hwnNf586bboXlzUwHPng3/+peprDt2NGXFxZeSxuDBplLftMkkgBKenuZXdkn/tru76cIJDze/wA8dMonjwgUzXnDnnaZf33b2Cphkk5JiPqOezQwqKC5gX8o+VsSt4I2tb5BVkMWYrmPYkrCFs9lniW4XjbtyZ3fybvKL8+nVuhffTP6Ga/yvASDuQhyPrnyUvKI87g6/m4k9JvL+zvd5YtUTXB98PVN6TuH5Dc9zLuccHf06cjLdfOe/af8b/n3zvxkYPBCAVcdWMemrSaTnmxlC/dr149Yut7I+fj0bTm7Aoi1l4vZr7MfYbmMZ220syVnJbDq1iW2J28grysPbwxvvRt508e/CoOBBRLeL5uVNL/P90e8Z3308f+j3B46cP8KBcwdYHrec+Ivx+Hr6olBkF2ZzT8Q9DA8dzrw981h1fFWl35u/tz9tfNtw+PzhCrH5evqSVZCFp7sn0e2i2Za4jcbujZkRPYOOzTtyJvMMyVnJBPgE0LNVT7q36k6ftn3w8qg4wJtflM+cnXN4YeMLnMs5R3ibcG4MvZFhocMYEDSAFt4tLnuMM/Mz2XFmB9cHX08j90al5cWWYtbFryO0eSidWnSqtHUFpnW3M2knCw8s5Nsj3xLSPITXbnyNiLYRpa+xaAsWbbmqlkJNuUyiKCEtCjspLISXX4YXX6w4VfDWW02y6NnzUtmJE/DFF/Dll2bA9oYbzC0w0HQTnTxp3qd3b4iIMC2IymYMFRfbbb74sdRj/OXHv+Dv7c/bt7yNT6OKg5haa/ak7OF0xml6tOpBx+YdcVNVTz9Mzkrm872fs+jgInYl76Kg2Aw6j+4ympeHv0yv1r3ILcxl3p55vLfjPZp7NSe6XTTBfsE8v+F5ii3FfDruUxLSE3hy9ZM0dm9MgE8AcalxeLp7UlBcwOSek5k3dh5eHl6k56Xzr5/+xYFzBxjReQSju44m2C+4QlyxF2L5+uDXjAwbSWRgZGl5SlYKa0+sRSmFl4cXRZYilsctZ/GhxaWJJdA3kIHBA/Fr7EduUS7ZBdnsSdlD/MV4ABq7N+Y/I/7DjOgZZSpEi7aw6eQmPt3zKQWWAmYOnEnP1pf+jRxLPcaWhC009miMt4c3zRo3o1tAN1o3aY1Sisz8TH4+/TP7z+6ni38X+gb2pXWT1vx8+mcWHljI6uOruanTTTw58Ena+La5soNvI7cwl5zCHPx9/K/6PRo6SRSi5goLzSyQefPMLJuoKBg7Fvr0gSeegJgYmDrVtBIuXjStiJtuMo/rmdzCXFYdX0W7pu2ICowqU7kXFBfw+ubXeXHTi7grd3IKcwhvE87i2xcT2iKU3MJclsUu47vY7/jx2I9l+qibNGpCsF8w+cX55BbmotG09W1L+6btKbIUsfr4aop1Mf3a9WNIyBCi20XTv31/QpqHXDbm+IvxTFg4gZikGABGho3kozEfEegbyLbEbXy+93NCmofwlwF/qTZZ1Yb8ony2JW6jQ7MOVf4iTsxIZGvCVnq36U23gG52jUfYlyQKUb24ODPve/16WLXKDLS2bm26eXbuNN0/YLp23nvPDHJehZMXT/LwiodxU24svn1xhWZ0XlFepd0DlVl6ZCkvb3qZ1k1a07t1b3q17kUL7xZ4e3hTrIv5+uDXfLn/Sy7mXTSh+wRwY6cbaeTWiKOpRzly4QipualM7DGRWSNmse/sPqZ+PRU35caIziNYFruMzIJM/L39ubnzzYzoPIKwlmEcOn+IfSn7OJ15Gi8Pr9J4k7OSOZ15mpzCHG7rehvT+kyje6vuV/U95RXl8fz65wlrGcb0yOlVdlkIUZskUbi64mIzv3vZMjNXfvx4swzB//4HH39spjKCGfQdNsy0GEaMMNMiwXQhbdliWg6tW1/xx2ut+WjXR/z5hz9TaCkkryiP5wY/x7NDngVM98T0b6czb888gpoF0btNbyLbRnJDxxsYGDSwzBTD7IJs/vzDn5kTM4cu/l3wcPPgyPkjFOuy00u9PLyY0H0Cd4Xfxfmc8/xw7AdWH1+Nh5sHYS3D6NyiM+O7j2fUNaNKtzmWeoyJX03kRNoJJvaYyNReUxkSMqRWZ+YIUV9JonAlJSeFRUeb/v6zZ03Fv3at6T7av7/s+EJkJNxzjznrtEuXK54dpLWu9Bfv9tPbWR+/nr1n97LzzE4OnT/E0JChfHzbx/xj3T/4ct+XbJq2iQFBA/jLD3/hP9v+w90Rd1NkKWJfyj4OnjtIsS7Gw82D8DbhNGvcDJ9GPhw+f5gTaSd4YsATvDDsBTzdPckvyudo6lEyCzLJKcyhoLiAaztcS3Ov5lf1FVq0xe7dOkLUN5IoXMGZM2aQ+cMPzTiDt7c5w/fgQTNr6N13Ydo0M66wfLk5i3T8eJM8rsDpjNOMXzieuAtx5Bblkl+UT3S7aO4Kv4spvaYQkxTDKz+9woaTGwDo0KyDmSPedSz3Rd2Hm3IjIz+DPu/1QaOZ1mcaz65/lkf6PcLsUbNLk05WQRZbErawPn49MUkxZBdmk1uYi6e7Jy8Ne4mhoUNr+QsUwrVJonBWxcWwcaNZMmDePNNSeOAB0720YYNZJsDDA+bOrXFCsGgLu5J2sSx2GbtTdvOna//EDR1vACAjP4MbPrmB42nHuSv8Lpp4NsFNufHDsR9K54sDtG/anr8O+Ct3hd9V5SySrQlbGfTJIIp1MRN7TGT+hPnSxSOEA0micDa5ufDCC2axs7NnLy1E9o9/mJPRrsCa42v41+Z/cTHvIrmFuaRkp3A+5zwKRQvvFqTnpfP6Ta/zSP9HGP2/0aw+vprlv1vOiLARZd5nX8o+Fh1cREjzEO4IvwNP98svtfDf7f9lc8JmPhzzYY0HsYUQ9iGJwpn8/DPce69ZFG7cODP+cMstZqGzK6C15o0tbzBzzUyC/YLpHtAdn0Y+NGvcjCEhQxgVNorGHo25d8m9LD68mE4tOnE87Tgfjv6Q+6Lus8++CSEcprpEgdbaaW7AaGBOWFiYdkqvvKK1m5vWQUFa//jjZV/+5tY3df8P+uv3tr+ns/KztNZaWywWHXchTk/+arLmOfTEhRN1Zn5mle9hsVj0qz+9qt2ed9N/X/P3WtsVIUT9AuzQVdSt0qJoKHbvNie/jR9vupzKL2JXzq6kXfT/sD9NPZuSlpeGX2M/BgQNYGfSTs5mn8VNufHK8Fd4YsATNZqnn5mfeVUrUgohGobqWhT1a2EcUbWnnjLLVH/44WWTREFxAfcsuYcAnwAOPHyAw+cP8/Yvb7MneQ8jOo9gYNBAhoUOK11PqCYkSQjhuiRRNASrV5sLq/znP2YhvnL2puzlVPopbu58M57unryw4QX2nd3H0ilLaendkgFBAxgQNKDu4xZCOAVJFPVBfr65wMrYsebEN1sWCzz5pLlo/MMPV9g0Iz+Dmz+7mZTsFFp6t+TWLrfyxd4vuCfiHkZ3HV0n4QshnJucflofrFwJ779vZi+9/HLZK6PNn2+W2HjxxUpXVn1u/XOczT7LO7e8w8iwkXx14CvaNW3HrJGz6i5+IYRTkxZFfbBwIfj7mwur/+1v5spo0dHmAvbr1pllNqZOrbDZgbMHmP3zbB6IeoCH+z3Mw/0eJqsgi2JLMX5e1Y9jCCFETUmicLTcXFi61FwJ7r33TIJ44glYvNhcZ3jiRDOQ7Va28ae15pHvH8HPy4+Xh79cWm57eUUhhKgNkigcbcUKyMqCyZPNgnx//jNMmWIu62l7fedyFhxYwPr49bz32/dc+mIrQgj7k0ThaAsWmKW7bS/8065dtZtkF2Tz1x//SlRgFPdH3W/nAIUQrk4ShSNlZZlrREybdkXXen5t82uczjzNgokLZCE9IYTdOdWsJ6XUaKXUnPT0dEeHUjPLl5sxismTK326oLiAB797kLm755aWnUo/xWtbXmNKrykMDB5YR4EKIVyZUyUKrfV3WusH/S5z5nK9sWABBAaa60aUo7VmxrIZfBDzAdO+ncYrm15Ba81Tq59CoXj1xlcdELAQwhVJ15OjZGSYgezf/x7cK3YfvbntTT7e/TEzB87kVMYpnl77NDHJMSw6uIhnbniGYL9gBwQthHBFkigcZcUKc0b2pEkVnloeu5wnVj3BhO4TeGn4SwC08GrBO9vfoX3T9jw58Mm6jlYI4cIkUTjKN99A27YwoOwaTJn5mdzxzR1EtIlg3th5pddufnvU20QFRtGjVQ+aeF7ZtSeEEOLXkEThCLm5pkVx550VTqT75tA3pOen8/aot8skBKUU0yOn13WkQgjhXIPZDcaqVZCdba4tUc7n+z6nU4tOstqrEKLekEThCN98Y5YLHzKkTPGZzDOsOb6GO3vfWaOLCQkhRF2QRFHXCgvhu+9g9Gjw9Czz1Jf7vkSjuSP8DgcFJ4QQFUmiqGsbN0JqauXdTns/p3/7/nTx7+KAwIQQonKSKOraN9+At7dZUtzGvpR97EnZw13hdzkoMCGEqJwkirpksZjlw0eNAh+fMk99vvdz3JU7t/e83UHBCSFE5SRR1JWiInjmGUhKgnHjyjxl0Ra+3P8lI8NG0qpJKwcFKIQQlXOqRFFvFwVMSIBhw+Cll8y5E+UWAfzx2I8kZiRyd8TdDgpQCCGq5lSJol4uCnjsGEREmOtef/opfPZZhdlO7+98n1Y+rRjbbaxjYhRCiGrImdn29v33kJYGu3ebhFHO6YzTfHfkO/464K94untW3F4IIRzMqVoU9dKhQ+DnB+HhlT790a6PKNbFPBD1QB0HJoQQNSOJwt4OHoTu3c31sMspshTxQcwH3Nz5Zjq37OyA4IQQ4vIkUdjboUPQo0elT30f9z2JGYn8vu/v6zgoIYSoOUkU9pSaCikppkVRifd3vk+gbyCju4yu48CEEKLmJFHY06FD5r6SFsWx1GOsiFvBfZH30ci9UR0HJoQQNSeJwp5KEkUlLYrnNjxHY4/GzOg3o46DEkKIKyOJwp4OHjTrOnXsWKZ4b8pevtj7BY/95jHaNW3noOCEEKJmJFHY06FD0K1bhavY/W3t3/Dz8uOpgU85KDAhhKg5SRT2dOhQhW6nzac2syx2GU8NfIoW3i0cFJgQQtScJAp7ycqCkyfLDGRrrZm5ZiaBvoE8+ptHHRicEELUnCzhYS9Hjph7mxbF9jPb+enUT7xzyzv4NPKpYkMhhKhfpEVhLwcPmnubRLE3ZS8Ao8JGOSIiIYS4Kk6VKOrVMuOHDoGHB4SFlRYdOX+Exu6NCfYLdmBgQghxZZwqUdSrZcYPHYJrroFGl06mi02NJaxlGO5u7g4MTAghroxTJYp6pWQxQBuxF2LpGtDVQQEJIcTVkURhDwUF5oJFNjOeiixFHEs9RpeWXRwYmBBCXDlJFPYQFwfFxWVaFPEX4ym0FNLFXxKFEKJhkURhDyUznmxaFLEXYgGk60kI0eBIorCHHTvMIHa3bqVFR86b8yqkRSGEaGgkUdjDtm0QFQVeXqVFsRdiaeHVAn9vfwcGJoQQV04SRW0rLITt2+Haa8sUx6aaGU+qkkuiCiFEfSaJorbt3Qu5uXDddWWKj5w/It1OQogGSRJFbdu2zdzbtCiyCrI4nXlapsYKIRokSRS1betWCAyE4EvLdBxNPQrIjCchRMMkiaK2bdtmWhM2YxEy40kI0ZBJoqhNZ8+aM7LLjU+UnEMR1jKssq2EEKJek0RRm37+2dyXTxSpsQT7Bcs1KIQQDZIkitq0datZWrxv3zLFMuNJCNGQSaKoTVu3Qp8+4O1dWqS1JvZCrMx4EkI0WJIoaktRUaUn2p3NPkt6frrMeBJCNFiSKGrL/v2QnV3lQLZ0PQkhGiqnShQOvRTqqlXmvlyL4sdjP+Km3OjTtk/dxySEELXAqRKFwy6FevQoPP88DB0KoaGlxRZt4dO9n3JTp5to69u2bmMSQoha4lSJwiEKC+GOO8yy4vPmlTnRbuPJjZxKP8XdEXc7MEAhhPh1PBwdQIP3z3/CL7/AwoUQFFTmqXl75tHUsylju411TGxCCFELpEXxa2zeDC+/DNOmwaRJZZ7KLshm0cFFTO45WU60E0I0aJdNFNYBYkkolXn3XWjZEt56q8JTiw8vJqsgS7qdhBANXk0SwO1AnFLqNaVUt8u+2lVoDRs2wPDh0LRphafn7ZlHaPNQrg++3gHBCSFE7blsotBa3wlEAseAuUqprUqpB5VSFWtHV3LsGJw+DUOGVHgqIT2BNcfXcHfE3bhJY0wI0cDVqBbTWmcAi4D5QCAwDohRSv3RjrHVbxs2mPvBgys8tfDAQjSaO8PvrOOghBCi9tVkjGKMUmoxsB5oBPTXWo8CIoC/2De8emz9emjdGrpV7I1bGruU8Dbhsqy4EMIp1GR67ATgTa31RttCrXWOUuo++4RVz5WMTwweXOa8CYALORf46dRPPH390w4KTgghaldNup6eA34peaCU8lZKhQBordfYJ6x67sQJSEiodHxiRdwKLNrCmK5j6j4uIYSwg5okiq8Ai83jYmuZ66pmfGJp7FICfQPp265vheeEEKIhqkmi8NBaF5Q8sP7tab+QGoD16yEgAHr0KFOcX5TPyqMrGd1ltMx2EkI4jZrUZueUUqX9KEqp24Dz9gupAahifGJ9/HqyCrKk20kI4VRqkigeAp5WSp1SSiUATwG/t29Y9Vh8PJw8Wen4xNIjS/Fp5MOw0GF1HpYQQtjLZWc9aa2PAdcqpXytj7PsHlV9VsX4hNaapbFLubnzzXg38q5kQyGEaJhqtHqsUuq3QE/AS1m7W7TW/7RjXPXXTz+Z9Z169ixTvDt5N4kZifxziGt+LUII51WTE+7ew6z39EdAAZOAjnaOq/46fNgkCbeyX92Px34E4LddfuuIqIQQwm5qMkYxQGt9N5CmtX4euA5w3QtAx8ZCl4q7v+/sPoL9gmndpLUDghJCCPupSaLIs97nKKXaAYWY9Z5cT3o6nD0L11xT4akD5w7Qo1WPSjYSQoiGrSaJ4julVHPgdSAGiAe+tGNM9VdcnLkv16IothRz+PxherbqWclGQgjRsFU7mG29YNEarfVF4Gul1DLAS2udXhfB1TtVJIoTF0+QV5QniUII4ZSqbVForS3AOzaP8102SYAZn1AKOncuU3zg7AEA6XoSQjilmnQ9rVFKTVCq3GnI9ZD1sq1z0tPtlMtiYyE4GLy8yhQfOCeJQgjhvGqSKH6PWQQwXymVoZTKVEpl2Dmuq6K1/k5r/aCfn599PiAurtIZTwfPHSTYL5imjV37on9CCOdUk0uhNtVau2mtPbXWzayPm9VFcPWK1qZFITOehBAu5rJnZiulbqisvPyFjJze+fNmemwVM56Ghw53UGBCCGFfNVnC4wmbv72A/sBOwLVWvouNNfflWhQy40kI4exqsijgaNvHSqkgYJa9Aqq3qpgaKzOehBDO7mqurpMIdK/tQOq92Fjw8ICQkDLFMuNJCOHsajJG8TagrQ/dgD6YM7RdS1wcdOpkkoUNmfEkhHB2NRmj2GHzdxHwP631ZjvFU39VsRigzHgSQji7miSKRUCe1roYQCnlrpTy0Vrn2De0esRiMS2K4WVnNsmMJyGEK6jRmdmA7SXbvIHV9gmnnjpzBnJzZY0nIYRLqkmi8LK9/Kn1bx/7hVQPlUyNlRlPQggXVJNEka2Uiip5oJTqC+TaL6R6qGRqbLlzKGTGkxDCFdRkjOJx4Cul1BnMpVDbYi6N6jpiY8HbG9q3L1O8JWELXfy7yIwnIYRTq8kJd9uVUt2ArtaiI1rrQvuGVc8cOGC6nWyuk11kKWLjyY38rvfvHBiYEELY32W7npRSfwCaaK33a633A75KqYftH1o9UVgImzfDwIFlimOSYsgsyGRoyFAHBSaEEHWjJmMUD1ivcAeA1joNeMBuEdU3O3dCVhYMLZsQ1p1YB8CQkCEOCEoIIepOTRKFu+1Fi5RS7oCn/UKqZ9auNfdDhpQtjl9Lz1Y9aePbpu5jEkKIOlSTRLESWKCUGq6UGg78D/jevmHVI+vWQXg4BASUFhUUF/DTqZ+k20kI4RJqkiieAtYCD1lv+yh7Ap7zys834xPlup22n95OTmEOw0Jda6V1IYRrqskV7izAz0A85loUw4BD9g2rnvjlF3NGdvnxifh1KBSDQwY7KDAhhKg7VU6PVUp1AaZab+eBBQBaa9fpb1m7FpSCG8pe5G/tibVEtI2gpXdLBwUmhBB1p7oWxWFM6+FWrfX1Wuu3geK6CaueWLcOIiOhRYvSoryiPLYkbJHxCSGEy6guUYwHkoB1SqkPrAPZqprXO5fcXNi6FYaVHYfYlriN/OJ8GZ8QQriMKhOF1nqJ1noK0A1Yh1nKo7VS6r9KqZvrKD7H2boVCgoqPX/CTbkxKHiQgwITQoi6VZPB7Gyt9ZfWa2d3AHZhZkI5t7Vrwd0dBpVNCNtObyO8TTh+Xn4OCkwIIerWFV0zW2udprWeo7V2/iv1bNkCUVHQ9NKCf1prdp7ZSd/Avg4MTAgh6tYVJQqXkpICwcFlihIyEriQe0EShRDCpUiiqEpqapnZTgA7z+wEICowqrIthBDCKUmiqEpaWoVEEZMUg7tyJ7xNuIOCEkKIuieJojK5uWb5jvKJIjmGHq164N3INVYwEUIIkERRubQ0c9/y0pnXJQPZ0u0khHA1kigqk5pq7m1aFElZSaRkp8hAthDC5UiiqExJi8ImUchAthDCVTlVolBKjVZKzUlPT/91b1RJoohJikGh6NO2z697byGEaGCcKlForb/TWj/o5/crz5qurEWRtJNuAd1o4tnk1723EEI0ME6VKGpNyRiFzWB2TFKMdDsJIVySJIrKpKWZ61BYWyYpWSmczjwtA9lCCJckiaIyaWkmSbiZrycmKQaQgWwhhGuSRFGZcmdl70wyM54iAyMdFZEQQjiMJIrKpKWVGZ/YnbybsJZhNGvczIFBCSGEY0iiqEy5BQHjL8bTuUVnBwYkhBCOI4miMuW6nhIyEghqFuTAgIQQwnEkUVTGJlHkF+VzNvssQX6SKIQQrkkSRXlal0kUiRmJANKiEEK4LEkU5WVnQ2Fh6WB2aaKQFoUQwkVJoiiv3PIdCRkJgLQohBCuSxJFeeUTRbo1UUiLQgjhoiRRlFdJi6Kld0t8Gvk4MCghhHAcSRTllbtokUyNFUK4OkkU5ZW7DGpCeoJ0OwkhXJokivIq6Xrq0LSDAwMSQgjHkkRRXlqaWTW2aVNyCnNIzU2VFoUQwqVJoigvNRWaNwc3t0sznmSMQgjhwiRRlGezcmzpORTSohBCuDBJFOXZLN8hLQohhJBEUVEl6zx1aCaD2UII1yWJojzbFkVGAq2btKaxR2MHByWEEI4jiaI8m4sWycl2QgghiaIsreHiRTnZTgghbEiisJWZCcXF0qIQQggbkihs2ZyVnZGfQUZ+hiQKIYTLk0Rhy2ZBwJKpsTLjSQjh6iRR2LJZEFBOthNCCEMShS2bric52U4IIQxJFLZsE0VGAgpFu6btHBuTEEI4mCQKWzZjFIkZiQQ2DaSReyPHxiSEEA4micJWWhq4u4Ovr0yNFUIIK0kUtkpWjlWKM5lnaN+svaMjEkIIh5NEYctmnaekzCTaNmnr4ICEEMLxJFHYsq7zlF+UT1peGoFNAx0dkRBCOJwkClvWFkVyVjIAbX2lRSGEEB6ODqBeycyELl1KE0Wgr7QoRMNXWFhIYmIieXl5jg5F1ANeXl506NCBRo1qPqNTEoWtw4ehsJCk4ysApOtJOIXExESaNm1KSEgISilHhyMcSGvNhQsXSExMJDQ0tMbbSdeTLaXA05OkzCRAup6Ec8jLy8Pf31+ShEAphb+//xW3LiVRVCI5KxmFonWT1o4ORYhaIUlClLiafwuSKCqRlJVEqyat8HCTnjkhhJBEUYnkrGQZyBZCCCtJFJVIykqS8QkhGpiioiJHh+C0pG+lEslZyfRq3cvRYQhR6x5f+Ti7k3fX6nv2aduHWSNnVfuasWPHkpCQQF5eHo899hgPPvggK1eu5Omnn6a4uJiAgADWrFlDVlYWf/zjH9mxYwdKKZ599lkmTJiAr68vWVlZACxatIhly5Yxd+5c7r33Xry8vNi1axcDBw5kypQpPPbYY+Tl5eHt7c0nn3xC165dKS4u5qmnnmLlypW4ubnxwAMP0LNnT2bPns2SJUsAWLVqFe+++y6LFy+u1e/HGUiiKMeiLSRnJcvyHULUoo8//piWLVuSm5tLv379uO2223jggQfYuHEjoaGhpFpXbn7hhRfw8/Nj3759AKSVLP1fjcTERLZs2YK7uzsZGRls2rQJDw8PVq9ezdNPP83XX3/NnDlziI+PZ/fu3Xh4eJCamkqLFi14+OGHOXfuHK1ateKTTz5h+vTpdv0eGipJFOVcyLlAkaVIzqEQTulyv/ztZfbs2aW/1BMSEpgzZw433HBD6Vz+li1bArB69Wrmz59ful0L69pr1Zk0aRLu7u4ApKenc8899xAXF4dSisLCwtL3feihh/Dw8CjzeXfddReff/4506ZNY+vWrXz66ae1tMfORRJFObJ8hxC1a/369axevZqtW7fi4+PDkCFD6NOnD4cPH67xe9hO6Sx/DkCTJk1K//7HP/7B0KFDWbx4MfHx8QwZMqTa9502bRqjR4/Gy8uLSZMmlSYSUZYMZpeTlGVOtpNZT0LUjvT0dFq0aIGPjw+HDx9m27Zt5OXlsXHjRk6cOAFQ2vV000038c4775RuW9L11KZNGw4dOoTFYql2DCE9PZ327c3lAebOnVtaftNNN/H++++XDniXfF67du1o164dL774ItOmTau9nXYykijKKV3nSbqehKgVI0eOpKioiO7duzNz5kyuvfZaWrVqxZw5cxg/fjwRERHcfvvtAPz9738nLS2NXr16ERERwbp16wD417/+xa233sqAAQMIDKz6/+aTTz7J//3f/xEZGVlmFtT9999PcHAw4eHhRERE8OWXX5Y+d8cddxAUFET37t3t9A00fEpr7egYal10dLTesWPHVW376k+vMnPNTDL/LxNfT99ajkyIunfo0CGpBKvxyCOPEBkZyX333efoUOpMZf8mlFI7tdbRlb1eOuTKSc5KxtfTV5KEEC6gb9++NGnShH//+9+ODqVek0RRjpxsJ4Tr2Llzp6NDaBDq/RiFUqqTUuojpdSiuvi8pKwkGcgWQggbdk0USqmPlVJnlVL7y5WPVEodUUodVUrNrO49tNbHtdZ11nmYnJUsLQohhLBh7xbFXGCkbYFSyh14BxgF9ACmKqV6KKV6K6WWlbvV+TrfSZnSohBCCFt2HaPQWm9USoWUK+4PHNVaHwdQSs0HbtNavwLcas94Lie7IJvMgkxpUQghhA1HjFG0BxJsHidayyqllPJXSr0HRCql/q+a1z2olNqhlNpx7ty5qwpMzqEQovYNHTqUH374oUzZrFmzmDFjRpXbDBkyhJIp7rfccgsXL16s8JrnnnuON954o9rPXrJkCQcPHix9/Mwzz7B69eoriF5AAxjM1lpf0Fo/pLXubG11VPW6OVrraK11dKtWra7qs+SsbCFq39SpU8us3wQwf/58pk6dWqPtV6xYQfPmza/qs8snin/+85/ceOONV/VejlJcXOzoEBySKE4DQTaPO1jLHE7WeRJO7/HHYciQ2r09/ni1Hzlx4kSWL19OQUEBAPHx8Zw5c4ZBgwYxY8YMoqOj6dmzJ88++2yl24eEhHD+/HkAXnrpJbp06cL111/PkSNHSl/zwQcf0K9fPyIiIpgwYQI5OTls2bKFpUuX8sQTT9CnTx+OHTvGvffey6JFZgLlmjVriIyMpHfv3kyfPp38/PzSz3v22WeJioqid+/ela5JFR8fz6BBg4iKiiIqKootW7aUPvfqq6/Su3dvIiIimDnTzNU5evQoN954IxEREURFRXHs2DHWr1/Prbde6m1/5JFHSpcdCQkJ4amnniIqKoqvvvqq0v0DSElJYdy4cURERBAREcGWLVt45plnmDVrVun7/u1vf+Ott96q9hhdjiMSxXbgGqVUqFLKE5gCLHVAHBUkZVpbFNL1JEStadmyJf379+f7778HTGti8uTJKKV46aWX2LFjB3v37mXDhg3s3bu3yvfZuXMn8+fPZ/fu3axYsYLt27eXPjd+/Hi2b9/Onj176N69Ox999BEDBgxgzJgxvP766+zevZvOnTuXvj4vL497772XBQsWsG/fPoqKivjvf/9b+nxAQAAxMTHMmDGj0u6t1q1bs2rVKmJiYliwYAGPPvooAN9//z3ffvstP//8M3v27OHJJ58EzDIhf/jDH9izZw9btmypdhmSEv7+/sTExDBlypRK9w/g0UcfZfDgwezZs4eYmBh69uzJ9OnTS1fBtVgszJ8/nzvvvPOyn1cduw5mK6X+BwwBApRSicCzWuuPlFKPAD8A7sDHWusD9oyjppKzknFX7gT4BDg6FCHsw+aXZl0q6X667bbbmD9/fmlFt3DhQubMmUNRURFJSUkcPHiQ8PDwSt9j06ZNjBs3Dh8fHwDGjBlT+tz+/fv5+9//zsWLF8nKymLEiBHVxnPkyBFCQ0Pp0qULAPfccw/vvPMOj1tbR+PHjwfMmdvffPNNhe0LCwt55JFH2L17N+7u7sTGxgJmOfNp06aVxtiyZUsyMzM5ffo048aNA8DLy6tG31nJ+lfV7d/atWtLk4K7uzt+fn74+fnh7+/Prl27SElJITIyEn9//xp9ZlXsPeup0k5IrfUKYIU9P/tqJGUl0ca3DW6q3g/dCNGg3HbbbfzpT38iJiaGnJwc+vbty4kTJ3jjjTfYvn07LVq04N57762whHhN3XvvvSxZsoSIiAjmzp3L+vXrf1W8jRs3BkzlW9klVt98803atGnDnj17sFgsNa78bXl4eGCxWEofV7d8+pXu3/3338/cuXNJTk6ulYsxSY1oQ062E8I+fH19GTp0KNOnTy8dxM7IyKBJkyb4+fmRkpJS2jVVlRtuuIElS5aQm5tLZmYm3333XelzmZmZBAYGUlhYyBdffFFa3rRpUzIzMyu8V9euXYmPj+fo0aMAfPbZZwwePLjG+5Oenk5gYCBubm589tlnpQPON910E5988knpGEJqaipNmzalQ4cOpZdczc/PJycnh44dO3Lw4EHy8/O5ePEia9asqfLzqtq/4cOHl3aZFRcXk56eDsC4ceNYuXIl27dvv2zrqiYkUdiQ5TuEsJ+pU6eyZ8+e0kQRERFBZGQk3bp143e/+x0DBw6sdvuoqChuv/12IiIiGDVqFP369St97oUXXuA3v/kNAwcOpFu3bqXlU6ZM4fXXXycyMpJjx46Vlnt5efHJJ58wadIkevfujZubGw899FCN9+Xhhx9m3rx5REREcPjw4dJf/yNHjmTMmDFER0fTp0+f0vGNzz77jNmzZxMeHs6AAQNITk4mKCiIyZMn06tXLyZPnkxkZGSVn1fV/r311lusW7eO3r1707dv39IZXp6engwdOpTJkyeXXv3v13CqZcaVUqOB0WFhYQ/ExcVd8faPr3ycjn4d+dN1f6r94IRwEFlm3PVYLJbSGVPXXHNNheevdJlxp2pRaK2/01o/6Ofnd1Xbzxo5S5KEEKJBO3jwIGFhYQwfPrzSJHE1ZJlxIYRwIj169OD48eO1+p5O1aIQQlTOmbqYxa9zNf8WJFEI4eS8vLy4cOGCJAuB1poLFy5c8XRe6XoSwsl16NCBxMRErnaxTOFcvLy86NChwxVtI4lCCCfXqFEjQkNDHR2GaMCk60kIIUS1JFEIIYSoliQKIYQQ1XKqM7NLKKXOASevcvMA4HwthtMQuOI+g2vutyvuM7jmfl/pPnfUWld61TenTBS/hlJqR1WnsTsrV9xncM39dsV9Btfc79rcZ+l6EkIIUS1JFEIIIaoliaKiOY4OwAFccZ/BNffbFfcZXHO/a22fZYxCCCFEtaRFIYQQolqSKIQQQlRLEoWVUmqkUuqIUuqoUmqmo+OxF6VUkFJqnVLqoFLqgFLqMWt5S6XUKqVUnPW+haNjrW1KKXel1C6l1DLr41Cl1M/WY75AKeXp6Bhrm1KquVJqkVLqsFLqkFLqOmc/1kqpP1n/be9XSv1PKeXljMdaKfWxUuqsUmq/TVmlx1YZs637v1cpFXUlnyWJAlOBAO8Ao4AewFSlVA/HRmU3RcBftNY9gGuBP1j3dSawRmt9DbDG+tjZPAYcsnn8KvCm1joMSAPuc0hU9vUWsFJr3Q2IwOy/0x5rpVR74FEgWmvdC3AHpuCcx3ouMLJcWVXHdhRwjfX2IPDfK/kgSRRGf+Co1vq41roAmA/c5uCY7EJrnaS1jrH+nYmpONpj9nee9WXzgLEOCdBOlFIdgN8CH1ofK2AYsMj6EmfcZz/gBuAjAK11gdb6Ik5+rDGrYnsrpTwAHyAJJzzWWuuNQGq54qqO7W3Ap9rYBjRXSgXW9LMkURjtgQSbx4nWMqemlAoBIoGfgTZa6yTrU8lAG0fFZSezgCcBi/WxP3BRa11kfeyMxzwUOAd8Yu1y+1Ap1QQnPtZa69PAG8ApTIJIB3bi/Me6RFXH9lfVcZIoXJRSyhf4Gnhca51h+5w2c6adZt60UupW4KzWeqejY6ljHkAU8F+tdSSQTbluJic81i0wv55DgXZAEyp2z7iE2jy2kiiM00CQzeMO1jKnpJRqhEkSX2itv7EWp5Q0Ra33Zx0Vnx0MBMYopeIx3YrDMH33za3dE+CcxzwRSNRa/2x9vAiTOJz5WN8InNBan9NaFwLfYI6/sx/rElUd219Vx0miMLYD11hnRnhiBr+WOjgmu7D2zX8EHNJa/8fmqaXAPda/7wG+revY7EVr/X9a6w5a6xDMsV2rtb4DWAdMtL7MqfYZQGudDCQopbpai4YDB3HiY43pcrpWKeVj/bdess9OfaxtVHVslwJ3W2c/XQuk23RRXZacmW2llLoF04/tDnystX7JsRHZh1LqemATsI9L/fVPY8YpFgLBmCXaJ2utyw+UNXhKqSHAX7XWtyqlOmFaGC2BXcCdWut8B4ZX65RSfTAD+J7AcWAa5gei0x5rpdTzwO2YGX67gPsx/fFOdayVUv8DhmCWE08BngWWUMmxtSbN/4fphssBpmmtd9T4syRRCCGEqI50PQkhhKiWJAohhBDVkkQhhBCiWpIohBBCVEsShRBCiGpJohDiKiilipVSu21utbawnlIqxHZFUCEczePyLxFCVCJXa93H0UEIURekRSFELVJKxSulXlNK7VNK/aKUCrOWhyil1lqvBbBGKRVsLW+jlFqslNpjvQ2wvpW7UuoD63UVflRKeTtsp4TLk0QhxNXxLtf1dLvNc+la696YM2FnWcveBuZprcOBL4DZ1vLZwAatdQRmHaYD1vJrgHe01j2Bi8AEu+6NENWQM7OFuApKqSyttW8l5fHAMK31cevii8laa3+l1HkgUGtdaC1P0loHKKXOAR1sl5OwLv++ynrxGZRSTwGNtNYv1sGuCVGBtCiEqH26ir+vhO06RMXIeKJwIEkUQtS+223ut1r/3oJZuRbgDszCjGAuVzkDSq/p7VdXQQpRU/IrRYir462U2m3zeKXWumSKbAul1F5Mq2CqteyPmCvNPYG56tw0a/ljwByl1H2YlsMMzJXZhKg3ZIxCiFpkHaOI1lqfd3QsQtQW6XoSQghRLWlRCCGEqJa0KIQQQlRLEoUQQohqSaIQQghRLUkUQgghqiWJQgghRLX+P0CtcDYLFrkBAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAEHCAYAAAB4POvAAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAA900lEQVR4nO3deXhU5fn4//edkBACIQkhrAESWQQEQsIugixSEVkEBUGrsogFqy22dWs/1S76U1vbaq1FUdwVrOACuGBBKHyVfd8hYZGwJCEJIfv6/P54JkMSEphAJhMy9+u65krOmbPcZyY593mW8xwxxqCUUkq5k4+nA1BKKVX3abJRSinldppslFJKuZ0mG6WUUm6nyUYppZTbabJRSinldvU8HUBt1bRpUxMZGenpMJRS6qqyZcuWM8aY8PLzNdlUIjIyks2bN3s6DKWUuqqIyLGK5ntFshGRhsC/gXxgtTHmQw+HpJRSXuWqbbMRkbdEJElEdpebP1JEDohInIg84Zg9AVhkjJkJjK3xYJVSystdtckGeAcYWXqGiPgCrwK3AF2BKSLSFYgAjjsWK6rBGJVSSnEVJxtjzBogtdzsvkCcMeawMSYfWAiMAxKwCQeu4mNWSqmrVV078bbmfAkGbJJpDXwK3C4ic4Glla0sIg+IyGYR2ZycnOzeSJVSyot4RQcBY0wWMM2F5eYB8wB69+6tw2ErpVQ1qWslmxNAm1LTEY55SimlPKiulWw2AR1FJAqbZCYDd3k2JKWUcoExcOIEbNkChw/DnXdCq1aejqraXLXJRkQWAEOApiKSADxtjJkvIg8BywFf4C1jzB4PhqmUcoeMDPjuO4iPtyfm7GwYPRpuuQUaNKh4nbw8qF//0ts+dw4+/BDefRcaNYIRI+yrRQtIS4PUVAgNhc6doZ7jFFpcDAcPQlKSfS80FJo0sbGInN92UREcPQorVsB//wubN9t5ADk5kJJyftk//QleeQXuvhvy8+G99+D116F5cxvP8OGQlWW3sX27jeeBB6Bx4wuP6cwZWLkS0tMhNha6dwcfH1i/Hr791n42v/sdBAe78ulfFtEndVasd+/eRkcQUG6Vl2dPXmlp9qQRHGxPUCEh4Otb8TrG2BPEhx/aE1zJSa1NG2jf3r5Kn2x8qqGmvKAAfvgB4uLsyf3UKbuP0FB74hs/3p6Iy8vPh1277InQx8cuHxJiT3yHD9uXMTb+0NCySaJ1axg2zC5f/vg/+QTmzLFxgF1GxH6OjRrBqFHQtav9LEJDYc0ae0Ldvt0u2749dOwIEybAbbeBn5/dzvbtMHeu/WyzsqBnT5sIdu2q+HMJDISYGLv+li02AZbn72+PLyAAzp61rxIREXDDDXY7YLfTrRv06mWP42c/g3Xr4KabYO9eOHkSoqNtYj10qOx+QkPt8YeEwEMPQZ8+9ruKj7fb2LbNfnYl/PxsbFlZ9rsRgXbt4D//sfu/AiKyxRjT+4L5mmwqpslGVStj7FXqW2/Zq9fUVHvSqEi9evak06ePPZmVnIzS0uzV9vbtdl5goN1OcXHl++3cGSZPtq/8fFi4ED7+GBIT4ZproEMHmzBKrr7Dw+H22+2VrzHwxRfw+OP2qr0ktubNITPTXiWXzJswAe67z17Zb9pkXzt22H1WJizMJtW0NJvQyvPxgX797OcQFmZP2suWwfLl9ur8L3+xn0+TJlBYCKtW2WP79ls4XqpTar16MHCgPbGnpdkT8M6dNlm1aAFTptgT8vr1NuFNngyzZ0Pv3vZzOXXKlqIyM88nzKQkW6LYvNnG3ru3fUVE2IRSUgIquZjIyTl/YdCiBQwZAtdeW7bU45Cem857O97j7usm02TuO/DHP9rP4Le/tYlHxJaOVq+2sfTubZPz5s3w3HPw2WfnN9a4sU1QP/mJfYWH28S4ebNNNMOGwdChNpndeac9rr//HR58sMLYXKHJpoo02XiZ7dth0iSbAJo0sSe3Pn3g5pvtSepi1S/r1sFjj9mTa4kOHeCJJ+yJOzMTpk+HTz+FAQNsAiipagkLsz8DA+3JOy3NXsFu3WpP2KWvhMGeOB58EO66y179Fhfbap9jx8pWKYG9Kl+zBv73v/NXtb6+9oTVqZNdNj7eljRKlCSv666zJ6p160iPasV746OYPnMuDTt0OV91VFhoSztvvAFvv21jBwgKslfHffrYE2Fs7PmkkpZmP99rrjlfZWOMPfHl5Z2f3r/fJpVvv7UnwszM89t+9ll48EGMjw9S2QkxNxeOHIHkZJuQgoLKvl9UBN98A//+N3z9tf08Zs+Ge+/FhISw9se1vL7ldSKDI3lkwCM0DWxa4W7Sc9MpKC6o9P2q2nRiE5MXT+Zw2mFubHcj397zLf6+/lXbSFycvaBp397+fbmaNFJS4N57bXXbnj12/cugyaaKNNlcpQoL7T/LRx/ZE3b37vbqOCbGXpGHhdmrwXqlmiv374fBg21CGTHCnnATE+0VYEGBTQRDh8LIkfYVHm6XOX0aXnrJVj20bAkTJ9qTqjH2RLZ/v00sBQX2SvQvf4FHHnH9n7+kwbjkqr9ePXvlXNUrzpMnbaIrKYE0a1b5sklJsGgRLFhAccJx3r65GT9rtokiX/hV/1/xt5v/VvF62dk2sbVrZ6/Yr6D67uGvHiYxK5HXRr9GkwZN7MyCApt4GzSARo14atVTfLznY7679ztaN2592fsCbPVXo0YkZ59hxeEVvLzhZTac2EBIQAjpuekE+gXyYJ8HefT6RwlveH4w462ntjJmwRiKiotYf/96IkMine8dTDnItlPbmHjdRHzk4p9FZn4mJ86dYMmBJfzuu9/RolEL7o2+l2fXPssDsQ/w2ujXnEl1b/JemgY2pVnDi3yHVZSUlcSKwysoNsVQXEzoweOMuP3Rqic5B002VaTJphbJz4cPPrBXqh062FdIiD3hp6bak2lcnK3HXr/eXs0GB0P//vYKLSGh7Pb8/W0p5uGHbeIYNMgmqbVrbV1+icxMWzWzfLm9+j18+MLYAgPh0UfhN7+xJY0SRUWweDE884yNceFCW0KqxXIKctidtJtDqYeIS43jrW1vkXAugadvfJrj544zf9t8Nt6/kV6tLr9O/8CZA4Q3DD+fRMpZdnAZYxaMASAqJIpP7/yUni16lllm4e6FTFk8BYA+rfrwv6n/o4Gfbe/JL8pn/5n9dA3vSj2fyvs/ZeRlsCd5DzsTd7L99HbW/riW3Ul2mMX2oe359YBfc1/P+zh29hjPrn2WBbsXEOgXyKPXP8qvBvyKVUdWMXnxZJoGNuVc3jlaNmrJ99O/J7RBKGuOrWHcwnGczT3LwDYDeXPsm3Ru2tm5b2MM6xLW8cbWN/hi/xek5aY537ut823MHzufJg2a8OSKJ3n+++d5eeTLRIVE8cL3L/D98e8B6BrelSHthvBArweIbhFdZttLDixhfcJ6UnNSSclJIbcw1/l+hyYd+PWAX9Mm2N4h8uXBL5n2xTSSs8vexJ72eBohASGVfn4Xo8nGRSIyBhjToUOHmYfKN8Kpim3YAK+9BlOnwo03np9fVGTrhnNy7LSI7coZGWkbKPPzbfXVxo2wb59NFvHxtk571Chbiti61VabHDtm16/s7zUgwCahbt1sIhk16nzV18mTtpG3pK1k3z54/317RduggV139Wro0ePixxkXZ6t1cnJsVVCTJrbUVFHjeAljbLVUZQ3+l1BQVMAPx3/gq0Nf8f+O/z8iQyLp37o//SP606tVr0teNVd4GKlxxKXGkV2QTU5BDodSD7Hq6CrWJ6wnv8i2sQhC9+bdmTd6Hv0i+nE29yxdX+1Ki0Yt2DhzY5kTecmVeWpOKr1b9cbP1++CY1i8bzEvb3iZ9Qnrqe9bnzu63sEDvR5gUNtBzqv2zPxMrvv3dTTyb8Rrt77GlMVTSMlJ4W8/+RszYmZQv159dibuZMD8AcS0iGFO/zlM/GQiP+3xU9677T22nNrC9C+msytpF00aNOG2a29jRPsRJGclOxNowrkEEs4llDnBB/kHMaDNAIa0G8KNkTfSr3U/fH3Kfl/7z+zn/777PxbvW0zTwKakZKfQq1Uvlk5Zyr7kfdz8wc0MbDuQWb1mcd/n9xEVGsXs3rP5w+o/kFWQxYyYGQhCam4qOxN3sjd5L438G3F7l9vp0rQLEY0jiAqNYkDEAOfnUWyKmfDxBL448AUA7YLb8ct+vyS/KJ/Vx1az9tha8oryeHzg4/x+8O85k32G2V/OZunBpfj5+NGkQRPCAsNoUM8mYoNhV+IuRIT7Y+5HRHh106tEN49m7q1zy5TaokKiLvgMXKXJpoq0ZOOC4mJbNfT739uSAdjqpocesr2XPvrofI+h0kqqg06ePN+AHBxsSxXt29uEU/qz79vXNpIOH26row4dsomipNG4WTObxKpSdZORYRvbv/jCJrO+fStcLCkriVnLZnF9m+v51YBfXdbJvURRcREZ+RmVXjGm5qTyTdw3rDm2hmPpx0g4l8CRtCNkFWTh5+NH71a9OZZ+jJMZJwGIDIlkZuxMpsdM50z2Gb48+CWrjq5iaORQHh34aJlYt5zcwgc7P+DLQ19yKLXsRZSP+BDTIoYhkUO4vs31XBt2Le2btCegXkCZ5RbvXcwdn9zBM0OfoWt4VxbtW8TyuOWk5Jzvrts+tD1P3/g0d3W/i5MZJ3lz65u8ue1NTmacpEOTDszuPZv41Hg+2PUB5/LOMajtIN4a95a94l7+a/6+/u+snbaWG9reQGJmIlMWT2HV0VU0a9iMWb1m8cGuD8gpyGHrz7bSolELnlnzDL9f9XtGXDOClUdW0rxhcx4f+DibTm5i6cGlnMs7B0Aj/0Z0aNKBtsFtiQiKIKJxBF3Du9KjeQ/ahbRz+Xtdn7Cep1Y9RXjDcN4Y8waBfrbzxgc7P+Cez+4BoH9Ef5ZNWUZYYBiJmYn88ptfsnjfYhrXb0xYgzAiGkdwV/e7mNxtMo38G11sd2TmZ/LEiicYEDGASddNKpPIU3NS+dXyX/HujnfpFNaJ05mnKSgq4M9D/8yc/nMqTBYlJbW3t79NYXEhc/rN4bmbnrvgu74SmmyqyKuTTWqqPdmnptrSQIMGtotoE0fVhzG2xPHYY7aXzqRJtu3i44/h+edte4efny1dTJ58/sq/qAh+/NEmiyNHbMLp18++yrdFJCba+xCaNz/fAwfIK8zjj//7I8fSj/H88Oed1QHuEJcax8gPRnLk7BGKTTE3t7+Z98a/56wvT89NZ0fiDnac3sGOxB2ENQjj/tj76RjW8YJtbTqxiZlLZ7IjcQddmnZhWNQwujfrTlJWEgnnEtiTvId1CesoNsWEBITQPrQ9EY0jaBfcjiGRQxh+zXAa17ddmhPOJbD66Gre2vYWq46uKrOfyJBIjp49yq0db+X98e/jIz48ufJJXtv8Gv6+/gyNGsqoDqPo1aoXDf0a0sCvAc0bNic44NL3VxhjuO3j21hyYAkAYQ3CGHPtGDqHdSaisR3n9sV1L7L99HZaB7XmVOYpjDHc3OFmft7n54zqOMp5Us/Kz+K9He/x5MonyS/K56G+D/G3dX/j/pj7eX3M62X2ufLISl5a/xJfHvoSPx8//jf1fwxoM8D5/p2L7uSTvZ8wI2YGL/7kRWcyzyvMY3fSblo3bk3zhs0r70xQTV7d+CrbT2/n5Vtediah0sfhrv0vj1vO7C9nExUaxeujX6dDkw6XXOfo2aMkZyXTp3Wfao9Hk00VeWWyOXjQJo133jlf9VXC3x/GjLFVTR9/bHsIBQba7rzTpp1PFNnZtvdT37625FGN9iTt4e5P72ZH4g78ff3x9/XnueHPMbv37Auu4owxnMs7R+P6jS/4JzfGsPXUVpYeXMq6hHXOq8au4V2d769PWM+4heMoNsUsu2sZO07vYM7yOYQEhNA/oj/bT2/n6Nmjzm2GNQgjPS+dwuJCbrrmJsZ3Hk94YDhhgWEsObCEf274Jy2DWjK953Q2n9rM2mNrySrIAiA8MJxrQq9hxDUjuLXTrfRp1cflKoyDKQdZsGsBrYJaMarjKFoFtWLu5rnM+WYOrRu3Jq8wj8SsRH7R9xf8cegfnQnrciVmJvLKxlcYGjmUGyNvvKBdpNgU89m+z3hj6xv0atmL+2PvJyo0qtLtnTh3gllfzmLZwWU0b9icfT/fR2iD0EqPNTM/k9iWsWXmFxYXciTtSIVJ3lu4M5lVlSabKvKaZGOM7UX0t7/B0qU2qfz0p/au5RYtbMI4ccLevfzRR7bx/frrbRfJSZNst103y8zP5F8b/8Uf//dHgvyDmD92Pt2adWP2l7NZHr+cDk060LNFTzo16USgXyAbTmzgh+M/kJKTQoN6DYhoHEHzRs3JL8onuyCbpKwkkrKSEIROYZ04mHIQg+HasGsxGBLOJZBdkE1USBTf/PQbOoV1AmBX4i5mLJlBRn4G0c2j6dmiJ9HNo4luEU3LRi05nXma+dvmM2/LPI6fO3+fhyA82OdBnh32rLMEkV+Uz6mMUzRv1LxaqzBKbEjYwKRFkwhrEMa8MfPo3eqC//1awxjD0oNLaR3U+oo6H6jaQZNNFdWpZFNUZBvYDxywpZdzth6b4mJ7k9zmzdC0qb3P4Oc/t1VXFSkosFVrlb1fTk5BDp/s/YSx1451qWdLflE+YxeMJTUnlZuuuYlhUcPYfHIzf1v3N85kn2FMpzG8MeYNmjey+zfGsHD3Qj7a/REHUw4SnxpPkSmic9PODIgYQOemnUnMTCQhI4GkrCQC6gUQ6BdI4/qNGdJuCKM6jiK8YTinMk7x6b5P+SruKxr6NaRN4za0C2nHlG5TyjSauqqouIjErERSslNIyUmhecPmdAnvUuXtXKnC4kJ8xbfWXPEq76DJporqTLJZscLeUFj6jurSrr3W3vtx772Vjyl1GfYl7+PORXeyK2kXfVr14dt7vi2TcIpN8QWNsr9a/iv+sf4f9G7Vm+2nt1NYbDsdjOwwkqcGP+Wsp69MQVEBOYU5V1xVpJS6fJUlm6t2IE51CdnZ9g72V16xCWXePOjSxf7epNQ9DiXjIrngdOZpPtv3GY3rN6ZpYFOaN2pOhyYdyvSoSctJ45O9n/DI8kdo6NeQP9z4B55d+yw3f3Az3/70W/x8/Xhp/Uu88P0LjOo4itdHv07j+o1ZdnAZ/1j/D37e5+f8a9S/yMjLYO2Pa2nRqMUFdfSV8fP1u6DbrVKqdtCSTSWuupJNfDy8+OL54Ud+/NH2JPvlL+14SVdYajmZcZLBbw8mPi3+gvciGkfQOqg1R88eJTErEYDhUcN5f/z7tAxqyZIDS7jjP3fQJbwLZ7LPcDLjJIPaDuKH4z8QGRLJP27+B9O+mEab4Dasm7HOLW0YSqmaodVoVXTVJJszZ+DPf7aj1fr52eFZmja1r3vvtQPtXYbSvVsSMxMZ8u4QEs4l8MXkL4hoHEFKdgonM05yIOUA+8/s50TGCSKDI+nctDPdmnXjJ+1/UqZH1Rf7v2DiJxPp1aoXfx3xV25oewPf//g9kxdPJuFcAg39GrL1Z1udjfFKqauTJpsquiqSzZ49dqiV9HSYMcPe+Niy5RVtMis/i/s+v48Vh1fQL6Ifg9oO4uM9H3M47TDf3P0Ng9oNuuxtn8s7R5B/UJkG65TsFH678reM7jSaMdeOuaLYlVKep8mmimp9ssnIsKPqnj1rOwF063bFm0zOSmb0gtFsPrmZO6+7k91Ju9mVtIuAegF8edeXDIu6vFKSUsp7aAcBF5UaG83ToVTOGFuSiYuzIxxXQ6I5mHKQWz+6lYRzCXw66VPGdR4H2CExgEoHTlRKKVdUw2P86hZjzFJjzAPBbnw86hV7+WX7tMLnnis78OVlOJJ2hJlLZnLdv68jLSeN7+79zplowCYZTTRKqSulJZurSVaWHXvs+eftWGW/+U2VVt96aisvrX+Jw2l2qPwiU8Tmk5vxFV9m9ZrFEzc8ceXPBlFKqQposrkaGGPHI3v0Uftslrvusr3PKrk/xhjDgt0LOJVxikC/QPx8/fh4z8esOLyCIP8g+rTug2DXfbjvw/zm+t/QKqhVTR6RUsrLaLK5GixYYMcqi421D+EaOLDSRfMK87h/6f18sPODMvNbNmrJCze9wM96/cylEX6VUqo6abKp7fLz7fNieva0Dxkr9RCu/KJ8frfyd7QMasmwqGG0DmrN7f+5nbU/ruWZoc/wi36/IKsgi+yCbCIaR1z2Y16VUupKabKp7ebPt48j/uqrC572+OXBL3lx3YvOaR/xwc/HjwW3L2Byt8kABNUPqtFwlVKqIppsarPsbDs6wA032Eckl7Ng9wKaNWzGhvs3sPbYWrac2sLkbpPpH9HfA8EqpVTlNNnUZv/6l32s8scfX9AZICMvg6UHlzIjZgaRIZFEhkRyT/Q9HgpUKaUuTu+zqa3OnrVdnG+5xQ5JU84XB74gtzCXKd2m1HxsSilVRZpsaqsPPoC0NFuNVoEFuxfQNrjtJZ/xopRStYEmm9pq2TLo1Al6XfiY3JTsFL6N/5bJ102+4AFkSilVG+mZqjbKzIRVq2D06ArfXrR3EYXFhUzprlVoSqmrgyabckRkjIjMS09P91wQK1fa+2tuvdU5KyMvg6LiIgA+2v0RnZt2Jrp5tKciVEqpKtFkU06tGIhz2TJo3Nh2eQZWHl5J4+cb4/+MPy1ebMGaY2uY0m1KmefCKKVUbaZdn2ub4mL48ku4+Wbwt3f8P//987Rs1JL7Y+/ndOZpMvIzmBk708OBKqWU6zTZ1Dbbttl7axztNTtO72DF4RU8P/x5Hr/hcQ8Hp5RSl0er0WqbZcvsDZy33ALA39f/nYZ+DXmg1wMeDkwppS6fJpvaZtky6NcPwsM5mXGSBbsWMD1mOqENQj0dmVJKXTZNNrXJ6dOwebOzCu1fG/9FYXEhc/rP8WxcSil1hTTZ1CZLl9qft95KVn4Wr21+jfFdxnNN6DWejUsppa6QJpva5M03oUsXiI5m0d5FpOWm8Uj/RzwdlVJKXTFNNrXFtm324WizZoEIK46sIDwwnIFtKn8qp1JKXS002dQWr78OAQFwzz0YY1h5eCXDoobpjZtKqTrBq5KNiFwjIvNFZJGnYykjIwM+/BAmT4bQUPaf2c+pzFMMjxru6ciUUqpauDXZiEiIiCwSkf0isk9ELms8fBF5S0SSRGR3Be+NFJEDIhInIk9cbDvGmMPGmBmXE4NbffSRHXxz1iwAvjvyHQDDr9Fko5SqG9w9gsDLwDfGmDtExB8ILP2miDQDcowxGaXmdTDGxJXbzjvAv4D3yq3vC7wKjAASgE0isgTwBZ4rt43pxpikKz+kamYMzJ0LPXtC374ArDyyknbB7YgKifJsbEopVU3clmxEJBgYDEwFMMbkA/nlFrsRmCUio4wxeSIyE5gA3FJ6IWPMGhGJrGA3fYE4Y8xhxz4XAuOMMc8BFY/PX9ts3Ag7dtiEI0JRcRGrjq5iQucJ2l6jlKoz3FmNFgUkA2+LyDYReVNEGpZewBjzCbAc+FhE7gamAxOrsI/WwPFS0wmOeRUSkTAReQ2IEZEnK1mmZh8x8Pnn4OcHd98NwPbT2zmbe1ar0JRSdYo7k009IBaYa4yJAbKAC9pUjDF/AXKBucBYY0ymuwIyxqQYY2YZY9o7Sj8VLVOzjxg4cAA6dICgIMBWoQEMixpWM/tXSqka4M5kkwAkGGM2OKYXYZNPGSIyCOgGfAY8XcV9nADalJqOcMy7ehw6BB07OidXHllJ1/CutGjUwoNBKaVU9XJbsjHGnAaOi8i1jlnDgb2llxGRGGAeMA6YBoSJyDNV2M0moKOIRDk6IEwGllxx8DWluBji4pzJJr8on7XH1mqXZ6VUnePu+2weBj4UkZ1AT+D/K/d+IDDJGBNvjCkG7gWOld+IiCwA1gHXikiCiMwAMMYUAg9h2332Af8xxuxx18FUuxMnIDfXmWzWJ6wnpzBHk41Sqs5xa9dnY8x2oPdF3v++3HQB8EYFy025yDa+Ar66/Cg96NAh+9ORbP539H8IwuB2gz0YlFJKVT+vGkGg1imXbNYlrKNreFd9do1Sqs7RZONJhw7Z8dBat6bYFLM+YT0DIi5rkAWllKrVNNl40sGDttuzjw8HUw6SlpvGgDaabJRSdY8mG08q1e153fF1AFqyUUrVSZpsPKWoCA4fLtNeExIQwrVNr73EikopdfXRZOMpP/4I+fllkk3/iP74iH4lSqm6R89snlLSE61TJ9Jz09mTtEer0JRSdZYmG08p1e1544mNGAz9I/p7NiallHITTTaecugQNGoELVqwLmEdgtCvdT9PR6WUUm6hycZTDh2y3Z5FnDdzBgfU0EjTSilVwzTZeMrBg9CxI8WmmA0JG7S9RilVp2my8YSCAjhyBDp21Js5lVJeQZNNOTXypM6jR+19Nh076s2cSimvoMmmnBp5Umepnmj7zuzD39dfb+ZUStVpmmw8oVSySc5OJjwwXG/mVErVaXqG84RDh6BxYwgP50z2GcIbhns6IqWUcitNNp4QH+/s9pycZUs2SilVl2my8YT4eGjfHsBWo2nJRilVx2myqWmFhbbbc0myyUqmaYOmHg5KKaXcS5NNTTt+3CacDh3IK8wjIz9DSzZKqTpPk01Ni4+3P9u350z2GQBts1FK1XmabGpaqWSTnJ0MoCUbpVSdp8mmpsXFQf360Lo1yVk22TQN1DYbpVTdpsmmpsXHQ1QU+PhoNZpSymtosqlpJffYgFajKaW8hiabmmRM2XtsspIRhNCAUA8HppRS7nXJZOMYBVmTUnVISoKsLGeyOZN9hrDAMHx9fD0cmFJKuZcrSeRO4JCI/EVEOrs7oDotLs7+LD16gLbXKKW8wCWTjTHmp0AMEA+8IyLrROQBEQlye3R1Taluz6BD1SilvIdL1WPGmHPAImAh0BIYD2wVkYfdGFvdEx8PPj4QGQk4hqrRbs9KKS/gSpvNWBH5DFgN+AF9jTG3ANHAr90bXvUSkWtEZL6ILPJIAPHx0KaNvc8G22aj1WhKKW/gSsnmduAfxpjuxpi/GmOSAIwx2cCMS60sIr4isk1Ell1ukCLylogkicjuCt4bKSIHRCRORJ642HaMMYeNMZeM2W3i4pxVaMWmmJScFE02Simv4Eqy+QOwsWRCRBqISCSAMWalC+v/EthX0Rsi0qx824+IdKhg0XeAkRWs7wu8CtwCdAWmiEhXEekuIsvKvZq5EKt7ler2nJqTSrEp1jYbpZRXcCXZfAIUl5oucsy7JBGJAG4F3qxkkRuBz0WkvmP5mcAr5RcyxqwBUitYvy8Q5yix5GPblMYZY3YZY0aXeyW5ErPbnDsHZ844b+gsGT1A22yUUt7AlWRTz3EiB8Dxu7+L238JeIyyycrJGPMJsBz4WETuBqYDE13cNkBr4Hip6QTHvAqJSJiIvAbEiMiTlSwzRkTmpaenVyEMF5TvieYYF02r0ZRS3sCVZJMsImNLJkRkHHDmUiuJyGggyRiz5WLLGWP+AuQCc4GxxphMF2K6LMaYFGPMLGNMe2PMc5Uss9QY80BwcHD17ryCbs+gQ9UopbyDK8lmFvBbEflRRI4DjwM/c2G9gcBYETmKrd4aJiIflF9IRAYB3YDPgKddDdzhBNCm1HSEY17tU/6GTh3xWSnlRVy5qTPeGNMf2wDfxRhzvTEmzoX1njTGRBhjIoHJwHeOG0SdRCQGmAeMA6YBYSLyTBXi3wR0FJEoEfF37GdJFdavOfHxEB4OQbY/hI74rJTyJvVcWUhEbgWuAwJEBABjzJ+qYf+BwCRjTLxjP/cCUyvY/wJgCNBURBKAp40x840xhSLyELbdxxd4yxizpxriqn4nT0JEhHMyOTuZIP8g6ter78GglFKqZlwy2Tga1AOBodheZXdQqiu0K4wxq7E3hZaf/3256QLgjQqWm3KRbX8FfFWVeDwiKQmaN3dO6lA1Silv4kqbzfXGmHuBNGPMH4EBQCf3hlUHJSVBs/O3+uhQNUopb+JKssl1/MwWkVZAAXZ8NOUqYyA5uUyy0aFqlFLexJVks1REQoC/AluBo8BHboyp7snKgpwc20HAQavRlFLe5KJtNo6Hpq00xpwFFjvGNwswxlTzHY91XJJj8AJHycYYY6vRGmg1mlLKO1y0ZGOMKcaOPVYynaeJ5jKUSzZZBVnkFeVpyUYp5TVcqUZbKSK3S0mfZ1V15ZKNDlWjlPI2riSbn2EH3swTkXMikiEi59wcV91SPtnoUDVKKS9zyftsjDH6+OcrlWyTS0kHAR2qRinlbVy5qXNwRfMdw/4rVyQlQaNG0KABoEPVKKW8jyvD1Txa6vcA7DNktgDD3BJRXVT+hk6tRlNKeRlXqtHGlJ4WkTbY59QoV1UweoC/rz9B/lpDqZTyDq50ECgvAehS3YHUaeWSTVxaHG0at0E7+CmlvIUrbTavAMYx6QP0xI4koFyVnAx9+zonN53YxMC2Az0YkFJK1SxX2mw2l/q9EFhQfrRmdRHFxTbZOHqinco4xfFzx+nbqu8lVlRKqbrDlWSzCMg1xhQBiIiviAQaY7LdG1odcfYsFBY6q9E2ndwEQN/WmmyUUt7DpREEgAalphsAK9wTTh1U7obOjSc24iu+xLSM8WBQSilVs1xJNgHGmMySCcfvge4LqY6pINl0b96dQD/9CJVS3sOVZJMlIrElEyLSC8hxX0h1TMnoAc2aYYxh08lN2l6jlPI6rrTZzAE+EZGTgAAtgDvdGZS7iMg1wO+AYGPMHTWy01Ilm7jUOM7mntX2GqWU17lkycYYswnoDMwGZgFdjDFbLrWeiASIyEYR2SEie0Tkj5cbpIi8JSJJIrK7gvdGisgBEYkTkScucSyHjTEzLjeOy1KSbMLC2HhiIwB9Wvep0RCUUsrTLplsROTnQENjzG5jzG6gkYg86MK284Bhxpho7L05I0Wkf7ltNxORoHLzOlSwrXeAkRXE5ot93s4tQFdgioh0FZHuIrKs3KtZ+fVrRFISNGkCfn5sPLGRQL9AuoZ39UgoSinlKa602cx0PKkTAGNMGjDzUisZq6RjgZ/jZcotdiPwuYjUBxCRmcArFWxrDZBawW76AnGOEks+sBAYZ4zZZYwZXe6VdMkjdYdSowdsPLmRXi17Uc/HldpLpZSqO1xJNr6lH5zmKE34u7Jxxz0524Ek4L/GmA2l3zfGfAIsBz4WkbuB6cBEF2MHaA0cLzWd4JhXWTxhIvIaECMiT1ayzBgRmZeeXk0PJHUkm4KiArad2qbtNUopr+RKsvkGmwyGi8hwYAHwtSsbN8YUGWN6AhFAXxHpVsEyfwFygbnA2NLdrKubMSbFGDPLGNPeGPNcJcssNcY8EBwcXD07TU6GZs3YlbSLvKI8TTZKKa/kSrJ5HPgO2zlgFrCLsjd5XpKjGm4VFbe7DAK6AZ8BT1dlu8AJoE2p6QjHvNojKQnCw52dAzTZKKW8kSu90YqBDcBRbBvJMGDfpdYTkXARCXH83gAYAewvt0wMMA8YB0wDwkTkmSrEvwnoKCJRIuIPTAaWVGF99yoshJQUaNaMzSc30zSwKe2C23k6KqWUqnGVtlSLSCdgiuN1BvgYwBgz1MVttwTedbTx+AD/McYsK7dMIDDJGBPv2Oe9wNQKYlkADAGaikgC8LQxZr4xplBEHsK2+/gCbxlj9rgYn/udsU/kpFkzfkz/gWtCr9HHCiilvNLFukXtB9YCo40xcQAi8oirGzbG7AQuOgBY+dGjjTEFwBsVLDflItv4CvjK1bhqVKkbOpPOJNE2uK1n41FKKQ+5WDXaBOAUsEpE3nB0DtDL8qooNVRNYlYizRp65lYfpZTytEqTjTHmc2PMZOzoAauww9Y0E5G5IvKTGorv6uYo2RQ3DSM5K5nmDZt7OCCllPIMVzoIZBljPjLGjMH29tqG7aGmLsWRbNKC/CgyRVqyUUp5LVe6PjsZY9KMMfOMMcPdFVCdkpQEvr4k+ucD0LyRlmyUUt6pSslGVZHjHpvEbNt2oyUbpZS30mTjTo7RA5KybHWattkopbyVJht3OnQI2rYlMSsR0JKNUsp7abJxl9xcOHAAoqNJykrCR3wICwzzdFRKKeURmmzcZe9eKCqC6GgSMxMJDwzHR/TjVkp5Jz37ucuOHfZnjx4kZSdpTzSllFfTZOMuO3dCgwbQoQOJmTp6gFLKu2mycZcdO6BbN/D1JSkrSXuiKaW8miYbdzDGlmyiowFIykrSko1SyqtpsnGHkyftc2yio8nKzyKrIEtLNkopr6bJxh1Kdw5w3NCpJRullDfTZOMOO3fanz16OG/o1N5oSilvpsnGHXbsgHbtICRESzZKKYUmG/fYuRN69AAgMdNRstE2G6WUF9NkU91KDVMDOEs24Q3DPRmVUkp5lCab6rZnjx2mpqRkk5VIcP1gAuoFeDgwpZTyHE021a2kc4DeY6OUUk6abKrbjh0QGAjt2wO2ZKPJRinl7TTZVLedO53D1IAt2Wi3Z6WUt6vn6QDqnD59IOz8c2sSMxMZ3HawBwNSSinP02RT3V54wflrYXEhKTkpWrJRSnk9rUZzo+SsZEBv6FRKKU02blRyj43e0KmU8naabNxIh6pRSilLk40b6SCcSillabJxIy3ZKKWU5VXJRkSuEZH5IrKoJvaXmJmIv68/wfWDa2J3SilVa7kt2YhIGxFZJSJ7RWSPiPzyCrb1logkicjuCt4bKSIHRCRORJ642HaMMYeNMTMuN46qSsq2Q9WISE3tUimlaiV3lmwKgV8bY7oC/YGfi0jX0guISDMRCSo3r0MF23oHGFl+poj4Aq8CtwBdgSki0lVEuovIsnKvGq/L0nHRlFLKcttNncaYU8Apx+8ZIrIPaA3sLbXYjcAsERlljMkTkZnABGzyKL2tNSISWcFu+gJxxpjDACKyEBhnjHkOGF3dx1RVKdkpNA1s6ukwlFLK42qkzcaRKGKADaXnG2M+AZYDH4vI3cB0YGIVNt0aOF5qOsExr7I4wkTkNSBGRJ6sZJkxIjIvPT29CmFULDUnldCA0CvejlJKXe3cnmxEpBGwGJhjjDlX/n1jzF+AXGAuMNYYk+muWIwxKcaYWcaY9o7ST0XLLDXGPBAcfOWN+mm5aTRp0OSKt6OUUlc7tyYbEfHDJpoPjTGfVrLMIKAb8BnwdBV3cQJoU2o6wjHP44wxpOWkaclGKaVwb280AeYD+4wxf69kmRhgHjAOmAaEicgzVdjNJqCjiESJiD8wGVhyZZFXj4z8DIpMkZZslFIK95ZsBgL3AMNEZLvjNarcMoHAJGNMvDGmGLgXOFZ+QyKyAFgHXCsiCSIyA8AYUwg8hG332Qf8xxizx32H5LrUnFQAQhtoyUYppdzZG+3/ARe9wcQY83256QLgjQqWm3KRbXwFfHWZYbpNWk4agFajKaUUXjaCQE1Ky7XJRqvRlFJKk43baDWaUkqdp8nGTUqq0bRko5RSmmzcxlmy0TYbpZTSZOMuablp+Pv6E+gX6OlQlFLK4zTZuEnJUDU64rNSSmmycZu03DTtHKCUUg6abNwkLUfHRVNKqRKabNxER3xWSqnzNNm4iY74rJRS57ltuBpvpyUbVZcUFBSQkJBAbm6up0NRtURAQAARERH4+fm5tLwmGzcoLC7kXN45LdmoOiMhIYGgoCAiIyO1h6XCGENKSgoJCQlERUW5tI5Wo7nB2dyzgA5Vo+qO3NxcwsLCNNEoAESEsLCwKpV0Ndm4gQ5Vo+oiTTSqtKr+PWiycYOSEZ+1zUYppSxNNm6gIz4rdXUqLCz0dAh1liYbN9BqNKWq32233UavXr247rrrmDdvHgDffPMNsbGxREdHM3z4cAAyMzOZNm0a3bt3p0ePHixevBiARo0aObe1aNEipk6dCsDUqVOZNWsW/fr147HHHmPjxo0MGDCAmJgYrr/+eg4cOABAUVERv/nNb+jWrRs9evTglVde4bvvvuO2225zbve///0v48ePr4FP4+qjvdHcQEd8VnXZnG/msP309mrdZs8WPXlp5EsXXeatt96iSZMm5OTk0KdPH8aNG8fMmTNZs2YNUVFRpKba/7s///nPBAcHs2vXLgDS0tIuuf+EhAR++OEHfH19OXfuHGvXrqVevXqsWLGC3/72tyxevJh58+Zx9OhRtm/fTr169UhNTSU0NJQHH3yQ5ORkwsPDefvtt5k+ffoVfx51kSYbN3C22Wg1mlLV5p///CefffYZAMePH2fevHkMHjzY2fW2SRNbk7BixQoWLlzoXC809NL/hxMnTsTX1xeA9PR07rvvPg4dOoSIUFBQ4NzurFmzqFevXpn93XPPPXzwwQdMmzaNdevW8d5771XTEdctmmzcIDUnlYZ+DfH39fd0KEpVu0uVQNxh9erVrFixgnXr1hEYGMiQIUPo2bMn+/fvd3kbpXtPle+y27BhQ+fvv//97xk6dCifffYZR48eZciQIRfd7rRp0xgzZgwBAQFMnDjRmYxUWdpm4wY6VI1S1Ss9PZ3Q0FACAwPZv38/69evJzc3lzVr1nDkyBEAZzXaiBEjePXVV53rllSjNW/enH379lFcXOwsIVW2r9atWwPwzjvvOOePGDGC119/3dmJoGR/rVq1olWrVjzzzDNMmzat+g66jtFk4wZpOfp4AaWq08iRIyksLKRLly488cQT9O/fn/DwcObNm8eECROIjo7mzjvvBOD//u//SEtLo1u3bkRHR7Nq1SoAnn/+eUaPHs31119Py5YtK93XY489xpNPPklMTEyZ3mn3338/bdu2pUePHkRHR/PRRx8537v77rtp06YNXbp0cdMncPUTY4ynY6iVevfubTZv3nxZ6w5+ezA+4sPqqaurNyilPGTfvn16Ir2Ihx56iJiYGGbMmOHpUGpURX8XIrLFGNO7/LJasnEDrUZTynv06tWLnTt38tOf/tTTodRq2pLlBjris1LeY8uWLZ4O4aqgJRs30Kd0KqVUWZpsqlluYS45hTnaQUAppUrRZFPNdKgapZS6kCabaqYjPiul1IU02VSzknHRtGSjVPUZOnQoy5cvLzPvpZdeYvbs2ZWuM2TIEEpuXxg1ahRnz569YJk//OEPvPjiixfd9+eff87evXud00899RQrVqyoQvQKNNlUu5JqNG2zUar6TJkypcx4ZwALFy5kypQpLq3/1VdfERIScln7Lp9s/vSnP3HTTTdd1rY8paioyNMhaLKpbjris1LV74477uDLL78kPz8fgKNHj3Ly5EkGDRrE7Nmz6d27N9dddx1PP/10hetHRkZy5swZAJ599lk6derEDTfc4Hx8AMAbb7xBnz59iI6O5vbbbyc7O5sffviBJUuW8Oijj9KzZ0/i4+OZOnUqixYtAmDlypXExMTQvXt3pk+fTl5ennN/Tz/9NLGxsXTv3r3CMdyOHj3KoEGDiI2NJTY2lh9++MH53gsvvED37t2Jjo7miSeeACAuLo6bbrqJ6OhoYmNjiY+PZ/Xq1YwePdq53kMPPeQcYicyMpLHH3+c2NhYPvnkkwqPDyAxMZHx48cTHR1NdHQ0P/zwA0899RQvvfSSc7u/+93vePnll6v0nZWn99lUs5I2G61GU3XWnDmwfXv1brNnTyh1ciuvSZMm9O3bl6+//ppx48axcOFCJk2ahIjw7LPP0qRJE4qKihg+fDg7d+6kR48eFW5ny5YtLFy4kO3bt1NYWEhsbCy9evUCYMKECcycOROwQ97Mnz+fhx9+mLFjxzJ69GjuuOOOMtvKzc1l6tSprFy5kk6dOnHvvfcyd+5c5syZA0DTpk3ZunUr//73v3nxxRd58803y6zfrFkz/vvf/xIQEMChQ4eYMmUKmzdv5uuvv+aLL75gw4YNBAYGOsdgu/vuu3niiScYP348ubm5FBcXc/z48Yt+rGFhYWzduhWAlJSUCo/vF7/4BTfeeCOfffYZRUVFZGZm0qpVKyZMmMCcOXMoLi5m4cKFbNy48aL7uhQt2VSz1JxUBCE4INjToShVp5SuSitdhfaf//yH2NhYYmJi2LNnT5kqr/LWrl3L+PHjCQwMpHHjxowdO9b53u7duxk0aBDdu3fnww8/ZM+ePReN58CBA0RFRdGpUycA7rvvPtasWeN8f8KECYAdYeDo0aMXrF9QUMDMmTPp3r07EydOdMa9YsUKpk2bRmBgIGATbUZGBidOnHA+mC0gIMD5/sWUjBd3seP77rvvnG1fvr6+BAcHExkZSVhYGNu2bePbb78lJiaGsLCwS+7vYrRkU83SctIICQjBRzSPqzrqIiUQdxo3bhyPPPIIW7duJTs7m169enHkyBFefPFFNm3aRGhoKFOnTr3g8QGumjp1Kp9//jnR0dG88847rF69+orirV+/PmBP4BU9bvof//gHzZs3Z8eOHRQXFxMQEFDlfdSrV4/i4mLn9MUenVDV47v//vt55513OH36dLU8EE7PiNUsNTdVOwco5QaNGjVi6NChTJ8+3VmqOXfuHA0bNiQ4OJjExES+/vrri25j8ODBfP755+Tk5JCRkcHSpUud72VkZNCyZUsKCgr48MMPnfODgoLIyMi4YFvXXnstR48eJS4uDoD333+fG2+80eXjSU9Pp2XLlvj4+PD+++87G/FHjBjB22+/7WxTSU1NJSgoiIiICD7//HMA8vLyyM7Opl27duzdu5e8vDzOnj3LypUrK91fZcc3fPhw5s6dC9iOBOnp6QCMHz+eb775hk2bNnHzzTe7fFyV0WRTzXSoGqXcZ8qUKezYscOZbKKjo4mJiaFz587cddddDBw48KLrx8bGcueddxIdHc0tt9xCnz59nO/9+c9/pl+/fgwcOJDOnTs750+ePJm//vWvxMTEEB8f75wfEBDA22+/zcSJE+nevTs+Pj7MmjXL5WN58MEHeffdd4mOjmb//v3OUsjIkSMZO3YsvXv3pmfPns6u2e+//z7//Oc/6dGjB9dffz2nT5+mTZs2TJo0iW7dujFp0iRiYmIq3V9lx/fyyy+zatUqunfvTq9evZzVef7+/gwdOpRJkyY5n2J6JfQRA5W43EcMDJg/gCD/IL6951s3RKWUZ+gjBrxPcXGxsydbx44dK1xGHzHgQYPbDmZ41HBPh6GUUpdt7969dOjQgeHDh1eaaKpKOwhUsxdGvODpEJRS6op07dqVw4cPV+s2tWSjlFLK7TTZKKVcou27qrSq/j1oslFKXVJAQAApKSmacBRgE01KSkqV7g3SNhul1CVFRESQkJBAcnKyp0NRtURAQAAREREuL6/JRil1SX5+fkRFRXk6DHUV02o0pZRSbqfJRimllNtpslFKKeV2OlxNJUQkGTh2mas3Bc5UYzhXA288ZvDO4/bGYwbvPO7LOeZ2xpjw8jM12biBiGyuaGyguswbjxm887i98ZjBO4+7Oo9Zq9GUUkq5nSYbpZRSbqfJxj3meToAD/DGYwbvPG5vPGbwzuOutmPWNhullFJupyUbpZRSbqfJphqJyEgROSAicSLyhKfjcRcRaSMiq0Rkr4jsEZFfOuY3EZH/isghx89QT8da3UTEV0S2icgyx3SUiGxwfOcfi4i/p2OsbiISIiKLRGS/iOwTkQF1/bsWkUccf9u7RWSBiATUxe9aRN4SkSQR2V1qXoXfrVj/dBz/ThGJrcq+NNlUExHxBV4FbgG6AlNEpKtno3KbQuDXxpiuQH/g545jfQJYaYzpCKx0TNc1vwT2lZp+AfiHMaYDkAbM8EhU7vUy8I0xpjMQjT3+Ovtdi0hr4BdAb2NMN8AXmEzd/K7fAUaWm1fZd3sL0NHxegCYW5UdabKpPn2BOGPMYWNMPrAQGOfhmNzCGHPKGLPV8XsG9uTTGnu87zoWexe4zSMBuomIRAC3Am86pgUYBixyLFIXjzkYGAzMBzDG5BtjzlLHv2vsIMUNRKQeEAicog5+18aYNUBqudmVfbfjgPeMtR4IEZGWru5Lk031aQ0cLzWd4JhXp4lIJBADbACaG2NOOd46DTT3VFxu8hLwGFDsmA4DzhpjCh3TdfE7jwKSgbcd1YdvikhD6vB3bYw5AbwI/IhNMunAFur+d12isu/2is5xmmzUZRORRsBiYI4x5lzp94zt5lhnujqKyGggyRizxdOx1LB6QCww1xgTA2RRrsqsDn7Xodir+CigFdCQC6uavEJ1freabKrPCaBNqekIx7w6SUT8sInmQ2PMp47ZiSXFasfPJE/F5wYDgbEichRbRToM25YR4qhqgbr5nScACcaYDY7pRdjkU5e/65uAI8aYZGNMAfAp9vuv6991icq+2ys6x2myqT6bgI6OHiv+2AbFJR6OyS0cbRXzgX3GmL+XemsJcJ/j9/uAL2o6NncxxjxpjIkwxkRiv9vvjDF3A6uAOxyL1aljBjDGnAaOi8i1jlnDgb3U4e8aW33WX0QCHX/rJcdcp7/rUir7bpcA9zp6pfUH0ktVt12S3tRZjURkFLZe3xd4yxjzrGcjcg8RuQFYC+zifPvFb7HtNv8B2mJHzJ5kjCnf+HjVE5EhwG+MMaNF5BpsSacJsA34qTEmz4PhVTsR6YntFOEPHAamYS9U6+x3LSJ/BO7E9rzcBtyPbZ+oU9+1iCwAhmBHd04EngY+p4Lv1pF4/4WtUswGphljNru8L002Siml3E2r0ZRSSrmdJhullFJup8lGKaWU22myUUop5XaabJRSSrmdJhulPEREikRke6lXtQ1mKSKRpUfyVcrT6l16EaWUm+QYY3p6OgilaoKWbJSqZUTkqIj8RUR2ichGEengmB8pIt85niWyUkTaOuY3F5HPRGSH43W9Y1O+IvKG47ks34pIA48dlPJ6mmyU8pwG5arR7iz1Xroxpjv2ju2XHPNeAd41xvQAPgT+6Zj/T+B/xpho7LhlexzzOwKvGmOuA84Ct7v1aJS6CB1BQCkPEZFMY0yjCuYfBYYZYw47Bjw9bYwJE5EzQEtjTIFj/iljTFMRSQYiSg+d4nj0w38dD8BCRB4H/Iwxz9TAoSl1AS3ZKFU7mUp+r4rS43YVoW20yoM02ShVO91Z6uc6x+8/YEecBrgbOxgq2Ef3zgb7eHLH0zWVqlX0Skcpz2kgIttLTX9jjCnp/hwqIjuxpZMpjnkPY5+Y+Sj26ZnTHPN/CcwTkRnYEsxs7BMmlao1tM1GqVrG0WbT2xhzxtOxKFVdtBpNKaWU22nJRimllNtpyUYppZTbabJRSinldppslFJKuZ0mG6WUUm6nyUYppZTbabJRSinldv8/sljNcN/nKIcAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -656,7 +625,7 @@ "if TRAIN_MODEL:\n", " generator = dt.generators.ContinuousGenerator(\n", " training_dataset & (training_dataset >> get_label),\n", - " batch_size=128,\n", + " batch_size=16,\n", " min_data_size=1000,\n", " max_data_size=1001,\n", " )\n", @@ -672,7 +641,7 @@ " \n", " plt.plot(h.history[\"accuracy\"], 'g')\n", " plt.plot(h.history[\"val_accuracy\"], 'r')\n", - " plt.legend([\"accuracy\", \"Validation accuracy\"])\n", + " plt.legend([\"accuracy\", \"Validation accuracy\"]) \n", " plt.yscale(\"log\")\n", " plt.ylabel(\"Accuracy\")\n", " plt.xlabel(\"Epoch\")\n", @@ -696,12 +665,12 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 78, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOcAAAD3CAYAAADmIkO7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAIrUlEQVR4nO3df6jVdx3H8ddLs9kNcaurY/3RzXDqqj82cpHQICmhvC4HbRTK/giSbWlBumBU2GYU/VgFNm3DVdCvPzKqkSepRaNGKCllBDPjts3SFszNOWe57vTTH/cId3a/n6P3eD2v630+/lHO+3zPD/W5z9n98P0el1IEIM+0Xr8AAGMjTiAUcQKhiBMIRZxAKOIEQhFnMNt32/5er18HeoM4A9heZXuv7RdsP2V7p+139Oi1fND2ftsnbP/N9g29eB2QXtHrFzDV2V4v6S5Jt0v6haT/SnqPpJWSTlzk17JM0hclfUDS7yVddTGfHy/HytlDtmdL2iRpbSnlx6WUE6WU4VLKz0opnxjj/ttt/8v2Mdu/tf3mUbPlth+zfdz2Ydt3tm/vt73D9nO2n7X9qO2mv/d7JG0qpewupZwupRwupRyeiPeOzoizt5ZIminpJ+d4/52SrpY0V9IfJH1/1Oybkm4rpcyS9BZJv27fvkHSIUlzJF0p6ZOSiiTZ3mp7a/v30yUtljTH9pDtQ7bvs/2qLt4fusDH2t56raQjpZSXzuXOpZRvnfm97bslHbU9u5RyTNKwpDfZ/lMp5aiko+27Dmvk4+lAKWVI0qOjHu8jox7+SkkzJN0s6Yb2cQ9J+rSkT43v7aEbrJy99Yykftsd/yNpe7rtL7R/SPO8pCfbo/72r++XtFzSQdu/sb2kffuXJQ1J+qXtx23f1fAU/2n/+vVSylOllCOSvtp+TPQAcfbWLkkvSrrpHO67SiM/JHq3pNmS3tC+3ZJUStlTSlmpkY+8P5X0w/btx0spG0opb5T0Pknrbb/r7Advr7aH1P7Ie+bm835HuGCIs4faH0c3Stpi+ybbfbZn2H6v7S+ddfdZGgn5GUl9kj5/ZmD7lbZXtz/iDkt6XtLp9myF7fm2LemYpFNnZmP4tqSP2p5r+wpJH5e048K9Y5wP4uyxUspXJK3XyP/bPS3pH5LWaWT1G+07kg5KOizpMUm7z5rfKunJ9kfe2yWtbt9+taRfSXpBIyv11lLKI5Jk+37b9496jM9K2iPpr5L2S/qjpM91/SYxLuZkayATKycQijiBUMQJhCJOIFR183vZtFv4aREwwR4+vd1j3c7KCYQiTiAUcQKhiBMIRZxAKOIEQhEnEIo4gVDECYQiTiAUcQKhiBMIRZxAKOIEQhEnEIo4gVDECYQiTiAUcQKhiBMIRZxAKOIEQvHluZeY6QvnV+fPfq1+/NvmHmycDa0eqB576sBQ/cFxXlg5gVDECYQiTiAUcQKhiBMIRZxAKOIEQrHPOcl02sdc29pRnQ/2nRz3c7daf67ON89fNO7Hxv9j5QRCEScQijiBUMQJhCJOIBRxAqGIEwjFPucks//OK6rzbvYxJWlea03j7Mbr9nU4erir58bLsXICoYgTCEWcQCjiBEIRJxCKOIFQxAmEYp8zTKfzNZ8Y3NbV47/1njuq8wUP7GqcHejqmXG+WDmBUMQJhCJOIBRxAqGIEwhFnEAotlLCdDolrJNFD9a3SgYqWyXIwsoJhCJOIBRxAqGIEwhFnEAo4gRCEScQin3OHqidFtbplLCP/fP66nxgI/uYlwpWTiAUcQKhiBMIRZxAKOIEQhEnEIo4gVDsc/ZAN+ds/m7b4uq8X+xzXipYOYFQxAmEIk4gFHECoYgTCEWcQCjiBEKxz9kDN163r3HW+vfM6rH9XHd2ymDlBEIRJxCKOIFQxAmEIk4gFHECoYgTCMU+5wSoXZdWkja/7keNs47fr8n5mlMGKycQijiBUMQJhCJOIBRxAqGIEwjFVsoEeHz1nHEf++rDF/CFYFJj5QRCEScQijiBUMQJhCJOIBRxAqGIEwjFPucEuHzx0+M+dsbK+rHHVtZPR+vW8EPNe7RclvPiYuUEQhEnEIo4gVDECYQiTiAUcQKhiBMIxT5nmN3XNl8286K4tjL7TP3Qea011fk19x6tzk8dGKo/wRTDygmEIk4gFHECoYgTCEWcQCjiBEIRJxCKfc5JptNe4mv21v9KO50v2s0+6xOD26rz1tKZ1fmWwRWNs6m4B8rKCYQiTiAUcQKhiBMIRZxAKOIEQhEnEIp9zklmwZo93T3AA/Xx8oU3N87WtnZUjx3sO9nVfEPle00HNrLPCSAEcQKhiBMIRZxAKOIEQhEnEIqtlElm+sL6VwB2e2pV7fjN8xdVj1237frqvNMpZX/58DcaZ+/cVT9V7rKdXW4xBWLlBEIRJxCKOIFQxAmEIk4gFHECoYgTCMU+5wR4bm/zqU+Sql+z9/Z9zadsSdLs4EtEdjqdbdGmO6rz2j7n8dfX/6leVp1OTqycQCjiBEIRJxCKOIFQxAmEIk4gFHECodjnnAADG3dV561VzV+F1+kr+GqXrpSyvyrv8sX1rx+smfX3ly7gK5kcWDmBUMQJhCJOIBRxAqGIEwhFnEAo4gRCsc/ZA+seubVxNtjh2q6dvoZvy+CK6nwi90GP/bx+Td1Oe7jzWs3Xpl1wCV6XthNWTiAUcQKhiBMIRZxAKOIEQhEnEIqtlB645t6jjbPW0ubTySRpsO9k/cE7bLVs+MGHqvMXr2o+Neu+pd+tHjvYt68673TZz9qfy6nqkZcmVk4gFHECoYgTCEWcQCjiBEIRJxCKOIFQLqU0DpdNu6V5iAlx5LYl1fmMlfXLS3Y6LWsiLXqw/hV/nS4ZOlU9fHq7x7qdlRMIRZxAKOIEQhEnEIo4gVDECYQiTiAU+5xAj7HPCUwyxAmEIk4gFHECoYgTCEWcQCjiBEIRJxCKOIFQxAmEIk4gFHECoYgTCEWcQCjiBEIRJxCKOIFQxAmEIk4gFHECoYgTCEWcQCjiBEIRJxCKOIFQxAmEIk4gFHECoYgTCEWcQCjiBEIRJxCKOIFQxAmEIk4gFHECoYgTCEWcQCiXUnr9GgCMgZUTCEWcQCjiBEIRJxCKOIFQxAmE+h96wIsPicBiKwAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOcAAAD3CAYAAADmIkO7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAJpElEQVR4nO3df6zVdR3H8debezGh0R0D1PihEgiallr+LpalBDJLM0mFGf6Ry6mtQis1VwZFhclcBmPNbJWuDTbNH/mz2dRmhj9SZ6KIAfErBeSHQsDl3k9/3EO7uft9AwfwvLg8H//Azvt8zvke3dPP8X52zo1SigD46dHoCwDQNeIETBEnYIo4AVPECZgiTsAUcRqLiBsi4vZGXwcagzgNRMSEiHgmIt6JiJUR8UBEfLJB13JhRMyPiI0R8XpEjGrEdUBqbvQF7O8iYrKkayRdJukhSVsljZV0jqSN7/G1jJb0U0kXSJon6YPv5fPj/7FzNlBEtEiaIumKUsqdpZSNpZTWUsq9pZRvdXH/uRHx74hYHxGPR8TRnWbjIuLliHg7IpZHxNW12/tHxH0RsS4i3oqIJyKi6t/7DyRNKaU8VUppL6UsL6Us3xuvHTtGnI11qqQDJd21k/d/QNIRkg6S9JykOzrNfiXpq6WUPpKOkfRo7farJC2TNEDSwZKuk1QkKSJmRcSs2t+bJJ0gaUBELIyIZRHxi4jotRuvD7uBt7WN1U/S6lLKtp25cynltu1/j4gbJK2NiJZSynpJrZI+HBEvlFLWSlpbu2urOt6eHlZKWSjpiU6Pd3mnhz9YUk9J50saVVt3t6TrJX23vpeH3cHO2VhrJPWPiB3+RzIimiLiJ7Uf0myQtLg26l/784uSxklaEhGPRcSptdtvlLRQ0sMR8c+IuKbiKf5T+/OWUsrKUspqSTNqj4kGIM7G+qukLZLO3Yn7TlDHD4nOlNQi6fDa7SFJpZSnSynnqOMt7x8kzand/nYp5apSyockfV7S5Ig4490PXtttl6n2lnf7zbv8irDHEGcD1d6Ofk/SzIg4NyJ6R0TPiDgrIqa/6+591BHyGkm9JU3bPoiIAyJiYu0tbqukDZLaa7OzI2J4RISk9ZLats+68GtJX4uIgyKir6RvSrpvz71i7AribLBSyk2SJqvj/+1WSVoq6Up17H6d/VbSEknLJb0s6al3zS+WtLj2lvcySRNrtx8h6U+S3lHHTj2rlPJnSYqI2RExu9NjTJX0tKQFkuZL+rukH+32i0Rdgg9bA57YOQFTxAmYIk7AFHECptLD79E9xvPTImAve6R9bnR1OzsnYIo4AVPECZgiTsAUcQKmiBMwRZyAKeIETBEnYIo4AVPECZgiTsAUcQKmiBMwRZyAKb7xHTut7fSPpfMVV25N50POf2lPXk63x84JmCJOwBRxAqaIEzBFnIAp4gRMcZSyn2nq2zedL7juyMpZ86Yuv8Hxf1rueV9d14SusXMCpogTMEWcgCniBEwRJ2CKOAFTxAmY4pyzmymfOC6dt09dlc5fHTmzcjZpyWfStf96bkQ6x65h5wRMESdgijgBU8QJmCJOwBRxAqaIEzDFOWc3s/TM3un8pZH3pvPvrzq2crb2oj7p2l5L5qVz7Bp2TsAUcQKmiBMwRZyAKeIETBEnYIo4AVOcc5ppH3V8Ol95Wq90fun4B9P5UU9cks6HX7Gscta2Zmm6FnsWOydgijgBU8QJmCJOwBRxAqaIEzDFUYqZN07Mj0omXfxQOv/NrWPT+dCbn0znbekU7yV2TsAUcQKmiBMwRZyAKeIETBEnYIo4AVOcc5p5Z1h+0jip5cV0/rseY/bk5aCB2DkBU8QJmCJOwBRxAqaIEzBFnIAp4gRMcc7ZAO2fqv76y7njbknXXrJwfDofNPv5/LnTKZywcwKmiBMwRZyAKeIETBEnYIo4AVPECZjinHMvaDp6ZDpfffXGytmjG49K126dckj+3JtWpHPsO9g5AVPECZgiTsAUcQKmiBMwRZyAKeIETHHOuRcsPq9fOv/jR6dXzi7/3FfStU0vPlfXNWHfw84JmCJOwBRxAqaIEzBFnIAp4gRMcZRSh+ahh6XzQacvTec/XDm2ctb+4it1XdOe0jRgQOWsDOyfro1Fy9N524YNdV3T/oqdEzBFnIAp4gRMESdgijgBU8QJmCJOwBTnnHXYtmhJOt/y85PS+bCpCypnK/sNSte2rXkrnTeNHJ7OF1yan1V+6YwnK2fHvX9euvae1dW/2lCSnn34tHR++PTnK2ftmzala7sjdk7AFHECpogTMEWcgCniBEwRJ2CKOAFTnHPuBZsGNKXz7/SbXzm77dpPp2tH/HJVOj9xTvVjS9L9A/6RzttKezrPjOn9QDr/9rgt6fwvW6vPSQdPqz5/7a7YOQFTxAmYIk7AFHECpogTMEWcgCniBExxzlmHHX1v7aFfXlj3Yw87flk6//2jc9P5G235OeWIxy5P58092ypnH+i9OV3b99qe6bz0zM9/R854rXK2cVq6tFti5wRMESdgijgBU8QJmCJOwBRxAqaIEzDFOWcddvS9ta88e0r+AMOqR/cfeU+69LPzL0jnB5z9ZjofuvmFdL47dvRJ0PUT838uix7vUzk7XPnnWLsjdk7AFHECpogTMEWcgCniBEwRJ2CKo5Q6NB82JJ3f9YWbd/AIB1ROxr8+Jl154KRt6Xzb5vxjXXtT09Ej0/mF1z2Yzh8+74TKWfUH2bovdk7AFHECpogTMEWcgCniBEwRJ2CKOAFTnHPWYcPHB6bzde0HpvOvrzi5cvaRlhXp2mfa8zPW3dU8ZHD1sLU1XTtubv5r+n72+FnpfMSr89L5/oadEzBFnIAp4gRMESdgijgBU8QJmCJOwBTnnHVYNib/Esgfnzchnb95Ukvl7O7rb0zX3jnr2HR+6DfyX8O37uRB6fzW6TMqZyN65ue3Fy0anc5HXMY55q5g5wRMESdgijgBU8QJmCJOwBRxAqaIEzAVpZTK4ege46uH+7EexxyZz9duSOdlW/V3z7YNPSRde+0dt6fzgc1vp/Nhzb3SeebYv12czgdf8Fo6L61b637u7uyR9rnR1e3snIAp4gRMESdgijgBU8QJmCJOwBRHKfuY5kH513IunVn9cTRJGj3k1XT+0JxTKmeDb8o/8pUdEaEaRynAPoY4AVPECZgiTsAUcQKmiBMwRZyAKc45gQbjnBPYxxAnYIo4AVPECZgiTsAUcQKmiBMwRZyAKeIETBEnYIo4AVPECZgiTsAUcQKmiBMwRZyAKeIETBEnYIo4AVPECZgiTsAUcQKmiBMwRZyAKeIETBEnYIo4AVPECZgiTsAUcQKmiBMwRZyAKeIETBEnYIo4AVPECZgiTsAUcQKmiBMwRZyAKeIETBEnYIo4AVPECZgiTsAUcQKmopTS6GsA0AV2TsAUcQKmiBMwRZyAKeIETBEnYOq/qWHK54MnwV0AAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -714,7 +683,7 @@ ], "source": [ "# index of validation image\n", - "idx = 3000\n", + "idx = 2000\n", "\n", "image = validation_images[idx]\n", "prediction = np.argmax(\n", @@ -738,7 +707,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 79, "metadata": {}, "outputs": [], "source": [ @@ -760,7 +729,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 80, "metadata": {}, "outputs": [], "source": [ @@ -791,12 +760,12 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 81, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABGcAAAM9CAYAAAAxZR53AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAABR6klEQVR4nO3dT4xdV5oY9lt8xSKL9agqSVQVJVFNsTk9Mqa7046BhI5DGFCIADYMOKsgQXaGk20WEyBB4AECGwaSeGEgqwQBDC8SwIts4mQzCMAQaGiEJhA0rKg1bvaIkl6LI4kl/uerKlaxql4WWsTjGZ3vk97hPe9V/X7b8/E759177nfP+/iAWphMJh0AAAAAbZxovQAAAACA40xzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoaLE0+NrCHyT+lNOgwjIOphzPxOwnchQvxxG0lIg5G4wPK+TIeBqMjxM5thMxO4mYaWX2YiTeq19P/tFChYlmwmsL/+Uc/Vm5GjUxkyOKqbGOvURMjfpd45moIfMOqHHdM7V3LRi/kMjxRnl4pcI7byu+d19P/uoRqkWZc1Fmz0f6eL4zapy/+rgemZhaz2b03GT+33OezpvRM35YIUc/e+Tryd87MrWo67rutYXfn6OzEfMrU6+Wg/EziRyZ+hvJnFuj74M1vgtG16Prvp78/b+wHvnlDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANDQ4vQpBtOn6A4q5Jgn0TXr63pk5nkajG8nctxPxEwr81kyMbNyb/iz9lsv4DuI1popuzX2al+1+bg9E9F1XUrkOJWIORuMb8QpXg322g8Ty4h8UuEYMVfm6ZmoUQNmRY3rnrkemXmiZzzzfEcya61xfzPPb/ROy1yz3WD8MJEj4v+b4cXInMF3gvG9RI5ZObfW+M4RXY9vp5IBAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA0tTp/iYPoUx85sXLOF4UthzPMfny2Or50ahDm2frleHJ+Mt8IcXfc0GB8ncmwnYvYSMfSvQqnqTfxM1MkRxdRYR43nIa53C8OVMOb5j8vja6d2whxbv1wujk/Gu2GO+PPUqiFRzbsbp7j/Rnn8WYXnamt/+hxzpcZzVWOevtYxK/qpiQvDl8OY5z8uPzdrp+I6svXLcs3LnYtqnCUz9Sp6xg8r5KjxWY7bM8FRN19no3KOrjuTyLGUiIlkalr0fTC+ZrHoenw7v5wBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGFsvDB/2sopd5go96xCwMV8KYS1fuhTHrgzAizLF55WRx/JPrF8IcXfcgEROJ/q59LftHZI5ZEm7Erk4dycwzbY7MHDViMjUv2kdLiRx7xdGF4ekww6UrfxrGrA8OE2sp27zypDj+yfXh1HPklK/ZN54G418mcuyUh7deTeSI3E/E/NUK88yKGjWir3n6WmskU5ujtWZqUTnHwvCVMMOlK1+HMbNTi6J11DonRPevxrt3Vt7f0I/c97S7YUw/9ShzVohqdKamrSViIo8q5Mic0SJnvve/9MsZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKHF6VMcTJ/iSNnvaZ7yrRtfPhlmWB8sJOYZFkev3/03wwwb5yblgJPBeNd13fMoYDvO0d1PxPR1//huBnM0T5QjM0cmZikYz5T3qD+/l8hRXmuuFh0m5im7fvdiGLOx+jiIyKyjRo3IvDejmMy9iWrenUSOyG6FHPPkKNWijFk542U+y3JxdHz5pTDD+uBpYp5yDbh+90KYYWP1wVRzfCN69jL3LlNH+lBjn/X1bML0xpejM9wsnY0yz9aZYHwtkePVREwNUd2rUUvi+/tt/HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaWiwPH/SzilDm743X+JvkGdE83//vmn8XC8PTxfFrG6NElrUw4ue3frc4vjKK98h4faEc8HKYous2XwoCziSSLCdidoLxWXkmONqC0tzbPHthhoXhSnH82sZniXXE9fvnty4Xx1dGD8Mc4+5kYi3TqlUjdqcc58Xo6x0QzVPjzHO03mdxLcqci+La+/NbbxfHV0b3whzjbj+IiMa7Lr5/cf3O7YHMWiJ9vNOO1n4+fjLn+PVEzBsVckQ2w4iF4ePi+LWNDyqsI1OPHoQ5xlVWQi1+OQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0tFgeHvSzil5kPstSIuZUML6cyBHZCSPGl88EEV+EOW6MLocxy6OnQcSrYY7UZZ3aMBHzUiJmb8rxruu6gynHM47Ss5mRuWY1rmskc91rrGM3EVNjDxxOnWF8+WQQEbxmuq67MfqdMGZ59DiIyNSAeC2x/WA8UyMyMX2IPktGjWs6T/qoM7XUWOus1N54jvHlSRARP3c3Rm+EMcujr4KITF2Nnr1Zue4ZNWpAjffZrFwPvp/1RMxfmz7kr2fWEvj522HIeGstiNgMc9wYxXt6efQgjJle5tmK6uujCuvIyMwTrbVGLfn+5zy/nAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaWmy9gP4MEjHLiZiXgvFhIkfZwnASxlzb+OPi+K+enQtzLN96mFhNnCd0MhjP3JrTwfizM4kkmXuzG4zv9JQjslQhxzw5OGLz9GFv6gwLw+jB67prG6Pi+K+evRLmWL71OLGa6BnP1O9TiZjIdjBe67Ua3b+jtFfnyXG77rPxeXO16E+K47lz0VeJ1fRxTWbjun+jRk2LDnqZgyBH23oc8tcSaf6z8rvzb7/yz3PLKfg//8p/EsZc++PgbPR//26YY/nWH6XX9GJl6lF0NsqY/tyayxGttUb9/f7Xwy9nAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgocXpUwymT1Hl74nXcCoRMwzGX516FePLmeuxXBx9MDobZljqnifmCbZI5vYvBeOZHOWP23XPziSSxNek6/YSMdOqMUdmrx43+xVyRCVxVmpVP8aXo4e366Jr9mC0FmZY6rYS80RFIPN8RzkyooL1NJFjt8I8GdF+rXAEOHYyNWBWalGNPZRRoy6Wc4wvZ/4vsfxufTCKzwlL3YPEPLNiVt5HmX0WxfS1V5ld63HIvxuH/O1X/nlx/D+9878k1/Pttv7WWhzz6r9dHH/wT383zLHU/VF2SS9Y5p22E4xnvvv01VOIYmq8w6Pr8e38cgYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoaLE8vJRIcVBhGdE8g0SOKOZMIsfZRMxyMB5c0oSN1SdhzOZB+fMujR4mZspck73ycOb2Rx/nZCLHThQwSSSpIbMXaWP6Zy9WoxbVmicyfW3eWL0fxmwelJ+9XC3KvGsimWtWY54oR2aOGvsok6PG+5nZ1FedyeznUxVylG2sPg9jNg+G5VWMHiRmOkrPzFH6LDA7Xl99FMa8f1D+/UPubJQ51+4nYvoQrWNW1tmX7/95/XIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaCv6A+plEiu0Ky4jmGVbIsZzIkfl78ofB+JMww8LwZHH8J6c/D3PcGG0Ux5e7T8McXbebiHkUjN+NU3xZXmtOtNZHiRxPEzF7U4533TR/2z6vjzlmyVIiJnNvpp0ns45BMJ6pM5n7ezDleNctDFeK4z85/UWY48bo7eL4cncvzJFZaxyzk8gR3ZtoPLOOjMw+qiGz1/huMnukj3ky64j2WeZc9HIi5pVgfC3MENeiX4U5bowuF8eXu4/DHLkzbY0aMCs5arAOatiMQ/7opTDk//i9/6A4fuJC9D0urkdfbl0Jc3z0v/2gOL7cfRDmyKnxnj9u3ylmm1/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA0tloeHiRS7FZYRzfNqIse5YDzTh9qpEDMOM2y/eTqIeBrmGDw7FUQchDm6bi8R8zARE4muWebeRGvN3LvM592ukCOKydybaec4apYSMTWuazRP9Nx1XdctV1hHpq4eTp1j+83o80ZzdN3gWRwTq1GvBokcmX0UqbHPgldvtRzRNclcs0iN6zFPalyzGvNk9nJUr15K5Hg9EbNRHk4c4bb/0stBxNthjsGz6Jp8FS+k20/ERHu+xvu5r+cq83kjmVrUx+c5brXoqNmMQ97/nUSech343//6fxRm2HmnXI/eff+zMMfgehTxdZgj976ZlTNJjVpC1/nlDAAAAEBTmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANBT8YfMziRT3KywjmudcIsdahXVk/s77TjD+NMxwcu1JEDEOc5w4f6o4vn/+dJgjp/x36w+/ehZmWBrdCiIy172vPuJhML6byHEw5XhGjRzzZFApZtocy4kcUT2r9Vm2EzFlJ9f2ps5x4nx5HblalPm85Zp3+NVKmGFpFF2zTC0q18ScGnugfD1yOZYSOSLT76H5krlmNa5JNE/m/kc5VhM5NuKQC8H4pTjFySvBu/XrOMeJx68Wx/fPn4+TdJNETHQuijMsje4m5plWjVpVa56oth63Mw1/3mYi5mYc8n5Qs95fD1OcvBLs19XPwxwnXvuyOL7/WvR9suu6Lj7XRA6/it9ZS6N7U88TP+N91aP555czAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOLrRcwW/YTMbvB+F4ix/Opc1xd/bzCOipYHcQx75SHr9/9YZhieLt8bybjZ/E6usRaQweVYqbVxxy8GLN076Zfy9XVLyqsI/MqWi4Pr96PU7xzpjicq0U7xfHJeCteR29q1LxZmIMXY5ZqUSCxza6ufhlE3EtM9DARU64B3Wri/PVOOeb63TfDFMPb5XNPrhZlzr3Qh+1EzO0KMZnzxkowHp97rma+H/VhNRETfk+7EKYY3i5/t1WP8vxyBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhosTz8NJHioMIyonnuVZgj04d6kIjZCcb3EzlevOt3L4YxJx8thDEnzpfv79XVLxKrGRRHr22Mwgyb55aK45/e3AhzTMbPwphYjf3Od7eXiKlxb6J5oue/lt1ETLTWzPU4mYiJlF8j1+/+IF7Fo+BV1HXdifPlmKurD8McXVeuI9c2vgwzbJ4rX9dPb66FOSbjzLsmkrm/0fsovu7Tz3EclfdZHTWu+zgRczcOuRO8fzNlcy0Yz5TEbrs4mjsXvRzGnDhfPrNeXb0d5ohc2/jTMGbzXPlc++nNc2GOyfhJek3TKZ8DoT+Z2lmjvpbf0bP1Pa3s2sadMGYzKDe572mPs0s60vxyBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGlosD+8kUpT/vnpONM/9RI69HtbRdV23G4wfJnIEl72ClQ8e1kk0Kq/15vCtMMWlK/eK4+uD+JpFMR+9eSrMsXzrSRjDrMrUmRq1KMqxncix38M6MjGZmngyGM98lnKNWPngUSLHUhwyOlscvjm8FKa4dKVcA9YH8XWPYj56M/4sy7cy74no2tfIwYsxqJAj2ouZGlHjXJQRnJ3uvxKn2Fwoj68+SqyjXJ9XPlhO5HgWh4zKz9XN4eUwxaUrnxXH1xNbaH1QXsdHb54Jcyzf2oonqqLGMwGzYvrzZu5slDAqD98cvhGmuHTlbnG8v7NRGHIs+OUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NBiefhpIsVBhWVE8+wmcjwKxoOP2nVd1+0nYmqI1jJ9z2xhuBrGTMaPp55nMn4WxnxyvbyWD3+2Fua4tvFpcfzdix+HOd5/eCGMObH5KIyZDTWeu3myl4jp6/mNZNZaQ4098OL30cLwpTBmMp4kMg2CHM/DDJ9cf6U4nqtFHxXH3714O8zx/sPXwpgTm5tBRGafla8ZL0rmvBGJns3Msxvtkcwe2k7E3AvGlxI5omfifiJHeZ6F4athhsk4MU2YYyeMiWvRmTDHtY1PiuPvXvxtmCN3LoqufabOqEXwr1oYngpjJuOtRKby+yaT45Prw+J47mw0Ko6/e/GzMEeuHj0MY+adX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBD5T+O3h0kUmRiprVTIcegUsz0OZ4/Cv62/ephmOO9x28UxxfHT8Ic4e3vuq7ONSlb+eBRGHPjnbeL4+9e/DjMsb+8EMYshRE11LimfTx3s2R/jubpa62R+Pl+/ijY8avxLO89fr28ivFWnKTKkzf9+2rlg/thhhvvvFUcf/fih2GO/eXnYcxStxdEZOrIcasTx0mN81mmVu0mYiKZWhTs59X42Xzv8e+VVzF+GOaYleeqzrnoVphjfzn+P9qlHs6BzkXMkvh7WpyjztloNuTqUXQ2+izMMTvf09ryyxkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgocXWC8gZzEiOjIMwYvnWV8XxzQtxz+zq6hfF8ZvDjTDHZLwVxsTiz1vDybXdICLeyos7mV5kX/tkWvOyTv682Sm7y7ceFsc3L8T77Orq58Xxm8O3whyT8fMwput2gvFMLYrqSOzk2kIQsRfmWNzZn3oddWpvjTrSzztgdvSzz+J5Zum6T1/Tlm99XRzfvBDnuLr6cXH85vB3whyT8TieKLy/cQ2IY+IacXJtOzFP2eLOYSIq2mvOIxwty7ceFMdz39O+LI7fHL4e5piM+3iXdF2NZ/jkWlTT4nUs7kymXsdR4JczAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQ0GLrBdQT/Y32zN9wz/wt+Cgmk6Psw3uvhzHXNj4pjl+6cjfM8enNjTBmMn4Wxkxr/8pKGHN19cvi+PW7l8IcK5uPEqtZCsZr7BHm1xEqmQm5WnSnOH7pyhdhjk9vvhHGTMbbQcRumKPrThVHc7Xo0+L49bvnwxwrm/fDmDp1JHrvqVXf3XF7B8xGzfvw3noYc23jq+L4pSt/Eub49OabYUxciw7DHFG92r9yOsxwdfXz4vj1uxfCHLla1Iej9Mxw1H14760wJj4bxc9e7nvaVhgTKz9/ubNRVI8uhjlWNh+GMceBX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADS2WhweJFAcVlhHNk1lHjRw1Psv0OYa3n4Uxm+fKn2d9kFjHlXthyEd33iqOH56ehDl+eu5OcXx98CTM8d7jC8Xx4e3DMMcktQf60Mczc9QEpYoXYnj7eRizea48vj7Yiye68nkYEtei+P8a4loUP5vvPX6jOD68vRXmmHS7YUws80xEn6dGHalRz+bJrHzeGuuYpbpa/jyZ52rzXPm5Wh8knrsrO2HIR3fKNSB3Ltosjq8P4vPZe49fL45nzpLxSmtRizg6hrfjc038PS0xUep7Wo169FlxfH3wIMwR16P4mvVXj2abX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADS2Whw/6WUU4zyCRI4qp9Vlq5Nkvjk7Gj8MMv/74reL4b87vhTmurt4NY9YvfhbGTOvG6O0wZvnWk+L4pFtKzJTZR5G+ngmOtxo1b/r9Phk/D2N+/fHbxfHfnN8Jc1xd/TyMWb94O4yZVq4WPSiOTyqtJVZ+j3wjeMWrZ1RR491aNhlvhTG//viN4njuXPRFGLN+8TdhzLRytehhcby/WpSpIy9+j0BfcvUo+p4WPze5evRZEDH9e36+zkbzzy9nAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgocXWC8jJ/I326f+Oe50c/VgaPSwHjOIcv+iGdRYzpeUu+Cxd13XdIBifn3vH9xHd/77U2GeZz7JUIeZMIsd+ML4Xr2K0VQ4Y7YQ5ftG9FMb0Ybl70HoJ38GcvL6PnFmpRRmzUq9qXLP4syyNnpQDRnGOX3Sr2QW9ULlz0Tzp44zmHMjsmJ3vadE5LzZfZ6P555czAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOL5eGDRIr9KgspGyRiMmtlNs3TvcustY/PM0/XrC+ZOhGJrmuNOTI5ziRiloPxs4kc0efdTuR4GozvJXL08R6ZN8HrmUYyz28ftaivd0CNzxvVqoxMHYliZuX9XUuNfTYr5um6z5sa7xLv6O/ONfuzZulMM9v3xi9nAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgoVn6o+MFB0dsnuiyZ/7++mz/jfbvJrMNa9ybTI5BD+vgu4vuS1/z1Lj/mc+ylIg5O+V4LbvBeF/3bp7MyauX76nGno9qTWaOvurVqSnHMw4TMTU+76ycrWqci2ap9jo7vRh9vUtqfG+BSB/vztnnlzMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOLrReQM6gUU8NBhRz7FXLMya2rJrq/R22P8GdlrmmN+9vHvcvMsZeIeTrtQrp4LduJHNFaPQ9/XuYdcNxq/FEyK3Wkr3l2g/Ea/w+YeWZqXJN5eu76OtPUME9rnSd9vUtqfG+BiPNi1/nlDAAAAEBTmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANDQwmQyab0GAAAAgGPLL2cAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKHF0uBrC38w6WshZQdHbJ7jZr/1AioqPjIz5evJP1povYZajl8tWkrEDKYcz8h83ihmL5Ejs9YoJnPNajy/u8F45vPWuK7zU1e/nvzjI1SL/kGiFu1UmGk5GD+VyBE9EzVqREZmv/dRzzLPf+a52g7GM/e/xh6J7m+mJp6pME/mutZYaySuvV9P/s6RqUVd13WvLfw3iXpU49xS4/nsq97UeM9HdSBzTaP9WOMc0HX93Ju+7l0Nta7ri/f15B/+hfXIL2cAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhhbLw4NEikxM5GBGctSw33oBcARlnu8+atGs1Jmuiz/vUoU5DhMxu8F45r7UuK47iZhoLX29i2ZpH/Hd9HUuqvF8n5lyjq7ruu1ETFQD9hI5IjU+byZHJiZ6fjOft0YtqvG+ytzfGvfP2fjF6OtdMitno0zNqrFfI309n/x5R/+6+eUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANDQYnn4IJEiEzOtPuaoJbikM2RhuBLGPP9xeXzt1HaYY+uXS8XxyXgrzMFxNzhi89QQ1cW9HubIxMQ5FoYvhTHPf1y+N2un4nm2fnm2OJ6rRbvBeI3r3nXz9d47Tvo6F/XxfNeqd6eC8cy5KFpLjbXG12xhGH2Wrnv+4/LnWTs1CXNs/fJ0cTxXi2pcszOJmPIZLnd/oxzReEat2jtPMve4Rj2qsdf6Ol/V+B62H4xnrun0+3FhWK4TXZc5G8Xr2Ppl+fvgZPwszNGfaB/N/9nJL2cAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKCh4I/Bz9PfCp+ntdZQ/jvvC8Py36zvuq67dOVuGLM+2Euv6NtsXpkUxz+5Ppx6jnqCR4Jjrvzc1VOjnmVyvPh56tWiU0HEUphj80r5/yNytajG/2nsVMhBG9O/E/ubZz8Yz7zv4ucqjjmTyBHV1kyt2i2OLgzjz3vpyudhzPpgPNU6uq7rNq+UP0+uFkXXrMa967r4/p1N5Ig+TyZH5GmFHPMmcyap8Z6P5smsI9prs3T+jt7zmfocfU87HWbInY2i+xtf180rW8XxT66vhjmOlj6emW/nlzMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ4vl4YN+VsG/ZjB1hvHl02HM+mBh6rVcv3shzLCx+iCIOEyso4Zgu/dmv0KOWfksx830z2ZOpvbuVcjx4o0vx/8HsD7I1ICoFv2lMMPGajTPJLGOSOazRPeu67pud9qF8EL09VxF8+xUmGM5EbOUiDkVjA8TOaJ32nYiR/majS/Hz/f6YDz1PLlz0deJeSLR+yi6L13XdWcSMWeD8VcSOV4qD68kUkS2Mp/3qOnrTBLNk6kT0V6bpXNtjXN6VI8y39Omf99cv/uDMGZjNVP3+pDZz7Nxtn2R/HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaCv6ofI2/8z4rgo/aq8zfcS9bGL5UHL+28dup5+i6rvv5rbeL4yuje2GOcXdYZS2zYVaeiVlZR18yz8xBD/NM/+zm1PgsmT0S1cX48y4MXy6OX9vYTKxjNYz4+a2/XBxfGU3CHONuLYjIXLMae2A3EbNXYZ5IjToyS+/W46RGjcjsscwe6aNexXMsDMt78drG7cQ64mfz57d+UBxfGX0d5hh3Ub2ap3cNZN4D05836uhnz8ff0z5LZImva/w97XGYY5xYSazG/evrPDHbdc8vZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGFlsvoD/7iZj5uRzjyyenznFj9HYYszz6eup5ZkdmDzCbBpVijpNMPYuu2TDMML58PohYDnPcGP00jFkeRf+XcCbM0a0E43txiu55NE9mH24nYqLFHCRy9MFz10Zf1z3zUGT2cyT6PPEc48vRsxl/lty56F4YE6tx/6IasFMhR0bmbBVc+62zFdbxtEIOXpwaZ/AaOTJ7Poo5DDPE39PiM9qN0YUwZnn0JIioUWsyOaLPk/k9SObcGuXJvLOiz1OjLn7/6+6XMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDi60XwJ+3MFwJY65t/LY4/qtna2GO5VsPskuCxgatF/AdHLRewHewVBxdGJ4PM1zb2CmO/+rZT8Mcy7eGYUwXlcWX4xTdWjD+PJHjkzNBjnOJJPcTMeXrmttnUUyNvTpPz+ZRUuO6Z3Jk9sh2ML6fyFG2MDwZxlzb+HVx/FfPgme367rlW/cSq5mVGh+tYzeRI3NvopjMPFE9e5TIMe0cx9VRqtGZZy/aj4eJHOU9X+d72qthjuVbW2FMP/c30y6Ifu+RyVE+k36jxueN9lFf79e/mF/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5k/Ok515b+vPr58cuoZHoxeCWOWuntTzwP9mJVStZ+IGQTj5ec/lyObZ7p5xpdfS+Q4LI4+uPNWmGHp9YfxNK8G4y/HKbpzwfg4kWMnGP/tWiLJ2URMtJjdRI5oj2T2GX/WbDyb/eXIfJa9YDx6aGLjy5lnZrs4+mC0HmZY6hK1qIoa9ya67rVEa80UzijHqeRaSjI1kdmVOV/VqEfTPzfjyy9NnSNXjx5MPU90RsvJ/JYjOqcvJXJk6kC0B2qdsaf1/efwyxkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgocXWCzieBsXRjdWtMMPmwcni+NLoSWIdS4mYgynHuy76vLkc+4kYjrcavebDYHyWSmaN56psY/VxGLN5sFocXxo9jCdaTywmujWZj7s35RypmEkiSV9e/B6J56CdGvd3NnLkalH5TFPvXDQrez4qaJnrvlthHTXeizsVctTYq3w/mTN6jecmM0+N7y3lmI3V+2GGzYPymXRp9CCxjhoyZ+PM4SfSV12M5pmV+vz9+eUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0tNh6AfNn+r+fvjB8qTj+k9P3wxw3Ru8Ux5e7re+0pm+3F4zvV8hxkMgRXfdMjsxamV/RPsuISuJSIkeNvZr5LFGeuFYtDE8Xx39y+k6Y48ZovTi+fPAkzNF9Wa6JXdd13b1gPJEijHmeyBFeks1EkqeJmN1gPLOPophMjmnnOGr6+rzzdF2nX+vCcKU4/pPTD8Mc8blonFjJqURMDVF9zrwDopjtRI6+zk7Tn51nY455NC/XJbMXD1/4KrouU4/i9/yN0e8Wx5e7x4mVzNPX9OnPpHXqUY13Z9tnxi9nAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIYWWy/g6BmEEdtvng0insazPFsOIiZhjpylYHwvkSPaZruJHJl5IvsVctBGpo98UGGeaL/Hz3fXnaqwjozMc1O2/Wb0bN4PcwyebQYRmecuqold1z0flsfvn4lzhDGZtUbXJLoeXdd124mYaD9n9nuNZ2IW5pglx+3z1hA/V9tvRmea+JkZPAtqRHcyzBG/A7ou9x6YVubME50VM/UsM4+zEzXMT+3cfjNTB8oGz6LnJvPsZa5ZjXrURzug1v2P8sx/vfLLGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaKjCHzav8bfRZ+Vvkmf+VnwUE+c4uRZ93qUwx4nzC8Xx/fMbYY6cveLo4VdxhqXRn1ZaS0l5nd/I7NVZ2Yt8dwc9zJGpEcsV5snsw1PBePxMnFx7GkRE41134vyd4vj++biedd3jRMyZ4ujhV2fDDEujZ0FEZg9tB+M7FXJ0XXz/aux39e67y1yzo3Qu6sfJtWg/Z85F5fq8f/6NxErKdSbj8Kt4rUujh0HEbmKm6JplcmTeaTX2cySzjkgfZwD+YoeJmBo1LZNj+n1wci16/8brOHF+XM5w/vR3WFFJud4cfhXXtKXRkwrrqHF/Z+UZrlGPvj+/nAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaWmy9gPkzmHK867ruTDB+KsxwdXU5iDibWEfG0/Lw6v04xTvPisPX774Sphje3imOT8YP43Wk7k0f9ivkOG6P7mEi5qCneSI11pHJEe2jTI6FYHwvzHB19XZinkjm2Qzq4mri/xreKee4fvcHYYrh7efF8cl4O15H4rrW2Uc1as0szAFxjbi6uhlEZJ6p4MyTsZqY553y8PW762GK4e3y2Sp3LoJZUeP8NTuurn4RRNQ6xwdnn9XEPO+U6+v1uz8MU8Rno/L3uG9krkl05pj/feSXMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDi60X0J/MRx1UiMnkWAvGX4lTvL5QHL6+djFMcfJROUfXdd2Jx7vF8avbn4c5uu5RcfTaxidhhs1zk+L4pzc3whyT8VYY03UHU45nY6aV2WdHyf6MzLPXyyq6rvzcfaOPfZZR3ovX71aqRefL41dX74c5orVe2/jTMMPmufIe+fTmuTDHZNzXPqK+vo5N0Tx91cSMPq5Jpt6Vn6vrd18LM5x8dDKMOXF+uzh+dfVOmCO6f9c2vgwzbJ4rr+PTm2fDHJPxrLxHZmUdfD+ZelSjptXYJzVyZGpe+fcPubNRph4F39NW41oSXZPc97Ty5/305pthjsl4J4w5DvxyBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGgr+UHvm77jXEM2z38sqcgbB+FIix2p5+NWFOMW/VR5eefwoznEuDul+Xh6++f++Faa49JPfFMfXB7thjvVBeY989OaZMMfyrcMwpuuimHitXbeXiJlWtA+PmoNETI1rEs2zncgR1avMPqyxzzLXLKq901/TlQ8eJaIS84zKtfXm8PUwxaUr5bWsD+Jrtj4o1+eP3ozfAcu3wpBK+ni39nVOmCd91KLjdt0z9axcN1c++DKRI1OLyrX35nA5THHpyhfF8fXBTphjfVBex0dvng1zLN/KXFfm26ycF/v6Ljcre7pco1c+yJwnE0blz5s7G0X1qMb3tMzZKK57x4FfzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANLbZewGwZJGKiS5a5pGvl4R+O4xR/pTz8OzujMMXB+FkY8+mTd4rjk0+3wxyfXH+lOP7hz+Ic1za+Lo6/e/GLMMf7D38UxpzYfBRFhDliOxVyLFXIMU8OZmSezDr2elhHNqa9heFLYcwkUYtq5PjkenktH/7s5TDHtY0/KY6/e/GzMMf7Dy+EMSc27wcR83H/j57MOaGPeWbp/vdxTaavvQvDhTDDZPx06nkm48y5qJzjw58NwxzXNsrnvHcvPgpzvP/wYhgTn4tgVs5otUQ1LfNdoJxjYbgaZpiMtxLzTJ/jk+vLxfEPf7YW5ri2cac4/u7F22GO9x/+MIw5sfkkjJl3fjkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANLZaHo7/zno2J1Pi79fsVctSYZynM8PxRcNkTl+O9e68Xxy9PNuN1JNba7U05ngha+eB+mOHGOxeK4+9ejD/v/vJCGJO4IjRRo0b0pcZa+6mJzx+9VA5YjWd57/EbxfHF8VacpMp7JOOwOJqrRW8Xx9+9eCvMsb8c/79IP7UoeBdBT54/OlUOWI1r4nuPN4rji+Ovv8uSCqKDz87UOVY+eBpmuPFOufa+e/GzMMf+crkmdp1zEX3p65wXnzfiehS/O997XP6etjjO1IkaMte1HFPnbHQnzJH7nhbVrL76AS+OX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADS2Wh5cTKU5VWMZuML5XYY6Mgwox8VqXb/1JcXzzjzfCHFdf+rI4fvPZz8Ick8fbYUz3i2D8t3GKrkvMEzi5thNExNd9cSfaZ11XZy9m9tG0+phjlmQ+76CneeZhjpzlW0+K45sX4mt6dfVucfzm8I0wx2T8LIyJ9XNdT65NX88Wdw4TUfOyn2F6y7ceFMc3L0zCHFdXPyuO3xxeCHNMxlthTH9n0rKTa9F5Ja4hizv+j5a+9PU+mv7duXxrXBzfvBA/N1dXg+9pw9fDHLl6FF3Xvs5G0fe02OLO80TU/tTzzMo1+zaqMgAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANLRYHj6VSLFcZSFlff298cw8exXmuV8c/XD0dpjh2t5WcfzShbthjk9vboQxkw+3g4jfhjm6blwc3b+yEma4uvp5cfz63R+FOVY2vwhjYpk90sd+7euZmCezck1mZR1BeU/48N6FMObaxp3i+KUrmVr0RhgzGT8LIqa/7rla9GVx/Prdi2GOlc1H2SUVDCrkqLFXa6xjnmSu2azcmxpqfJbpc3x47/UwJq5F5XNE1yXPReOdIKJGLVoNY66uls8081WLeHGmPwt03X4w3tfZOLPXopga9Sh+tq5tlL8fXboSfyfJ1aMnYcy06pyNLoU5VjbL34+/Ed2/+f+e5pczAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOL5eHlRIqzVRZStp+I2QvGd2ssJGEnEXO/ODq8/csww+a5p8Xx9d+eiZex/i/DkI/eebk4fnj6SZjjp+e+LC9jEF+z9x6/URwf3t4Kc0xS96YPB60XMIcy12zQ0zzHx/D2YRizea583dcHC/FEV+6GIR/d+UFx/PB0vNa4Fj0Ic8S16HmYYxJGdF28n+3V2eXe1JZ5rjbPla/7+iA6J3Zdd+XzMOSjO28Vxw9Px2fWn567UxxfHzwOc8xOLYKMqC7W2mdRnqWpZxjejp+czXPlmHr1KDobxXXgp+dGxfHc2ahcF4e3n4U5cvUo2kc13r81cnz//eyXMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDi60XMFv2e5pnXBydjMvjXdd1v/745eL4b84/CHNcXf0ijFm/GEUMwhyRG6PLYczyrYfF8Ul3MPU65stx+7wZrkltk/GTMObXH79VHP/N+fi+XF39MoxZv/hZGDOtG6O3w5jlW4+L45PUTNPXzdlxlD4L38+L3wOT8VYYE9eivTDH1dXPw5j1i7fDmFj5mtU5F02/DmZd5txT46teH+erzBxLiZjo855K5CibjHfCmF9/fKk4/pvzT8McV1fvhDHrFz8OY2Lla5+rR+Xz4iRVa9SjrvPLGQAAAICmNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaCj4Y/C7iRTT/734eJ7y318/jpZGD8sBozjHL7pXEjPV+Jvz5RzL3VZP67CPoLal0b1ywCh+dn/RrVRazXSWu68TUdHnqVGrmG/eV39W5npEMfH1WBo9KQfM1LmobLkLPsvcOUr7eZZkrmvmu9y089S4vzXqRNd13VIwXuN7a2IVYT3aDnP8onupwkqmvze5elTjbNTX+Wm265FfzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANLZaH9xIptissI5rnIJEjE3OUDHrKEcVkcpwKxvcTOaL7m9mrmT0SrSV4ZGBmZJ6rSI39Pk+1uUZdnaV5+nCUPkvGrOyRWXquapwT5mkfzdNaIzX20SztxeOmr2sfzdPXOjJnkqUpx7sufsYz56vo9w+ZOlLjus7T98VZ+Y7V9rr75QwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADQU/EHxvX5WEc5T4++N1xL9bfsaf6N9Vv4mfSbmVCLH0pTjXdd128F4jWvGfIuezYwaz+9RUuOaHjVqDZEae6TGuafGeaWv88g81d55qQGzdHbmxcjc43nZB5nnKvObgqiWLCdyRDLfffr6Dh3pq15F82RqfOb7YA2z/Uz45QwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NBieXgpkeJMlYWU7fUwR1Zwyao4SMQMKuSoYbdCjv1ETPR5+vq8zK4+ns3j5rhd06iuQsasvI9qPL+zch7J5OhrnlkxT2vlxci8s2q81/o4g2dyHCZiou8UO4kc0TXLfPfJfLfpQ417k9lDs1KPZuW6f39+OQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0tDCZTFqvAQAAAODY8ssZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhosTT42sIfTPpayIs3SMQsVYg5lcgR2U3E7ATjexXW0XXxdctc1xrXpIb9CjkOKsRkckTi6/715O8vVJhoJry28A8StaiP65rZ78WympSpAdHnrVUDIjWuWY15+pqjxjugRk3crhATvUcylsOIryd/7wjVov96jmpR5kzTh37eebOjxjmh1jx95Khxb2rkiD/L15P/9sjUoq7rutcWfv8IfU+D4+XryT/+C+uRX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADS2Whw/6WUWoxjqWKuTouvCSdacqzRM5DMZr3btBMJ75vDWu/V4wvl8hR1+ia8r3U+P+Lgfj0fOfyVHLbg9zZPZqFNPXfu9rnp0px7Nq1PAoR405an3eeZHZZzWua43nqq9nIvq8s1IDMvU7o4/a28fz36c+9oCzFTD//HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhosTw86GcVoRrrqPVZ9oPx3QpzZHJEMQdhhoXhShjz/Mfl67Z2ai/MsfXL8jyT8VaYIxZs5Wri6xrHZHJEZuXZnCVLPcwRPf9d13U7FebJ1IAa+2gW5qhVi+LrvvXL5eL4ZPwszNF15RxddyqRIxMT2a4QU2OvRtfjqOnnmagzT19rjfT1zpv+3ZurReXzxtqpp2GOOuei6PPUuGa11FjrtHPAfMnVo/J4nbNRje9pZPnlDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANLRYHh70s4pe1Posh8H4boU59hIxB8XRheHpMMOlK1+EMeuD8jzhFuq6bvPKw+L4J9dXwxzx/YvX0XVnEjH7wXh0/7su3gPRNc04Ss9mRubzzso1qVEDMnukxj568erVounXsnmlnOST668kskR15GwixzARE3lcIUfmXRPJ1FXamI8akTP9Z1kYroQxdc5Fsc0r5fFPrmdqRFQUM+uclffmUdqrEMvVo7thzPog872kbPPKk+J4rh5Ri1/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA0tloeX+lnFXNnvYY69RMxBcXR8+VSYYX2Qmafs+t2LYczG6uMgIrOO5WA802fM7OcoZjuRgzZq1KtoL5afu3o55klUi+Jnc31wOPUqrt/9QRizsfpk6nm6Lqqta4kc6xXWEby+u66L9+LTCus4UyHHPOnr+T1qdaK9XC16PvU8dWrR9DURmF3jy/GZtc7ZqMb3NPWoT345AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADS22XsDxdBCMD8IMC8OV4vi1jTuJdZwKI35+60JxfGX0MMwxDj/vUpgjltnKNbZ7Jkd0/6LrkRHvEV6EzL2LYmrc/1qm30dxLfrTqefouq77+a23i+MrowdhjnGVlcyT6P7WqCPHrRYdt887K+K6GdeiTxLzxO/4n9/6YXF8ZbQV5hh35bXm7AfjNd5X2RjgXxXXo1GVeZyNjh6/nAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaWiwPH/SziipqrHXQU44oJrgtXdeNL78SRGyHOW6M3gpjlkdfBRFLYY6u20vERKL7u5/IkYmpkSNaa419Nk/PZg2Zz9vHPqshc//n5/6OL58OIuL/A7gxejuMWR7dyy1oKpnrHtXWp4kcNWrAo0RMtNYa+yx+1xwtmXdeH2apRszGWsaXT06d48boh2HM8mg3iFhLzHQms5xA9M6L1tl1XbeTiInOPbNx/2GWjC9P/67InY0eTD0Ps8UvZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGFsvDB4kUgwrLiObJrKMvNT7vUnF0YfhKmOHaxv3i+K+eXQpzLN86DGO67mwwvpPIMUv3j/nU1x6alVrUR12NLQxfCmOubdwpjv/q2bkwx/KtJ4nVzMq7ZjcYf5TIsZeIiWwnYqK11tjP0RxHTWYfzsbzOzv1bHoLw5Uw5trGqDj+q2drYY5cLYryxPPEMZk9NA7GnyZyZPTxjGfOoxH/30w/+qtHD7JL4ghRyQAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKHF8vBBP6sI58msI4oZJNcSqZGnfNnHl4eJHHvF0QejH4YZlrqvE/NEMvcmumZ97TPmV40a0Jejs9/Hl09OnePB6NUwZql7MPU8/V3XnWC8XJu/sV1hHZl5+rgm0fU4ak4lYpYqzBPd391EjlmpNdOvo04tWgtjlrrniUxngvF4nq5bD8YXEjmifVbr/1+PzjsNahhfnr7G5+pRjbMR88YvZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGFlsvoD8HiZjBC19FxsbqOIzZPCj31ZZGD2stZ04cJmL2epqH+jLP76yI6kimzszG591YfRLGbB4sFceXRg8qrWY26nN8bzL3rsb9nZUctJF5Hmo8M7OxRzZW4zqyeVD+vLlz0TC5olkQHeEzR/zMHjkVjGf2yH6FHJFZeUdw1G2sPg5j4u9ptc5GHDV+OQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA0tTp/iYPoUVQxmJEd8SReGK8Xxn5zeDHPcGP1ucXy5exTmyN277WB8L5Gjhmit+72sos48NfYZf16NWnS87k2dWvTD4vhyt/Wd1vRizcr97Wsd0TMxK9djnszTu2YpGM/UzFoxZQvD08Xxn5z+IsxxY/R2cXy5e5hYSeZME52LHiVyRE5VWEdf57OMaI/sVpgjc80gFp+NvgxzxPXowXdZEseIX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADS2Whw8SKQYVlpGZZ1o11lknz/abUY7deBXPngYRDxMr2U/EHAbj8Vr7kdlDfewz6Mv0+zmuRbHBs6lTVFKrxvchePX2pkZNnKfrXsOsvGsy1z3aZ5l9WOMdH1+P7TeXpp6lTi3K3LudYPx+IsdeMD799agn2ms1nonorJnhjEcd22+emjrH7JyNmDd+OQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA0tTp/iYPoUoUFPOWrME/e7Tq7tBhE78Szn7xfH989n7sv09+7wq2EYszR6HERkrnu01j72Ydf1txcjfX3eWdHX543mOVr37uTa3tQ5Tpwv59g/vzL1HBmHX50JY5ZGD3tYSUbm/0UOX/gqeFGmf666bqlCjhr//1bj/RzrpxadSmTJHIvLz+bhV/F1XxptRhGJdUQxcU2ss88y9oPx6Fyc4f+bqSP+nhY7cb6co7+zUVz3lkYPelgJWSoZAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NBieXjQzyqOlMMKMQdhhqurnwcRe4l1xPOEe2A12EJd13XvlHuA1+9eDFMMbz8rjk/GW/E6Up+3Bs9NfZlrWuP+9nHv+vos+xVyxK6ubgYRmc9b4bqvJmLeKQ/natHz4vhkvJNYSEb0fyeJ2ttbzTtOMte0xnWPcnjP/Ouurn4RRFQ482RiqpyLfhSmGN4+WRyfjLfjdaRE1203kWP6c+/0c0B/rq5+2XoJ3+jtbFT+3pn7nkbX+eUMAAAAQFOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NBieXgpkWJQYRkHRyhHxv4Ln+H63R+GMScfxb25E+d3i+NXV++m1/Rtrm2MwpjNc+XxT28GAV3XTcZb2SUVZPZ7FFPjmYFIUN5TMv378n6+fvdHYYaTj+K1nji/XRy/uhrXkajGX9v4kzDD5rmTxfFPb74Z5piMn4cxfbwn+D76OidE82TWsdfDOrIx7eXORQthzInz5c97dXUzvaZvc23jt2HM5rnyGf3Tm2+FOSbjp+k1fbvyOfEbfdQzNZOMzNmoxvmp7Prdi2FMrh5F39O+TK/p2+S+p5XPi5/e3Ahz1PmeNv/8cgYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABoK/pD7qUSKpQrL2AvG9xM5DiqsIyOaJ7OOkxXWUb51Kx9sV5ij67rgT9vfHL4Vprh05Yvi+Pogvmbrg8Pi+Edvxvtw+dZWGNOP4LFL6Wu/82cNWi+gsujzZPZqOUeuFiWu66ic5+bw9TDFpSufF8dztag8nqtFz8OY+NpH782MGnXkqD0T82JWzjyZmNl4X6188KhOolF5z98cXghTXLpyrzieq0XlGvDRm3GO5VtPw5hYjT1SQ42ayPyrccaOZN575T2/8sHDOksJv6dlzkZ3i+PRd7BMzEdvxj2F2fme1pZfzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANLZaHlxMpTlVYxiAY30nkOAjG95Jr6UO01v1EjvKtWxi+FGaYjLcS80yf45Prq8XxD392NsxxbePT4vi7Fz8Lc7z/8GIYc2LzURgTi/ZzjZ5oNAftRPcmev4zOTIy89SwVBxdGL4SZsjVonLNy9Wi8jstV4u+LI7natGPwpg6tYj6+nquaswzKzlmw8JwJYyZjJ8lMpXrc64WDYvjH/7s5TDHtY3bxfF3L/7LMMf7D38QxpzYvB9E1HhfzdM7j3aCr60pmb324vd0rh719T2tRj0aFcfrfU97GMbMO7+cAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhir8wfgafwt+nhxMneH5o+CarcY53nv8enF8MfF37ftTvmYrHzwKM9x45+3i+LsXPw5z7C8fhjFLYQSza1Zq0fQ1ok6O2PNHwY5fja/pe4/fKI4vjp8lVtLXveujFn0Z5thfXghj1CLK+qkRs1OL4hxxLcqci2alFt0LM9x45wfF8dy56HkYs1RlD8zK+5nZdTYR81Yi5u1gfC3M8PzRy+WA1X8R5njv8W5xfHF8K8wxK1Y+eBjGxGejz8Iczkbf8MsZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKHF8vBBIsVuhWVE8+xXyFFjHXUs33pYHN+8EOe4uvp5cfzm8I0wx2T8LJ4ovCb9XPeTa9tTz7K4M5k6R+7zRjF7Pa2D+o7WdV++9aQ4vnkh7t9fXf2iOH5z+HaYYzLeCmPi90A/9+bk2vTP7+JO5p0WxfT1XpyFOWbJcfu8/YjPRYMwR1yL+joX9ePk2vTn79y5KLr28b2pk4Oj7a045NW/HMf8zfLw0t96GqZY6soxm89eCXNc/b8Oi+M3PwpTdJPx/xMHzYj+6tHR55czAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQ0GJ5eD+RYlBhGZl5IgdTjs+OD++9HsZc2xgVxy9d+SLM8enNjTBmMt4JY6a1f+V0GHN19fPi+PW7F8McK5uPEquJ9nNmH+1NOUfG/OxnWpl+n31470IYc23jq+J4vVq0HcbEytckV4vKn+f63R+FOVY274cxwP8vV4uic9G9MEeuFm0FEdO/n/evrIQx/Z2L+uBcRPzdp/ubccjv/Yf/ojj+d7t/kltOwf/w7/wXcdDg5eLwpc349xGf3ozrQFyPpperR18Wx3P16GF6TUeZX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADS2Whw8TKQ4qLCOap8Yc82N4+1kYs3muPL4+2IknuvJ5GPLRnbeK44en98McPz1Xnmd9EO+z9x6X1zG8/TzMMQkjui7ea4MKOWo4Xs9E7vNm7k2NeWZFjc9bVqcW7cUTXfkiDPnozkZx/PD06TDHT8/dLo6vD56EOd57fKE4nrlmky6um7Eae7VGjhe/D2dLjXvXl+CIN1PK+2h4Oz4nbJ5bKo6vDxL7/crdMOSjO28Uxw9Px6eNn567UxxfHzwIc/R3Loqe8UwN6KNOHLdadNSU3/Fd13Un/v34/fp3un9aHP/RPxylV/Rt/vP/7n8OY/7Hf+/3i+Pr/+tr8USpevSD4niuHkXf0zJno9eL48Pb8VkwV4+OPr+cAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABpaLA/v97OKcJ6DRI4oJpNjNkzGW2HMrz9+qzj+m/PbYY6rq1+GMesXb4cx07oxuhzGLN96UhyfdIPETMPkikpq7KMaOZYq5Jgny4mYzB6I1KgjNe5vjc+SyVGOmYyfhRniWhS/R+rUot0wR9cdFkdvjN4OMyzfelwcz9WiGve3hllZB21k7n/mXRPFTP++moz3wphff/xOcfw355+GOa6ufh7GrF+MYqZ/B+Rq0cPieK4WAd9V6nva7fLZ6NbjhTDH1dXymaXrum794mdhzLRy9ehBcXxSaS3HgV/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ4vl4b1EioMKy4hy1JjjaFkaPSwHjOIcv+heqbOYKS13W4moYTB+JpHjbCImsp2I2amQI5L5vEfJq4mYUxXm2Z1yvOviulmrng0q5ZnO0ujrckCqFp1OzHQyGM/kKF/75S74LF3X1bnumT0QzeO92EZwbOq6ruv2e5pnWkuJmEztXauQI/IojFgaBTGjwzBH7lz04p+95S4443Vd1987YF5qzbysk7/Y3TDi8A83wph/8h//3eL43/2Df5Je0bfO0ZXn6LquO/k/Bd8Fbsaf9xfdS9klvVDL3YPWSzhW/HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaGixPHyQSJGJmVYfcxw1g9YL+A6WEjFng/G1RI5XEzGRRxVi9qdfRjeskGOerCVizlSYZzsY36mQo8b978thIib6PLXeIzXeA7OSY1bM03tingRHq95E9zdTMzPvzbfKwycr1Obn0Rmg6+LPG9Xmruu6vURMDVEd6evZzNSzaC2zUhNnZR18P1/GIX+4EYb8uvs3iuP/1d/477ML+laHf3g6DvrDKODO1OvgaPLLGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaGixPLzXzyqOlP1ETHDZUwYzkqOGU4mYs8H4q4kcG4mYSOaaRc/NuMI6zlTIMU8y9zfaIxlPpxzvuniP7CbX0odor2beAQc95Jgns/RZor1Y4x0wS5/3OKlx75YSMWtxyMngffSjzFoCnyTeec/OBQH3ExPtZFYTyDwT0f2bpecqWktmL/bxeWbpmvHdfR6HPEzc4392oTh8+M9qfBe4m4i5E4x/UWEdHEV+OQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANLRYHl7qZxWhgxnJkRFc0mqizzOokKMvu4mYp8F45vPW8CgRsx2M71dYRzTHUXM/EbNTYZ7oumbm6OP+z5Lo2cu8RzK1aF7eA7VqUR81bVbeAXx3mXsX7aG9RI5Hccjzs+XxPzmTmCeaI/POuxeMZ3IcpVpUS41aFOXo6wzH7HorDnn5L8cxf6M8fOJvPEutpuTwDzfioCjmYWamW5kgjhi/nAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaWphMJq3XAAAAAHBs+eUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA39f6Ge9+ngphIZAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAABGcAAAM9CAYAAAAxZR53AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAABT+ElEQVR4nO3d26tm17Ug9vVp3y91V5VcJZVkHUk+ajuy05bsguNA2wkn14cE8pR+CE3opAkNgTyEEAh5yD8QaGjCoUNoGkKe06RDP1kc0u7YlmSpLcVGrpJkyTq6VKlKu3bVvurbe+XBBzpu+8wxVN+sNdfe+/d7HbPGmmutucaaa9QHe9L3fQcAAABAG4+0ngAAAADASaY5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0NF8KXpz8mT/lNFpzrSdQ0UHrCRxLt/q/M2k9h1ouTv6eWnSs1ahn6shY3er/q2NUi/67kdSizDMzludqLM9mZh41rutQ92YMx8gaxxq41f8Px6YWdV3XXZz8LyOpR0OpsaZPWl2sUY8yc43G1DrfaRAvtjaShlkjf9V3ml/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA3Nz55ibvYU3UGFHCdNdM1q3JehDDXXsawzz8yXl7lmNa5JjXtToax20wo5ashc02jMUOdS47rXMKbzjdazWkSkRu0dqn5Hau01orkepWeixjUZS46jdN1rOU7nXOv5HOK9lzHUd1qN843G1NgLZnLsJ8YMsddr+1z55QwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADQ0xB8L54TrL5wpxnefPgxzrLx6r9Z0gBNKLQLGQC0CxiKuR9Mwx8qrn9eazonnlzMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ/OzpziYPQUPYG6Qo0wWForxrefW4xzBEpn/pE/M5Dits+N0LkMZ6prVOM5Ju79RLcrUqnhMXItW4hxhLQpTdF23HcSHqc11nLS1ypd3nGpiZh6ZWrRcjG89dybOcVDefudq0TSIj6kWDbEGxnS+QzmJ5xwZYq2Npy5OFsq/s9h6bi3OcVCuJfOfZOa6H8Rr3Zeo7lVobVR5rh78fP1yBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGqrxx8CprsbfV084fzoc0j9f/rv1P1h7L8zxxs75Ynz77lqYA46Og4GOM62QI3oFZGpRNGYpTlGlFn0U5nhj59FiPFeLEucTylzXaB3VuP811upA7ysewFC1aCwqnG+qFpXjP1j7TZjjjZ3Hi/Htu/E8uu5UYswQMrXoMIjvVZhHjdrMuGWe8eidVKMuDlRbz8d7knhv9KswR5290RB7lpPBL2cAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhuZbT+Bkmms9ga7rum7n0Xge31/7pBh/e389zLH/WnmZze1shDmGMw3iHpk2DhJjajxXmeNEojWUcVghR2Ye0XFWEjmWgvhimGHn0VPhmO+v3SjG395/NMyx/1p5LnM798IcmfOJZdZqtBYzOfZmPEbGON5nJ89QNTGjxjoa4hhxTcztiz4qxt/e/0qYY/+1cs2b28mc72piTKRGLYrqTGZMjbXq/5vbGVOticYMUa/qyNWjvyjG394/HebYf618TeZ2Pg1zHC3Rt1zb7wmVDAAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGor+0Ddjdb78d+t3z8d/o/2pKzfDMS/fuVqMr725Febo93fCMcOYDpTDY1VfvJ5zY2Z1kBhT4/7XWKuLiTHRXBPXNKxFK2GKXC36ajG+9ub9MEe//0UwInPNomuSWYeZNRKtgf1Ejmi9ZtZzRL073jLrOfPczCqxVoNas3s+TlGnFk3CHP3+YTDiQpij6y6Xw8tLM6foui6+9B8kcnSfBvF3MkkCz1TIwYPJvEuG2KN1XTyXGvurhPNniuHd83HdfOrKx+GYl+9cKcbX3oy/wfr9eP80HmPZ6z88fjkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANDTfegIn00EQnwsz7J0tj7n6xK0wx28+uBSOWX3382K8D8/luPHItLE00HFq3N9phRyrFXJkxLUmEteiT8IcdWpRRnS+meuxGMQzazUzJlpH9xM5Zj1GxkqFHHx5sz+78VruutxajfJk6lm0FrfDDHtny2vx6hPvhzlytWizGO9Tz0T0rrkcp7gW3Jt/L07x1F+/Ho45DP4f9zf/7Jn4QP/4sXL87Z04R+SPg2McS2PZg2fqUY25ZnLUeK/NLt4bfRzmyNWj8vfeyftOO/r8cgYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoaL71BPhDDsIR07W+GL+6sBnm+LC7mJ7RX20uMSY+n2FY7kfXYusJfAk15pp5rqIxNZ7NGrXoszDHh92ZcEw8lxrXLHPvVmeMd13XrSTG1Kib0yBeoyYepWfzpKlRIzJr9dSM8YyNcMR0rRy/urAb5vgw9UxkrtuMFpbiMf92Ofynf/3/DFP8re4fhmMOg//H/bN/4++EOf7Zzr9VHvD2pTBH6E9nT8GDGmqfH73TMoaZ63Tti2I89512vtZ0ZpS57kfpG6vGOnp4x/DLGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaCj4o+Rzw8xiNA5aT+C3LpwJh7z02I1i/PWtx8Mcy+9vhmP6cERGtI5Gct0ZsbHUohrzyOQISnPXdV23WOE4wbN3YSHM8NJjbxXjr29dCHMsv38rHNN3S8GI6Hp0XXxdVxM5ojGZHBk15rodxGus58x1P2lqvNOGqHmZe5dZZ9Ge5WwiR+DC6XDIS4/9rBh/fetqmGP5/bvhmH6INf9YPOTqS+8U4/9p94/CHKf+y/34QMF/4/6tv/8PwxRv/uk3i/G7//PFeB6BM38av0eOn2nrCQysRm2tkOPC+XDIS4+9V4y/vnU5zJH7TqtxTY7TOspcj+j92vZ6+OUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NB86wmcRJNT68X43jPTMMdn01PF+OGvluN5HNwNxwxjLjHm4KHPAsYj0zePnpv4uZqcWivG9565H+b4bHq2GD/81WI8j4OtcEz8uqpRI2rk6BNjJhWOQxtDva+i48TPVZxjKZEjc5zo2Yy3mpNT5T3L3jO3wxzxvii+L5ODzXBMfH8PEzn2Zwr/dhbl98Rh5j2SedUEYzLHOTx8+P8XPMQxOBmG+U5bjeeR+k7LvJOGMNQ8xnK+D49KBgAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADc2XwwfDzOKE2b68UIxfO30jzPHmT58qxiebdxIzOUp/Kz6aq7V6vGXub401MMQ6G+q5WwpHbF8uz+Xa6Q/CHG/+9GIxPtm8Geaoc00y92ZvgOPsJ3IsVpjHvcSYzFxmNcQxxiSzVmus5yhH/Hx33UqFHBnbQXwaZ7h8phi/dvrnYY43f3q+GJ9sfhjmqPOuqVBnEmXzox8+XYz/2b/5d8Ic/9nf/1/DMYfB/+P+g+4/D3Pc+6cXygMONsMcMx+j67ruP5r5MDyw4JOzmuj5jOvz9uXlYvza6V+HOd786ZPF+GRzK8yRq9E19qVxjY4NdX9riM637V7fL2cAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhuZbT+C4mayuhGNWLuwW49f3LoU5+s376Tn91Q4SY+aK0cniYpxiORizXb4eXdd1/XQajCjP87cy58s47Q90nCHWSOKZqXC+k9XVcMzKhXIdub53IcxRpxZlnt9yDcjVouA42/F176d7wYjMvavx/yI7iTHRXGqs98w8jpNhnt/4OPFeo+uiGpA5l8waKa+ByWqcI65FcT3rNz8ORmTuS+Z8y3kmi/Fcu+WgBmz/OkzR/+OvFuP//IsfhDn+33/nXwvHRO7+XxfjQf8kGvDhzPPo/snXZ89xLNWo89E7OvM5mXnPP3yT1bju1flO+yIYkam/GTW+06K9UeY7rY+PM4ihvhceHr+cAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjJ/mJ4vod/eCcccvHe2GF99/n6YY3dxuTyP/S/CHJP1tXDM1pOLxfiVi3fDHKfnPi/GP90/E+bYuHWuGF+9sRnm6A/CIQlVkvCl7bWewJcwVyFH+bn7rfJa7LfjOhLXov0wx+7iQnke+/FrZrK+FI6Ja1G5znRd152e+6wYz9Wis8X46o2NMMdwtWha40AjOMaYZLZNNWpAdJz4mem69RmP0XVddy8xZrsY7bfLz13Xdd3Be+U9zerzn4Y5dhfL/9/Y78fPTK4WrRbjVy7eCXOcniufz6f7fZhj41Z5rqt/dj7McfcfXAzHhL4o3//f+s2M8YQPM8/dS7MfhyNtuO+08jdWv38Y5pisnw7H1NkbbRTjdfZG8fdif5DZ10Rjarx/a2zSHrzF4pczAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEPzrSdwEh0sTorxZxfuhzlefvbRYnzt/b0wx9kX74Vjri2Vx/T97P29Sys3wzG/uHRYjN85PBPmWL7+eXpOjM1B6wkMbG6Qo8S1KK4RLz97pRhfe78Pc5x98W445tpS+fnt48OELq1shWN+cWm/GFeLODqmiTGZ2lt+Jrou3tMcLJYf4GcXboU5Xn72q8V4rhbFc7229HEx3lcoRpdW3gnH/OJSuX7fOXwizLF8vby3yomvWddtBvFoDWV8VCEHdN3B4kIxnvtOi/ZG8TzOvrgTjrm29GkxnitH5VpyaWUjzPCLS+WWwp3D82GO5et3wjGxzP553N8UfjkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANDTfegLHzWR1JRyzenVr5uOsntkpxl/8k4/CHHt9fJwf3v5qMT6ZxP29hbmDcvyXcY7+kUkxvv6N+JpOr4dDONLmKuQor9U6OTLznH3MZPV8mGH16v7M84hr0Tthjr0+vu5xLYoL2sLcYTn+y3Kd6bpMLdoMc0yv11hnNYzlmTlKpokxNa5JdJy9RI77FeaxnRhTriOT1aUwwzD7ok/CHHt9fH/r1KJoX3QxzNE/8sfF+Po3Loc5pjvRO6CL/xv3gzhF10V70vg9EXumQo7jqEadPz5y32l3Zz5OXI9uhzn2+vj76OjsjeL30XDfadEzEb+zZj/GX80vZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKH51hM4bvrt8t+177quu7txrjxg9VaY49r6p8X4j+8/HuZ45Cd74ZjVw81wTKz8t977RIbp4+Vrtn07Xsqr3X7iSAeJMQyvvIaGO85Q6yNzvovFaL99GGa4u3GxPGD1wzBHXIu+EuZ45Cf3wjGrh3fCMbFynahTiyZhjtVuO3GkGqK6qN59eUNds+g48V6jzlwz783yXqLfjnPc3VgrD1iNZxHXoithjty+aCsYMQ1zRPouqM1d100ff7YY334iPs7qX0vc3+h19LM4RfdKdO1vJ5JE4vtLSzXqUebZKh+nznda/G0U16MnwxyP/CQ+3/g7bfZP/dze6FQxnvtOy7zXIkN9L0QefB5+OQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0NN96AsfNZGUlHPOdy+8GI+Ke2WvbF8sZXu/DHP3hYThmCJNTa+GYK1+7WYzfeuVUmKPvDtJzerjmKuQYy7nwcGTWyFIxOlk5E2b4zuUPZ57Ha9vni/FHXr8b5ugPd8Ix8Vwyr7NpMTo5dTbMcOVrHxfjuVpUo/Zm/m+lfL51tgDRMY6b/ZEcJ/MOqHFvMudbnstkZTHMcLz2RTVq0eUww5W/EeyLLsXvgPW/cTscE7k3dyEe9FkQfy+ea+jp2VMcPbOvtXrHmdUw75I69Sj+1htPPZpd7jvtTjF+65X1MEd8RbquzvdPje+wh3cMv5wBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGhvjD9SfK9OxSPKYv98Te2jsX5jg1v1uMb/QrYY4aJivL8aDD8l+uf+ylz8MU79y+UIyv3d+I5wEzm6uQo1bZPShGp2fj40z78rP51t6ZMMep+c+K8Y3yIf7S7Nc1V4smxfBjL90KU7xz+1IxvnY/rmfD/b/IEK/4k7aNKD934zrO/gDHiPNMzy6EGYbZF62HOXK1qHy+dfZFd8IU79z+68X42upGPA+OuOlIjnN03gPTs6vxmCr1aKsY3+gz1yxTo8s1a7IS19/usHy+ue+088X42v3b8TwGe79Gauz1H5xfzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANzbeewHGzezEec/2Vx4vxvXMLYY7vPnejGP/4m6fDHCtvxb256bnlYvxbX38nzLE2mSvGf7b9WJzj5xvhGI6z8hr6rYMBjpMpmVGOzLlk7BejuxfvhRmuv7JWjO+dm4Y5vvvc58X4x9+8FOZYeWsSjpmeWynGc7WoXPN+th0X8LWfR9e1xhqBSI16N8xxdi/24ZjrrzxRjO+di5+r7z73XjH+8TfPhTnq7It+FeaI90XlfWLXdd3ayxvlAd8KU3T3ugvxoKhc/SxO0ZVvTdd1dxNJZj0G41brk7ScZ/fiUpjh+it/VIznvtOiepT5Tov3CtNz5X1cbm+0WIzn9ka3gxGZd8lQ77VIjT3ag5+LX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDtf6o/AlS/tvn6+/1cYovDovhpU+2whRvbD5djL/w7Q/DHMvfm4ZjVifR33qP/xb8n288UZ7HqzthjuPloPUEjqBMqapxXaPjLCZyzP7M5OwVo+vvfR6n+KKcY+mTuEa8sfnVYvyFb78b5lj+XnkeXZepRfG9+fONy+V5vHo/zNF1C0E8c38z6znKU2O9x/c3Zhvx+2rcmxp1Yqh3Tfk46+8l1tkX5TG5WnSc9kX3whxd90E5/C+ejFPcThwmEl/Wrus+CuIbs88jPMZJNZZ6NIZjDPmdVn7+Xvj2e2GO5e+FQ7rVSbQnid/R8d4oPt+u+yIxJjLUO2uI/dWD88sZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKH52VPMzZ6iO6iQo4bZz6W/d7/CPBL2NovhGz+6FKbYfSG+/RdXyudz86NzYY7ldzbKA3o9Qo6KTI1YrXCcvcSY/WK0v/dhhXkkXhHBVG/86Ikwxe4LC+GYiyt3ivGbH50Kcyy/c7M8oD8Mc8Qyr9XFxJhorWXem0O8W2vsAY6SofYrY9kXza7OviixzvbKNfHGjy6HKXK1aKMYz+2LbpcH9Jl3wPUgfitO8eF64jiRzP0t71lzOSJvV8jBw1PjXVHjO61cJ34rqr+J+rxX3rPc+NGFMMXuC5NwzMWVL4rxmx89GuZYfqc8167vwxx11HjvZdZIdJy2vQ1fxQAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBD860nMC4HiTFzD30WNfS7u+GYpVfiPJtBfLn7PDehosx152Qb6rkby/OdeSb2Z4zXmsdSMdrvTuMMr8RjNoN7k6tF0f9HZP6/IlojmTVUa8wYHJV51nLSzreGcVyz3L4oHjNMLcrU3o0Z411X596MZQ93p/UEmEmtOlEjT5Rj9jXf726FY3LfaVE9upWYzVjaAfFeMJ5rjW/5tjXNL2cAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKChCn/YvO3fAh/eSTtfOCrmBjhG5vnfG+g4YzGtkKPGvcvkqHGc6LVZax1Ga+AorRHGqdYzE43J5Iieq0ydqfFM1MiROV/PL/yuzDNR49ka6vmMalqtGjDE3reGGnvFTJ5Ma6PGGpn1GH81v5wBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGppvPQGAOsZSzvYHyjEN4nOJHAeJMUPkqKHG/c9cs6NkiHszlvs/lOO0RjLnslhhzFIiRySzzqKamKmrQ9XNIRyVeXZdnbkep2fzqImevYzj9OxlZOpijes61N64xlxryMwjuiZt15lfzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEND/fFzgIdsboBjHFQaE5kOdJwh1LpmS0E8c/9rrJEh1lnXDXN/axxjqOsxFsfpfBcrjVkN4tGz23Xxdd1P5NhLjIkcVsgxlOj5zazVo/IegVqGWvND7TeG2nMeJ+M+X7+cAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAamm89AYA6DirkmJsxnh0zhBrXo4bM9ajxKsqcbzQmM9caOWqMGWK98/vG8lzVsD/QcWpcs0yOaRCvdb5Dnc9ROAYcNTXeezX2GxlRTcvI7K9q7MFqzHUo425/+OUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NCk7/vWcwAAAAA4sfxyBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAami8FL07+fj/URI6O4iU7YhYTY04F8ZUKOTK2Z4x3XdfdT4zZD+IHiRzTGY+REd+7W/3fnVQ40ChcnPzjRC3aqHCkszPGu67rVmefRreZGHMziN9O5IjW4lwiRzSmVs2s8VxFOQ4TOaL/08ic71DXNXOch+9W/7ePUS36e4latFThSHszxruu63YqzCNzLtH7KLPXiI6znsgR1d5MjkwNiOpzZq9xL4hn9jSRzLlENTEjc3+jepXJEYnfAbf6//rY1KKu67qLk//pCH2n1VhrGZl1f1RkfkMR1c7MPqDGOyv6Psrc/8w3Vo3jRGqsofje3er/mz9Yj/xyBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhovhw+GGYWozGXGDMN4sElrTaPzJjISoUxpxI5VhNjZpVZq/sV8py0Z2IsFhNjatyb6DiJtXy6wjQ2M0l2gvj9RI7omi0lckS1KHPvMqLzzdz/qH6fNDXeIyetJg71fh5Ljsz/4UXPeKaO1NhrXAjiNfZnGZnau10hR3R/M/c/c02iNVDjup60OjKksdT5Guukxjv8KO0DMtcs2htlanjm+2hWta77YaU8D9uDz9MvZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKEaf3QeivoLZ4rx3afjvwW/8up2rekAJ1R/4Wwxvvv0F2GOlVe3Ks0GOKniWjQJc9gXATXU+U67V2s6J55fzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANzZfDc8PM4kgJLlkVB5XGzG6ysFKMbz0XX4/JwW4xPv9JZibbQXwvkWM/MWaY68qXlbl3NepVdJxoHXZdt7laYR6biTH3gnjmmYjWeyZHdN3rPFOThUkxvvXc6TjHQXku858MUd/HRL378oZ6P48lx2FiTKY+zypT36PzXU/kiM93srBTjG89dy7OcXC/GJ//JPN/p30Qz9y7aWJMDVFtrfH+Vs/+sLFcl6HWWuQovefjOjBZKNe1refOxjkOFovx3HdatM4y97/G+7XGOsvUzsiD//7FL2cAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKCho/TH3gcwlstR42+0J/5W/Pm1cEj//HYx/oO1X4Q53ti5Woxv310Pc3RdeR5dt5PIkbgm4ZjMvckcZ1ZDHGNM9hNj5gY4zr0KOTJr6H6FMZlrFo3J9O+jupmpRafDIf3z5Wf8B2u/DnO8sXO2GM/Vouh8a6zDjBrvCb68TO0d4h1+OMAxum486ywz12gfkKjf58+HQ/rny3XzB2sfhTne2DlXjOdq0VYQr7UvitRYizyYzPNZ49tmLHXgKK21Cr9/OH8hHNI/X95z/GDt0zBH/J0Wfy923W4Qr7En7bqu20uMiUTrue1vV/xyBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhovvUEhnPcTnU/iE/DDDuPHoRjvr/2QTH+9v5Xwhz7r20V43M7n4c54vONzyU3hnGK13PXzQ1wnHuJHBtBPLMOdxJjtoN49Mx0Xe66Rg5nzrDz6Fo45vtrf1GMv71/Jsyx/1r5PZCrRUfpXVLj/vK7xlKLMvOo8c4b6r05jrW682hcz76/9nEx/vb+Y2GO/ddOFeNzO/fDHF23FMT3Ejlq7J0y630xiNeoq5l33kk0jmerxl6hjqPzu4SdRxfCMd9fu12Mv73/eJhj/7Wzxfjczhdhjq5bSYwZQqbuRWo8Mw9e047OCgUAAAA4hjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGjowf8I95FT42+Wd12dSxbN5SBOcf5MMbx7vg9TPHXlo3DMy3euFONrbx6GOfr9u+GY2SWuWWrMEOZaT+AI2k+MqXFdozWSWUPRXDO1KHOcWjVtRufXiuHd8/F9eerKx+GYl+88XoyvvRmvkX5/JxiRqe/R+WTWYY3j1KhnNdbQCdpGdF3XdfE7r869yRxnVkO9EzPHqbDew31Rpha9G455+c5Txfjam2GKRC3K1JFoTI06k3HSagAPZqjfA0TrcaA9eFiPFsMUT125H455+c6/Xoyvvbka5ugPgnuTuWQH0X5iI5HkduZAQXws31gPPg+/nAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAamm89gXoOKuSYS4yZVjjO7PbOLhTjV5+4Feb4zQdPhGNW390uxvvUNcuMGUJmHtE6OkaPzJGynxhTY53VqCNDWQrimXOZvT+/d3axGL/6xMdhjt98cCkcs/ruZjHep84len4zayi67uXrkR8TzSXzTERroMYzM5b6PiZD7BOG2vNk1JjL7Dn2zpbP5+oTN8Mcv/ngcjhm9d3y/qoPa0TXdd1qYkykxvN9KjEmWs+ZfVE0l71EjlmPcRwdpz1pre+JGu/52dfS3tmzxXiuHj0fjll9d6UY7+OS1nVPJsZEPgiu+8ePJpKUvzl/ayeI13gm2r5f/XIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaGi+9QRyDkaSYyhz4YjpWnnM1YW9MMeH3VJiLnGeYRyl+xc9VtMBjnHcZNZhjWtS497UENeAOjkWg/h+mGG6Vu7xX13YDnN8GI4YSo1rtpLIsZoYE8ms9+i5ie9vLLoex02mRhynWpQxxPs5sy8qX7OrC/fCHB92lxNzOQzimfdVjWsWXZNMPcvsAyOZtRqd7xDX4zjKnPNYrm2UI1M3a7yja7yz4rlO19aK8asLfZjjw+5sPJXLQb35epyi+2tBPCp5XRdf1t1Ejs8ze6M7iTGzaltL/HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaCv5Q+0EiRY2/BR4dJzOPyLRCjoFcOB8OeemxD4rx17e+GuZYfj/+o/P9IH/rvcb9rWWI9Rw8dvwBmTWyX+E4Q6z3zDEyayTKkzlOcF0vnAkzvPTY9WL89a0rYY7l9++GY/puEo6ZXea6Lwbx1USO9cSYyE6FHDVq71KFHDwcUQ3I3P9aY2aUqkXvFeOvbz0e5lh+/044pq9yvlGOqM50XVxrMs/mULVoO4jXeH+vVMjBg6mxr8ms+cyYaN1ncgTnc+F0mOGlx94vxl/f+qMwx/L78bdr/+1gwB+HKbpHvlP+Hjw8TPyWYz+4rjfjFN3nmf3TEPv0Gh78W88vZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICG5ltPIGcuMeYgiNc61enMGSanzhbje89E59J1n03LOQ5/tRrP4+BuOCa+9vFcc2OGkFlHkcy51DgOvytz3Wd/NusYy/2P5zE5tV6M7z2zF+b4bHqmGD/81VI8j4PdcMwwxrKGuu7IvJ5PnKHeZ2N5b9YQn0udWrRSjB/+Kn6mcrWoxr2Jchyldx6MSfS7g8zeqLyv2XsmnsVn01PF+OGvluN5HCTqQIVScvhFUBsPEvva/dnnkatpQ7wb2+6//HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAaCv6Qd+Lvmjf+W+BH0fbl8t+2v3b612GON3/6XDE+2bz7ZaZUEP09+czfm6/xN+kza3EImXnUOF+Orhr3f5i6Gtei62GON3/6lWJ8snn7S83p4ZoG8czzvVMhR416tldhTI21mpnHSTPEO+AovWfi9R7Xohthjjd/+kfF+GTzTphjuL1GdJyhakQNmeN4Jo63oa595rnYn/ko25f7Yvza6bfDHG/+9IlifLL5YWIml+IhN06X45lLthXsOTO39xdB/P1Ejm4jMSaqNzX2zzXq74M/E345AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADQ0P1O4mhrHiXJMKxwjNlldDMesXNgtxq/vXQ5z9Jv3gxHxPGqYLM7Fg5aDuWzvhCn66UEwInO+ibkOIjqXjLGcy1CGeX7j4wxVEzPnW14Dk9WVMEOdWrQdjMhcs9nv72QxcZywFn0Rpuine8GIWs9mdD7RPLqu6/aDeI1aFB3juKlxzcai1rlEtajGvuhSmKPfvBuMyDybmWsSnO/iapxiOajPqVoU1c1MjciMGUKN/yseap8wJkPVo7HUvdnfN7m90Z1i/PreqTBHv/lxMCJzTRNjNst7hclbZ+McW9HeqFyfu67r+l9Hz99GPI8uquFdV2ctRvurGvu4B5+nX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDwR/6rvF3vscicy6z/+30fns/Psp7Z4vx1efvhzl2F9fK84in0U3Wyzm6ruu2nizHr1z8JMxxeu5eMf7p/pkwx8atC8X46o3dMEd/0Pbv1vOwBeUsZTpjvJbZ12q/vROOiWvRnTDH7uJKeR778TOTq0Xla3Ll4q0wx+m5z4rxXC06V4yv3vgizNEfxPcmXgOZWhSN2UvkiCxVyHGUeAd8Wbl9UfBcpfZFy+V57MfPZq4WrRfjVy5uhTlOz90uxj/dL1+Pruu6jVunivHVG3FN7A8Sm8VQ5n0V1Yka/1d8WCEHD0/0vqmxh4v123fDMfHeaCPMsbtYXtP9flzTJuvxmt56svycX7n4mzDH6bm+GE/tjYJHfPXGp2GO/qD8vfhb0T48syeJxtT4XnzwHH45AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADQ033oC9cwF8YNBZpFxsDgpxp9duB/mePnZx4rxtff7MMfZF++FY64t3S7G+34vzNF1+8XopZXtMMMvLpWX6p3DS2GO5evxdWWsMqUqqgFDmbaewF+Ka15ci+Ia8fKzTxbja+8fhjnOvng3HHNtaaMY7/tMjS/fm0srN8MMcS26GOZYvh7XvKPzThvLPGhn9tp7sFjO8ezCTpjj5WcfLcbX3o/3K7l90WfFeN8vhjmid1quFp0rxnO16FY4JpZ5P0f/F1zjc2Qs717+sLF8cmb2RuV9y7ML8Z7l5WfL3yVr78fX4+yLcR24tvROMd73s9fnSyvxNfvFpXL9vXO4HubI7Y2i65bZk0RjauxrHjyHX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADc3PnuJg9hTdXIUcNeYxu8nqSjhm9erWzMdZPbNTjL/4J5+FOfb6+Dg/vH21GJ9MpmGOhbnyXBd+OQlz9I8sF+Pr37gb5pheD4cwWosDHSeqRZk6U6OezS5XizZnPk5ciz4Jc+z1cR354e2vFuO5WvRFOZ6qRUvF+Po39sIcatFRNo7ne1zK12SyeirMsHo1en7jd8DqmXJ9ztWieGP0w9vPFOOTSbxGFoIhC78s73m6LlOLwhTd9PqVeFCV9+J+NJNEjshqhRxHTaYejeU7rcYxMp+tUT1aDzMM8532YZgjtzd6uhifTOKatjBXXiMLv4x/y9E/Ut4/rX8jvqbT6zXWauZ3J/E+bXYPfgy/nAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIaCPxhf4++NZwx1nEj89+Sjufbb98MMdzceLQ9Y3QxzXFv/pBj/8f2nwhyP/CT+G+yrh9Hfpd8Pc0TXtc9keHy9GN++HSzlrutWu/i6dt1cYsyshjjGcbPUegJ/KVOrMnUkEq/nSL+9E465u3GuPGA1Ps619U+L8R/fvxLmeOQn2+GY1cPo+Z39uudq0Voxvn07Xqu5WjSE2dfZyTOW+l1j31TrXMp5+u14nxDXopthjmvrnxfjP77/dJjjkZ9Ee55MLZr9uvZdvD+bPl4+n+3bZ8Icq6mqF9WJuH7HY+K9c6y8Tzy5jsq+NvM+qvBsperR6fKA1b8Ic8R7o8fDHLm90d1wzKxye6PyNdu+vRzmWE3Vkujdl/kunfUYGQ++J/XLGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKCh+dYTGM50kKNMVlbCMd+5/G4wIs7x2valYvyR1/swR394GI6Jzc2cYXJqORxz5Ws3i/Fbr5wNc/TdQXZKM5r9mvCvylzTGte9xhqJ5pE5RoXnamUhHPOdy7+a+TivbV8sxh95Pa4zuVr08J+ryan1cMyVr90uxm+9ciHMEVfnjMz1GKrmnSQnrb5nznexGJ2snAozfOfyhzMdo+u67rXt8rOXq0WTcEws89yVx0xOxXXkytfuFeO3Xrkc5ugT+81YJkeN92IkXmfHz1Dvgeg4mc/JKMcw+7w632mx17a/Uow/8vpemKM/3J95HjXk9ka3ivFbr8Q5hvtOi65r2+vulzMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANBQ5g/Tj8Dsf9c+l2P2v68+Pbsaj+nLPbG39k6FOU7N7xbjG/0wt3ayshwPOpwWw4+9dCdM8c7tx4rxtfv34nmkRGugxjqqsZ5nX6s8LDXuTY1atBCP6cvxXC3aKsY3+ngeNUxW1uJBVWrR5WI8V4sy9dkzPk5HZNvUdV28hpYSOdYTY8p1Ynr2q2GGaX+6GH9rrxzvuq47Nb9djOdqUeb5LY+ZrASFteu67rBcN3O16FvF+Nr9vXge3UpiTCSzp4nGDLXPP26GqkfRcWqsgWFMz8bXbNqXn+G39s6FOeK9UeZ6ZGp0eV8zWUk844eHxfBjL30epoj3RnGOrltMjIlk9k7j3l/55QwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQ0Hw5PJdIsVdhGktBPDOPxQrz2J85w+7FeMz1V54oxvfOLYc5vvvce8X4x988HeZYeSvuzU2DuXzr6++EOdYmfTH+s+34oq39/PNgRLCUOeIORnKczDymA8wjtnsxrs3XXyk/e3vn4ufqu8+Va8DH33wqzLHyVrlGdF2tWlR+l+Rq0UY4ZjzUxfoy/6dV47oPUUcy+6bLiTFPFqO7f+1smOH6zUvF+N7aQpjju8/cKMY//huJWvTP4+s+PV2uV9/6+v8T5liblPc0P9uO93BrP78ZjDgb5siJ1vxOIkf0Pqrxjt+ukOOoyXwfDXGcoeaRWSflMbsX42+9eG8Uvwe++9xfFOO5vVFc9+K90a/CHGuT8vnk9kZbwYjM+2aod+dhhRyRBz8Xv5wBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGgj/CPdTfrY+Ok/nb6Cs1JjKz9ff6eNAXXxTDS5/shSne2LxajL/w7ffCHMvfi/+O++pk9jXw5xuXy/N4dSuRZTLzPHKGWvN8OQcjOU78zMQ5ap1LeS7r723HKcJaFNezNzbLz/cL3/4gzFGnFsXP7p9vPFGeR6oWHSbGDCFTq6LrGmwBUjLPxHGSuWaZPcus9ivkyMzzSjzkyXJ4/dHEGvmjoBbtxef7xsLTxfgL/+6HYY7lFxO16NfB/2t+9lSY4883Lpbn8epfhDnid8lGIkfiPXFk9kU7rScwUmO5f9E8Mnuj2cesv7cZp/iiXAeWPon3AW9sPlOMv/DtT8Icub1RtE+Lv4/r7I1WE2MiNfYk8Td0bsysHvz3L345AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADQ0Xw7PDTOL8DhLiRyrNSaScFCM9vd2Ejn2Zp/G3n4xfONHF8IUuy8Et7/ruosr94vxmx+dC3Msv7NRHtD3YY7h1uIQymuIPyTzzNRYI9G9ydy7aRA/TM5ltuP0925XOEZcI7q9cs278aPzYYrdFxbCMXEtuhjmWH7n8/KAVC2qYagacJzq5lhkrmlmzxKJ1kji2ezK+4TUPJcTx3muHO6/Xn52u67rusvBs7c7iXPsB/uig7hG7H4tUYu6oBb9IrEv+vkvygP6xPmG78Ua76uMzP47GrNYYR7Rej+OxlLjh5pHZk2Xn4v+3kaFeSTW6155rjd+9JUwxe4Ly+GYiyt3i/Hcd1qwX+zjuhivgcwaydSBGvunKEeNfXrmHf2H+eUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NB8OXwwzCwGO87J0e/uhmOWXonzbAbx5e6z3ISK5irkmCbGBMudEauxRsZ0nMhRWs/lHn+uFsXnuxncm+XubphjmP+POErvs8w64/gaqN5lHomDyUOfRn+wF45Zei0es/kvyvHlDz5PTKZGLYou7HYix36FedRQYy1mzve4GcuepcZ7L5Mj886K8tR47y3OnKHfjeex9Mr9cEy8N0rUo65PjJnVWNZq1w0zlwc/hl/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ/OtJ5CzN6LjHFQ4zpj+1vtREV33Wte0xv2dVsjB71pKjDlOz2bmXKJ1VuN6jEl0bzKvsxr3N7qumef/uN0bftcQ9zfzf2uLQXw/TrGbWM9vV9hKXpg9RShzWzYSY94P4p8ncnQ7mUEzytyXxBoI6+ZhIkd08Wtcj5NYVzPnPMR7LzOPod6dUZ6hPn2juWaevRpqXNfMGqqxJ62xVseyB3vwY/jlDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANDQfDl8MHuKlOg4+xVy1JhHZsy0wjxqXNO5Cjm6brjrOoQa9yZjLOd7nCwmxtS47tFzk3muajx7NWpRjRwZNWpipuYdpXtTQ3ScWjWe+oZ419TYJ2Tm+VE85MMr5fjnibmuBfHMKyD678bMI7OVGHMzGrCZSLIdxDN1psY6G6qO7B2TYxxFQ7z3Mus1uj9Hae9c49kbar2O5TtuLHunrhvmHf3gx/DLGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaGi+HB7L3yQfy98sryW47N1cIkc0JpNjLPd3qGMMdU2iHDXWarSGjpvFxJj9AY6TmcdShXnUqHmHFXJkrmk018xzlxFd+8xxatybvQo5ON6GerdGovdEZp4fJ8bcKYe3EnUzHLOamEc0ZiWRI3NN7gXxjUSOnQrzGIvMniZaizXqauaddxLVegeXZNZAdI9r3b/odwdD/S4huiZj+m4dy7Mzlt+MtJ3HWK4CAAAAwImkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANzZfDc4kUBxWmER0nM4+VCvPInEs0Zi+RY5oYM+s8MtesxnFqnEsNtc63Vp7Wxzhu9hNjatSi6DiZY2TmGqlRi2rkyKhRI4JXUdd18XUd6n0V5cjc/8w1GarGU1+Ne1NjrUbrbDGR43JizJVyeC3xfK8F8cxUo/9uzNyWrcSYm6vBgMx+9MMgPtResoZM/Y5uzlKFeWSu2UlUo5ZEMmsguse15hk9F4cVjpH5bUN0TYZ6hw9x/zPGdL5jWSO1/yUAAAAAM9OcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhiZ937eeAwAAAMCJ5ZczAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANDQfCl4cfJnfZxirsI0DirkmFbIUbwcSTWux5jUuDc11Li/J8ut/u9OWs+hlouT/zFRi4Ywpud7LM/mUNckqs81/q8h8w6Izneo67FfYcww781b/X97jGpRZl80hDHVorEYS03k941jvd7q//axqUVdN6Z6NJQa62gca7GeIereWL7TM2rs44bxV9Ujv5wBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGppvPYGcaesJ/P/MzRgfykGlPDXOp9ZcZjXUPMayBo6TzDWtcX+P070bar0PdZz9IF7j3mVyRK/NzP95LCXGRGq8F2vcuyOyjTh2vM9+XzTXoa7ZmPasEc/v8XfS9vFjqQORo1Rbh1Lj/taov22fGb+cAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhuZbT4Djr79wphjfffowzLHy6ue1pgOcUP2Fs8X47tOTMMfKq9uVZgOcVPG+qA9zrLy6WWs6wAkW16Mvwhwrr96rNZ0Tzy9nAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIbmW08gJzPN6UOfxW8dDHSccZgslPt3W8+txzmCSzb/yVA9wrmBjkN9Qz130XGO0hoaaq7DHGeysFKMbz13Js4R1qLMufRBfKjrnnkv7gfxo7Se+V3u3e8b5j0xWVgoxreeOxfnCGtRZiZHZAtPY8fpu6VG3TtetTOuRzW+04aqNTXuTWauQ6yBBz+GX84AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDQ/3h8gGM5VSCPxbfdV38t88zOSo4fzoc0j+/X4z/YO3XYY43dh4txrfvroU54vs7TeSoYSzrDKI6UkON9Z74P4DzF8Ih/fPlZ/wHax+FOd7YeawY3767Hubour0gXuu+RO+BzHtiiP9/OWn/x5O5vzXe4UM830dJjWua2CecPxMO6Z8vz+UHax+EOeJ9UaYWjWQvCSnR81drf12jdo7k2Toffx/F32nvhTnq1KNI5v5mvuWG+A6rsYYefI2ctF0VAAAAwKhozgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEPz5fDcMLOocpyDCjmGUmOu05kz7Dwaj/n+2l8U42/vnw9z7L9WXmZzOxvxRELBUh6Vk7beh1LjmkT3JnOMoermEGs+07+P5hFfj51HV8Ix31/7TTH+9v5Xwhz7r50qxud29sIcXRfPNTbEWqWdsdybo/SuGcu+KL5m31/7qBh/e/9cmGP/teVifG5nO8xxtO5v5Didy0k0+7M3nMxaG8d6zNWjT4rxt/fPhDmG+U7LqLGvHcv798H55QwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADRU4w+Kj8Q4/iZ9znT2FOfLf7d+93x8PZ66cjMc8/Kdq8X42pu7YY5+fyccc3TUWGc1jGUeYzLENalxjKHKbqb3fjqIX4xTnH+uGN69uBqmeOqpRC26/WIxvvbm7TBHv38nGJGpVVH9rvUeGep9BMfEaPZF+2GOfj8ec3TYj7ST+Z6oseeo8N1ypFR4/4b1qA9TDPedFo9hOH45AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADQ033oCOXMVchxUOk4mz8O3d7Y816tP3Axz/OaDS+GY1XfvFON9mCHjiCxDGqpRA2rIrNWhet41npuL5fC5J8MMe//JajF+9T++Feb4zceJWvR/fF6M9/cXwhzd248FAz6Nc3R7QXwnkSPzHonG1HgXjeN9xsMylvs7zDyG2xdtFuN96n011L42MtS7dYjjjGWfMDbT1hOoaCw1LbZ3tvyFdPWJxN7ogwvhmNV3g71RmOG4Ofp7I7+cAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABqaL4fnhpnFIDLnclAhTybH7KZrfTF+dWEzzPFhd6nWdGY0TYwJlmo1x2nNHyeLiTGHFY5To18drdUxrbHHyuFrcYaDv71bjP8Xf/yPwhyvvvtUOObH698sxu+evhjm6D4P4jdX4hxhja/1ronqYiZHjWdiDMegnWH2NLF4nzCefdFYrhnw+4b6TivXrKsL0Yak6z7szteazggMtfetcX8z36WRB/9u9csZAAAAgIY0ZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABo6MH/CPeghvrb6Bk1/n564MKZcMhLj71XjL++dSXMsfz+3XBMH46AIWRK1XSA49SoRUPVs8Q1W1gqx/+D02GK//5rf68Y//adfxrmuPbWF+GY8yv/YTH+v3/vb4Y5ulcWy/Gbq3GObj+IbydyZETvmsx6r/FMjOEYYzLAHuDYqbBGBtsXbYVj6uyLhlhHtd41Q7yzahzDszluQ31yjuU77UYx/vrWV8Icy+/fCcf04fkekU/9QUXvpLa1xC9nAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIbmW0/gJJqcOluM7z0zDXN8Nl0vxg9/tRTP42A3HDMe0TXJLOW5GhPhRMusoaNTVien1orxvacPwhw3D04X49MbK2GO+Qq16JFHDsMxh4vhTBJHisZkcsTXNR4Tny8Pw1jeI5k1dHRMTpX3NHvPxOcb74vK9a7rum5ycDscMx5RrcmsEeuZodTYx9c4TiyuR3thjvF8p2Wux9HZt54EfjkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANVfjD5gezp+jmKhyjRo5hbF9eKMavnX4/zPHmT58qxiebG19mSjBy0fPddV03HeA4mZK5WGEemXoVjdkLM2w/Wr5m137xUZjjf9v+m8X4R0vLYY6l9f1wzD/vvleMH74SH6f7PB4S838ajEGmJkaG2hfFdXP78koxfu30jTDHmz99vBifbN4OcwwnuvY13nkVtvhd1w2zz4cae7iuq7Huh6lHd7/UnB6uIWpJjW/5jBo5atXOB2OXCQAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADQ0Xw4fDDOL8DhzFXLUmEdssroSjlm5sFuMX9+7FOboN+8HI4Jb23Vd100TY8omi0vxoOWgB7j9RZiin32qSdFaG+qZ4Ms7HOAYmVoUjcnk2EmMKT8Uk9XFMMPKhXeL8es/fDLM0W+V69kPv/Pvhzm65XhI9145PPkgPt/uMBgzXz6XrhuyFsHDNszeajz7omFMFhPXdTm4JtuZWtQnZ1SSub+ZdVLjOPDwDVePomc4852WEewFU99pwd4o9Z12lDZHNfbpkQeveX45AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADVX4I+sP/ne8/6Xo74lnjlEjx+z67Z1wzMF7Z4vx1efvhzl2F5fL89iP/yb9ZP1MOGbryXL/7srFz8Mcp+fuFeOf7p8Pc2zcOluMr97YDHP0VZZAtM4yhnhmeDgy131xoOOU9dvl567ruu7gvaVifPX5O2GO3Z88Xp7H/306zDFZj8dsPVme65WL8VxPP7NRjH96JZ7Hxq2LxfjqjbthjlwtmmYGBSq84kdxjKNmLO+JGmrUoqH2ReXa2+/vhzkm62vhmK0ny9ckVYvmNorxT/fj/dnGrVPF+OqNrTBHfzAJx4xnLXK0RetomH3t0fpOy9Sjct27cvFWmOP0XPlbLlePzhXj4/pOG2KtPfgx/HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaGi+HD4YZhbhceYq5Khl9uMcLE6K8WcX7oc5Xn720WJ87f29MMfZFzfCMdeW7hXjfd+HOaJrdmnlZpjhF5fK1+zO4fkwx/L1zXAMHB1RLTqMMyxuFePPLrwX5nj52bvF+Nr7+2GOsy/GdfXa0kox3vcXwhxdd6kcXdkNM8S1aC3MsXx9Go7hKBtqP3JUxHu4Ovuirxbja+9vhznOvhgf59rS58V4alsUuLTyaTjmF5fK6+zOYVwT7Yuoo0bNy+TIfA/O7mCx/Hk83Hda+Rus6zLfabPvN+p8p50Jc4ynHtVYZw/+TPjlDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANDQfOsJ5By0nkDaZHUlHLN6dSsYMY1znLlfjL/4Jx+FOfb6Phzzw9tPF+OTSZxjYa58/xZ+GfcI+0fKS3X9G9E17brp9cw6mkuMiQyxXo/OM1FH5nzj52b24+wnctRYQ5njlOdapxbFVs98UYy/+Ccfhzn2+sVwzA9vf7UYn0zWwxwLc+U6svDL+JXYP1KuvevfuB3mmF7P3N/DxJjwSBVyjOEY/L6j8w4YrhbtFOO5WhSv53hfVK6JXdd1C3Pl53vhl5MwR//IQjG+/o3dMMf0ejikq/NOi9RYz0PMkz8sc+2jezzM/ZusxnuFYepR5jstPk68N4pr2ni+08IhlYz7O80vZwAAAAAa0pwBAAAAaEhzBgAAAKAhzRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoKHyHyXnS+u374Vj7m6cKw9YjY9zbf2TYvzH96+EOR75yX44ZvXwbjyZGfWJZTh9vHzNtm8vhTlWu/h8Yw/+d+uZxXQkx9mrcIy5xJjMccpz7be3wwx3N06XB6zGc722fqsY//H9q2GOR34SP1erh+Wa13WLYY6uK9eJPpFh+ni5QG/fPgxz5GrRUGueL+covQOGWkPld3i/vRNmiPdF5TrTdV13bf3TYvzH9x8Pczzyk7j2DrMvik0fXy7Gc/uisaznzHuRo20c97hOPYp/2zBcPdoMx8SC/eRg32mfh2OGWUdt66JfzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANzbeeQD3T1hPouq7rJisr4ZjvXH43GBH3zF7bPl/O8Hp8PfrD/XBM180lxkTKy2xyaj3McOVrt4vxW6+cCXP04Yiu67qD1CiGdpgYU+PeRcfJHGOvwjwyxynPNVeL3g9GZGrR5XKG1+Mc/WHmfKN6laln94vRyanlMMOVr31YjN965WyYox/J+6rOFmAs5zKUzPmO5brWqImZPUB5rpOVU2GGOvuii+UMr8e7gP4w866J7u/s9y63L7pTjN96pbxP7LrsvihaA/ZNDGX22jpZid/z37n8TjBiTPWohhrfaTeL8Vw9qvHNWcNQ784/zC9nAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgodn/YPwgpq0nkDY9uxyP6ct/+/ytvbNhjlPz5Wuy0S+EOWqYrKzEgw7LPcDHXroTpnjn9uVifO3+RjwPmNlQtWj249SpRY+GOU7Nf1GMb/SJGlHBZCVR8w53i+HHXvokTPHO7UvF+Nr9zXgeVZTvXc7RebdydE3PLsVj+vI+4a29c2GOU/Pl53ujXw9z5J6rg2J0spI4zmG5bj72UlxH4lp0L54HnDB19kaZejSWvVF8vt1hXwznvtPUo99Vfk+U+OUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NB86wnkHJFpdl23e3EuHHP9lSeL8b1zC2GO7z73djH+8TefCnOsvBUfZ3puuRj/1tffD3OsTcrX5Gfbj8Y5fn47GBFfdxiHWmt1WozuXlwMM8S1qPz8d13Xffe5XxfjH3/zfJgjV4vKY7719XfCHGuTcvxn2/Fc136+HY4ZxkHrCZxQQ+1HouOUn//fGuq9WJ7r7sU4w/VXHi/Gc/uiG8X4x988F+ZYeSsoEl3XTc+Va2uuFtXYF20EI2rdf7VmvMbyfZSpR0Mpr/vcd1r5G2rvXHzdv/tcuQ4crb1RjXpEll/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ8Efah/L362P/578WKy/l7hmX5THLH2yH6Z4Y/PZYvyFb38Y5lj+XjzX1clcMCKKd92fbzxRnserW2GO2EFiTDxXmF2NdTZ7jvX3+njQF+X+/NIne2GKNza/Woy/8O2Pwxx1atFimCOuRXfDHHXeR5l6VcNQx2F49kX/qjc2ny7GX/j2R2GOXC2KRoxlX3SU1KhVJ3GPlznnGtelxv0Zxzdlrh6V909Ln+yGKd7YfKYYr/edFo3I7I0ul+fx6maYIzaO+38U+OUMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0NB86wkMZ26Qo/T3dgY5Tre3VQzf+NFXwhS7LyyEYy6u3C3Gb350Lsyx/M5GeUDfhzkgNsQzXqNkZuZ5MHOe4WrRtBi+8aPLYYpcLbpfjOdq0eflAf1ymKPr9hNjaojWQGaNzHqMjGHereNx0s539jXS3ys/u9XslZ/NGz+6GKaoU4tOhzmO176oRh2pYSzzGFKmHg1Rs8Z07WvsjSpcs73dYvjGj54IU+y+EO85L65sFuM3PzoV5lh+52Z5QD8Jc9QxpnXUjl/OAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA3Nt54AD0e/uxeOWXolHrMZxJe7z5Mz4l+aq5DjoEIOvrxpYkx0f0/Wvet3t8IxS6/EeTaD67rc3UnMJnrlZe5vDZk1UGOdDLHWTtZ6Pnky76ujsQZO3r4oc19q7EdgTKJ1P9SaL8+j390NMyy9Ej/Dm8FxlrvPwhxd1yfGjMVY7u/D45czAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQ0HzrCQwn/lvxdYzl76sPdb5DGct1hchQz950gGOM6RVR47pGOTLHiK77EPcFxiR6P49lP5KZR2avMZbzqeE4nQtkZNZ8jediqLo4xDN83L6hx/1N6ZczAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEPzM4W7ruu66cOfxpFy0HoCA5trPQH4S0PUkcPEmP2HPovh1DiXMf0fQPS+ytzfKEfmHVBrDONU497VeLfWyHHS1uFQ5+veUMNQa2Asa63GN+dQ9Xks1yxjiLnW6ikc/+/OMe2aAQAAAE4czRkAAACAhjRnAAAAABrSnAEAAABoSHMGAAAAoCHNGQAAAICGNGcAAAAAGsr80fGHn6KKzN9GH4uxXLOxyPzN+oOHPot6MudzFI5x1NToNR9WyDFULRrimRhqnWVq4hDXtcY1zeQYSz2rcX/Hci6M10lbZ0PVzeg4R+maQS1D7BV8cz4cY5lrjfv74OfilzMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEOaMwAAAAANac4AAAAANKQ5AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBDmjMAAAAADWnOAAAAADSkOQMAAADQkOYMAAAAQEPzs6eYzp6ixjSq5KCNg9YTqCw6n7kBjnESHbaewF8aqhYdlZpX6/8AFivlKcmsoRrvvIwhnnF1hCGctHWWOV/7AHg4htgb1Xh+x2QstWSo/dUQHvxc/HIGAAAAoCHNGQAAAICGNGcAAAAAGtKcAQAAAGhIcwYAAACgIc0ZAAAAgIY0ZwAAAAAa0pwBAAAAaGjS933rOQAAAACcWH45AwAAANCQ5gwAAABAQ5ozAAAAAA1pzgAAAAA0pDkDAAAA0JDmDAAAAEBD/x/M5lYkJrE5/gAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -812,7 +781,7 @@ "\n", "for i in range(12):\n", " ax[i//4, i%4].imshow(image)\n", - " ax[i//4, i%4].imshow(maps[i], alpha=0.7, cmap='jet', vmin = 0, vmax = 0.9)\n", + " ax[i//4, i%4].imshow(maps[i], alpha=0.7, cmap='jet', vmin = 0, vmax = 0.4)\n", " ax[i//4, i%4].axis('off')" ] } @@ -838,10 +807,10 @@ "orig_nbformat": 4, "vscode": { "interpreter": { - "hash": "a44da721a5827f98cc9179544fef0a80b8a9b4f8cdc93722922a5386f263ab84" + "hash": "1a7f8d9ad56e90590fcee41b0180e1f5be02ee2520f1975e08f7f16dd529d162" } } }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/tutorials/distinguishing_particles_in_brightfield_tutorial.ipynb b/examples/tutorials/distinguishing_particles_in_brightfield_tutorial.ipynb index 2c4c21003..e255d7a9d 100644 --- a/examples/tutorials/distinguishing_particles_in_brightfield_tutorial.ipynb +++ b/examples/tutorials/distinguishing_particles_in_brightfield_tutorial.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -1389,4 +1389,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/tutorials/tracking_multiple_particles_unet_tutorial.ipynb b/examples/tutorials/tracking_multiple_particles_unet_tutorial.ipynb index 58e9b0bbe..2d90e32fc 100644 --- a/examples/tutorials/tracking_multiple_particles_unet_tutorial.ipynb +++ b/examples/tutorials/tracking_multiple_particles_unet_tutorial.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -900,4 +900,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/tutorials/tracking_particle_cnn_tutorial.ipynb b/examples/tutorials/tracking_particle_cnn_tutorial.ipynb index 3c3444811..35100b871 100644 --- a/examples/tutorials/tracking_particle_cnn_tutorial.ipynb +++ b/examples/tutorials/tracking_particle_cnn_tutorial.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -819,4 +819,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/tutorials/tracking_particle_using_pytorch_tutorial.ipynb b/examples/tutorials/tracking_particle_using_pytorch_tutorial.ipynb index 1cca6436f..b197639da 100644 --- a/examples/tutorials/tracking_particle_using_pytorch_tutorial.ipynb +++ b/examples/tutorials/tracking_particle_using_pytorch_tutorial.ipynb @@ -14,7 +14,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -682,4 +682,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/tutorials/train_label_free_particle_tracker.ipynb b/examples/tutorials/train_label_free_particle_tracker.ipynb index 2bb158ac0..b761ec8a9 100644 --- a/examples/tutorials/train_label_free_particle_tracker.ipynb +++ b/examples/tutorials/train_label_free_particle_tracker.ipynb @@ -89,7 +89,7 @@ "id": "fdd77bb2", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -573,4 +573,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/examples/tutorials/trajectory_analysis_Transformer_tutorial.ipynb b/examples/tutorials/trajectory_analysis_Transformer_tutorial.ipynb new file mode 100644 index 000000000..d776f57e6 --- /dev/null +++ b/examples/tutorials/trajectory_analysis_Transformer_tutorial.ipynb @@ -0,0 +1,497 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# %matplotlib inline\n", + "# !pip install deeptrack" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Example 1. Single-level trajectory analysis using Transformers\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Setup\n", + "\n", + "Imports the objects needed for this example.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import deeptrack as dt\n", + "from deeptrack.extras import datasets\n", + "\n", + "import tensorflow as tf\n", + "\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib as mpl\n", + "\n", + "import scipy.sparse\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Overview\n", + "\n", + "In this example, [...]\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Defining the dataset\n", + "\n", + "### 2.1 Defining the training set\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Download the STrajCh dataset\n", + "datasets.load(\"STrajCh\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "TRAINING_PATH = \"datasets/STrajCh/training/{file}.npz\"\n", + "\n", + "# read training data\n", + "train_data = ()\n", + "for file in (\"data\", \"indices\", \"labels\"):\n", + " train_data += (\n", + " scipy.sparse.load_npz(TRAINING_PATH.format(file=file)).toarray(),\n", + " )\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def splitter(randset):\n", + " def inner(inputs):\n", + " data, indices, labels = inputs\n", + "\n", + " # Convert to numpy array\n", + " data = data._value\n", + "\n", + " # get indices of the rows belonging to randset\n", + " idx = np.where(indices == randset)[0]\n", + "\n", + " sdata = data[idx][:, :2]\n", + " sdata = np.concatenate(\n", + " [\n", + " sdata,\n", + " np.array((0, *np.linalg.norm(np.diff(sdata, axis=0), axis=1)))[\n", + " :, np.newaxis\n", + " ],\n", + " data[idx][:, 2:],\n", + " ],\n", + " axis=1,\n", + " )\n", + "\n", + " labels = labels[idx]\n", + "\n", + " return sdata, labels\n", + "\n", + " return inner\n", + "\n", + "\n", + "nsamples = np.max(train_data[1])\n", + "train_set = dt.Value(lambda: tuple(train_data)) >> dt.Lambda(\n", + " splitter, randset=lambda: np.random.randint(0, nsamples + 1)\n", + ")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 2.x Visualizing the dataset\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, axs = plt.subplots(3, 3, figsize=((10, 10)), sharex=True, sharey=True)\n", + "\n", + "cmap = plt.cm.ScalarMappable(\n", + " norm=mpl.colors.Normalize(vmin=0.01, vmax=1.4), cmap=plt.cm.Oranges_r\n", + ")\n", + "\n", + "for i in range(9):\n", + " data, labels = train_set.update()()\n", + "\n", + " data = data[:, :2]\n", + "\n", + " # extract changepoints\n", + " diff = np.array(labels[1:] - labels[:-1])\n", + " cp = (0, *np.where(diff != 0)[0] + 1, labels.shape[0])\n", + "\n", + " for idxi, idxj in zip(cp[:-1], cp[1:]):\n", + " axs[i // 3, i % 3].plot(\n", + " data[idxi : idxj + 1, 0],\n", + " data[idxi : idxj + 1, 1],\n", + " c=cmap.to_rgba(labels[idxi])[0],\n", + " zorder=0,\n", + " )\n", + " axs[i // 3, i % 3].scatter(\n", + " data[idxi, 0], data[idxi, 1], c=\"g\", zorder=1, s=20\n", + " )\n", + "\n", + " # set axis\n", + " axs[i // 3, i % 3].set_xlim([-0.6, 0.6])\n", + " axs[i // 3, i % 3].set_ylim([-0.6, 0.6])\n", + " axs[i // 3, i % 3].set_yticks([-0.5, 0, 0.5])\n", + " axs[i // 3, i % 3].set_xticks([-0.5, 0, 0.5])\n", + "\n", + "# set axis labels\n", + "plt.setp(axs[:, 0], ylabel=\"y-centroid\")\n", + "plt.setp(axs[-1, :], xlabel=\"x-centroid\")\n", + "\n", + "plt.subplots_adjust(wspace=0.05, hspace=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data, labels = train_set.update()()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 2.x Augment trajectories\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def AugmentTrajectories(rotate, translate, flip_x, flip_y):\n", + " \"\"\"\n", + " Returns a function that augments the input trajectories by applying\n", + " a random rotation, translation, and flip on the centroid coordinates.\n", + " \"\"\"\n", + "\n", + " def inner(inputs):\n", + " data, labels = inputs\n", + "\n", + " # Apply rotation and translation\n", + " centroids = data[:, :2]\n", + " centroids_x = (\n", + " centroids[:, 0] * np.cos(rotate)\n", + " + centroids[:, 1] * np.sin(rotate)\n", + " + translate[0]\n", + " )\n", + " centroids_y = (\n", + " centroids[:, 1] * np.cos(rotate)\n", + " - centroids[:, 0] * np.sin(rotate)\n", + " + translate[1]\n", + " )\n", + "\n", + " # Apply flip\n", + " if flip_x:\n", + " centroids_x *= -1\n", + " if flip_y:\n", + " centroids_y *= -1\n", + "\n", + " data[:, 0] = centroids_x\n", + " data[:, 1] = centroids_y\n", + "\n", + " return data, labels\n", + "\n", + " return inner" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "augmented_train_set = train_set >> dt.Lambda(\n", + " AugmentTrajectories,\n", + " rotate=lambda: np.random.rand() * 2 * np.pi,\n", + " translate=lambda: np.random.randn(2) * 0.05,\n", + " flip_x=lambda: np.random.randint(2),\n", + " flip_y=lambda: np.random.randint(2),\n", + ")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 2.x Pad trajectories\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def pad(pad_to):\n", + " def inner(inputs):\n", + " data, labels = inputs\n", + "\n", + " tlen = int(np.shape(data)[0])\n", + "\n", + " # create mask\n", + " indices = np.arange(tlen)\n", + " mask = np.stack(\n", + " [np.repeat(indices, tlen), np.tile(indices, tlen)], axis=1\n", + " )\n", + "\n", + " # pad data\n", + " data = np.pad(data, ((0, pad_to - tlen), (0, 0)), mode=\"constant\")\n", + " labels = np.pad(labels, ((0, pad_to - tlen), (0, 0)), mode=\"constant\")\n", + "\n", + " # pad mask\n", + "\n", + " mask = np.pad(\n", + " mask,\n", + " ((0, pad_to ** 2 - np.shape(mask)[0]), (0, 0)),\n", + " mode=\"constant\",\n", + " )\n", + "\n", + " return (data, mask), labels\n", + "\n", + " return inner" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pad_to = np.unique(\n", + " train_data[1], return_counts=True\n", + ")[1].max()\n", + "\n", + "padded_train_set = augmented_train_set >> dt.Lambda(pad, pad_to=pad_to)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 3.x Defining data generator\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "generator = dt.generators.ContinuousGenerator(\n", + " padded_train_set,\n", + " batch_size=8,\n", + " min_data_size=1024,\n", + " max_data_size=1025,\n", + " use_multi_inputs=True,\n", + ")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 3.x Defining the network\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import tensorflow_addons as tfa\n", + "\n", + "model = dt.models.Transformer(\n", + " number_of_node_features=4,\n", + " dense_layer_dimensions=(32, 64, 96),\n", + " number_of_transformer_layers=3,\n", + " base_fwd_mlp_dimensions=256,\n", + " number_of_node_outputs=1,\n", + " node_output_activation=\"linear\",\n", + ")\n", + "model.summary()\n", + "\n", + "\n", + "class mae(tf.keras.losses.Loss):\n", + " def call(self, y_true, y_pred):\n", + " return tf.reduce_sum(tf.abs(y_true - y_pred)) / tf.math.count_nonzero(\n", + " y_true, dtype=tf.float32\n", + " )\n", + "\n", + "\n", + "# Compile model\n", + "model.compile(\n", + " tf.keras.optimizers.Adam(learning_rate=0.0001),\n", + " loss=mae(),\n", + ")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 4. Training the network\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with generator:\n", + " model.fit(generator, epochs=150)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 5. Evaluating the network" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "VALIDATION_PATH = \"datasets/STrajCh/validation/{file}.npz\"\n", + "\n", + "# read validation data\n", + "val_data = ()\n", + "for file in (\"data\", \"indices\", \"labels\"):\n", + " val_data += (\n", + " scipy.sparse.load_npz(VALIDATION_PATH.format(file=file)).toarray(),\n", + " )\n", + "\n", + "val_data, idxs , labels= val_data\n", + "val_data = val_data[:, 1:]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# sample index\n", + "idx = 100\n", + "\n", + "# get indices of the rows belonging to randset\n", + "indices = np.where(idxs == idx)[0]\n", + "\n", + "val_sdata = val_data[indices][:, :2]\n", + "val_sdata = np.concatenate(\n", + " [\n", + " val_sdata,\n", + " np.array((0, *np.linalg.norm(np.diff(val_sdata, axis=0), axis=1)))[\n", + " :, np.newaxis\n", + " ],\n", + " val_data[indices][:, 2:],\n", + " ],\n", + " axis=1,\n", + ")\n", + "\n", + "gt = labels[indices]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import itertools\n", + "\n", + "# Compute predictions\n", + "edges = np.array(\n", + " list(itertools.product(*(np.arange(val_sdata.shape[0]),) * 2))\n", + ")\n", + "pred = model([val_sdata[np.newaxis], edges[np.newaxis]])\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.plot(gt[:, 0])\n", + "plt.plot(pred.numpy()[0, :, 0])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.8.6 64-bit", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.6" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "1a7f8d9ad56e90590fcee41b0180e1f5be02ee2520f1975e08f7f16dd529d162" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/requirements.txt b/requirements.txt index fe434b07b..e136ef78d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,7 @@ pydata-sphinx-theme numpydoc scikit-image tensorflow-probability +tensorflow-datasets pydeepimagej more_itertools pint diff --git a/setup.py b/setup.py index 4d2bbc84f..6f54ac41a 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ setuptools.setup( name="deeptrack", # Replace with your own username - version="1.2.1", + version="1.4.0a8", author="Benjamin Midtvedt", author_email="benjamin.midtvedt@physics.gu.se", description="A deep learning oriented microscopy image simulation package",