diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..d6a37fa6a --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,109 @@ +version: 2.1 + + +# ------------------------------------------------------------------------------------- +# Executors +# ------------------------------------------------------------------------------------- +executors: + gpu: + environment: + CUDA_VERSION: "10.2" + PYTHONUNBUFFERED: 1 + machine: + image: ubuntu-1604:201903-01 + resource_class: gpu.medium # tesla m60 + +# ------------------------------------------------------------------------------------- +# Re-usable commands +# ------------------------------------------------------------------------------------- +install_nox: &install_nox + - run: + name: "Preparing environment" + command: | + sudo apt-get install -y expect + sudo pip install nox==2020.8.22 + +install_suitesparse: &install_suitesparse + - run: + name: "Preparing environment" + command: | + sudo apt-get install -y libsuitesparse-dev + +setupcuda: &setupcuda + - run: + name: Setup CUDA + working_directory: ~/ + command: | + # download and install nvidia drivers, cuda, etc + wget --quiet --no-clobber -P ~/nvidia-downloads 'https://s3.amazonaws.com/ossci-linux/nvidia_driver/NVIDIA-Linux-x86_64-440.82.run' + time sudo /bin/bash ~/nvidia-downloads/NVIDIA-Linux-x86_64-440.82.run --no-drm -q --ui=none + echo "Done installing NVIDIA drivers." + pyenv versions + nvidia-smi + pyenv global 3.7.0 + +# ------------------------------------------------------------------------------------- +# Jobs +# ------------------------------------------------------------------------------------- +jobs: + py37_linux: + docker: + - image: circleci/python:3.7 + steps: + - checkout + - <<: *install_suitesparse + - <<: *install_nox + - run: + name: "Testing theseus" + command: | + export NOX_PYTHON_VERSIONS=3.7 + pip install nox==2020.8.22 + nox + + py38_linux: + docker: + - image: circleci/python:3.8 + steps: + - checkout + - <<: *install_suitesparse + - <<: *install_nox + - run: + name: "Testing theseus" + command: | + export NOX_PYTHON_VERSIONS=3.8 + pip install nox==2020.8.22 + nox + + unittests_gpu17: + executor: gpu + steps: + - checkout + - <<: *install_suitesparse + - <<: *setupcuda + - run: + name: Installs basic dependencies + command: | + pyenv versions + pyenv global 3.7.0 + python -m venv ~/theseus_venv + echo ". ~/theseus_venv/bin/activate" >> $BASH_ENV + . ~/theseus_venv/bin/activate + pip install --progress-bar off --upgrade pip + pip install --progress-bar off --upgrade setuptools + pip install --progress-bar off torch==1.9.0+cu102 torchvision torchaudio -f https://download.pytorch.org/whl/torch_stable.html + python -c 'import torch; print("Torch version:", torch.__version__); assert torch.cuda.is_available()' + pip install --progress-bar off -e . + install_cuda: true + - run: + name: Run GPU tests + command: | + pytest theseus/tests/test_theseus_layer.py -s + + +workflows: + version: 2 + build: + jobs: + - py37_linux + - py38_linux + - unittests_gpu17 diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.md b/.github/ISSUE_TEMPLATE/BUG_REPORT.md new file mode 100644 index 000000000..517867085 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.md @@ -0,0 +1,34 @@ +--- +name: "\U0001F41B Bug Report" +about: Submit a bug report to help us improve Theseus +--- + +## 🐛 Bug + + + +## Steps to Reproduce + +Steps to reproduce the behavior: + + + + + +## Expected behavior + + + +## Additional context + + + +## System Info + + +- OS (e.g., Linux): +- Python version: +- GPU models and configuration: +- CUDA version: +- pip/conda dependencies packages versions: +- Any other relevant information: diff --git a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md new file mode 100644 index 000000000..10f526e57 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md @@ -0,0 +1,26 @@ +--- +name: "\U0001F680 Feature Request" +about: Submit a proposal or request for a new Theseus feature +--- + +## 🚀 Feature + + +## Motivation + + + + + +## Pitch + + + +## Alternatives + + + + +## Additional context + + diff --git a/.github/ISSUE_TEMPLATE/QUESTIONS_HELP_SUPPORT.md b/.github/ISSUE_TEMPLATE/QUESTIONS_HELP_SUPPORT.md new file mode 100644 index 000000000..aff47d92a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/QUESTIONS_HELP_SUPPORT.md @@ -0,0 +1,6 @@ +--- +name: "❓ Questions/Help/Support" +about: Do you need support? +--- + +## ❓ Questions and Help diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..ce7f4b1ac --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,30 @@ +## Motivation and Context + + + + + +## How Has This Been Tested + + + +## Types of changes + + + +- [ ] Docs change / refactoring / dependency upgrade +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would cause existing functionality to change) + +## Checklist + + + +- [ ] My code follows the code style of this project. +- [ ] My change requires a change to the documentation. +- [ ] I have updated the documentation accordingly. +- [ ] I have read the **CONTRIBUTING** document. +- [ ] I have completed my CLA (see **CONTRIBUTING**) +- [ ] I have added tests to cover my changes. +- [ ] All new and existing tests passed. diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..e091373b1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +.idea +.DS_Store +.vscode +*.code-workspace +build/ +.mypy_cache/ +.pytest_cache/ +__pycache__/ +*.egg-info/ +expts/ +*.ipynb_checkpoints +examples/*.ipynb_checkpoints +outputs/ +examples/outputs/ \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..cd68e9eb4 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,26 @@ +repos: + - repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black + language_version: python3.7 + + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.7.9 + hooks: + - id: flake8 + additional_dependencies: [-e, "git+git://github.com/pycqa/pyflakes.git@1911c20#egg=pyflakes"] + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.910 + hooks: + - id: mypy + additional_dependencies: [torch==1.9.0, tokenize-rt==3.2.0, types-PyYAML] + args: [--no-strict-optional, --ignore-missing-imports] + exclude: setup.py + + - repo: https://github.com/pycqa/isort + rev: 5.6.4 + hooks: + - id: isort + files: 'theseus/.*' diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..83f431e8f --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,80 @@ +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic +address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a +professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when there is a +reasonable belief that an individual's behavior may have a negative impact on +the project or its community. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..f70f2bfc2 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,56 @@ +# Contributing to Theseus + +We want to make contributing to Theseus as easy and transparent as possible. + +### Developer Guide + +- Fork the repo and make a branch from `develop`. See the hybrid [workflow model](#workflow-model) we follow. + ```bash + git checkout -b . develop + ```` +- Make small, independent, and well documented commits. If you've changed APIs, update the documentation. Add or modify unit tests as appropriate. Follow this [code style](#code-style). +- See [pull requests](#pull-requests) guide when you are ready to have your code reviewed to be merged into `develop`. It will be included in the next release on `main` following this [versioning](#versioning). +- See [issues](#issues) for questions, suggestions, and bugs. +- If you haven't already, complete the [Contributor License Agreement](#contributor-license-agreement) and see [license](#license). + +--- + +## Workflow Model + +We follow a hyrbid between [Gitflow](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow) and [Trunk-based](https://www.atlassian.com/continuous-delivery/continuous-integration/trunk-based-development) models. From the former we adopt hosting latest stable release on `main` branch and feature development on `develop` branch, and from the latter we adopt small and frequent merges of new features into `develop`. + +## Code Style + +For Python we use `black` and `isort` for linting and code style, and use [typing](https://docs.python.org/3/library/typing.html). We also use pre-commit hooks to ensure linting and style enforcement. +```bash +pip install pre-commit && pre-commit install && pre-commit run --all-files +``` + +## Pull Requests + +- We encourage more smaller and focused PRs rather than big PRs with many independent changes. +- Use this [PR template](.github/PULL_REQUEST_TEMPLATE.md) to submit your code for review. Consider using the [draft PR](https://github.blog/2019-02-14-introducing-draft-pull-requests/) option to gather early feedback. +- Add yourself to the `Assignees`, add at least one core Theseus team member to `Reviewers`, link to any open issues that can be closed when the PR is merged, and add appropriate `Labels` and `Milestone`. +- We expect the PR is ready for final review only if Continuous Integration tests are passing. +- Keep your branch up-to-date with `develop` by rebasing as necessary. +- We employ `squash-and-merge` for incorporating PRs. Add a brief change summary to the commit message. +- After PR is approved and merged into `develop`, delete the branch to reduce clutter. + +## Versioning + +We use [semantic versioning](https://semver.org/). For core Theseus team member, to prepare a release: +- Update [version](version.txt) file. +- Make sure all tests are passing. +- Create a release tag with changelog summary using the github release interface. + +## Issues + +We use [GitHub issues](https://github.com/facebookresearch/theseus/issues) to track bugs. You can also reach out to us there with questions or suggestions. Please ensure your description is clear by following our [templates](https://github.com/facebookresearch/theseus/issues/new/choose). + +## Contributor License Agreement + +In order to accept your pull request, we need you to submit a Contributor License Agreement (CLA). You only need to do this once to work on any of Meta's open source projects. Complete your CLA here: + +## License + +By contributing to Theseus, you agree that your contributions will be licensed under the [LICENSE](LICENSE) file in the root directory of this source tree. diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..b93be9051 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Meta Platforms, Inc. and affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 000000000..76419e0e4 --- /dev/null +++ b/README.md @@ -0,0 +1,41 @@ +[![CircleCI](https://circleci.com/gh/facebookresearch/theseus/tree/main.svg?style=svg)](https://circleci.com/gh/facebookresearch/theseus/tree/main) +[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/facebookresearch/habitat-sim/blob/main/LICENSE) +[![python 3.7, 3.8](https://img.shields.io/badge/python-3.7%20%7C%203.8-blue.svg)](https://www.python.org/downloads/release/) +[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-green?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + + +# Theseus + +A library for differentiable nonlinear optimization built on PyTorch to support constructing various problems in robotics and vision as end-to-end differentiable architectures. + +The current focus is on nonlinear least squares with support for sparsity, batching, GPU, and backward modes for unrolling, truncated and implicit, and sampling based differentiation. This library is in beta with expected full release in mid 2022. + + +## Getting Started +- Prerequisites + - We *strongly* recommend you install `theseus` in a venv or conda environment. + - Theseus requires `torch` installation. To install for your particular CPU/CUDA configuration, follow the instructions in the PyTorch [website](https://pytorch.org/get-started/locally/). + +- Installing + ```bash + git clone https://github.com/facebookresearch/theseus.git && cd theseus + pip install -e . + ``` +- Running unit tests + ```bash + pytest theseus + ``` +- See [tutorials](tutorials/) and [examples](examples/) to learn about the API and usage. + + +## Additional Information + +- Use [Github issues](https://github.com/facebookresearch/theseus/issues/new/choose) for questions, suggestions, and bugs. +- See [CONTRIBUTING](CONTRIBUTING.md) if interested in helping out. +- Theseus is being developed with the help of many contributors, see [THANKS](THANKS.md). + + +## License + +Theseus is MIT licensed. See the [LICENSE](LICENSE) for details. diff --git a/THANKS.md b/THANKS.md new file mode 100644 index 000000000..f4b3eb84a --- /dev/null +++ b/THANKS.md @@ -0,0 +1,11 @@ +Theseus is made possible by the contributions of the following people. + +Core Theseus team +- [Mustafa Mukadam](https://github.com/mhmukadam) +- [Luis Pineda](https://github.com/luisenp) +- [Shobha Venkataraman](https://github.com/vshobha) +- [Maurizio Monge](https://github.com/maurimo) +- [Brandon Amos](https://github.com/bamos) +- [Paloma Sodhi](https://github.com/psodhi) +- [Jing Dong](https://github.com/dongjing3309) +- [Stuart Anderson](https://github.com/stuart-fb) diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 000000000..3a40dff8e --- /dev/null +++ b/examples/README.md @@ -0,0 +1,23 @@ +This folder contains examples of how to use Theseus for several applications: + +- state_estimation_2d.py: Is an example of how to do 2D pose estimation from simulated +noisy GPS and odometry sensors. In this example the noise is state dependent, and we +learn the cost weight as a function of pose. +- motion_planning_2d.py: Is an example of how to set up a differentiable motion planning +problem, inspired by [Bhardwaj et al. 2020](https://arxiv.org/pdf/1907.09591.pdf). +- tactile_pose_estimation.py: Is an example of how to set up learning models for +tactile pose estimation, as described in [Sodhi et al. 2021](https://arxiv.org/abs/1705.10664) + +These can be run from your root `theseus` directory by doing + + python examples/state_estimation_2d.py + python examples/motion_planning_2d.py + python examples/tactile_pose_estimation.py + +The motion planning and tactile estimation examples require `hydra` installation which you can obtain +by running. + + pip install hydra-core + +Any outputs generated by these scripts will be saved under `examples/outputs`. You can +change this directory by passing the CLI option `hydra.run.dir=` \ No newline at end of file diff --git a/examples/configs/motion_planning_2d.yaml b/examples/configs/motion_planning_2d.yaml new file mode 100644 index 000000000..7e4918d02 --- /dev/null +++ b/examples/configs/motion_planning_2d.yaml @@ -0,0 +1,42 @@ +seed: 0 +device: cuda:0 +verbose: true +plot_trajectories: true +map_type: tarpit +batch_size: 2 +img_size: 128 +num_time_steps: 100 +train_data_size: 2 +num_images: 1000 + +do_learning: true +num_epochs: 20 +shuffle_each_epoch: false +model_type: initial_trajectory_model +model_lr: 0.01 +model_wd: 0 +amsgrad: false +use_mean_objective_as_loss: true +include_imitation_loss: true +clip_grad_norm: 0 +obj_loss_weight: 0.0005 +gp_loss_weight: 0 +collision_loss_weight: 0 + +total_time: 10 +robot_radius: 0.4 +gp_params: + Qc_inv: [[1.0, 0.0], [0.0, 1.0]] +obs_params: + weight: 100.0 + safety_dist: 0.4 +optim_params: + method: levenberg_marquardt + max_iters: 2 + step_size: 0.3 + kwargs: + damping: 0.1 + +hydra: + run: + dir: examples/outputs \ No newline at end of file diff --git a/examples/configs/tactile_pose_estimation.yaml b/examples/configs/tactile_pose_estimation.yaml new file mode 100644 index 000000000..4fb7e9722 --- /dev/null +++ b/examples/configs/tactile_pose_estimation.yaml @@ -0,0 +1,40 @@ +seed: 0 + +dataset_name: "rectangle-pushing-corners-keypoints" +sdf_name: "rect" + +episode: 0 +max_steps: 100 + +# 0: disc, 1: rect-edges, 2: rect-corners, 3: ellip +class_label: 2 +num_classes: 4 + +shape: + rect_len_x: 0.2363 + rect_len_y: 0.1579 + +tactile_cost: + # window size (min, max, step) over which to add pairwise costs + min_win_mf: 10 + max_win_mf: 40 + step_win_mf: 5 + + init_pretrained_model: True + +train: + # options: "weights_only" or "weights_and_measurement_nn" + mode: "weights_only" + + batch_size: 1 + num_batches: 1 + + num_epochs: 100 + eps_tracking_loss: 1e-5 + +options: + vis_traj: True + +hydra: + run: + dir: examples/outputs \ No newline at end of file diff --git a/examples/motion_planning_2d.py b/examples/motion_planning_2d.py new file mode 100644 index 000000000..a6d0c7cfb --- /dev/null +++ b/examples/motion_planning_2d.py @@ -0,0 +1,240 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pathlib +import random +from typing import Dict + +import hydra +import matplotlib.pyplot as plt +import numpy as np +import torch +import torch.nn.functional as F +import torch.utils.data + +import theseus as th +import theseus.utils.examples as theg + + +# To run this example, you will need a motion planning dataset available at +# https://dl.fbaipublicfiles.com/theseus/motion_planning_dataset.tar.gz +# +# The steps below should let you run the example. +# From the root project folder do: +# mkdir expts +# cd expts +# wget https://dl.fbaipublicfiles.com/theseus/motion_planning_dataset.tar.gz +# tar -xzvf motion_planning_data.tar.gz +# cd .. +# python examples/motion_planning_2d.py +DATASET_DIR = pathlib.Path.cwd() / "expts" / "motion-planning-2d" / "dataset" + + +def plot_and_save_trajectories( + epoch: int, + batch: Dict[str, torch.Tensor], + initial_trajectory: torch.Tensor, + solution_trajectory: torch.Tensor, + robot_radius: float, + include_expert: bool = False, +): + sdf = th.eb.SignedDistanceField2D( + th.Variable(batch["sdf_origin"]), + th.Variable(batch["cell_size"]), + th.Variable(batch["sdf_data"]), + ) + + trajectories = [initial_trajectory.cpu(), solution_trajectory.cpu()] + if include_expert: + trajectories.append(batch["expert_trajectory"]) + figures = theg.generate_trajectory_figs( + batch["map_tensor"], sdf, trajectories, robot_radius=robot_radius + ) + + dir_name = pathlib.Path.cwd() / f"output_maps/{epoch:02d}" + dir_name.mkdir(exist_ok=True, parents=True) + for fig_idx, fig in enumerate(figures): + map_id = batch["file_id"][fig_idx] + fig.savefig(dir_name / f"output_{map_id}_im.png") + plt.close(fig) + + +def run_learning_loop(cfg): + dataset = theg.TrajectoryDataset( + True, cfg.train_data_size, DATASET_DIR, cfg.map_type + ) + data_loader = torch.utils.data.DataLoader( + dataset, cfg.batch_size, shuffle=cfg.shuffle_each_epoch + ) + + motion_planner = theg.MotionPlanner( + cfg.img_size, + cfg.obs_params.safety_dist + cfg.robot_radius, + cfg.total_time, + cfg.obs_params.weight, + cfg.gp_params.Qc_inv, + cfg.num_time_steps, + cfg.optim_params.method, + cfg.optim_params.max_iters, + cfg.optim_params.step_size, + use_single_collision_weight=True, + device=cfg.device, + ) + + if cfg.model_type == "scalar_collision_weight": + learnable_model = theg.ScalarCollisionWeightModel() + elif cfg.model_type == "scalar_collision_weight_and_cost_eps": + learnable_model = theg.ScalarCollisionWeightAndCostEpstModel(cfg.robot_radius) + elif cfg.model_type == "initial_trajectory_model": + learnable_model = theg.InitialTrajectoryModel(motion_planner) + + learnable_model.to(cfg.device) + model_optimizer = torch.optim.Adam( + learnable_model.parameters(), + lr=cfg.model_lr, + amsgrad=cfg.amsgrad, + weight_decay=cfg.model_wd, + ) + + if not cfg.do_learning: + cfg.num_epochs = 1 + + for epoch in range(cfg.num_epochs): + print(f"Starting epoch {epoch}") + epoch_total_loss = 0 + epoch_mean_objective_loss = 0 + epoch_gp_loss = 0 + epoch_collision_loss = 0 + epoch_imitation_loss = 0 + epoch_grad_norm = 0 + epoch_params_norm = 0 + for batch in data_loader: + + model_optimizer.zero_grad() + + start = batch["expert_trajectory"][:, :2, 0] + goal = batch["expert_trajectory"][:, :2, -1] + planner_inputs = { + "sdf_origin": batch["sdf_origin"].to(cfg.device), + "start": start.to(cfg.device), + "goal": goal.to(cfg.device), + "cell_size": batch["cell_size"].to(cfg.device), + "sdf_data": batch["sdf_data"].to(cfg.device), + } + motion_planner.objective.update(planner_inputs) + + if not cfg.do_learning or cfg.model_type != "initial_trajectory_model": + # This method updates the dictionary so that optimization variables + # are initialized with a straight line trajectory + planner_inputs.update( + motion_planner.get_variable_values_from_straight_line( + planner_inputs["start"], planner_inputs["goal"] + ) + ) + + # This returns a dictionary of auxiliary variable names to torch tensors, + # with values learned by a model upstream of the motion planner layer + if cfg.do_learning: + learnable_inputs = learnable_model.forward(batch) + planner_inputs.update(learnable_inputs) + + # Get the initial trajectory (to use for logging) + motion_planner.objective.update(planner_inputs) + initial_trajectory = motion_planner.get_trajectory() + with torch.no_grad(): + batch_error = motion_planner.objective.error_squared_norm().mean() / 2 + print(f"Planner MSE optim first: {batch_error.item() : 10.2f}") + + _, info = motion_planner.layer.forward( + planner_inputs, + track_best_solution=True, + verbose=cfg.verbose, + **cfg.optim_params.kwargs, + ) + if cfg.do_learning and cfg.include_imitation_loss: + solution_trajectory = motion_planner.get_trajectory() + else: + solution_trajectory = motion_planner.get_trajectory( + values_dict=info.best_solution + ) + + with torch.no_grad(): + batch_error = motion_planner.objective.error_squared_norm().mean() / 2 + print(f"Planner MSE optim final: {batch_error.item() : 10.2f}") + + if cfg.do_learning: + gp_error, collision_error = motion_planner.get_total_squared_errors() + loss = 0 + if cfg.use_mean_objective_as_loss: + loss = motion_planner.objective.error_squared_norm().mean() + loss /= motion_planner.objective.dim() + loss *= cfg.obj_loss_weight + epoch_mean_objective_loss += loss.item() + else: + loss = ( + cfg.gp_loss_weight * gp_error + + cfg.collision_loss_weight * collision_error + ) + if cfg.include_imitation_loss: + imitation_loss = F.mse_loss( + solution_trajectory, batch["expert_trajectory"].to(cfg.device) + ) + loss += imitation_loss + epoch_imitation_loss += imitation_loss.item() + loss.backward() + + with torch.no_grad(): + grad_norm = 0.0 + params_norm = 0.0 + for p in learnable_model.parameters(): + grad_norm += p.grad.data.norm(2).item() + params_norm += p.data.norm(float("inf")).item() + epoch_grad_norm += grad_norm / cfg.num_epochs + epoch_params_norm += params_norm / cfg.num_epochs + if cfg.clip_grad_norm > 0: + torch.nn.utils.clip_grad_norm_( + learnable_model.parameters(), cfg.clip_grad_norm + ) + model_optimizer.step() + + epoch_total_loss += loss.item() + epoch_gp_loss += gp_error.item() + epoch_collision_loss += collision_error.item() + + if cfg.plot_trajectories: + plot_and_save_trajectories( + epoch, + batch, + initial_trajectory.cpu().detach().clone(), + solution_trajectory.cpu().detach().clone(), + cfg.robot_radius, + include_expert=cfg.include_imitation_loss, + ) + + collision_w = ( + motion_planner.objective.aux_vars["collision_w"].data.mean().item() + ) + cost_eps = motion_planner.objective.aux_vars["cost_eps"].data.mean().item() + print("collision weight", collision_w) + print("cost_eps", cost_eps) + print("OBJECTIVE MEAN LOSS", epoch_mean_objective_loss) + print("GP LOSS: ", epoch_gp_loss) + print("COLLISION LOSS: ", epoch_collision_loss) + print("IMITATION LOSS: ", epoch_imitation_loss) + print("TOTAL LOSS: ", epoch_total_loss) + print(f" ---------------- END EPOCH {epoch} -------------- ") + + +@hydra.main(config_path="./configs/", config_name="motion_planning_2d") +def main(cfg): + torch.set_default_dtype(torch.double) + torch.manual_seed(cfg.seed) + random.seed(cfg.seed) + np.random.seed(cfg.seed) + run_learning_loop(cfg) + + +if __name__ == "__main__": + main() diff --git a/examples/state_estimation_2d.py b/examples/state_estimation_2d.py new file mode 100644 index 000000000..5f99e83ac --- /dev/null +++ b/examples/state_estimation_2d.py @@ -0,0 +1,276 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List + +import torch +import torch.nn as nn +import torch.nn.functional as F + +import theseus as th + +device = "cpu" +torch.manual_seed(0) +path_length = 50 +state_size = 2 +batch_size = 4 + + +# --------------------------------------------------- # +# --------------------- Utilities ------------------- # +# --------------------------------------------------- # +def generate_path_data( + batch_size_, + num_measurements_, + generator=None, +): + vel_ = torch.ones(batch_size, 2) + path_ = [torch.zeros(batch_size, 2)] + for _ in range(1, num_measurements_): + new_state_ = path_[-1] + vel_ + path_.append(new_state_) + vel_ += 0.75 * torch.randn(batch_size, 2, generator=generator) + return path_ + + +class SimpleNN(nn.Module): + def __init__(self, in_size, out_size, hid_size=30, use_offset=False): + super().__init__() + self.fc = nn.Sequential( + nn.Linear(in_size, hid_size), + nn.ReLU(), + nn.Linear(hid_size, hid_size), + nn.ReLU(), + nn.Linear(hid_size, out_size), + ) + + def forward(self, state_): + return self.fc(state_) + + +def run_model( + mode_, + cost_weights_model_, + current_inputs_, + path_length_, + print_stuff=False, +): + weights_ = get_weights_dict_from_model( + mode_, + cost_weights_model_, + current_inputs_, + path_length_, + print_stuff=print_stuff, + ) + theseus_inputs_ = {} + theseus_inputs_.update(current_inputs_) + theseus_inputs_.update(weights_) + + return theseus_inputs_ + + +def get_weights_dict_from_model( + mode_, cost_weights_model_, values_, path_length_, print_stuff=False +): + if mode_ == "constant": + weights_dict = {} + unique_weight_ = cost_weights_model_() + for i in range(path_length_): + weights_dict[f"scale_gps_{i}"] = unique_weight_[0, 0].view(1, 1) + if i < path_length_ - 1: + weights_dict[f"scale_between_{i}"] = unique_weight_[0, 1].view(1, 1) + else: + + weights_dict = {} + all_states_ = [] + # will compute weight for all cost weights in the path + for i in range(path_length_): + all_states_.append(values_[f"pose_{i}"]) + model_input_ = torch.cat(all_states_, dim=0) + + weights_ = cost_weights_model_(model_input_) + for i in range(path_length_): + weights_dict[f"scale_gps_{i}"] = weights_[i - 1, 0].view(1, 1) + if i < path_length_ - 1: + weights_dict[f"scale_between_{i}"] = weights_[i - 1, 1].view(1, 1) + + if print_stuff: + with torch.no_grad(): + print("scale5", weights_dict["scale_gps_5"].item()) + print("scale45", weights_dict["scale_gps_45"].item()) + print("btwn5", weights_dict["scale_between_5"].item()) + print("btwn45", weights_dict["scale_between_45"].item()) + + return weights_dict + + +def get_initial_inputs(gps_targets_): + inputs_ = {} + for i, _ in enumerate(gps_targets_): + inputs_[f"pose_{i}"] = gps_targets_[i] + torch.randn(1) + return inputs_ + + +def get_path_from_values(batch_size_, values_, path_length_): + path = torch.empty(batch_size_, path_length_, 2, device=device) + for i in range(path_length_): + path[:, i, :2] = values_[f"pose_{i}"] + return path + + +# ------------------------------------------------------------- # +# --------------------------- Learning ------------------------ # +# ------------------------------------------------------------- # +def run_learning(mode_, path_data_, gps_targets_, measurements_): + + # first input is scale for GPS costs, second is scale for Between costs + if mode_ == "constant": + model_params = nn.Parameter(10 * torch.rand(1, 2, device=device)) + + def cost_weights_model(): + return model_params * torch.ones(1) + + model_optimizer = torch.optim.Adam([model_params], lr=3e-2) + else: + cost_weights_model = SimpleNN(state_size, 2, hid_size=100, use_offset=False).to( + device + ) + model_optimizer = torch.optim.Adam( + cost_weights_model.parameters(), + lr=7e-5, + ) + + # GPS and Between cost weights + gps_cost_weights = [] + between_cost_weights = [] + for i in range(path_length): + gps_cost_weights.append( + th.ScaleCostWeight(th.Variable(torch.ones(1, 1), name=f"scale_gps_{i}")) + ) + if i < path_length - 1: + between_cost_weights.append( + th.ScaleCostWeight( + th.Variable(torch.ones(1, 1), name=f"scale_between_{i}") + ) + ) + + # ## Variables + poses = [] + for i in range(path_length): + poses.append(th.Point2(name=f"pose_{i}")) + + # ## Cost functions + cost_functions: List[th.CostFunction] = [] + + # ### GPS and between cost functions + for i in range(path_length): + cost_functions.append( + th.eb.VariableDifference( + poses[i], + gps_cost_weights[i], + th.Point2(data=gps_targets_[i]), + name=f"gps_{i}", + ) + ) + if i < path_length - 1: + cost_functions.append( + ( + th.eb.Between( + poses[i], + poses[i + 1], + between_cost_weights[i], + th.Point2(data=measurements_[i]), + name=f"between_{i}", + ) + ) + ) + + # # Create Theseus layer and initial values for variables + objective = th.Objective() + for cost_function in cost_functions: + objective.add(cost_function) + optimizer = th.GaussNewton( + objective, + th.CholeskyDenseSolver, + max_iterations=1, + step_size=0.9, + ) + state_estimator = th.TheseusLayer(optimizer) + state_estimator.to(device) + + # ## Learning loop + path_tensor = torch.stack(path_data_).permute(1, 0, 2) + best_loss = 1000.0 + best_solution = None + losses = [] + for epoch in range(200): + model_optimizer.zero_grad() + + inner_loop_iters = 3 + theseus_inputs = get_initial_inputs(gps_targets_) + theseus_inputs = run_model( + mode_, + cost_weights_model, + theseus_inputs, + path_length, + print_stuff=False, + ) + objective.update(theseus_inputs) + with torch.no_grad(): + if epoch % 10 == 0: + print("Initial error:", objective.error_squared_norm().mean().item()) + + for i in range(inner_loop_iters): + theseus_inputs, info = state_estimator.forward( + theseus_inputs, + track_best_solution=True, + verbose=epoch % 10 == 0, + ) + theseus_inputs = run_model( + mode_, + cost_weights_model, + theseus_inputs, + path_length, + print_stuff=epoch % 10 == 0 and i == 0, + ) + + solution_path = get_path_from_values( + objective.batch_size, theseus_inputs, path_length + ) + + loss = F.mse_loss(solution_path, path_tensor) + loss.backward() + model_optimizer.step() + loss_value = loss.item() + losses.append(loss_value) + if loss_value < best_loss: + best_loss = loss_value + best_solution = solution_path.detach() + + if epoch % 10 == 0: + print("TOTAL LOSS: ", loss.item()) + print(f" ---------------- END EPOCH {epoch} -------------- ") + + return best_solution, losses + + +path_data = generate_path_data(batch_size, 50) +gps_targets = [] +measurements = [] +for i in range(path_length): + gps_noise = 0.075 * path_data[i][1].abs() * torch.randn(batch_size, 2) + gps_target = (path_data[i] + gps_noise).view(batch_size, 2) + gps_targets.append(gps_target) + + if i < path_length - 1: + measurement = (path_data[i + 1] - path_data[i]).view(batch_size, 2) + measurement_noise = 0.005 * torch.randn(batch_size, 2).view(batch_size, 2) + measurements.append(measurement + measurement_noise) + +mlp_solution, mlp_losses = run_learning("mlp", path_data, gps_targets, measurements) +print(" -------------------------------------------------------------- ") +constant_solution, constant_losses = run_learning( + "constant", path_data, gps_targets, measurements +) diff --git a/examples/tactile_pose_estimation.py b/examples/tactile_pose_estimation.py new file mode 100644 index 000000000..9c0045ac1 --- /dev/null +++ b/examples/tactile_pose_estimation.py @@ -0,0 +1,392 @@ +import pathlib +import random + +import hydra +import matplotlib.pyplot as plt +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim + +import theseus as th +import theseus.utils.examples as theg + +# To run this example, you will need a tactile pushing dataset available at +# https://dl.fbaipublicfiles.com/theseus/tactile_pushing_data.tar.gz +# +# The steps below should let you run the example. +# From the root project folder do: +# mkdir expts +# cd expts +# wget https://dl.fbaipublicfiles.com/theseus/tactile_pushing_data.tar.gz +# tar -xzvf tactile_pushing_data.tar.gz +# cd .. +# python examples/tactile_pose_estimation.py +EXP_PATH = pathlib.Path.cwd() / "expts" / "tactile-pushing" +torch.set_default_dtype(torch.double) + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") +seed = 0 +torch.random.manual_seed(seed) +random.seed(seed) +np.random.seed(seed) + +plt.ion() + + +# In this example, the goal is to track 2D object poses (x,y,theta) during planar +# pushing from tactile image measurements for single episode. +# This can solved as an optimization problem where the variables being estimated +# are object poses over time. +# +# We formulate the optimization using following cost terms: +# * Quasi-static pushing planar: Penalizes deviation from quasi-static dynamics. +# Uses velocity-only quasi-static model for sticking contact. +# * Tactile measurements: Penalizes deviations from predicted relative pose +# from tactile image feature pairs. +# * Object-effector intersection: Penalizes intersections between object and +# end-effector. +# * End-effector priors: Penalizes deviation from end-effector poses captured from +# motion capture. +# * Boundary conditions: Penalizes deviation of first object pose from a global +# pose prior. +# +# Based on the method described in, +# Sodhi et al. Learning Tactile Models for Factor Graph-based Estimation, +# 2021 (https://arxiv.org/abs/1705.10664) + + +def run_learning_loop(cfg): + dataset_file = EXP_PATH / "datasets" / f"{cfg.dataset_name}.json" + dataset = theg.load_tactile_dataset_from_file( + filename=dataset_file, device=device, episode=cfg.episode + ) + + time_steps = np.minimum(cfg.max_steps, len(dataset["obj_poses"])) + + min_win_mf, max_win_mf, step_win_mf = ( + cfg.tactile_cost.min_win_mf, + cfg.tactile_cost.max_win_mf, + cfg.tactile_cost.step_win_mf, + ) + + # -------------------------------------------------------------------- # + # Creating optimization variables + # -------------------------------------------------------------------- # + # The optimization variables for this problem are SE2 object and end effector + # poses over time. + obj_poses, eff_poses = [], [] + for i in range(time_steps): + obj_poses.append(th.SE2(name=f"obj_pose_{i}", dtype=torch.double)) + eff_poses.append(th.SE2(name=f"eff_pose_{i}", dtype=torch.double)) + + # -------------------------------------------------------------------- # + # Creating auxiliary variables + # -------------------------------------------------------------------- # + # - obj_start_pose: target for boundary cost functions + # - motion_captures: priors on the end-effector poses + # - nn_measurements: tactile measurement prediction from image features + # - sdf_data, sdf_cell_size, sdf_origin: signed distance field data, + # cell_size and origin + obj_start_pose = th.SE2( + x_y_theta=dataset["obj_poses"][0].unsqueeze(0), name="obj_start_pose" + ) + + motion_captures = [] + for i in range(time_steps): + motion_captures.append( + th.SE2( + x_y_theta=dataset["eff_poses"][i].unsqueeze(0), + name=f"motion_capture_{i}", + ) + ) + + nn_measurements = [] + for i in range(min_win_mf, time_steps): + for offset in range(min_win_mf, np.minimum(i, max_win_mf), step_win_mf): + nn_measurements.append(th.SE2(name=f"nn_measurement_{i-offset}_{i}")) + + sdf_tensor, sdf_cell_size, sdf_origin = theg.load_tactile_sdf_from_file( + filename=EXP_PATH / "sdfs" / f"{cfg.sdf_name}.json", + device=device, + ) + + sdf_data = th.Variable(sdf_tensor, name="sdf_data") + sdf_cell_size = th.Variable(sdf_cell_size, name="sdf_cell_size") + sdf_origin = th.Variable(sdf_origin, name="sdf_origin") + eff_radius = th.Variable(torch.zeros(1, 1), name="eff_radius") + + # -------------------------------------------------------------------- # + # Creating cost weights + # -------------------------------------------------------------------- # + # - qsp_weight: diagonal cost weight shared across all quasi-static cost + # functions. + # - mf_between_weight: diagonal cost weight shared across all moving factor + # cost functions. + # - intersect_weight: scalar cost weight shared across all object-effector + # intersection cost functions. + # - motion_capture_weight: diagonal cost weight shared across all end-effector + # priors cost functions. + qsp_weight = th.DiagonalCostWeight(th.Variable(torch.ones(1, 3), name="qsp_weight")) + mf_between_weight = th.DiagonalCostWeight( + th.Variable(torch.ones(1, 3), name="mf_between_weight") + ) + intersect_weight = th.ScaleCostWeight( + th.Variable(torch.ones(1, 1), name="intersect_weight") + ) + motion_capture_weight = th.DiagonalCostWeight( + th.Variable(torch.ones(1, 3), name="mc_weight") + ) + + # -------------------------------------------------------------------- # + # Creating cost functions + # -------------------------------------------------------------------- # + # - VariableDifference: Penalizes deviation between first object pose from + # a global pose prior. + # - QuasiStaticPushingPlanar: Penalizes deviation from velocity-only + # quasi-static dynamics model QuasiStaticPushingPlanar + # - MovingFrameBetween: Penalizes deviation between relative end effector poses + # in object frame against a measurement target. Measurement target + # `nn_measurements` is obtained from a network prediction. + # - EffectorObjectContactPlanar: Penalizes intersections between object and end + # effector based on the object sdf. + # - VariableDifference: Penalizes deviations of end-effector poses from motion + # capture readings + + # Loop over and add all cost functions, + # cost weights, and their auxiliary variables + objective = th.Objective() + nn_meas_idx = 0 + c_square = (np.sqrt(cfg.shape.rect_len_x ** 2 + cfg.shape.rect_len_y ** 2)) ** 2 + for i in range(time_steps): + if i == 0: + objective.add( + th.eb.VariableDifference( + obj_poses[i], + motion_capture_weight, + obj_start_pose, + name=f"obj_priors_{i}", + ) + ) + + if i < time_steps - 1: + objective.add( + th.eb.QuasiStaticPushingPlanar( + obj_poses[i], + obj_poses[i + 1], + eff_poses[i], + eff_poses[i + 1], + qsp_weight, + c_square, + name=f"qsp_{i}", + ) + ) + if i >= min_win_mf: + for offset in range(min_win_mf, np.minimum(i, max_win_mf), step_win_mf): + objective.add( + th.eb.MovingFrameBetween( + obj_poses[i - offset], + obj_poses[i], + eff_poses[i - offset], + eff_poses[i], + mf_between_weight, + nn_measurements[nn_meas_idx], + name=f"mf_between_{i - offset}_{i}", + ) + ) + nn_meas_idx = nn_meas_idx + 1 + + objective.add( + th.eb.EffectorObjectContactPlanar( + obj_poses[i], + eff_poses[i], + intersect_weight, + sdf_origin, + sdf_data, + sdf_cell_size, + eff_radius, + name=f"intersect_{i}", + ) + ) + + objective.add( + th.eb.VariableDifference( + eff_poses[i], + motion_capture_weight, + motion_captures[i], + name=f"eff_priors_{i}", + ) + ) + + # -------------------------------------------------------------------- # + # Creating TheseusLayer + # -------------------------------------------------------------------- # + # Wrap the objective and inner-loop optimizer into a `TheseusLayer`. + # Inner-loop optimizer here is the Levenberg-Marquardt nonlinear optimizer + # coupled with a dense linear solver based on Cholesky decomposition. + nl_optimizer = th.LevenbergMarquardt( + objective, th.CholeskyDenseSolver, max_iterations=3 + ) + theseus_layer = th.TheseusLayer(nl_optimizer) + theseus_layer.to(device=device, dtype=torch.double) + + # -------------------------------------------------------------------- # + # Creating parameters to learn + # -------------------------------------------------------------------- # + # Use theseus_layer in an outer learning loop to learn different cost + # function parameters: + if cfg.train.mode == "weights_only": + qsp_model = theg.TactileWeightModel( + device, wt_init=torch.tensor([[50.0, 50.0, 50.0]]) + ).to(device) + mf_between_model = theg.TactileWeightModel( + device, wt_init=torch.tensor([[0.0, 0.0, 10.0]]) + ).to(device) + measurements_model = None + + learning_rate = 5 + learnable_params = list(qsp_model.parameters()) + list( + mf_between_model.parameters() + ) + elif cfg.train.mode == "weights_and_measurement_nn": + qsp_model = theg.TactileWeightModel( + device, wt_init=torch.tensor([[5.0, 5.0, 5.0]]) + ).to(device) + mf_between_model = theg.TactileWeightModel( + device, wt_init=torch.tensor([[0.0, 0.0, 5.0]]) + ).to(device) + measurements_model = theg.TactileMeasModel(2 * 2 * 4, 4) + if cfg.tactile_cost.init_pretrained_model is True: + measurements_model = theg.init_tactile_model_from_file( + model=measurements_model, + filename=EXP_PATH / "models" / "transform_prediction_keypoints.pt", + ) + measurements_model.to(device) + + learning_rate = 1e-3 + cfg.train.eps_tracking_loss = 5e-4 # early stopping + learnable_params = ( + list(measurements_model.parameters()) + + list(qsp_model.parameters()) + + list(mf_between_model.parameters()) + ) + else: + print("Learning mode {cfg.train.mode} not found") + + outer_optim = optim.Adam(learnable_params, lr=learning_rate) + batch_size = cfg.train.batch_size + num_batches = cfg.train.num_batches + measurements = [ + ( + dataset["img_feats"][0:time_steps].unsqueeze(0).repeat(batch_size, 1, 1), + dataset["eff_poses"][0:time_steps].unsqueeze(0).repeat(batch_size, 1, 1), + dataset["obj_poses"][0:time_steps].unsqueeze(0).repeat(batch_size, 1, 1), + ) + for _ in range(num_batches) + ] + + obj_poses_gt = dataset["obj_poses"][0:time_steps, :].clone().requires_grad_(True) + eff_poses_gt = dataset["eff_poses"][0:time_steps, :].clone().requires_grad_(True) + + theseus_inputs = {} + for epoch in range(cfg.train.num_epochs): + losses = [] + for batch_idx, batch in enumerate(measurements): + theseus_inputs.update( + theg.get_tactile_nn_measurements_inputs( + batch=batch, + device=device, + class_label=cfg.class_label, + num_classes=cfg.num_classes, + min_win_mf=min_win_mf, + max_win_mf=max_win_mf, + step_win_mf=step_win_mf, + time_steps=time_steps, + model=measurements_model, + ) + ) + theseus_inputs.update( + theg.get_tactile_motion_capture_inputs(batch, device, time_steps) + ) + theseus_inputs.update( + theg.get_tactile_cost_weight_inputs(qsp_model, mf_between_model) + ) + theseus_inputs.update( + theg.get_tactile_initial_optim_vars(batch, device, time_steps) + ) + + theseus_inputs["sdf_data"] = ( + (sdf_tensor.data).repeat(batch_size, 1, 1).to(device) + ) + + theseus_inputs, _ = theseus_layer.forward(theseus_inputs, verbose=True) + + obj_poses_opt = theg.get_tactile_poses_from_values( + batch_size=batch_size, + values=theseus_inputs, + time_steps=time_steps, + device=device, + key="obj_pose", + ) + eff_poses_opt = theg.get_tactile_poses_from_values( + batch_size=batch_size, + values=theseus_inputs, + time_steps=time_steps, + device=device, + key="eff_pose", + ) + + loss = F.mse_loss(obj_poses_opt[batch_idx, :], obj_poses_gt) + loss.backward() + + nn.utils.clip_grad_norm_(qsp_model.parameters(), 100, norm_type=2) + nn.utils.clip_grad_norm_(mf_between_model.parameters(), 100, norm_type=2) + + with torch.no_grad(): + for name, param in qsp_model.named_parameters(): + print(name, param.data) + for name, param in mf_between_model.named_parameters(): + print(name, param.data) + + print(" grad qsp", qsp_model.param.grad.norm().item()) + print(" grad mfb", mf_between_model.param.grad.norm().item()) + + outer_optim.step() + + with torch.no_grad(): + for param in qsp_model.parameters(): + param.data.clamp_(0) + for param in mf_between_model.parameters(): + param.data.clamp_(0) + + losses.append(loss.item()) + + if cfg.options.vis_traj: + theg.visualize_tactile_push2d( + obj_poses=obj_poses_opt[0, :], + eff_poses=eff_poses_opt[0, :], + obj_poses_gt=obj_poses_gt, + eff_poses_gt=eff_poses_gt, + rect_len_x=cfg.shape.rect_len_x, + rect_len_y=cfg.shape.rect_len_y, + ) + + print(f"AVG. LOSS: {np.mean(losses)}") + + if np.mean(losses) < cfg.train.eps_tracking_loss: + break + + +@hydra.main(config_path="./configs/", config_name="tactile_pose_estimation") +def main(cfg): + torch.set_default_dtype(torch.double) + torch.manual_seed(cfg.seed) + random.seed(cfg.seed) + np.random.seed(cfg.seed) + + run_learning_loop(cfg) + + +if __name__ == "__main__": + main() diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 000000000..99ffc7d8e --- /dev/null +++ b/noxfile.py @@ -0,0 +1,24 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import nox + + +@nox.session() +def lint(session): + session.install("--upgrade", "setuptools", "pip") + session.install("-r", "requirements/dev.txt") + session.run("flake8", "theseus") + session.run("black", "--check", "theseus") + + +@nox.session() +def mypy_and_tests(session): + session.install("--upgrade", "setuptools", "pip") + session.install("torch") + session.install("-r", "requirements/dev.txt") + session.run("mypy", "theseus") + session.install("-e", ".") + session.run("pytest", "theseus") diff --git a/requirements/dev.txt b/requirements/dev.txt new file mode 100644 index 000000000..3776d7f55 --- /dev/null +++ b/requirements/dev.txt @@ -0,0 +1,7 @@ +black>=20.8b1 +flake8>=3.8.4 +mypy>=0.910 +nox==2020.8.22 +pre-commit>=2.9.2 +isort>=5.6.4 +types-PyYAML==5.4.3 diff --git a/requirements/main.txt b/requirements/main.txt new file mode 100644 index 000000000..09557abbb --- /dev/null +++ b/requirements/main.txt @@ -0,0 +1,5 @@ +numpy>=1.19.2 +scipy>=1.5.3 +scikit-sparse>=0.4.5 +# torch>=1.7.1 will do separate install instructions for now (CUDA dependent) +pytest>=6.2.1 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 000000000..c86dccd17 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,18 @@ +[flake8] +max-line-length = 100 +# E203: whitespace before ":", incompatible with black +# W503: line break before binary operator (black also) +# F401: imported but unused +ignore=E203, W503 +per-file-ignores = + *__init__.py:F401 + +[mypy] +python_version = 3.7 +ignore_missing_imports = True +show_error_codes = True +strict_optional = False + +[isort] +profile=black +skip=theseus/__init__.py diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..7b8e9a8d7 --- /dev/null +++ b/setup.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from pathlib import Path +import setuptools + + +def parse_requirements_file(path): + with open(path) as f: + reqs = [] + for line in f: + line = line.strip() + reqs.append(line.split("==")[0]) + return reqs + + +reqs_main = parse_requirements_file("requirements/main.txt") +reqs_dev = parse_requirements_file("requirements/dev.txt") +root_dir = Path(__file__).parent +version = (root_dir / "version.txt").read_text().strip() + +with open("README.md", "r") as fh: + long_description = fh.read() + +setuptools.setup( + name="theseus", + version=version, + author="Meta Research", + description="A library for differentiable nonlinear optimization.", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/facebookresearch/theseus", + keywords="differentiable optimization, nonlinear least squares, factor graphs", + packages=setuptools.find_packages(), + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + ], + python_requires=">=3.7", + install_requires=reqs_main, + extras_require={"dev": reqs_main + reqs_dev}, +) diff --git a/theseus/__init__.py b/theseus/__init__.py new file mode 100644 index 000000000..c44f63e99 --- /dev/null +++ b/theseus/__init__.py @@ -0,0 +1,47 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .core import ( + CostFunction, + CostWeight, + DiagonalCostWeight, + AutoDiffCostFunction, + Objective, + ScaleCostWeight, + Variable, +) +from .geometry import ( + SE2, + SO2, + LieGroup, + Manifold, + Point2, + Point3, + Vector, + local, + retract, + compose, + inverse, + log_map, + exp_map, +) +from .optimizer import DenseLinearization, SparseLinearization, VariableOrdering +from .optimizer.linear import ( + CholeskyDenseSolver, + CholmodSparseSolver, + DenseSolver, + LinearOptimizer, + LUDenseSolver, +) +from .optimizer.nonlinear import ( + GaussNewton, + LevenbergMarquardt, + NonlinearLeastSquares, + NonlinearOptimizerParams, + NonlinearOptimizerStatus, +) +from .theseus_layer import TheseusLayer + +import theseus.embodied as eb diff --git a/theseus/constants.py b/theseus/constants.py new file mode 100644 index 000000000..27e4bf947 --- /dev/null +++ b/theseus/constants.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +EPS = 1e-10 diff --git a/theseus/core/__init__.py b/theseus/core/__init__.py new file mode 100644 index 000000000..f4d95c81e --- /dev/null +++ b/theseus/core/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .cost_function import AutoDiffCostFunction, CostFunction, ErrFnType +from .cost_weight import CostWeight, DiagonalCostWeight, ScaleCostWeight +from .objective import Objective +from .variable import Variable diff --git a/theseus/core/cost_function.py b/theseus/core/cost_function.py new file mode 100644 index 000000000..8fe909149 --- /dev/null +++ b/theseus/core/cost_function.py @@ -0,0 +1,189 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from typing import Any, List, Optional, Tuple, cast + +import torch +import torch.autograd.functional as autogradF +from typing_extensions import Protocol + +from .cost_weight import CostWeight, ScaleCostWeight +from .theseus_function import TheseusFunction +from .variable import Variable + + +# A cost function is defined by the variables interacting in it, +# and a cost weight for weighting errors and jacobians +# This is an abstract class for cost functions of different types. +# Each concrete function must implement an error method and the +# jacobian of the error, by implementing abstract methods +# `error` and `jacobians`, respectively. +class CostFunction(TheseusFunction, abc.ABC): + def __init__( + self, + cost_weight: CostWeight, + *args: Any, + name: Optional[str] = None, + **kwargs: Any, + ): + super().__init__(name=name) + self.weight = cost_weight + + @abc.abstractmethod + def error(self) -> torch.Tensor: + pass + + # Returns (jacobians, error) + @abc.abstractmethod + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + pass + + @abc.abstractmethod + def dim(self) -> int: + pass + + def weighted_error(self) -> torch.Tensor: + error = self.error() + return self.weight.weight_error(error) + + def weighted_jacobians_error( + self, + ) -> Tuple[List[torch.Tensor], torch.Tensor]: + jacobian, err = self.jacobians() + return self.weight.weight_jacobians_and_error(jacobian, err) + + # Must copy everything + @abc.abstractmethod + def _copy_impl(self, new_name: Optional[str] = None) -> "CostFunction": + pass + + # Here for mypy compatibility downstream + def copy( + self, new_name: Optional[str] = None, keep_variable_names: bool = False + ) -> "CostFunction": + return cast( + CostFunction, + super().copy(new_name=new_name, keep_variable_names=keep_variable_names), + ) + + # calls to() on the cost weight, variables and any internal tensors + def to(self, *args, **kwargs): + super().to(*args, **kwargs) + self.weight.to(*args, **kwargs) + + +# Function protocol for learnable cost functions. `optim_vars` and `aux_vars` are +# separated to facilitate internal autograd handling of optimization variables +class ErrFnType(Protocol): + def __call__( + self, optim_vars: Tuple[Variable, ...], aux_vars: Tuple[Variable, ...] + ) -> torch.Tensor: + ... + + +# The error function is assumed to receive variables in the format +# err_fn( +# optim_vars=(optim_vars[0].data, ..., optim_vars[N - 1].data), +# aux_vars=(aux_vars[0].data, ..., aux_vars[M -1].data) +# ) +# +# The user also needs to explicitly specify the output's dimension +class AutoDiffCostFunction(CostFunction): + def __init__( + self, + optim_vars: List[Variable], + err_fn: ErrFnType, + dim: int, + cost_weight: CostWeight = ScaleCostWeight(1.0), + aux_vars: Optional[List[Variable]] = None, + name: Optional[str] = None, + autograd_strict: bool = False, + autograd_vectorize: bool = False, + ): + super().__init__(cost_weight, name=name) + # this avoids doing aux_vars=[], which is a bad default since [] is mutable + aux_vars = aux_vars or [] + + def _register_vars_in_list(var_list_, is_optim=False): + for var_ in var_list_: + if hasattr(self, var_.name): + raise RuntimeError(f"Variable name {var_.name} is not allowed.") + setattr(self, var_.name, var_) + if is_optim: + self.register_optim_var(var_.name) + else: + self.register_aux_var(var_.name) + + if len(optim_vars) < 1: + raise ValueError( + "AutodiffCostFunction must receive at least one optimization variable." + ) + _register_vars_in_list(optim_vars, is_optim=True) + _register_vars_in_list(aux_vars, is_optim=False) + + self._err_fn = err_fn + self._dim = dim + self._autograd_strict = autograd_strict + self._autograd_vectorize = autograd_vectorize + + # The following are auxiliary Variable objects to hold tensor data + # during jacobian computation without modifying the original Variable objects + self._tmp_optim_vars = tuple(Variable(data=v.data) for v in optim_vars) + + def _compute_error( + self, + ) -> Tuple[torch.Tensor, Tuple[Variable, ...], Tuple[Variable, ...]]: + optim_vars = tuple(v for v in self.optim_vars) + aux_vars = tuple(v for v in self.aux_vars) + err = self._err_fn(optim_vars=optim_vars, aux_vars=aux_vars) + if err.shape[1] != self.dim(): + raise ValueError( + "Output dimension of given error function doesn't match self.dim()." + ) + return err, optim_vars, aux_vars + + def error(self) -> torch.Tensor: + return self._compute_error()[0] + + # Returns (jacobians, error) + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + err, optim_vars, aux_vars = self._compute_error() + + # this receives a list of torch tensors with data to set for tmp_optim_vars + def jac_fn(*optim_vars_data_): + assert len(optim_vars_data_) == len(self._tmp_optim_vars) + for i, tensor in enumerate(optim_vars_data_): + self._tmp_optim_vars[i].update(tensor) + + return self._err_fn(optim_vars=self._tmp_optim_vars, aux_vars=aux_vars) + + jacobians_full = autogradF.jacobian( + jac_fn, + tuple(v.data for v in optim_vars), + create_graph=True, + strict=self._autograd_strict, + vectorize=self._autograd_vectorize, + ) + aux_idx = torch.arange(err.shape[0]) # batch_size + + # torch autograd returns shape (batch_size, dim, batch_size, var_dim), which + # includes derivatives of batches against each other. + # this indexing recovers only the derivatives wrt the same batch + jacobians = list(jac[aux_idx, :, aux_idx, :] for jac in jacobians_full) + return jacobians, err + + def dim(self) -> int: + return self._dim + + def _copy_impl(self, new_name: Optional[str] = None) -> "AutoDiffCostFunction": + return AutoDiffCostFunction( + [v.copy() for v in self.optim_vars], + self._err_fn, + self._dim, + aux_vars=[v.copy() for v in self.aux_vars], + cost_weight=self.weight.copy(), + name=new_name, + ) diff --git a/theseus/core/cost_weight.py b/theseus/core/cost_weight.py new file mode 100644 index 000000000..4b81f1aa3 --- /dev/null +++ b/theseus/core/cost_weight.py @@ -0,0 +1,125 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from typing import List, Optional, Sequence, Tuple, Union, cast + +import torch + +from .theseus_function import TheseusFunction +from .variable import Variable + + +# Abstract class for representing cost weights (aka, precisions, inverse covariance) +# Concrete classes must implement two methods: +# - `weight_error`: return an error tensor weighted by the cost weight +# - `weightJacobiansError`: returns jacobians an errors weighted by the cost weight +class CostWeight(TheseusFunction, abc.ABC): + def __init__( + self, + name: Optional[str] = None, + ): + super().__init__(name=name) + + @abc.abstractmethod + def weight_error(self, error: torch.Tensor) -> torch.Tensor: + pass + + @abc.abstractmethod + def weight_jacobians_and_error( + self, + jacobians: List[torch.Tensor], + error: torch.Tensor, + ) -> Tuple[List[torch.Tensor], torch.Tensor]: + pass + + # Must copy everything + @abc.abstractmethod + def _copy_impl(self, new_name: Optional[str] = None) -> "TheseusFunction": + pass + + def copy( + self, new_name: Optional[str] = None, keep_variable_names: bool = False + ) -> "CostWeight": + return cast( + CostWeight, + super().copy(new_name=new_name, keep_variable_names=keep_variable_names), + ) + + +# Besides passing a theseus Variable, can also get a float and it will create the +# Variable with a default name for it +class ScaleCostWeight(CostWeight): + def __init__( + self, + scale: Union[float, torch.Tensor, Variable], + name: Optional[str] = None, + ): + super().__init__(name=name) + if not isinstance(scale, Variable): + if not isinstance(scale, torch.Tensor): + scale = torch.tensor(scale) + self.scale = Variable(scale) + else: + self.scale = scale + if not self.scale.data.squeeze().ndim == 0: + raise ValueError("ScaleCostWeight only accepts 0-dim data.") + self.scale.data = self.scale.data.view(1, 1) + self.register_aux_vars(["scale"]) + + def weight_error(self, error: torch.Tensor) -> torch.Tensor: + return error * self.scale.data + + def weight_jacobians_and_error( + self, + jacobians: List[torch.Tensor], + error: torch.Tensor, + ) -> Tuple[List[torch.Tensor], torch.Tensor]: + error = error * self.scale.data + new_jacobians = [] + for jac in jacobians: + new_jacobians.append(jac * self.scale.data.view(-1, 1, 1)) + return new_jacobians, error + + def _copy_impl(self, new_name: Optional[str] = None) -> "ScaleCostWeight": + return ScaleCostWeight(self.scale.copy(), name=new_name) + + +# Besides passing a theseus Variable, can also get any float sequence and it will create the +# Variable with a default name for it +class DiagonalCostWeight(CostWeight): + def __init__( + self, + diagonal: Union[Sequence[float], Variable], + name: Optional[str] = None, + ): + super().__init__(name=name) + if not isinstance(diagonal, Variable): + self.diagonal = Variable(torch.tensor(diagonal)) + else: + self.diagonal = diagonal + if not self.diagonal.data.squeeze().ndim < 2: + raise ValueError("DiagonalCostWeight only accepts 1-D data.") + self.diagonal.data = self.diagonal.data.view(1, -1) + self.register_aux_vars(["diagonal"]) + + def weight_error(self, error: torch.Tensor) -> torch.Tensor: + return error * self.diagonal.data + + def weight_jacobians_and_error( + self, + jacobians: List[torch.Tensor], + error: torch.Tensor, + ) -> Tuple[List[torch.Tensor], torch.Tensor]: + error = error * self.diagonal.data.view(1, -1) + new_jacobians = [] + for jac in jacobians: + # Jacobian is batch_size x cost_fuction_dim x var_dim + # This left multiplies the weights (inv cov.) to jacobian + new_jacobians.append(jac * self.diagonal.data.view(1, -1, 1)) + return new_jacobians, error + + def _copy_impl(self, new_name: Optional[str] = None) -> "DiagonalCostWeight": + return DiagonalCostWeight(self.diagonal.copy(), name=new_name) diff --git a/theseus/core/objective.py b/theseus/core/objective.py new file mode 100644 index 000000000..77c8bbda2 --- /dev/null +++ b/theseus/core/objective.py @@ -0,0 +1,465 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import warnings +from collections import OrderedDict +from typing import Dict, List, Optional, Sequence, Union + +import torch + +from theseus.core.theseus_function import TheseusFunction +from theseus.geometry.manifold import Manifold + +from .cost_function import CostFunction +from .cost_weight import CostWeight +from .variable import Variable + + +# If dtype is None, uses torch.get_default_dtype() +class Objective: + def __init__(self, dtype: Optional[torch.dtype] = None): + # maps variable names to the variable objects + self.optim_vars: OrderedDict[str, Manifold] = OrderedDict() + + # maps variable names to variables objects, for optimization variables + # that were registered when adding cost weights. + self.cost_weight_optim_vars: OrderedDict[str, Manifold] = OrderedDict() + + # maps aux. variable names to the container objects + self.aux_vars: OrderedDict[str, Variable] = OrderedDict() + + # maps variable name to variable, for any kind of variable added + self._all_variables: OrderedDict[str, Variable] = OrderedDict() + + # maps cost function names to the cost function objects + self.cost_functions: OrderedDict[str, CostFunction] = OrderedDict() + + # maps cost weights to the cost functions that use them + # this is used when deleting cost function to check if the cost weight + # variables can be deleted as well (when no other function uses them) + self.cost_functions_for_weights: Dict[CostWeight, List[CostFunction]] = {} + + # ---- The following two methods are used just to get info from + # ---- the objective, they don't affect the optimization logic. + # a map from optimization variables to list of theseus functions it's + # connected to + self.functions_for_optim_vars: Dict[Manifold, List[TheseusFunction]] = {} + + # a map from all aux. variables to list of theseus functions it's connected to + self.functions_for_aux_vars: Dict[Variable, List[TheseusFunction]] = {} + + self._batch_size: Optional[int] = None + + self.device: torch.device = torch.device("cpu") + + self.dtype: Optional[torch.dtype] = dtype or torch.get_default_dtype() + + # this increases after every add/erase operation, and it's used to avoid + # an optimizer to run on a stale version of the objective (since changing the + # objective structure might break optimizer initialization). + self.current_version = 0 + + def _add_function_variables( + self, + function: TheseusFunction, + optim_vars: bool = True, + is_cost_weight: bool = False, + ): + + if optim_vars: + function_vars = function.optim_vars + self_var_to_fn_map = self.functions_for_optim_vars + self_vars_of_this_type = ( + self.cost_weight_optim_vars if is_cost_weight else self.optim_vars + ) + else: + function_vars = function.aux_vars # type: ignore + self_var_to_fn_map = self.functions_for_aux_vars # type: ignore + self_vars_of_this_type = self.aux_vars # type: ignore + for variable in function_vars: + # Check that variables have name and correct dtype + if variable.name is None: + raise ValueError( + f"Variables added to an objective must be named, but " + f"{function.name} has an unnamed variable." + ) + if variable.dtype != self.dtype: + raise ValueError( + f"Tried to add variable {variable.name} with data type " + f"{variable.dtype} but objective's data type is {self.dtype}." + ) + # Check that names are unique + if variable.name in self._all_variables: + if variable is not self._all_variables[variable.name]: + raise ValueError( + f"Two different variable objects with the " + f"same name ({variable.name}) are not allowed " + "in the same objective." + ) + else: + self._all_variables[variable.name] = variable + assert variable not in self_var_to_fn_map + self_var_to_fn_map[variable] = [] + + # add to either self.optim_vars, + # self.cost_weight_optim_vars or self.aux_vars + self_vars_of_this_type[variable.name] = variable + + # add to list of functions connected to this variable + self_var_to_fn_map[variable].append(function) + + # Adds a cost function to the objective + # Also adds its optimization variables if they haven't been previously added + # Throws an error if a new variable has the same name of a previously added + # variable that is not the same object. + # Does the same for the cost function's auxiliary variables + # Then does the same with the cost weight's auxiliary variables + # + # For now, cost weight "optimization variables" are **NOT** added to the + # set of objective's variables, they are kept in a separate container. + # Update method will check if any of these are not registered as + # cost function variables, and throw a warning. + def add(self, cost_function: CostFunction): + # adds the cost function if not already present + if cost_function.name in self.cost_functions: + if cost_function is not self.cost_functions[cost_function.name]: + raise ValueError( + f"Two different cost function objects with the " + f"same name ({cost_function.name}) are not allowed " + "in the same objective." + ) + else: + warnings.warn( + "This cost function has already been added to the objective, " + "nothing to be done." + ) + else: + self.cost_functions[cost_function.name] = cost_function + + self.current_version += 1 + # ----- Book-keeping for the cost function ------- # + # adds information about the optimization variables in this cost function + self._add_function_variables(cost_function, optim_vars=True) + + # adds information about the auxiliary variables in this cost function + self._add_function_variables(cost_function, optim_vars=False) + + if cost_function.weight not in self.cost_functions_for_weights: + # ----- Book-keeping for the cost weight ------- # + # adds information about the variables in this cost function's weight + self._add_function_variables( + cost_function.weight, optim_vars=True, is_cost_weight=True + ) + # adds information about the auxiliary variables in this cost function's weight + self._add_function_variables( + cost_function.weight, optim_vars=False, is_cost_weight=True + ) + + self.cost_functions_for_weights[cost_function.weight] = [] + + if cost_function.weight.num_optim_vars() > 0: + warnings.warn( + f"The cost weight associated to {cost_function.name} receives one " + "or more optimization variables. Differentiating cost " + "weights with respect to optimization variables is not currently " + "supported, thus jacobians computed by our optimizers will be " + "incorrect. You may want to consider moving the weight computation " + "inside the cost function, so that the cost weight only receives " + "auxiliary variables.", + RuntimeWarning, + ) + + self.cost_functions_for_weights[cost_function.weight].append(cost_function) + + if self.optim_vars.keys() & self.aux_vars.keys(): + raise ValueError( + "Objective does not support a variable being both " + "an optimization variable and an auxiliary variable." + ) + + # returns a reference to the cost function with the given name + def get_cost_function(self, name: str) -> CostFunction: + return self.cost_functions.get(name, None) + + # checks if the cost function with the given name is in the objective + def has_cost_function(self, name: str) -> bool: + return name in self.cost_functions + + # checks if the optimization variable with the given name is in the objective + def has_optim_var(self, name: str) -> bool: + return name in self.optim_vars + + # returns a reference to the optimization variable with the given name + def get_optim_var(self, name: str) -> Manifold: + return self.optim_vars.get(name, None) + + # checks if the aux. variable with the given name is in the objective + def has_aux_var(self, name: str) -> bool: + return name in self.aux_vars + + # returns a reference to the aux. variable with the given name + def get_aux_var(self, name: str) -> Variable: + return self.aux_vars.get(name, None) + + @property + def batch_size(self) -> int: + return self._batch_size + + def _erase_function_variables( + self, + function: TheseusFunction, + optim_vars: bool = True, + is_cost_weight: bool = False, + ): + if optim_vars: + fn_var_list = function.optim_vars + self_vars_of_this_type = ( + self.cost_weight_optim_vars if is_cost_weight else self.optim_vars + ) + self_var_to_fn_map = self.functions_for_optim_vars + else: + fn_var_list = function.aux_vars # type: ignore + self_vars_of_this_type = self.aux_vars # type: ignore + self_var_to_fn_map = self.functions_for_aux_vars # type: ignore + + for variable in fn_var_list: + cost_fn_idx = self_var_to_fn_map[variable].index(function) + # remove function from the variable's list of connected cost functions + del self_var_to_fn_map[variable][cost_fn_idx] + # if the variable has no other functions, remove it also + if not self_var_to_fn_map[variable]: + del self_var_to_fn_map[variable] + del self_vars_of_this_type[variable.name] + + # Removes a cost function from the objective given its name + # Also removes any of its variables that are no longer associated to other + # functions (either cost functions, or cost weights). + # Does the same for the cost weight, but only if the weight is not associated to + # any other cost function + def erase(self, name: str): + self.current_version += 1 + if name in self.cost_functions: + cost_function = self.cost_functions[name] + # erase variables associated to this cost function (if needed) + self._erase_function_variables(cost_function, optim_vars=True) + self._erase_function_variables(cost_function, optim_vars=False) + + # delete cost function from list of cost functions connected to its weight + cost_weight = cost_function.weight + cost_fn_idx = self.cost_functions_for_weights[cost_weight].index( + cost_function + ) + del self.cost_functions_for_weights[cost_weight][cost_fn_idx] + + # No more cost functions associated to this weight, so can also delete + if len(self.cost_functions_for_weights[cost_weight]) == 0: + # erase its variables (if needed) + self._erase_function_variables( + cost_weight, optim_vars=True, is_cost_weight=True + ) + self._erase_function_variables( + cost_weight, optim_vars=False, is_cost_weight=True + ) + del self.cost_functions_for_weights[cost_weight] + + # finally, delete the cost function + del self.cost_functions[name] + else: + warnings.warn( + "This cost function is not in the objective, nothing to be done." + ) + + # gets the name associated with a cost function object (or None if not present) + def get_cost_function_name(self, cost_function: CostFunction) -> Optional[str]: + for name in self.cost_functions: + if id(cost_function) == id(cost_function): + return name + return None + + @staticmethod + def _get_functions_connected_to_var( + variable: Union[str, Variable], + objectives_var_container_dict: "OrderedDict[str, Variable]", + var_to_cost_fn_map: Dict[Variable, List[TheseusFunction]], + variable_type: str, + ) -> List[TheseusFunction]: + if isinstance(variable, str): + if variable not in objectives_var_container_dict: + raise ValueError( + f"{variable_type} named {variable} is not in the objective." + ) + variable = objectives_var_container_dict[variable] + if variable not in var_to_cost_fn_map: + raise ValueError( + f"{variable_type} {variable.name} is not in the objective." + ) + return var_to_cost_fn_map[variable] + + def get_functions_connected_to_optim_var( + self, variable: Union[Manifold, str] + ) -> List[TheseusFunction]: + return Objective._get_functions_connected_to_var( + variable, + self.optim_vars, # type: ignore + self.functions_for_optim_vars, # type: ignore + "Optimization Variable", + ) + + def get_functions_connected_to_aux_var( + self, aux_var: Union[Variable, str] + ) -> List[TheseusFunction]: + return Objective._get_functions_connected_to_var( + aux_var, self.aux_vars, self.functions_for_aux_vars, "Auxiliary Variable" + ) + + # sum of cost function dimensions + def dim(self) -> int: + err_dim = 0 + for cost_function in self.cost_functions.values(): + err_dim += cost_function.dim() + return err_dim + + # number of (cost functions, variables) + def size(self) -> tuple: + return len(self.cost_functions), len(self.optim_vars) + + # number of cost functions + def size_cost_functions(self) -> int: + return len(self.cost_functions) + + # number of variables + def size_variables(self) -> int: + return len(self.optim_vars) + + # number of auxiliary variables + def size_aux_vars(self) -> int: + return len(self.aux_vars) + + def error(self) -> torch.Tensor: + error_vector = torch.zeros(self.batch_size, self.dim()).to( + device=self.device, dtype=self.dtype + ) + pos = 0 + for cost_function in self.cost_functions.values(): + error_vector[ + :, pos : pos + cost_function.dim() + ] = cost_function.weighted_error() + pos += cost_function.dim() + return error_vector + + def error_squared_norm(self) -> torch.Tensor: + return (self.error() ** 2).sum(dim=1) + + def copy(self) -> "Objective": + new_objective = Objective() + + # First copy all individual cost weights + old_to_new_cost_weight_map: Dict[CostWeight, CostWeight] = {} + for cost_weight in self.cost_functions_for_weights: + new_cost_weight = cost_weight.copy( + new_name=cost_weight.name, keep_variable_names=True + ) + old_to_new_cost_weight_map[cost_weight] = new_cost_weight + + # Now copy the cost functions and assign the corresponding cost weight copy + new_cost_functions: List[CostFunction] = [] + for cost_function in self.cost_functions.values(): + new_cost_function = cost_function.copy( + new_name=cost_function.name, keep_variable_names=True + ) + # we assign the allocated weight copies to avoid saving duplicates + new_cost_function.weight = old_to_new_cost_weight_map[cost_function.weight] + new_cost_functions.append(new_cost_function) + + # Handle case where a variable is copied in 2+ cost functions or cost weights, + # since only a single copy should be maintained by objective + for cost_function in new_cost_functions: + # CostFunction + for i, var in enumerate(cost_function.optim_vars): + if new_objective.has_optim_var(var.name): + cost_function.set_optim_var_at( + i, new_objective.optim_vars[var.name] + ) + for i, aux_var in enumerate(cost_function.aux_vars): + if new_objective.has_aux_var(aux_var.name): + cost_function.set_aux_var_at( + i, new_objective.aux_vars[aux_var.name] + ) + # CostWeight + for i, var in enumerate(cost_function.weight.optim_vars): + if var.name in new_objective.cost_weight_optim_vars: + cost_function.weight.set_optim_var_at( + i, new_objective.cost_weight_optim_vars[var.name] + ) + for i, aux_var in enumerate(cost_function.weight.aux_vars): + if new_objective.has_aux_var(aux_var.name): + cost_function.weight.set_aux_var_at( + i, new_objective.aux_vars[aux_var.name] + ) + new_objective.add(cost_function) + return new_objective + + def __deepcopy__(self, memo): + if id(self) in memo: + return memo[id(self)] + the_copy = self.copy() + memo[id(self)] = the_copy + return the_copy + + def update(self, input_data: Optional[Dict[str, torch.Tensor]] = None): + self._batch_size = None + + def _get_batch_size(batch_sizes: Sequence[int]) -> int: + unique_batch_sizes = set(batch_sizes) + if len(unique_batch_sizes) == 1: + return batch_sizes[0] + if len(unique_batch_sizes) == 2: + min_bs = min(unique_batch_sizes) + max_bs = max(unique_batch_sizes) + if min_bs == 1: + return max_bs + raise ValueError("Provided data tensors must be broadcastable.") + + for var_name, data in input_data.items(): + if data.ndim < 2: + raise ValueError( + f"Input data tensors must have a batch dimension and " + f"one ore more data dimensions, but data.ndim={data.ndim} for " + f"tensor with name {var_name}." + ) + if var_name in self.optim_vars: + self.optim_vars[var_name].update(data) + elif var_name in self.aux_vars: + self.aux_vars[var_name].update(data) + elif var_name in self.cost_weight_optim_vars: + self.cost_weight_optim_vars[var_name].update(data) + warnings.warn( + "Updated a variable declared as optimization, but it is " + "only associated to cost weights and not to any cost functions. " + "Theseus optimizers will only update optimization variables " + "that are associated to one or more cost functions." + ) + else: + warnings.warn( + f"Attempted to update a tensor with name {var_name}, " + "which is not associated to any variable in the objective." + ) + + # Check that the batch size of all data is consistent after update + batch_sizes = [v.data.shape[0] for v in self.optim_vars.values()] + batch_sizes.extend([v.data.shape[0] for v in self.aux_vars.values()]) + self._batch_size = _get_batch_size(batch_sizes) + + # iterates over cost functions + def __iter__(self): + return iter([f for f in self.cost_functions.values()]) + + # Applies to() with given args to all tensors in the objective + def to(self, *args, **kwargs): + for cost_function in self.cost_functions.values(): + cost_function.to(*args, **kwargs) + device, dtype, *_ = torch._C._nn._parse_to(*args, **kwargs) + self.device = device + self.dtype = dtype diff --git a/theseus/core/tests/__init__.py b/theseus/core/tests/__init__.py new file mode 100644 index 000000000..7bec24cb1 --- /dev/null +++ b/theseus/core/tests/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. diff --git a/theseus/core/tests/common.py b/theseus/core/tests/common.py new file mode 100644 index 000000000..b9319836b --- /dev/null +++ b/theseus/core/tests/common.py @@ -0,0 +1,201 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import copy + +import torch + +import theseus as th + + +class MockVar(th.Manifold): + def __init__(self, length, data=None, name=None): + super().__init__(length, data=data, name=name) + + @staticmethod + def _init_data(length): + return torch.empty(1, length) + + def dof(self): + return 0 + + def _local_impl(self, variable2): + pass + + def _local_jacobian(self, var2): + pass + + def _retract_impl(delta): + pass + + def _copy_impl(self, new_name=None): + return MockVar(self.data.shape[1], data=self.data.clone(), name=new_name) + + +class MockCostWeight(th.CostWeight): + def __init__( + self, the_data, name=None, add_dummy_var_with_name=None, add_optim_var=None + ): + super().__init__(name=name) + self.the_data = the_data + self.register_aux_var("the_data") + if add_dummy_var_with_name: + var = MockVar(1, name=add_dummy_var_with_name) + setattr(self, add_dummy_var_with_name, var) + self.register_optim_var(add_dummy_var_with_name) + if add_optim_var: + setattr(self, add_optim_var.name, add_optim_var) + self.register_optim_var(add_optim_var.name) + + def weight_error(self, error): + return self.the_data * error + + def weight_jacobians_and_error(self, jacobians, error): + raise NotImplementedError( + "weight_jacobians_and_error is not implemented for MockCostWeight." + ) + + def _copy_impl(self, new_name=None): + return MockCostWeight(self.the_data.copy(), name=new_name) + + +class NullCostWeight(th.CostWeight): + def __init__(self): + super().__init__(name="null_cost_weight") + + def _init_data(self): + pass + + def weight_error(self, error): + return error + + def weight_jacobians_and_error(self, jacobians, error): + return jacobians, error + + def _copy_impl(self, new_name=None): + return NullCostWeight() + + +class MockCostFunction(th.CostFunction): + def __init__(self, optim_vars, aux_vars, cost_weight, name=None): + super().__init__(cost_weight, name=name) + for i, var in enumerate(optim_vars): + attr_name = f"optim_var_{i}" + setattr(self, attr_name, var) + self.register_optim_var(attr_name) + for i, aux in enumerate(aux_vars): + attr_name = f"aux_var_{i}" + setattr(self, attr_name, aux) + self.register_aux_var(attr_name) + self._dim = 2 + + def error(self): + mu = torch.stack([v.data for v in self.optim_vars]).sum() + return mu * torch.ones(self._dim) + + def jacobians(self): + return [self.error()] * len(self._optim_vars_attr_names) + + def dim(self) -> int: + return self._dim + + def _copy_impl(self, new_name=None): + return MockCostFunction( + [v.copy() for v in self.optim_vars], + [aux.copy() for aux in self.aux_vars], + self.weight.copy(), + name=new_name, + ) + + +def create_mock_cost_functions(data=None, cost_weight=NullCostWeight()): + len_data = 1 if data is None else data.shape[1] + var1 = MockVar(len_data, data=data, name="var1") + var2 = MockVar(len_data, data=data, name="var2") + var3 = MockVar(len_data, data=data, name="var3") + aux1 = MockVar(len_data, data=data, name="aux1") + aux2 = MockVar(len_data, data=data, name="aux2") + names = [ + "MockCostFunction.var1.var2", + "MockCostFunction.var1.var3", + "MockCostFunction.var2.var3", + ] + cost_function_1_2 = MockCostFunction( + [var1, var2], [aux1, aux2], cost_weight, name=names[0] + ) + cost_function_1_3 = MockCostFunction( + [var1, var3], [aux1], cost_weight, name=names[1] + ) + cost_function_2_3 = MockCostFunction( + [var2, var3], [aux2], cost_weight, name=names[2] + ) + + var_to_cost_functions = { + var1: [cost_function_1_2, cost_function_1_3], + var2: [cost_function_1_2, cost_function_2_3], + var3: [cost_function_2_3, cost_function_1_3], + } + aux_to_cost_functions = { + aux1: [cost_function_1_2, cost_function_1_3], + aux2: [cost_function_1_2, cost_function_2_3], + } + return ( + [cost_function_1_2, cost_function_1_3, cost_function_2_3], + names, + var_to_cost_functions, + aux_to_cost_functions, + ) + + +def create_objective_with_mock_cost_functions(data=None, cost_weight=NullCostWeight()): + ( + cost_functions, + names, + var_to_cost_functions, + aux_to_cost_functions, + ) = create_mock_cost_functions(data=data, cost_weight=cost_weight) + + objective = th.Objective() + for cost_function in cost_functions: + objective.add(cost_function) + + return ( + objective, + cost_functions, + names, + var_to_cost_functions, + aux_to_cost_functions, + ) + + +def check_copy_var(var): + var.name = "old" + new_var = var.copy(new_name="new") + assert var is not new_var + assert var.data is not new_var.data + assert torch.allclose(var.data, new_var.data) + assert new_var.name == "new" + new_var_no_name = copy.deepcopy(var) + assert new_var_no_name.name == f"{var.name}_copy" + + +def check_another_theseus_tensor_is_copy(var, other_var): + assert isinstance(var, other_var.__class__) + assert var is not other_var + check_another_torch_tensor_is_copy(var.data, other_var.data) + + +def check_another_torch_tensor_is_copy(tensor, other_tensor): + assert tensor is not other_tensor + assert torch.allclose(tensor, other_tensor) + + +def check_another_theseus_function_is_copy(fn, other_fn, new_name): + assert fn is not other_fn + assert other_fn.name == new_name + for var, new_var in zip(fn.optim_vars, other_fn.optim_vars): + check_another_theseus_tensor_is_copy(var, new_var) + for aux, new_aux in zip(fn.aux_vars, other_fn.aux_vars): + check_another_theseus_tensor_is_copy(aux, new_aux) diff --git a/theseus/core/tests/test_cost_function.py b/theseus/core/tests/test_cost_function.py new file mode 100644 index 000000000..bcc9cf491 --- /dev/null +++ b/theseus/core/tests/test_cost_function.py @@ -0,0 +1,187 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import copy + +import numpy as np +import pytest # noqa: F401 +import torch + +import theseus as th + +from .common import ( + MockCostFunction, + MockCostWeight, + MockVar, + check_another_theseus_function_is_copy, + create_mock_cost_functions, +) + + +def test_copy(): + cost_weight = MockCostWeight(th.Variable(torch.ones(1))) + data = torch.ones(1, 1) + cost_functions, *_ = create_mock_cost_functions(data=data, cost_weight=cost_weight) + + for cost_function in cost_functions: + cost_function.weight = cost_weight + + def _check_new_cost_function(new_cost_function): + check_another_theseus_function_is_copy( + cost_function, new_cost_function, new_name=f"{cost_function.name}_copy" + ) + check_another_theseus_function_is_copy( + cost_weight, + new_cost_function.weight, + new_name=f"{cost_weight.name}_copy", + ) + + _check_new_cost_function(cost_function.copy()) + _check_new_cost_function(copy.deepcopy(cost_function)) + + +def test_default_name_and_ids(): + reps = 100 + seen_ids = set() + for i in range(reps): + cost_function = MockCostFunction([], [], MockCostWeight(torch.ones(1))) + cost_function_name = f"MockCostFunction__{cost_function._id}" + seen_ids.add(cost_function._id) + assert cost_function.name == cost_function_name + assert len(seen_ids) == reps + + +def test_autodiff_cost_function_error_and_jacobians_shape(): + for i in range(100): + num_optim_vars = np.random.randint(0, 5) + num_aux_vars = np.random.randint(0, 5) + batch_size = np.random.randint(1, 10) + err_dim = np.random.randint(1, 5) + optim_vars = [] + aux_vars = [] + variable_values = torch.randn(num_optim_vars + num_aux_vars) + idx = 0 + for i in range(num_optim_vars): + optim_vars.append( + MockVar( + idx + 1, + data=torch.ones(batch_size, idx + 1) * variable_values[idx], + name=f"optim_var_{i}", + ) + ) + idx += 1 + for i in range(num_aux_vars): + aux_vars.append( + MockVar( + idx + 1, + data=torch.ones(batch_size, idx + 1) * variable_values[idx], + name=f"aux_var_{i}", + ) + ) + idx += 1 + cost_weight = MockCostWeight(torch.ones(1, 1)) + + # checks that the right number of optimization variables is passed + # checks that the variable values are correct + # returns the sum of the first elements of each tensor, which should be the + # same as the sum of variables_values + def error_fn(optim_vars, aux_vars): + assert isinstance(optim_vars, tuple) + assert len(optim_vars) == num_optim_vars + assert len(aux_vars) == num_aux_vars + ret_val = torch.zeros(batch_size, err_dim) + + all_vars = optim_vars + aux_vars + + vals = [] + for i, arg in enumerate(all_vars): + assert isinstance(arg, th.Variable) + assert arg.shape == (batch_size, i + 1) + assert arg.data.allclose(variable_values[i] * torch.ones_like(arg.data)) + vals.append(arg[0, 0]) + return ret_val + torch.Tensor(vals).sum() + + # this checks that 0 optimization variables is not allowed + if len(optim_vars) < 1: + with pytest.raises(ValueError): + th.AutoDiffCostFunction( + optim_vars, + error_fn, + 1, + cost_weight=cost_weight, + aux_vars=aux_vars, + ) + else: + # check that the error function returns the correct value + cost_function = th.AutoDiffCostFunction( + optim_vars, + error_fn, + err_dim, + cost_weight=cost_weight, + aux_vars=aux_vars, + ) + err = cost_function.error() + assert err.allclose(variable_values.sum() * torch.ones(batch_size, err_dim)) + + # Now checking the jacobians + jacobians, err_jac = cost_function.jacobians() + assert err_jac.allclose(err) + assert len(jacobians) == num_optim_vars + for i in range(num_optim_vars): + # variable dim is i + 1 (see MockVar creation line) + assert jacobians[i].shape == (batch_size, err_dim, i + 1) + + +def test_autodiff_cost_function_cost_weight(): + batch_size = 10 + optim_vars = [] + aux_vars = [] + + for i in range(5): + optim_vars.append( + MockVar( + 1, + data=torch.ones(batch_size, 1) * torch.randn(1), + name=f"optim_var_{i}", + ) + ) + aux_vars.append( + MockVar( + 1, + data=torch.ones(batch_size, 1) * torch.randn(1), + name=f"aux_var_{i}", + ) + ) + + def error_fn(optim_vars, aux_vars): + return torch.ones(batch_size, 1) + + # test verifying default CostWeight + cost_function = th.AutoDiffCostFunction( + optim_vars, + error_fn, + 1, + aux_vars=aux_vars, + ) + assert type(cost_function.weight).__name__ == "ScaleCostWeight" + assert torch.allclose(cost_function.weight.scale.data, torch.ones(1, 1)) + weighted_error = cost_function.weighted_error() + assert torch.allclose(weighted_error, torch.ones(batch_size, 1)) + + # test overriding default CostWeight + for i in range(10): + cost_weight_value = torch.randn(1, 1) + cost_weight = MockCostWeight(cost_weight_value) + cost_function = th.AutoDiffCostFunction( + optim_vars, + error_fn, + 1, + cost_weight=cost_weight, + aux_vars=aux_vars, + ) + assert torch.allclose(cost_function.weight.the_data, cost_weight_value) + weighted_error = cost_function.weighted_error() + direct_error_computation = cost_weight_value * torch.ones(batch_size, 1) + assert torch.allclose(weighted_error, direct_error_computation) diff --git a/theseus/core/tests/test_manifold.py b/theseus/core/tests/test_manifold.py new file mode 100644 index 000000000..d55b050f0 --- /dev/null +++ b/theseus/core/tests/test_manifold.py @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch + +import theseus as th + +from .common import MockVar, check_copy_var + + +def test_copy(): + for i in range(100): + size = torch.randint(low=1, high=21, size=(1,)).item() + data = torch.rand(size=(1,) + (size,)) + var = MockVar(size, data=data, name="var") + check_copy_var(var) + + +def test_var_shape(): + for sz in range(100): + data = torch.ones(1, sz) + var = MockVar(sz, data=data) + assert data.shape == var.shape + + +def test_update_shape_check(): + for sz in range(2, 100): + data = torch.ones(sz) + var = MockVar(sz) + with pytest.raises(ValueError): + var.update(data) # no batch dimension + with pytest.raises(ValueError): + var.update(data.view(-1, 1)) # wrong dimension + var.update(data.view(1, -1)) + assert torch.isclose(var.data.squeeze(), data).all() + + +class MockVarNoArgs(th.Manifold): + def __init__(self, data=None, name=None): + super().__init__(data=data, name=name) + + @staticmethod + def _init_data(): + return torch.ones(1, 1) + + def dof(self): + return 0 + + def _local_impl(self, variable2): + pass + + def _local_jacobian(self, var2): + pass + + def _retract_impl(self, delta): + pass + + def _copy_impl(self): + return MockVarNoArgs() + + +def test_variable_no_args_init(): + var = MockVarNoArgs(name="mock") + assert var.data.allclose(torch.ones(1, 1)) + assert var.name == "mock" + var = MockVarNoArgs(data=torch.ones(2, 1)) + assert var.data.allclose(torch.ones(2, 1)) + var.update(torch.ones(3, 1)) + assert var.data.allclose(torch.ones(3, 1)) diff --git a/theseus/core/tests/test_objective.py b/theseus/core/tests/test_objective.py new file mode 100644 index 000000000..ab873f1b4 --- /dev/null +++ b/theseus/core/tests/test_objective.py @@ -0,0 +1,469 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa: F401 +import torch + +import theseus as th + +from .common import ( + MockCostFunction, + MockCostWeight, + MockVar, + NullCostWeight, + check_another_theseus_tensor_is_copy, + create_mock_cost_functions, + create_objective_with_mock_cost_functions, +) + + +def test_add(): + objective, cost_functions, names, *_ = create_objective_with_mock_cost_functions() + + for cost_function, name in zip(cost_functions, names): + assert name in objective.cost_functions + assert objective.cost_functions[name] == cost_function + + # --------- All the code that follows test for handling of errors --------- + def _create_cost_function_with_n_vars_and_m_aux( + cost_function_name, var_names, aux_names, cost_weight + ): + the_vars = [MockVar(1, name=name_) for name_ in var_names] + the_aux = [MockVar(1, name=name_) for name_ in aux_names] + return MockCostFunction( + the_vars, + the_aux, + cost_weight, + name=cost_function_name, + ) + + # ### Testing no duplicate names allowed ### + # duplicate cost function name + new_cost_function = _create_cost_function_with_n_vars_and_m_aux( + cost_functions[0].name, ["none1", "none2"], [], NullCostWeight() + ) + with pytest.raises(ValueError): + objective.add(new_cost_function) + # duplicate variable name + new_cost_function = _create_cost_function_with_n_vars_and_m_aux( + "new", [cost_functions[0].optim_var_0.name], [], NullCostWeight() + ) + with pytest.raises(ValueError): + objective.add(new_cost_function) + + # This for loop tests two failure cases: + # i) duplicate aux. variable name and + # ii) same name used both for optimization var and aux. variable + for aux_name in [ + cost_functions[0].aux_var_0.name, + cost_functions[0].optim_var_0.name, + ]: + new_cost_function = _create_cost_function_with_n_vars_and_m_aux( + f"another_{aux_name}", ["some_name"], [aux_name], NullCostWeight() + ) + with pytest.raises(ValueError): + objective.add(new_cost_function) + + # -------------- Test similar stuff but for cost weight -------- + cost_weight = MockCostWeight( + th.Variable(torch.ones(1), name="cost_weight_aux"), + add_dummy_var_with_name="cost_weight_var_1", + ) + yet_another_cost_function = _create_cost_function_with_n_vars_and_m_aux( + "yet_another", ["yet_var_1"], ["yet_aux_1"], cost_weight + ) + with pytest.warns(RuntimeWarning): # optim var associated to weight + objective.add(yet_another_cost_function) # no conflict here + + cost_weight_with_conflict_in_aux_var = MockCostWeight( + th.Variable(torch.ones(1), name="cost_weight_aux") + ) + cost_function_with_cost_weight_conflict = ( + _create_cost_function_with_n_vars_and_m_aux( + "yet_another_2", + ["yet_var_2"], + "yet_aux_2", + cost_weight_with_conflict_in_aux_var, + ) + ) + with pytest.raises(ValueError): + objective.add(cost_function_with_cost_weight_conflict) + + cost_weight_with_conflict_in_var = MockCostWeight( + th.Variable(torch.ones(1), name="cost_weight_aux"), + add_dummy_var_with_name="cost_weight_var_1", + ) + another_cost_function_with_cost_weight_conflict = ( + _create_cost_function_with_n_vars_and_m_aux( + "yet_another_3", + ["yet_var_3"], + "yet_aux_3", + cost_weight_with_conflict_in_var, + ) + ) + with pytest.raises(ValueError): + objective.add(another_cost_function_with_cost_weight_conflict) + + +def test_getters(): + ( + objective, + cost_functions, + names, + var_to_cost_functions, + aux_to_cost_functions, + ) = create_objective_with_mock_cost_functions() + for cost_function, name in zip(cost_functions, names): + assert id(objective.get_cost_function(name)) == id(cost_function) + + for var in var_to_cost_functions: + assert id(var) == id(objective.get_optim_var(var.name)) + + for aux in aux_to_cost_functions: + assert id(aux) == id(objective.get_aux_var(aux.name)) + + assert objective.get_cost_function("bad-name") is None + assert objective.get_optim_var("bad-name") is None + assert objective.get_aux_var("bad-name") is None + + +def test_has_cost_function_and_has_var(): + objective, cost_functions, names, *_ = create_objective_with_mock_cost_functions() + + for name in names: + assert objective.has_cost_function(name) + + for cost_function in cost_functions: + for var in cost_function.optim_vars: + assert objective.has_optim_var(var.name) + + for cost_function in cost_functions: + for aux in cost_function.aux_vars: + assert objective.has_aux_var(aux.name) + + +def test_add_and_erase_step_by_step(): + var1 = MockVar(1, data=None, name="var1") + var2 = MockVar(1, data=None, name="var2") + var3 = MockVar(1, data=None, name="var3") + aux1 = MockVar(1, data=None, name="aux1") + aux2 = MockVar(1, data=None, name="aux2") + cw1 = MockCostWeight(aux1, name="cw1", add_dummy_var_with_name="ignored_optim_var") + cw2 = MockCostWeight(aux2, name="cw2", add_optim_var=var1) + + cf1 = MockCostFunction([var1, var2], [aux1, aux2], cw1, name="cf1") + cf2 = MockCostFunction([var1, var3], [aux1], cw2, name="cf2") + cf3 = MockCostFunction([var2, var3], [aux2], cw2, name="cf3") + + objective = th.Objective() + for cost_function in [cf1, cf2, cf3]: + if cost_function is not cf3: + with pytest.warns(RuntimeWarning): + # a warning should emit the first time cw1/cw2 are added + objective.add(cost_function) + else: + objective.add(cost_function) + + for name in ["var1", "ignored_optim_var"]: + assert name in objective.cost_weight_optim_vars + for name in ["var1", "var2", "var2"]: + assert name in objective.optim_vars + for name in ["aux1", "aux2"]: + assert name in objective.aux_vars + + # Checks that the objective is maintaining variable -> fn correctly, against + # a list of expected functions + def _check_funs_for_variable( + var_, expected_fn_list_, optim_var=True, is_cost_weight_optim=False + ): + # If var should have been deleted, check that no trace of it remains + if not expected_fn_list_: + if optim_var: + assert var_ not in objective.functions_for_optim_vars + if is_cost_weight_optim: + assert var_ not in objective.cost_weight_optim_vars + else: + assert var_ not in objective.optim_vars + else: + assert var_ not in objective.functions_for_aux_vars + assert var_ not in objective.aux_vars + return + + # Otherwise check that list of connected functions match the expected + if optim_var: + obj_container = objective.functions_for_optim_vars[var_] + else: + obj_container = objective.functions_for_aux_vars[var_] + assert len(obj_container) == len(expected_fn_list_) + for fun_ in expected_fn_list_: + assert fun_ in obj_container + + # Runs the above check for all variables in this graph, given expected lists + # for each + def _check_all_vars(v1_lis_, v2_lis_, v3_lis_, cw1_opt_lis_, a1_lis_, a2_lis_): + _check_funs_for_variable(var1, v1_lis_) + _check_funs_for_variable(var2, v2_lis_) + _check_funs_for_variable(var3, v3_lis_) + _check_funs_for_variable( + cw1.optim_var_at(0), cw1_opt_lis_, is_cost_weight_optim=True + ) + _check_funs_for_variable(aux1, a1_lis_, optim_var=False) + _check_funs_for_variable(aux2, a2_lis_, optim_var=False) + + v1_lis = [cw2, cf1, cf2] + v2_lis = [cf1, cf3] + v3_lis = [cf2, cf3] + cw1o_lis = [cw1] + a1_lis = [cw1, cf1, cf2] + a2_lis = [cw2, cf1, cf3] + + _check_all_vars(v1_lis, v2_lis, v3_lis, cw1o_lis, a1_lis, a2_lis) + + objective.erase("cf1") + v1_lis = [cw2, cf2] + v2_lis = [cf3] + cw1o_lis = [] + a1_lis = [cf2] # cf1 and cw1 are deleted, since cw1 not used by any other cost fn + a2_lis = [cw2, cf3] + _check_all_vars(v1_lis, v2_lis, v3_lis, cw1o_lis, a1_lis, a2_lis) + assert cw1 not in objective.cost_functions_for_weights + + objective.erase("cf2") + v1_lis = [cw2] # cw2 still used by cf3 + v3_lis = [cf3] + a1_lis = [] + _check_all_vars(v1_lis, v2_lis, v3_lis, cw1o_lis, a1_lis, a2_lis) + assert cw2 in objective.cost_functions_for_weights + + objective.erase("cf3") + v1_lis = [] + v2_lis = [] + v3_lis = [] + a2_lis = [] + _check_all_vars(v1_lis, v2_lis, v3_lis, cw1o_lis, a1_lis, a2_lis) + assert cw2 not in objective.cost_functions_for_weights + + +def test_objective_error(): + vars = [] + # Create var_i with data = i, for i in {0, 1, 2} + for i in range(3): + data = i * torch.ones(1, 1) + var = MockVar(1, data=data, name=f"var{i}") + var.update(data) + vars.append(var) + + # Adds all three possible cost functions with var_i and var_j to the objective + cov = NullCostWeight() + objective = th.Objective() + expected_objective_error = [] + for i, v1 in enumerate(vars): + for j, v2 in enumerate(vars): + if i == j: + continue + cost_function = MockCostFunction([v1, v2], [], cov) + objective.add(cost_function) + cost_function_error = cost_function.error().numpy() + expected_objective_error.append(cost_function_error) + # Mock cost function computes error as (i + j)[1, 1] for var_i, var_j + np.testing.assert_almost_equal(cost_function_error, (i + j) * np.ones(2)) + # Given these cost functionss, + # error is just the concatenation of the cost function errors above + expected_objective_error = np.concatenate(expected_objective_error) + + # batch_size is set by update, but for this test we just want to check + # that the error method works properly (no pass through update) + objective._batch_size = 1 + error = objective.error().squeeze().numpy() + np.testing.assert_almost_equal(error, expected_objective_error) + + # Test the squared error function + squared_error = np.sum(expected_objective_error ** 2) + np.testing.assert_almost_equal( + objective.error_squared_norm().numpy(), squared_error + ) + + +def test_get_cost_functions_connected_to_vars(): + ( + objective, + _, + _, + optim_var_to_cost_functions, + aux_to_cost_functions, + ) = create_objective_with_mock_cost_functions() + + def _check_connections(var_to_cost_fns, objective_get_var_method): + for variable, expected_cost_functions in var_to_cost_fns.items(): + + def _check_cost_functions(cost_functions): + assert len(cost_functions) == len(expected_cost_functions) + for cost_function in cost_functions: + assert cost_function in expected_cost_functions + + _check_cost_functions(objective_get_var_method(variable)) + _check_cost_functions(objective_get_var_method(variable.name)) + + with pytest.raises(ValueError): + objective_get_var_method("var_not_in_objective") + objective_get_var_method(MockVar(None, name="none")) + + _check_connections( + optim_var_to_cost_functions, objective.get_functions_connected_to_optim_var + ) + _check_connections( + aux_to_cost_functions, objective.get_functions_connected_to_aux_var + ) + + +def test_copy(): + objective, cost_functions, *_ = create_objective_with_mock_cost_functions( + torch.ones(1, 1), MockCostWeight(th.Variable(torch.ones(1))) + ) + new_objective = objective.copy() + assert new_objective is not objective + + assert objective.size_cost_functions() == new_objective.size_cost_functions() + for cost_function, new_cost_function in zip( + cost_functions, new_objective.cost_functions.values() + ): + assert cost_function.num_optim_vars() == new_cost_function.num_optim_vars() + assert cost_function.num_aux_vars() == new_cost_function.num_aux_vars() + assert cost_function.name == new_cost_function.name + assert cost_function is not new_objective + for var, new_var in zip(cost_function.optim_vars, new_cost_function.optim_vars): + check_another_theseus_tensor_is_copy(var, new_var) + for aux, new_aux in zip(cost_function.aux_vars, new_cost_function.aux_vars): + check_another_theseus_tensor_is_copy(aux, new_aux) + + +def test_copy_no_duplicate_cost_weights(): + objective = th.Objective() + v1 = MockVar(1, name="v1") + cw1 = th.ScaleCostWeight(1.0) + cw2 = th.ScaleCostWeight(2.0) + objective.add(MockCostFunction([v1], [], cw1, "cf1")) + objective.add(MockCostFunction([v1], [], cw1, "cf2")) + objective.add(MockCostFunction([v1], [], cw2, "cf3")) + objective.add(MockCostFunction([v1], [], cw2, "cf4")) + objective.add(MockCostFunction([v1], [], cw2, "cf5")) + + objective_copy = objective.copy() + seen_cw1 = set() + seen_cw2 = set() + for cf in objective_copy.cost_functions.values(): + if cf.name in ["cf1", "cf2"]: + scale = 1.0 + original_weight = cw1 + set_to_add = seen_cw1 + else: + scale = 2.0 + original_weight = cw2 + set_to_add = seen_cw2 + + assert isinstance(cf.weight, th.ScaleCostWeight) + assert cf.weight.scale.data.item() == scale + assert cf.weight is not original_weight + set_to_add.add(cf.weight) + assert len(seen_cw1) == 1 + assert len(seen_cw2) == 1 + + +def test_update_updates_properly(): + ( + objective, + _, + _, + var_to_cost_functions, + aux_to_cost_functions, + ) = create_objective_with_mock_cost_functions( + torch.ones(1, 1), + MockCostWeight(th.Variable(torch.ones(1), name="cost_weight_aux")), + ) + + input_data = {} + for var in var_to_cost_functions: + input_data[var.name] = 2 * var.data.clone() + for aux in aux_to_cost_functions: + input_data[aux.name] = 2 * aux.data.clone() + + objective.update(input_data=input_data) + assert objective.batch_size == 1 + + for var_name, data in input_data.items(): + if var_name in [v.name for v in var_to_cost_functions]: + var_ = objective.get_optim_var(var_name) + if var_name in [aux.name for aux in aux_to_cost_functions]: + var_ = objective.get_aux_var(var_name) + assert data is var_.data + + +def test_update_raises_batch_size_error(): + ( + objective, + _, + _, + var_to_cost_functions, + aux_to_cost_functions, + ) = create_objective_with_mock_cost_functions( + torch.ones(1, 1), + MockCostWeight(th.Variable(torch.ones(1), name="cost_weight_aux")), + ) + + input_data = {} + batch_size = 2 + # first check that we can change the current batch size (doubling the size)s + for var in var_to_cost_functions: + new_data = torch.ones(batch_size, 1) + input_data[var.name] = new_data + for aux in aux_to_cost_functions: + new_data = torch.ones(batch_size, 1) + input_data[aux.name] = new_data + objective.update(input_data=input_data) + assert objective.batch_size == batch_size + + # change one of the variables, no error since batch_size = 1 is broadcastable + input_data["var1"] = torch.ones(1, 1) + objective.update(input_data=input_data) + assert objective.batch_size == batch_size + + # change another variable, this time throws errors since found batch size 2 and 3 + input_data["var2"] = torch.ones(batch_size + 1, 1) + with pytest.raises(ValueError): + objective.update(input_data=input_data) + + # change back before testing the aux. variable + input_data["var2"] = torch.ones(batch_size, 1) + objective.update(input_data=input_data) # shouldn't throw error + + # auxiliary variables should also throw error + input_data["aux1"] = torch.ones(batch_size + 1, 1) + with pytest.raises(ValueError): + objective.update(input_data=input_data) + + +def test_iterator(): + cost_functions, *_ = create_mock_cost_functions() + + objective = th.Objective() + for f in cost_functions: + objective.add(f) + + idx = 0 + for f in objective: + assert f == cost_functions[idx] + idx += 1 + + +def test_to_dtype(): + objective, *_ = create_objective_with_mock_cost_functions() + for dtype in [torch.float32, torch.float64, torch.long]: + objective.to(dtype=dtype) + for _, cf in objective.cost_functions.items(): + for var in cf.optim_vars: + assert var.dtype == dtype + for aux in cf.aux_vars: + assert aux.dtype == dtype diff --git a/theseus/core/tests/test_precision.py b/theseus/core/tests/test_precision.py new file mode 100644 index 000000000..464418154 --- /dev/null +++ b/theseus/core/tests/test_precision.py @@ -0,0 +1,67 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import copy + +import pytest # noqa: F401 +import torch + +import theseus as th + +from .common import ( + check_another_theseus_function_is_copy, + check_another_theseus_tensor_is_copy, + create_mock_cost_functions, +) + + +def test_copy_scale_cost_weight(): + scale = th.Variable(torch.tensor(1.0)) + p1 = th.ScaleCostWeight(scale, name="scale_cost_weight") + for the_copy in [p1.copy(), copy.deepcopy(p1)]: + check_another_theseus_function_is_copy(p1, the_copy, new_name=f"{p1.name}_copy") + check_another_theseus_tensor_is_copy(scale, the_copy.scale) + + +def test_copy_diagonal_cost_weight(): + diagonal = th.Variable(torch.ones(3)) + p1 = th.DiagonalCostWeight(diagonal, name="diagonal_cost_weight") + for the_copy in [p1.copy(), copy.deepcopy(p1)]: + check_another_theseus_function_is_copy(p1, the_copy, new_name=f"{p1.name}_copy") + check_another_theseus_tensor_is_copy(diagonal, the_copy.diagonal) + + +def test_weight_error_scale_and_diagonal_cost_weight(): + def _check(f1_, f2_, scale_): + j1, e1 = f1_.weighted_jacobians_error() + j2, e2 = f2_.weighted_jacobians_error() + assert torch.allclose(e1 * scale_, e2) + for i in range(len(j1)): + assert torch.allclose(j1[i] * scale_, j2[i]) + + for _ in range(10): + scale = torch.rand(1) + cost_functions, *_ = create_mock_cost_functions( + data=torch.ones(1, 10), + cost_weight=th.ScaleCostWeight(th.Variable(torch.tensor(1.0))), + ) + cost_functions_x_scale, *_ = create_mock_cost_functions( + data=torch.ones(1, 10), + cost_weight=th.ScaleCostWeight(th.Variable(scale.squeeze())), + ) + for f1, f2 in zip(cost_functions, cost_functions_x_scale): + _check(f1, f2, scale) + + diagonal = torch.ones(2) + cost_functions, *_ = create_mock_cost_functions( + data=torch.ones(1, 10), + cost_weight=th.DiagonalCostWeight(th.Variable(diagonal)), + ) + cost_functions_x_scale, *_ = create_mock_cost_functions( + data=torch.ones(1, 10), + cost_weight=th.DiagonalCostWeight(th.Variable(diagonal * scale)), + ) + for f1, f2 in zip(cost_functions, cost_functions_x_scale): + _check(f1, f2, scale) diff --git a/theseus/core/tests/test_variable.py b/theseus/core/tests/test_variable.py new file mode 100644 index 000000000..2cd2295d8 --- /dev/null +++ b/theseus/core/tests/test_variable.py @@ -0,0 +1,83 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa: F401 +import torch + +import theseus as th + +from .common import MockVar + + +def test_variable_init(): + all_ids = [] + for i in range(100): + if np.random.random() < 0.5: + name = f"name_{i}" + else: + name = None + data = torch.rand(1, 1) + t = th.Variable(data, name=name) + all_ids.append(t._id) + if name is not None: + assert name == t.name + else: + assert t.name == f"Variable__{t._id}" + assert t.data.allclose(data) + + assert len(set(all_ids)) == len(all_ids) + + +def test_properties(): + for i in range(100): + length = np.random.randint(2, 5) + batch_size = np.random.randint(1, 100) + dtype = torch.float if np.random.random() < 0.5 else torch.long + data = torch.ones(batch_size, length, dtype=dtype) + t = th.Variable(data) + assert t.shape == data.shape + assert t.ndim == data.ndim + assert t.dtype == dtype + + +def test_update(): + for _ in range(10): + for length in range(1, 10): + var = MockVar(length) + batch_size = np.random.randint(1, 10) + # check update from torch tensor + new_data_good = torch.rand(batch_size, length) + var.update(new_data_good) + assert var.data is new_data_good + # check update from variable + new_data_good_wrapped = torch.rand(batch_size, length) + another_var = MockVar(length, data=new_data_good_wrapped) + var.update(another_var) + # check raises error on shape + new_data_bad = torch.rand(batch_size, length + 1) + with pytest.raises(ValueError): + var.update(new_data_bad) + # check raises error on dtype + new_data_bad = torch.rand(batch_size, length + 1).double() + with pytest.raises(ValueError): + var.update(new_data_bad) + # check batch indices to ignore + how_many = np.random.randint(1, batch_size + 1) + ignore_indices = np.random.choice(batch_size, size=how_many) + ignore_mask = torch.zeros(batch_size).bool() + ignore_mask[ignore_indices] = 1 + old_data = var.data.clone() + new_data_some_ignored = torch.randn(batch_size, length) + if ignore_indices[0] % 2 == 0: # randomly wrap into a variable to also test + new_data_some_ignored = MockVar(length, new_data_some_ignored) + var.update(new_data_some_ignored, batch_ignore_mask=ignore_mask) + for i in range(batch_size): + if ignore_mask[i] == 1: + assert torch.allclose(var[i], old_data[i]) + else: + if isinstance(new_data_some_ignored, th.Variable): + new_data_some_ignored = new_data_some_ignored.data + assert torch.allclose(var[i], new_data_some_ignored[i]) diff --git a/theseus/core/tests/theseus_function.py b/theseus/core/tests/theseus_function.py new file mode 100644 index 000000000..dccc9ea12 --- /dev/null +++ b/theseus/core/tests/theseus_function.py @@ -0,0 +1,31 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa: F401 +import torch + +import theseus.core + +from .common import MockCostFunction, MockCostWeight + + +def test_theseus_function_init(): + all_ids = [] + variables = [theseus.core.Variable(torch.ones(1, 1), name="var_1")] + aux_vars = [theseus.core.Variable(torch.ones(1, 1), name="aux_1")] + for i in range(100): + cost_weight = MockCostWeight(torch.ones(1, 1), name=f"cost_weight_{i}") + if np.random.random() < 0.5: + name = f"name_{i}" + else: + name = None + cost_function = MockCostFunction(variables, aux_vars, cost_weight, name=name) + all_ids.append(cost_function._id) + all_ids.append(cost_weight._id) + if name is not None: + assert name == cost_function.name + + assert len(set(all_ids)) == len(all_ids) diff --git a/theseus/core/theseus_function.py b/theseus/core/theseus_function.py new file mode 100644 index 000000000..d08c43006 --- /dev/null +++ b/theseus/core/theseus_function.py @@ -0,0 +1,144 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from itertools import count +from typing import Generator, List, Optional, Sequence + +from theseus.geometry import Manifold + +from .variable import Variable + + +# Base class that provides objective-tracking functionality for functions of tensors +# such as cost fuctions and cost weights. Variables are classified as either +# optimization variables (of the objective), or auxiliary variables. +# +# Subclasses must register their optimization variables and auxiliary variables +# with methods `register_optim_vars` and `register_aux_vars`, respectively. +class TheseusFunction(abc.ABC): + _ids = count(0) + + def __init__(self, name: Optional[str] = None): + self._id = next(self._ids) + if name: + self.name = name + else: + self.name = self.get_default_name() + self._optim_vars_attr_names: List[str] = [] + self._aux_vars_attr_names: List[str] = [] + + def get_default_name(self) -> str: + return f"{self.__class__.__name__}__{self._id}" + + def register_optim_var(self, attr_name: str): + if attr_name in self._optim_vars_attr_names: + raise ValueError( + "Tried to register an optimization variable that was previously " + "registered." + ) + if attr_name in self._aux_vars_attr_names: + raise ValueError( + "An optimization variable cannot also be an auxiliary variable." + ) + self._optim_vars_attr_names.append(attr_name) + + def register_aux_var(self, attr_name: str): + if attr_name in self._aux_vars_attr_names: + raise ValueError( + "Tried to register an auxiliary variable that was previously registered." + ) + if attr_name in self._optim_vars_attr_names: + raise ValueError( + "An auxiliary variable cannot also be an optimization variable." + ) + self._aux_vars_attr_names.append(attr_name) + + def register_optim_vars(self, variable_names: Sequence[str]): + for name in variable_names: + self.register_optim_var(name) + + def register_aux_vars(self, aux_var_names: Sequence[str]): + for name in aux_var_names: + self.register_aux_var(name) + + # Must copy everything + @abc.abstractmethod + def _copy_impl(self, new_name: Optional[str] = None) -> "TheseusFunction": + pass + + def copy( + self, new_name: Optional[str] = None, keep_variable_names: bool = False + ) -> "TheseusFunction": + if not new_name: + new_name = f"{self.name}_copy" + new_fn = self._copy_impl(new_name=new_name) + if keep_variable_names: + for old_var, new_var in zip(self.optim_vars, new_fn.optim_vars): + new_var.name = old_var.name + for old_aux, new_aux in zip(self.aux_vars, new_fn.aux_vars): + new_aux.name = old_aux.name + return new_fn + + def __deepcopy__(self, memo): + if id(self) in memo: + return memo[id(self)] + the_copy = self.copy() + memo[id(self)] = the_copy + return the_copy + + @property + def optim_vars(self) -> Generator[Manifold, None, None]: + return (getattr(self, attr) for attr in self._optim_vars_attr_names) + + @property + def aux_vars(self) -> Generator[Variable, None, None]: + return (getattr(self, attr) for attr in self._aux_vars_attr_names) + + def optim_var_at(self, index: int) -> Manifold: + return getattr(self, self._optim_vars_attr_names[index]) + + def aux_var_at(self, index: int) -> Variable: + return getattr(self, self._aux_vars_attr_names[index]) + + # This method is only used when copying the full objective. + # It replaces the variable at the given index in the interal list of + # variables with the variable passed as argument. + # This is used by methods `set_optim_var_at()` and `set_aux_var_at()`. + def _set_variable_at( + self, + index: int, + variable: Variable, + variable_attrs_array: List, + variable_type: str, + ): + if index >= len(variable_attrs_array): + raise ValueError(f"{variable_type} index out of range.") + setattr(self, variable_attrs_array[index], variable) + + # see `_set_variable_at()` + def set_optim_var_at(self, index: int, variable: Manifold): + self._set_variable_at( + index, variable, self._optim_vars_attr_names, "Optimization variable" + ) + + # see `_set_variable_at()` + def set_aux_var_at(self, index: int, variable: Variable): + self._set_variable_at( + index, variable, self._aux_vars_attr_names, "Auxiliary variable" + ) + + def num_optim_vars(self): + return len(self._optim_vars_attr_names) + + def num_aux_vars(self): + return len(self._aux_vars_attr_names) + + # calls to() on the cost weight, optimization variables and any internal tensors + def to(self, *args, **kwargs): + for var in self.optim_vars: + var.to(*args, **kwargs) + for aux in self.aux_vars: + aux.to(*args, **kwargs) diff --git a/theseus/core/variable.py b/theseus/core/variable.py new file mode 100644 index 000000000..6eaeed176 --- /dev/null +++ b/theseus/core/variable.py @@ -0,0 +1,94 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from itertools import count +from typing import Optional, Union + +import torch + + +class Variable: + _ids = count(0) + + def __init__(self, data: torch.Tensor, name: Optional[str] = None): + self._id = next(Variable._ids) + if name: + self.name = name + else: + self.name = f"{self.__class__.__name__}__{self._id}" + self.data = data + + def copy(self, new_name: Optional[str] = None) -> "Variable": + if not new_name: + new_name = f"{self.name}_copy" + return Variable(self.data.clone(), name=new_name) + + def __deepcopy__(self, memo): + if id(self) in memo: + return memo[id(self)] + the_copy = self.copy() + memo[id(self)] = the_copy + return the_copy + + # batch_ignore_mask is a boolean list where batch_ignore_mask[i] = 1 means + # variable[i] will *not* be updated + def update( + self, + data: Union[torch.Tensor, "Variable"], + batch_ignore_mask: Optional[torch.Tensor] = None, + ): + if isinstance(data, Variable): + data = data.data + if ( + len(data.shape) != len(self.data.shape) + or data.shape[1:] != self.data.shape[1:] + ): + raise ValueError( + f"Tried to update tensor {self.name} with data " + f"incompatible with original tensor shape. Given {data.shape[1:]}. " + f"Expected: {self.data.shape[1:]}" + ) + if data.dtype != self.dtype: + raise ValueError( + f"Tried to update used tensor of dtype {data.dtype} but Variable has " + f"type {self.dtype}." + ) + if batch_ignore_mask is not None and batch_ignore_mask.any(): + good_indices = ~batch_ignore_mask + self[good_indices] = data[good_indices] + else: + self.data = data + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(data={self.data}, name={self.name})" + + def __str__(self) -> str: + return repr(self) + + # calls to() on the internal tensors + def to(self, *args, **kwargs): + self.data = self.data.to(*args, **kwargs) + + @property + def shape(self) -> torch.Size: + return self.data.shape + + @property + def device(self) -> torch.device: + return self.data.device + + @property + def dtype(self) -> torch.dtype: + return self.data.dtype + + @property + def ndim(self) -> int: + return self.data.ndim + + def __getitem__(self, item): + return self.data[item] + + def __setitem__(self, item, value): + self.data[item] = value diff --git a/theseus/embodied/__init__.py b/theseus/embodied/__init__.py new file mode 100644 index 000000000..a6bf2b014 --- /dev/null +++ b/theseus/embodied/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .collision import Collision2D, EffectorObjectContactPlanar, SignedDistanceField2D +from .measurements import Between, MovingFrameBetween +from .misc import VariableDifference +from .motionmodel import ( + DoubleIntegrator, + GPCostWeight, + GPMotionModel, + QuasiStaticPushingPlanar, +) diff --git a/theseus/embodied/collision/__init__.py b/theseus/embodied/collision/__init__.py new file mode 100644 index 000000000..b27992657 --- /dev/null +++ b/theseus/embodied/collision/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .collision import Collision2D +from .eff_obj_contact import EffectorObjectContactPlanar +from .signed_distance_field import SignedDistanceField2D diff --git a/theseus/embodied/collision/collision.py b/theseus/embodied/collision/collision.py new file mode 100644 index 000000000..39eb14e74 --- /dev/null +++ b/theseus/embodied/collision/collision.py @@ -0,0 +1,73 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple + +import torch + +from theseus.core import CostFunction, CostWeight, Variable +from theseus.embodied.kinematics import IdentityModel, RobotModel +from theseus.geometry import Point2 + +from .signed_distance_field import SignedDistanceField2D + + +class Collision2D(CostFunction): + def __init__( + self, + pose: Point2, + cost_weight: CostWeight, + sdf_origin: Variable, + sdf_data: Variable, + sdf_cell_size: Variable, + cost_eps: Variable, + name: Optional[str] = None, + ): + if not isinstance(pose, Point2): + raise ValueError("Collision2D only accepts 2D poses as inputs.") + super().__init__(cost_weight, name=name) + self.pose = pose + self.sdf_origin = sdf_origin + self.sdf_data = sdf_data + self.sdf_cell_size = sdf_cell_size + self.cost_eps = cost_eps + self.register_optim_vars(["pose"]) + self.register_aux_vars(["sdf_origin", "sdf_data", "sdf_cell_size", "cost_eps"]) + self.robot: RobotModel = IdentityModel() + self.sdf = SignedDistanceField2D(sdf_origin, sdf_cell_size, sdf_data) + + def _compute_distances_and_jacobians( + self, + ) -> Tuple[torch.Tensor, torch.Tensor]: + robot_state = self.robot.forward_kinematics(self.pose) + return self.sdf.signed_distance(robot_state) + + def _error_from_distances(self, distances: torch.Tensor): + return (self.cost_eps.data - distances).clamp(min=0) + + def error(self) -> torch.Tensor: + distances, _ = self._compute_distances_and_jacobians() + return self._error_from_distances(distances) + + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + distances, jacobian = self._compute_distances_and_jacobians() + error = self._error_from_distances(distances) + faraway_idx = distances > self.cost_eps.data + jacobian[faraway_idx] = 0.0 + return [-jacobian], error + + def _copy_impl(self, new_name: Optional[str] = None) -> "Collision2D": + return Collision2D( + self.pose.copy(), + self.weight.copy(), + self.sdf_origin.copy(), + self.sdf_data.copy(), + self.sdf_cell_size.copy(), + self.cost_eps.copy(), + name=new_name, + ) + + def dim(self) -> int: + return self.robot.dim() diff --git a/theseus/embodied/collision/eff_obj_contact.py b/theseus/embodied/collision/eff_obj_contact.py new file mode 100644 index 000000000..f92b231df --- /dev/null +++ b/theseus/embodied/collision/eff_obj_contact.py @@ -0,0 +1,116 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple + +import torch + +from theseus.core import CostFunction, CostWeight, Variable +from theseus.embodied.kinematics import IdentityModel +from theseus.geometry import SE2 + +from .signed_distance_field import SignedDistanceField2D + + +class EffectorObjectContactPlanar(CostFunction): + def __init__( + self, + obj: SE2, + eff: SE2, + cost_weight: CostWeight, + sdf_origin: Variable, + sdf_data: Variable, + sdf_cell_size: Variable, + eff_radius: Variable, + name: Optional[str] = None, + use_huber_loss: bool = False, + ): + super().__init__(cost_weight, name=name) + self.obj = obj + self.eff = eff + self.sdf_origin = sdf_origin + self.sdf_data = sdf_data + self.sdf_cell_size = sdf_cell_size + self.eff_radius = eff_radius + self.register_optim_vars(["obj", "eff"]) + self.register_aux_vars( + ["sdf_origin", "sdf_data", "sdf_cell_size", "eff_radius"] + ) + self.robot = IdentityModel() + self.sdf = SignedDistanceField2D(sdf_origin, sdf_cell_size, sdf_data) + self._use_huber = use_huber_loss + + if use_huber_loss: + raise NotImplementedError( + "Jacobians for huber loss are not yet implemented." + ) + + def _compute_distances_and_jacobians( + self, + ) -> Tuple[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]: + J_transf: List[torch.Tensor] = [] + J_xy: List[torch.Tensor] = [] + eff__obj = self.obj.transform_to( + self.eff.xy(jacobians=J_xy), jacobians=J_transf + ) + J_transf_obj = J_transf[0] + J_transf_eff = J_transf[1].matmul(J_xy[0]) + robot_state = self.robot.forward_kinematics(eff__obj) + dist, J_dist = self.sdf.signed_distance(robot_state) + J_out = (J_dist.matmul(J_transf_obj), J_dist.matmul(J_transf_eff)) + return dist, J_out + + def _error_from_distances(self, distances: torch.Tensor): + if self._use_huber: + eff_rad = self.eff_radius.data + err = distances.clone() + # linear (two-sided, otherwise this would be 0) + gt_r_idx = distances >= 2 * eff_rad + err[gt_r_idx] = distances[gt_r_idx] - 0.5 * eff_rad + # quadratic + lt_r_pos_idx = (distances < 2 * eff_rad).logical_or(distances > 0) + err[lt_r_pos_idx] = ( + 0.5 * (distances[lt_r_pos_idx] - eff_rad).square() / eff_rad + ) + # linear + neg_idx = distances < 0 + err[neg_idx] = -distances[neg_idx] + 0.5 * eff_rad + return err + else: + return (distances - self.eff_radius.data).abs() + + def error(self) -> torch.Tensor: + distances, _ = self._compute_distances_and_jacobians() + return self._error_from_distances(distances) + + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + distances, jacobians = self._compute_distances_and_jacobians() + error = self._error_from_distances(distances) + if self._use_huber: + raise NotImplementedError( + "Jacobians for huber loss are not yet implemented." + ) + else: + lt_idx = distances < self.eff_radius.data + jacobians[0][lt_idx] *= -1.0 + jacobians[1][lt_idx] *= -1.0 + return [jacobians[0], jacobians[1]], error + + def _copy_impl( + self, new_name: Optional[str] = None + ) -> "EffectorObjectContactPlanar": + return EffectorObjectContactPlanar( + self.obj.copy(), + self.eff.copy(), + self.weight.copy(), + self.sdf_origin.copy(), + self.sdf_data.copy(), + self.sdf_cell_size.copy(), + self.eff_radius.copy(), + name=new_name, + ) + + def dim(self) -> int: + return self.robot.dim() diff --git a/theseus/embodied/collision/signed_distance_field.py b/theseus/embodied/collision/signed_distance_field.py new file mode 100644 index 000000000..9af337c47 --- /dev/null +++ b/theseus/embodied/collision/signed_distance_field.py @@ -0,0 +1,184 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Optional, Tuple + +import torch +from scipy import ndimage + +from theseus.core import Variable +from theseus.utils import gather_from_rows_cols + + +class SignedDistanceField2D: + # origin shape is: batch_size x 2 + # sdf_data shape is: batch_size x field_height x field_width + def __init__( + self, + origin: Variable, + cell_size: Variable, + sdf_data: Optional[Variable] = None, + occupancy_map: Optional[Variable] = None, + ): + if occupancy_map is not None: + if sdf_data is not None: + raise ValueError( + "Only one of arguments sdf_data and occupancy_map should be provided." + ) + sdf_data = self._compute_sdf_data_from_map(occupancy_map, cell_size) + else: + if sdf_data is None: + raise ValueError( + "Either argument sdf_data or argument occupancy_map should be provided." + ) + self.update_data(origin, sdf_data, cell_size) + self._num_rows = sdf_data.shape[1] + self._num_cols = sdf_data.shape[2] + + def _compute_sdf_data_from_map( + self, occupancy_map_batch: Variable, cell_size: Variable + ) -> Variable: + # Code from https://github.com/gtrll/gpmp2/ + if occupancy_map_batch.ndim != 3: + raise ValueError( + "Argument occupancy_map to SignedDistanceField2D must be a batch of matrices." + ) + num_maps = occupancy_map_batch.data.size(0) + all_sdf_data = [] + + for i in range(num_maps): + occupancy_map = occupancy_map_batch[i] + + cur_map = occupancy_map > 0.75 + cur_map = cur_map.int() + + if torch.max(cur_map) == 0: + map_x, map_y = occupancy_map.size(0), occupancy_map.size(1) + max_map_size = 2 * cell_size[i].item() * max(map_x, map_y) + sdf_data = ( + torch.ones(occupancy_map.shape, dtype=cell_size.dtype) + * max_map_size + ) + else: + # inverse map + inv_map = 1 - cur_map + # get signed distance from map and inverse map + # since bwdist(foo) = ndimage.distance_transform_edt(1-foo) + map_dist = ndimage.distance_transform_edt(inv_map.numpy()) + inv_map_dist = ndimage.distance_transform_edt(cur_map.numpy()) + + sdf_data = map_dist - inv_map_dist + # metric + sdf_data = torch.tensor(sdf_data, dtype=cell_size.dtype) * cell_size[i] + + all_sdf_data.append(sdf_data) + + sdf_data_var = Variable(torch.stack(all_sdf_data)) + + return sdf_data_var + + def update_data(self, origin: Variable, sdf_data: Variable, cell_size: Variable): + if sdf_data.ndim != 3: + raise ValueError( + "Argument sdf_data to SignedDistanceField2D must be a batch of matrices." + ) + if not (origin.ndim == 2 or (origin.ndim == 3 and origin.shape[2] == 1)): + raise ValueError( + "Argument origin to SignedDistanceField2D must be a batch of 2-D tensors." + ) + if not ( + cell_size.ndim == 1 or (cell_size.ndim == 2 and cell_size.shape[1] == 1) + ): + raise ValueError( + "Argument cell_size must be a batch of 0-D or 1-D tensors." + ) + if ( + origin.shape[0] != sdf_data.shape[0] + or origin.shape[0] != cell_size.shape[0] + ): + raise ValueError("Incompatible batch size between input arguments.") + self.origin = origin + self.sdf_data = sdf_data + self.cell_size = cell_size + + def convert_points_to_cell( + self, points: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + origin = ( + self.origin.data.unsqueeze(-1) + if self.origin.ndim == 2 + else self.origin.data + ) + cell_size = ( + self.cell_size.data + if self.cell_size.ndim == 2 + else self.cell_size.data.unsqueeze(-1) + ) + px = points[:, 0] + py = points[:, 1] + + out_of_bounds_idx = ( + (px < origin[:, 0]) + .logical_or(px > (origin[:, 0] + (self._num_cols - 1.0) * cell_size)) + .logical_or(py < origin[:, 1]) + .logical_or(py > (origin[:, 1] + (self._num_rows - 1.0) * cell_size)) + ) + + col = (px - origin[:, 0]) / cell_size + row = (py - origin[:, 1]) / cell_size + return row, col, out_of_bounds_idx + + # Points shape must be (batch_size, 2, num_points) + # Computed distances for each point, shape (batch_size, num_points) + # Jacobian tensor, shape (batch_size, num_points, 2) + def signed_distance( + self, points: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + rows, cols, out_of_bounds_idx = self.convert_points_to_cell(points) + + lr = torch.floor(rows) + lc = torch.floor(cols) + hr = lr + 1.0 + hc = lc + 1.0 + lri = lr.long().clamp(0, self._num_rows - 1) + lci = lc.long().clamp(0, self._num_cols - 1) + hri = hr.long().clamp(0, self._num_rows - 1) + hci = hc.long().clamp(0, self._num_cols - 1) + + def gather_sdf(r_, c_): + return gather_from_rows_cols(self.sdf_data.data, r_, c_) + + # Compute the distance + hrdiff = hr - rows + hcdiff = hc - cols + lrdiff = rows - lr + lcdiff = cols - lc + dist = ( + hrdiff * hcdiff * gather_sdf(lri, lci) + + lrdiff * hcdiff * gather_sdf(hri, lci) + + hrdiff * lcdiff * gather_sdf(lri, hci) + + lrdiff * lcdiff * gather_sdf(hri, hci) + ) + dist[out_of_bounds_idx] = 0 + + # Compute the jacobians + + cell_size = ( + self.cell_size.data + if self.cell_size.ndim == 2 + else self.cell_size.data.unsqueeze(-1) + ) + + jac1 = ( + hrdiff * (gather_sdf(lri, hci) - gather_sdf(lri, lci)) + + lrdiff * (gather_sdf(hri, hci) - gather_sdf(hri, lci)) + ) / cell_size + jac2 = ( + hcdiff * (gather_sdf(hri, lci) - gather_sdf(lri, lci)) + + lcdiff * (gather_sdf(hri, hci) - gather_sdf(lri, hci)) + ) / cell_size + jac1[out_of_bounds_idx] = 0 + jac2[out_of_bounds_idx] = 0 + return dist, torch.stack([jac1, jac2], dim=2) diff --git a/theseus/embodied/collision/tests/sdf_data.csv b/theseus/embodied/collision/tests/sdf_data.csv new file mode 100644 index 000000000..e99a74a68 --- /dev/null +++ b/theseus/embodied/collision/tests/sdf_data.csv @@ -0,0 +1,5 @@ +7.242617953772866324e-01,5.420863142275129665e-01,6.164356593150067676e-01,9.513976977985337813e-01,6.967734809070457169e-01,2.110625457620858292e-01,6.893651422463076273e-02,8.234228454321479429e-02,6.680288647193592944e-01,9.376783822325435169e-01,7.792607086309264464e-01,4.920495787814479494e-01,2.526882179835965303e-01,2.639737458005025905e-01,6.224870857593659546e-01,3.493470758073778937e-01,1.638660394677782239e-01,1.980630616768148666e-01,5.735859356397290387e-01,9.533208089228849547e-01,9.257653991555085948e-01,1.744396401695424981e-01,9.902844210735993746e-01,4.629933713748677349e-01,9.008162954774492359e-01,8.088952698850015999e-02,2.637905218879607050e-01,7.407237850077919017e-01,1.275483224898635859e-01,2.191777647224503189e-01,6.739352119887553894e-01,8.683214004136002862e-01,8.643800893798035956e-01,2.306816402738054439e-01,5.692462601805867717e-01,1.665852194843163714e-01,6.548826115237472711e-01,1.913600832197046575e-01,1.108807123245655335e-01,7.265101762149124287e-01,7.835292744481094918e-01,3.131608109695896980e-01,1.590734747743438593e-01,9.824988280597488632e-01,7.227634665451997398e-01,5.853913690039460604e-01,8.710702280596138714e-01,8.138361521013769417e-01,6.162587748587498027e-01,1.192891142156408124e-01,8.479749404298145832e-01,8.596397155356584330e-01,1.348596175452652091e-01,1.439792364857082063e-01,5.055160119042401146e-01,6.425957221320953705e-01,7.699236945627607742e-01,8.447044958928316971e-01,7.241212073949554462e-02,5.107076353929290713e-01,7.262568398087436705e-01,9.808490026528465799e-01,9.711339673958702123e-01,5.728787430875725839e-02,1.239935376287525370e-01,3.309957923699636817e-01,2.363467066445490428e-01,3.644889205972723367e-01,2.719177804014771827e-01,5.495409191042585073e-01,9.774926014761960991e-01,9.965263429171724585e-01,9.076035921859225697e-01,1.702203227494216708e-01,6.093479116764753956e-01,9.783132602912589793e-01,2.329973623568807328e-01,8.078386653456822719e-01,3.512838067532725317e-01,4.140535091686015079e-01,6.152389966285385370e-01,8.261666731573544808e-01,6.668134987053371265e-01,8.776692268073761349e-01,7.431718875340043162e-02,3.266123818255290967e-01,1.133337518704518043e-01,2.414334631253297614e-01,6.703899038505659602e-01,1.953231429267899832e-01,8.166987653299357941e-01,4.665131036499412520e-01,7.528122844910511979e-01,7.866376910563011027e-01,5.690356883007741118e-02,3.444027461002241441e-02,6.993902170951071806e-01,2.584431152847634916e-01,6.613344058216602539e-01,7.290044838328421095e-01 +2.242297566259743302e-01,6.838847235625579168e-01,7.096066944616399308e-01,8.926142678211678172e-01,9.081288196728281026e-01,5.614290559432092431e-01,9.923369178427866588e-01,8.184625308547754940e-01,3.032800771879086144e-01,3.496375125815492924e-01,9.590129249262541178e-01,8.738161951103520231e-01,6.412422127510374326e-01,1.785931539229854703e-01,8.002938613863666006e-01,5.775815381859028541e-01,3.386210201661654651e-02,6.714038189617490548e-01,9.692791097352837504e-01,5.690187980571842497e-01,3.682299948687988156e-01,3.224769689992739163e-01,7.652997053273635775e-01,1.211112496873744115e-01,6.590045587937202187e-01,4.871217755549089867e-01,4.300143269690204217e-01,4.638720787340948615e-01,8.525030235863523265e-01,5.531501912441626478e-01,3.245179222628953308e-01,7.062727704755908942e-01,8.825066821808681405e-01,3.999027748649726099e-01,5.686320961329432988e-01,7.616746941709381380e-01,1.351555326041475880e-02,2.837539025365156675e-01,7.682901015108434262e-01,7.554824968592288581e-01,8.778480741382673758e-01,9.860031933415062122e-01,9.469214270973769221e-02,8.486287091374334901e-01,5.227324912966139214e-01,5.552710215960255935e-01,6.270114653085988587e-01,9.916581111443992791e-01,3.288923010019840776e-01,2.464321712941222042e-01,3.318700435991538411e-01,6.518846518960426106e-01,6.453691421922151950e-01,7.090565132483003241e-01,6.817883584132089503e-02,3.496175226565875205e-01,5.054457247761242611e-01,3.990863389211837253e-01,6.991070941998900334e-02,1.165447266633649059e-01,2.097256845151180826e-01,9.484287270241679169e-01,6.740613900208205500e-01,7.840549981466499174e-01,6.211616715319188131e-01,5.127575176763180531e-01,3.122311699853993483e-01,8.555653867951180658e-01,8.843507380429611597e-01,5.068519215891253982e-01,5.909241884124012412e-01,4.302631142244059959e-01,5.340870565284758786e-01,9.793912466136375228e-01,8.332701576691321499e-01,7.275224861781306451e-01,3.723386296100226200e-01,9.907316083595644196e-01,2.902213524332543093e-01,3.507138360591699477e-02,5.559125328035072755e-01,9.182143203484818272e-01,2.072135305671973793e-01,4.097929228785046618e-01,8.007092041082652933e-01,9.667288407861340804e-02,5.917266971025586519e-01,7.639907267609875596e-01,4.718136380932380636e-02,7.727980647988451635e-01,6.960711526048036779e-01,5.344910807359649629e-01,5.760524943747873383e-01,6.080614776081783024e-02,5.591149428846081681e-01,6.617682395999456846e-01,4.521516895126382884e-01,9.130392621426212107e-01,8.714059906619986418e-02,3.077458966666797302e-01 +4.502189289546081330e-01,4.940300106604776920e-01,9.899256660016931830e-01,3.057194868038761726e-01,6.840372124015140498e-01,3.303610642669997377e-01,1.605815080164901820e-01,9.614389173424638946e-01,5.213649355259681872e-01,6.965013159315084357e-01,1.332900411434233545e-01,5.346311163644248410e-01,4.999659911182513516e-01,9.894840919788872036e-01,4.094998805454016511e-01,2.523265373548114754e-01,3.621803269918678625e-01,3.734691081891960973e-01,3.917029948547826601e-01,6.581463199428178035e-01,7.664058614616539034e-01,6.058549480774155782e-02,5.807128299078933020e-01,6.264036307794521585e-01,6.811397667776012410e-01,9.857353801292085826e-01,6.805736846638803517e-01,3.728613634313261427e-01,9.867531186672080512e-01,7.506961358028704234e-01,7.604201227738406343e-01,3.992460368058037279e-01,1.981109484499921081e-01,9.273326924719595965e-01,5.135544354796167665e-01,2.074480687621336195e-01,5.198827542459167894e-01,9.294371988998583189e-01,1.446175251773212933e-01,1.417664919686941749e-01,7.916072661645743302e-01,5.336600429110402999e-01,6.905139413421812478e-02,4.652655623025504505e-01,6.549569306153493198e-01,4.293303274060020769e-01,1.417121605035839282e-01,7.795611225283284318e-01,6.810790612775933228e-01,5.793221655512248436e-01,7.177157534441449549e-01,9.194775485226424072e-01,4.686264195144911326e-01,2.085833614418121096e-01,5.158149323488022686e-01,3.682703963580568063e-01,7.514124642162844925e-02,7.660394958011857991e-01,3.331804690086752618e-01,5.460747760747873825e-01,2.159157142563375986e-01,5.357434608030940959e-01,6.388432442582282489e-01,6.695861285768376359e-01,2.305153582929140299e-01,5.415458810596169847e-01,6.762883145650263605e-01,2.655679508682543544e-01,5.527584442690395283e-01,6.285465754408446903e-01,4.362357981954053665e-01,5.086587617424053187e-01,1.460367691221240438e-01,6.117282232915181028e-01,7.258310165527606106e-01,5.559823658646212419e-02,7.359983000237112494e-01,1.271981537835531473e-01,2.107328918478513780e-01,7.240538837217006529e-01,8.117405187912419828e-01,9.596214281595449869e-01,6.099483356473155427e-01,2.699566559727818449e-01,1.119316332945766712e-01,4.473041573408774729e-01,4.551751458997304045e-01,7.946624949046560138e-01,8.516651673663583333e-01,3.480087254744036462e-01,1.630467987183674694e-01,1.241157606234932764e-02,4.025233883280084912e-01,8.386803656293407849e-01,2.147261296713871559e-01,5.042589380222395334e-03,1.434290859747522351e-01,6.916521759998084384e-02,6.850136298876667862e-01,2.547561707692588273e-01 +2.301601792506119404e-01,9.318884566495397870e-01,8.217028520941754843e-01,4.374470808267663102e-01,2.704477358140390253e-01,3.853786084798371814e-02,4.112906755533088932e-01,2.741893288861977362e-01,9.892228431575935677e-01,2.753127132780874842e-02,1.303743791394518237e-02,4.606774261412213134e-01,7.615887425433620628e-02,1.655931545978173158e-01,5.571020939987234577e-01,7.072355189252992336e-01,6.286075085011066932e-02,5.800095042753692365e-01,2.543044788052446492e-01,9.848969190375811422e-04,3.460207215382610224e-01,3.305002810763224552e-01,7.821926053349514962e-02,5.851728687325324918e-01,5.809240504676644878e-01,2.350103777274396011e-01,2.776028231973902693e-01,9.207714207533442830e-01,4.432641902671852208e-02,5.582023934332535076e-01,8.128755463393273484e-01,8.823963630717793771e-01,1.518539686911026809e-01,3.812829143813064592e-01,4.179139412710684010e-01,9.318481792543842346e-02,5.598553187254915464e-01,6.695777899202747241e-01,9.757680455652028018e-01,2.452478481959654566e-01,2.693430627576844927e-01,4.439375375701238857e-01,9.721482123815674559e-01,2.099556450046446976e-02,5.975082855317471742e-01,6.303854906206000086e-01,2.262386364127275673e-01,3.276314593390963203e-01,8.825879361840456294e-01,7.914454115143287893e-01,4.795239743754157180e-01,2.808661543720766263e-01,7.278210125004526132e-01,5.316028269358669123e-01,6.599162276167824803e-01,6.222194874155938527e-01,7.763311105207245655e-01,2.341476374411661787e-01,2.682308144173420894e-01,3.889462145994232456e-01,4.548031861712094059e-01,8.228135665207817473e-01,9.771247385075233494e-01,9.251626310936026387e-01,5.814119148148600980e-01,5.918200121039925987e-01,7.136629427962304373e-02,7.799537792012743997e-01,2.712926624099020190e-01,8.996029082417030809e-01,8.621209421579486554e-01,2.184818608621819092e-01,8.363126594193300845e-01,9.985314328301061471e-01,9.925793334654757771e-01,6.752869699816196336e-01,2.414359948479614104e-01,7.840630916101067882e-01,1.031268770322602002e-02,8.443282836875497521e-01,7.737239371970622281e-01,3.864833966060728931e-01,4.365806687771413008e-01,9.806097747938118392e-01,7.281181286000926178e-02,4.287774110436241015e-01,4.025167488905142532e-01,3.901232308379980473e-01,9.368555864619085849e-01,4.429099973916340582e-01,3.784172084606265418e-01,1.327251995710096999e-01,7.313966781093594882e-02,9.947478869257958545e-01,1.502356722279253320e-01,2.775114411618714305e-01,7.477522925857394975e-01,2.042461821794911980e-01,5.442628934988721356e-01,4.582667144650375279e-01 +4.873890214399004828e-01,6.968389659442970618e-01,4.106653019659228798e-01,9.937731874213612260e-01,4.179252573529819159e-01,7.877837125559521425e-01,9.037611447313179669e-01,8.109370051458667517e-01,9.760494724326505001e-01,4.745522769324304058e-01,2.201288315374891891e-01,1.577287785902176109e-01,4.923465805444393606e-01,5.746959836660657794e-01,4.827787825427665158e-01,7.634119159237391372e-01,6.578684013679538811e-01,6.610161304932172799e-01,8.056423134048379397e-01,5.896329048900457170e-01,7.854579596406990882e-01,3.835134220621727597e-01,3.424834749957188507e-01,3.434189576517625575e-01,1.075630662843481966e-01,2.775296991944390390e-01,8.993563373852179188e-01,2.734735757319443072e-01,4.269712099814876316e-01,8.414974588964709845e-01,9.655216745566668202e-01,4.093372663723515714e-01,5.842254993520935580e-01,7.981261202127556409e-01,4.989326250831908061e-01,4.679275356832229837e-01,2.731606972958956492e-01,4.223318328739730232e-01,5.612053663707898821e-01,9.121710242330443030e-01,2.019408163119407185e-03,9.441207151187747115e-01,3.434288009998659463e-01,5.132820974856509011e-01,4.289119629972332204e-01,8.718894287471327109e-01,8.121655576520210307e-01,3.248249489920647104e-01,8.565360074413913560e-01,3.378116892815056227e-01,2.345582160203846245e-01,9.039332221443256987e-01,6.661627582288512928e-01,6.682656624607866691e-01,7.150008954487733215e-01,3.630674285504482679e-01,6.411571473505894847e-02,1.047348786009109078e-01,7.836466577876068929e-01,3.824131181295553628e-02,2.973183338709434187e-01,8.113409469899578452e-01,3.300783065011025919e-03,6.316374705825645863e-01,4.620819633281786354e-01,7.647690290736138818e-01,5.411716904566818975e-01,9.095632293211556307e-01,3.397510846534343987e-01,8.241786789925356072e-01,1.660673054776069524e-01,5.835087955935673865e-01,6.013505463599211343e-01,1.237403073353537941e-01,3.582090560812564162e-01,2.993240649142179244e-01,4.191253401416916713e-01,3.154113312889127441e-01,4.293166988195616929e-01,9.113014669767303122e-01,2.564310418657156365e-02,5.641981275882210767e-01,1.083091978462576987e-01,3.137110960431676032e-01,3.547053651452664047e-01,9.207524384295223685e-01,4.104509864835069743e-01,9.779730492235478856e-01,9.422070014987190545e-01,5.946960072593270308e-01,6.137243265055400965e-01,6.921653957214635167e-01,3.890324514563414660e-01,9.345056707226486736e-01,6.769967340378213461e-01,5.408995359097653610e-01,6.896089021698508237e-01,4.235642995880810613e-01,8.132920159148109285e-01,1.369622645770934843e-01 diff --git a/theseus/embodied/collision/tests/test_collision_factor.py b/theseus/embodied/collision/tests/test_collision_factor.py new file mode 100644 index 000000000..8c42767ec --- /dev/null +++ b/theseus/embodied/collision/tests/test_collision_factor.py @@ -0,0 +1,108 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa +import torch + +import theseus as th +from theseus.core.tests.common import ( + check_another_theseus_function_is_copy, + check_another_theseus_tensor_is_copy, + check_another_torch_tensor_is_copy, +) +from theseus.utils import numeric_jacobian + + +def test_collision2d_error_shapes(): + generator = torch.Generator() + generator.manual_seed(0) + cost_weight = th.ScaleCostWeight(1.0) + for batch_size in [1, 10, 100]: + for field_widht in [1, 10, 100]: + for field_height in [1, 10, 100]: + pose = th.Point2(data=torch.randn(batch_size, 2).double()) + origin = torch.randn(batch_size, 2) + sdf_data = torch.randn(batch_size, field_widht, field_height) + cell_size = torch.randn(batch_size, 1) + cost_function = th.eb.Collision2D( + pose, + cost_weight, + th.Variable(origin), + th.Variable(sdf_data), + th.Variable(cell_size), + th.Variable(torch.ones(1)), + name="cost_function", + ) + error = cost_function.error() + jacobians, jac_error = cost_function.jacobians() + assert error.shape == jac_error.shape + assert error.shape == (batch_size, 1) + assert jacobians[0].shape == (batch_size, 1, 2) + + +def test_collision2d_copy(): + batch_size = 20 + cost_weight = th.ScaleCostWeight(1.0) + pose = th.Point2(data=torch.randn(batch_size, 2).double()) + origin = torch.ones(batch_size, 2) + sdf_data = torch.ones(batch_size, 1, 1) + cell_size = torch.ones(batch_size, 1) + cost_function = th.eb.Collision2D( + pose, + cost_weight, + th.Variable(origin), + th.Variable(sdf_data), + th.Variable(cell_size), + th.Variable(torch.ones(1)), + name="name", + ) + cost_function2 = cost_function.copy(new_name="new_name") + assert cost_function is not cost_function2 + check_another_theseus_tensor_is_copy(cost_function2.pose, pose) + for attr_name in ["origin", "sdf_data", "cell_size"]: + check_another_theseus_tensor_is_copy( + getattr(cost_function.sdf, attr_name), + getattr(cost_function2.sdf, attr_name), + ) + check_another_theseus_function_is_copy( + cost_function.weight, + cost_function2.weight, + new_name=f"{cost_function.weight.name}_copy", + ) + check_another_torch_tensor_is_copy( + cost_function.cost_eps.data, cost_function2.cost_eps.data + ) + assert cost_function2.name == "new_name" + + +def test_collision2d_jacobians(): + for _ in range(10): + for batch_size in [1, 10, 100, 1000]: + cost_weight = th.ScaleCostWeight(torch.ones(1).squeeze().double()) + pose = th.Point2(data=torch.randn(batch_size, 2).double()) + origin = th.Variable(torch.ones(batch_size, 2).double()) + sdf_data = th.Variable(torch.randn(batch_size, 10, 10).double()) + cell_size = th.Variable(torch.rand(batch_size, 1).double()) + cost_eps = th.Variable(torch.rand(1).double()) + cost_function = th.eb.Collision2D( + pose, cost_weight, origin, sdf_data, cell_size, cost_eps + ) + + def new_error_fn(vars): + new_cost_function = th.eb.Collision2D( + vars[0], cost_weight, origin, sdf_data, cell_size, cost_eps + ) + return th.Vector(data=new_cost_function.error()) + + expected_jacs = numeric_jacobian( + new_error_fn, + [pose], + function_dim=1, + delta_mag=1e-6, + ) + jacobians, error_jac = cost_function.jacobians() + error = cost_function.error() + assert torch.allclose(error_jac, error) + assert torch.allclose(jacobians[0], expected_jacs[0], atol=1e-6) diff --git a/theseus/embodied/collision/tests/test_eff_obj_contact.py b/theseus/embodied/collision/tests/test_eff_obj_contact.py new file mode 100644 index 000000000..fb692b731 --- /dev/null +++ b/theseus/embodied/collision/tests/test_eff_obj_contact.py @@ -0,0 +1,151 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa +import torch + +import theseus as th +from theseus.geometry.tests.test_se2 import create_random_se2 +from theseus.utils import numeric_jacobian + + +def test_eff_obj_interesect_jacobians(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 10, 100]: + obj = create_random_se2(batch_size, rng) + eff = create_random_se2(batch_size, rng) + origin = th.Variable(torch.randn(batch_size, 2).double()) + sdf_data = th.Variable(torch.randn(batch_size, 10, 10).double()) + cell_size = th.Variable(torch.rand(batch_size, 1).double()) + eff_radius = th.Variable(torch.rand(batch_size, 1).double()) + cost_weight = th.ScaleCostWeight(1.0) + cost_function = th.eb.EffectorObjectContactPlanar( + obj, eff, cost_weight, origin, sdf_data, cell_size, eff_radius + ) + jacobians, _ = cost_function.jacobians() + + def new_error_fn(groups): + new_cost_function = th.eb.EffectorObjectContactPlanar( + groups[0], + groups[1], + cost_weight, + origin, + sdf_data, + cell_size, + eff_radius, + ) + return th.Vector(data=new_cost_function.error()) + + expected_jacs = numeric_jacobian( + new_error_fn, [obj, eff], function_dim=1, delta_mag=1e-6 + ) + + def _check_jacobian(actual_, expected_): + # This makes failures more explicit than torch.allclose() + diff = (expected_ - actual_).norm(p=float("inf")) + assert diff < 1e-5 + + for i in range(len(expected_jacs)): + _check_jacobian(jacobians[i], expected_jacs[i]) + + +def _load_sdf_data_from_file(filename): + + with open(filename) as f: + import json + + sdf_data = json.load(f) + sdf_data_vec = sdf_data["grid_data"] + + sdf_data_mat = np.zeros((sdf_data["grid_size_y"], sdf_data["grid_size_x"])) + for i in range(sdf_data_mat.shape[0]): + for j in range(sdf_data_mat.shape[1]): + sdf_data_mat[i, j] = sdf_data_vec[i][j] + + sdf_data_mat = torch.Tensor(sdf_data_mat).unsqueeze(0) + cell_size = torch.Tensor([sdf_data["grid_res"]]).unsqueeze(0) + origin = torch.Tensor( + [sdf_data["grid_origin_x"], sdf_data["grid_origin_y"]] + ).unsqueeze(0) + + return sdf_data_mat, cell_size, origin + + +def _create_sdf_data(sdf_idx=0): + + cell_size = 0.01 + origin_x, origin_y = 0.0, 0.0 + sdf_data = np.loadtxt( + open("theseus/embodied/collision/tests/sdf_data.csv", "rb"), + delimiter=",", + skiprows=0, + ) + sdf_data_mat = (sdf_data[sdf_idx, :]).reshape(10, 10) + + sdf_data_mat = torch.Tensor(sdf_data_mat).unsqueeze(0) + cell_size = torch.Tensor([cell_size]).unsqueeze(0) + origin = torch.Tensor([origin_x, origin_y]).unsqueeze(0) + + return sdf_data_mat, cell_size, origin + + +def test_eff_obj_interesect_errors(): + + eff_radius = torch.Tensor([0.0]) + cost_weight = th.ScaleCostWeight(1.0) + + inputs = { + "obj": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.02, 0.01, 0.0], + [0.02, 0.03, 0.0], + ] + ), + "eff": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [0.0, 0.0, np.pi / 2], + [0.01, 0.02, 0.0], + [0.01, 0.02, np.pi / 4], + [0.02, 0.06, 0.0], + ] + ), + } + outputs = { + "error": torch.DoubleTensor( + [ + [[0.72426180], [0.72426180], [0.17443964], [0.0], [0.67393521]], + [[0.22422976], [0.22422976], [0.32247697], [0.0], [0.32451792]], + [[0.45021893], [0.45021893], [0.06058549], [0.0], [0.76042012]], + [[0.23016018], [0.23016018], [0.33050028], [0.0], [0.81287555]], + [[0.48738902], [0.48738902], [0.38351342], [0.0], [0.96552167]], + ] + ) + } + + for sdf_idx in range(0, 5): + sdf_data, cell_size, origin = _create_sdf_data(sdf_idx) + + obj = th.SE2(x_y_theta=inputs["obj"]) + eff = th.SE2(x_y_theta=inputs["eff"]) + + cost_fn = th.eb.EffectorObjectContactPlanar( + obj, + eff, + cost_weight, + origin.repeat(5, 1), + sdf_data.repeat(5, 1, 1), + cell_size.repeat(5, 1), + eff_radius, + ) + + actual = cost_fn.error() + expected = outputs["error"][sdf_idx, :] + assert torch.allclose(actual, expected) diff --git a/theseus/embodied/collision/tests/test_signed_distance_field.py b/theseus/embodied/collision/tests/test_signed_distance_field.py new file mode 100644 index 000000000..84591abbf --- /dev/null +++ b/theseus/embodied/collision/tests/test_signed_distance_field.py @@ -0,0 +1,116 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa +import torch + +import theseus as th +from theseus.utils import numeric_jacobian + + +def test_sdf_2d_shapes(): + generator = torch.Generator() + generator.manual_seed(0) + for batch_size in [1, 10, 100]: + for field_width in [1, 10, 100]: + for field_height in [1, 10, 100]: + for num_points in [1, 10, 100]: + origin = torch.randn(batch_size, 2) + sdf_data = torch.randn(batch_size, field_width, field_height) + points = torch.randn(batch_size, 2, num_points) + cell_size = torch.randn(batch_size, 1) + sdf = th.eb.SignedDistanceField2D(origin, cell_size, sdf_data) + dist, jac = sdf.signed_distance(points) + assert dist.shape == (batch_size, num_points) + assert jac.shape == (batch_size, num_points, 2) + + +def test_signed_distance_2d(): + data = torch.tensor( + [ + [1.7321, 1.4142, 1.4142, 1.4142, 1.7321], + [1.4142, 1, 1, 1, 1.4142], + [1.4142, 1, 1, 1, 1.4142], + [1.4142, 1, 1, 1, 1.4142], + [1.7321, 1.4142, 1.4142, 1.4142, 1.7321], + ] + ).view(1, 5, 5) + sdf = th.eb.SignedDistanceField2D( + -0.2 * torch.ones(1, 2), 0.1 * torch.ones(1, 1), data + ) + + points = torch.tensor([[0, 0], [0.18, -0.17]]) + rows, cols, _ = sdf.convert_points_to_cell(points) + assert torch.allclose(rows, torch.tensor([[2.0, 0.3]])) + assert torch.allclose(cols, torch.tensor([[2.0, 3.8]])) + + dist, _ = sdf.signed_distance(points) + assert torch.allclose(dist, torch.tensor([1.0, 1.567372]).view(1, 2)) + + +def test_sdf_2d_creation(): + map1 = torch.tensor( + [ + [0, 1, 1, 1, 1], + [0, 0, 1, 1, 1], + [0, 1, 1, 1, 1], + [0, 1, 1, 1, 0], + [0, 1, 1, 0, 0], + ] + ) + map2 = torch.zeros(5, 5) + data_maps = th.Variable(torch.stack([map1, map2])) + sdf_batch = th.eb.SignedDistanceField2D( + -0.2 * torch.ones(2, 2), 0.1 * torch.ones(2, 1), occupancy_map=data_maps + ) + # generate verification data for map1 + import numpy as np + + s2, s5 = np.sqrt(2), np.sqrt(5) + sdf_map1_verify = 0.1 * torch.tensor( + [ + [1, -1, -s2, -s5, -3], + [s2, 1, -1, -2, -2], + [1, -1, -s2, -s2, -1], + [1, -1, -s2, -1, 1], + [1, -1, -1, 1, s2], + ] + ) + if sdf_batch.sdf_data.data.dtype == torch.float32: + sdf_map1_verify = sdf_map1_verify.float() + assert torch.allclose( + sdf_batch.sdf_data[0], sdf_map1_verify + ), "Failed conversion of map with obstacle." + assert torch.allclose( + sdf_batch.sdf_data[1], torch.tensor(1.0) + ), "Failed conversion of map with no obstacle." + + +def test_signed_distance_2d_jacobian(): + for batch_size in [1, 10, 100]: + origin = th.Variable(torch.randn(batch_size, 2).double()) + sdf_data = th.Variable(torch.randn(batch_size, 10, 10).double()) + cell_size = th.Variable(torch.rand(batch_size, 1).double()) + sdf = th.eb.SignedDistanceField2D(origin, cell_size, sdf_data) + for num_points in [1, 10]: + points = torch.randn(batch_size, 2, num_points).double() + _, jacobian = sdf.signed_distance(points) + + for p_index in range(num_points): + x = th.Vector(data=points[:, :1, p_index].double()) + y = th.Vector(data=points[:, 1:, p_index].double()) + + def new_distance_fn(vars): + new_points = torch.stack([vars[0].data, vars[1].data], dim=1) + new_dist = sdf.signed_distance(new_points)[0] + return th.Vector(data=new_dist) + + expected_jacs = numeric_jacobian( + new_distance_fn, [x, y], function_dim=1, delta_mag=1e-7 + ) + expected_jacobian = torch.cat(expected_jacs, dim=2).squeeze(1) + # This makes failures more explicit than torch.allclose() + diff = (expected_jacobian - jacobian[:, p_index]).norm(p=float("inf")) + assert diff < 1e-5 diff --git a/theseus/embodied/kinematics/__init__.py b/theseus/embodied/kinematics/__init__.py new file mode 100644 index 000000000..27550b440 --- /dev/null +++ b/theseus/embodied/kinematics/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .robot_model import IdentityModel, RobotModel diff --git a/theseus/embodied/kinematics/robot_model.py b/theseus/embodied/kinematics/robot_model.py new file mode 100644 index 000000000..88233d623 --- /dev/null +++ b/theseus/embodied/kinematics/robot_model.py @@ -0,0 +1,41 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc + +import torch + +from theseus.geometry import SE2, LieGroup, Point2, Vector + + +class RobotModel(abc.ABC): + def __init__(self): + pass + + @abc.abstractmethod + def forward_kinematics(self, robot_pose: LieGroup) -> torch.Tensor: + pass + + @abc.abstractmethod + def dim(self) -> int: + pass + + +class IdentityModel(RobotModel): + def __init__(self): + super().__init__() + + def forward_kinematics(self, robot_pose: LieGroup) -> torch.Tensor: + if isinstance(robot_pose, SE2): + return robot_pose.translation.data.view(-1, 2, 1) + if isinstance(robot_pose, Point2) or isinstance(robot_pose, Vector): + assert robot_pose.dof() == 2 + return robot_pose.data.view(-1, 2, 1) + raise NotImplementedError( + f"IdentityModel not implemented for pose with type {type(robot_pose)}." + ) + + def dim(self) -> int: + return 1 diff --git a/theseus/embodied/measurements/__init__.py b/theseus/embodied/measurements/__init__.py new file mode 100644 index 000000000..13fdf2541 --- /dev/null +++ b/theseus/embodied/measurements/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .between import Between +from .moving_frame_between import MovingFrameBetween diff --git a/theseus/embodied/measurements/between.py b/theseus/embodied/measurements/between.py new file mode 100644 index 000000000..509146c04 --- /dev/null +++ b/theseus/embodied/measurements/between.py @@ -0,0 +1,54 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple + +import torch + +from theseus.core import CostFunction, CostWeight +from theseus.geometry import LieGroup, between + + +class Between(CostFunction): + def __init__( + self, + v0: LieGroup, + v1: LieGroup, + cost_weight: CostWeight, + measurement: LieGroup, + name: Optional[str] = None, + ): + super().__init__(cost_weight, name=name) + self.v0 = v0 + self.v1 = v1 + self.register_optim_vars(["v0", "v1"]) + self.measurement = measurement + self.register_aux_vars(["measurement"]) + if not isinstance(v0, v1.__class__) or not isinstance( + v0, measurement.__class__ + ): + raise ValueError("Inconsistent types between variables and measurement.") + + def error(self) -> torch.Tensor: + var_diff = between(self.v0, self.v1) + return self.measurement.local(var_diff) + + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + Jlist: List[torch.Tensor] = [] + var_diff = between(self.v0, self.v1, jacobians=Jlist) + error = self.measurement.local(var_diff) + return Jlist, error + + def dim(self) -> int: + return self.v0.dof() + + def _copy_impl(self, new_name: Optional[str] = None) -> "Between": + return Between( + self.v0.copy(), + self.v1.copy(), + self.weight.copy(), + self.measurement.copy(), + name=new_name, + ) diff --git a/theseus/embodied/measurements/moving_frame_between.py b/theseus/embodied/measurements/moving_frame_between.py new file mode 100644 index 000000000..070000cdf --- /dev/null +++ b/theseus/embodied/measurements/moving_frame_between.py @@ -0,0 +1,77 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple + +import torch + +from theseus.core import CostFunction, CostWeight +from theseus.geometry import LieGroup, between + + +class MovingFrameBetween(CostFunction): + def __init__( + self, + frame1: LieGroup, + frame2: LieGroup, + pose1: LieGroup, + pose2: LieGroup, + cost_weight: CostWeight, + measurement: LieGroup, + name: Optional[str] = None, + ): + seen_classes = set( + [x.__class__.__name__ for x in [frame1, frame2, pose1, pose2, measurement]] + ) + if len(seen_classes) > 1: + raise ValueError("Inconsistent types between input variables.") + + super().__init__(cost_weight, name=name) + self.frame1 = frame1 + self.frame2 = frame2 + self.pose1 = pose1 + self.pose2 = pose2 + self.register_optim_vars(["frame1", "frame2", "pose1", "pose2"]) + self.measurement = measurement + self.register_aux_vars(["measurement"]) + + def error(self) -> torch.Tensor: + pose1__frame = between(self.frame1, self.pose1) + pose2__frame = between(self.frame2, self.pose2) + var_diff = between(pose1__frame, pose2__frame) + return self.measurement.local(var_diff) + + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + jacobians_b1: List[torch.Tensor] = [] + pose1__frame = between(self.frame1, self.pose1, jacobians=jacobians_b1) + jacobians_b2: List[torch.Tensor] = [] + pose2__frame = between(self.frame2, self.pose2, jacobians=jacobians_b2) + jacobians_b_out: List[torch.Tensor] = [] + var_diff = between(pose1__frame, pose2__frame, jacobians=jacobians_b_out) + error = self.measurement.local(var_diff) + + JB1_f1, JB1_p1 = jacobians_b1 + JB2_f2, JB2_p2 = jacobians_b2 + J_Bout_B1, J_Bout_B2 = jacobians_b_out + J_out_f1 = torch.matmul(J_Bout_B1, JB1_f1) + J_out_p1 = torch.matmul(J_Bout_B1, JB1_p1) + J_out_f2 = torch.matmul(J_Bout_B2, JB2_f2) + J_out_p2 = torch.matmul(J_Bout_B2, JB2_p2) + + return [J_out_f1, J_out_f2, J_out_p1, J_out_p2], error + + def dim(self) -> int: + return self.frame1.dof() + + def _copy_impl(self, new_name: Optional[str] = None) -> "MovingFrameBetween": + return MovingFrameBetween( + self.frame1.copy(), + self.frame2.copy(), + self.pose1.copy(), + self.pose2.copy(), + self.weight.copy(), + self.measurement.copy(), + name=new_name, + ) diff --git a/theseus/embodied/measurements/tests/between_errors_se2.npy b/theseus/embodied/measurements/tests/between_errors_se2.npy new file mode 100644 index 000000000..4baecf941 Binary files /dev/null and b/theseus/embodied/measurements/tests/between_errors_se2.npy differ diff --git a/theseus/embodied/measurements/tests/between_errors_so2.npy b/theseus/embodied/measurements/tests/between_errors_so2.npy new file mode 100644 index 000000000..a2ce036b5 Binary files /dev/null and b/theseus/embodied/measurements/tests/between_errors_so2.npy differ diff --git a/theseus/embodied/measurements/tests/test_between.py b/theseus/embodied/measurements/tests/test_between.py new file mode 100644 index 000000000..eed0a79f8 --- /dev/null +++ b/theseus/embodied/measurements/tests/test_between.py @@ -0,0 +1,127 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +import theseus as th +from theseus.core.tests.common import ( + check_another_theseus_function_is_copy, + check_another_theseus_tensor_is_copy, +) +from theseus.geometry.tests.test_se2 import create_random_se2 +from theseus.utils import numeric_jacobian + + +def test_copy_between(): + v0 = th.SE2() + v1 = th.SE2() + meas = th.SE2() + cost_function = th.eb.Between(v0, v1, th.ScaleCostWeight(1.0), meas, name="name") + cost_function2 = cost_function.copy(new_name="new_name") + check_another_theseus_function_is_copy( + cost_function, cost_function2, new_name="new_name" + ) + check_another_theseus_tensor_is_copy(cost_function2.v0, v0) + check_another_theseus_tensor_is_copy(cost_function2.v1, v1) + check_another_theseus_tensor_is_copy(cost_function2.measurement, meas) + check_another_theseus_function_is_copy( + cost_function.weight, + cost_function2.weight, + new_name=f"{cost_function.weight.name}_copy", + ) + assert cost_function2.name == "new_name" + + +def test_jacobian_between(): + rng = torch.Generator() + rng.manual_seed(0) + cost_weight = th.ScaleCostWeight(1) + for batch_size in [1, 10, 100]: + v0 = create_random_se2(batch_size, rng) + v1 = create_random_se2(batch_size, rng) + measurement = create_random_se2(batch_size, rng) + cost_function = th.eb.Between(v0, v1, cost_weight, measurement) + + def new_error_fn(groups): + new_cost_function = th.eb.Between( + groups[0], groups[1], cost_weight, measurement + ) + return new_cost_function.measurement.retract(new_cost_function.error()) + + expected_jacs = numeric_jacobian(new_error_fn, [v0, v1]) + jacobians, error_jac = cost_function.jacobians() + error = cost_function.error() + assert torch.allclose(error_jac, error) + assert torch.allclose(jacobians[0], expected_jacs[0], atol=1e-8) + assert torch.allclose(jacobians[1], expected_jacs[1], atol=1e-8) + + +def test_error_between_point2(): + rng = torch.Generator() + rng.manual_seed(0) + cost_weight = th.ScaleCostWeight(1) + for batch_size in [1, 10, 100]: + p1 = th.Point2(torch.randn(batch_size, 2, generator=rng)) + p2 = th.Point2(torch.randn(batch_size, 2, generator=rng)) + measurement = th.Point2(torch.randn(batch_size, 2, generator=rng)) + cost_function = th.eb.Between(p1, p2, cost_weight, measurement) + expected_error = (p2 - p1) - measurement + error = cost_function.error() + assert torch.allclose(expected_error.data, error.data) + + +def test_error_between_so2(): + so2_data = torch.DoubleTensor( + [-np.pi, -np.pi / 2, 0.0, np.pi / 2, np.pi] + ).unsqueeze(1) + + num_val = len(so2_data) + between_errors = np.load( + "theseus/embodied/measurements/tests/between_errors_so2.npy" + ) + z = 0 + # between_errors was generated using all 3-combinations of the 5 SO2 above + for i in range(num_val): + for j in range(num_val): + for k in range(num_val): + meas = th.SO2(theta=so2_data[i, :1]) + so2_1 = th.SO2(theta=so2_data[j, :1]) + so2_2 = th.SO2(theta=so2_data[k, :1]) + dist_cf = th.eb.Between( + so2_1, so2_2, th.ScaleCostWeight(1.0), measurement=meas + ) + assert np.allclose(dist_cf.error().item(), between_errors[z]) + z += 1 + + +def test_error_between_se2(): + se2_data = torch.DoubleTensor( + [ + [-1.1, 0.0, -np.pi], + [0.0, 1.1, -np.pi / 2], + [1.1, -1.1, 0.0], + [0.0, 0.0, np.pi / 2], + [-1.1, 1.1, np.pi], + ] + ) + num_val = len(se2_data) + between_errors = np.load( + "theseus/embodied/measurements/tests/between_errors_se2.npy" + ) + z = 0 + # between_errors was generated using all 3-combinations of the 5 SE2 above + for i in range(num_val): + for j in range(num_val): + for k in range(num_val): + meas = th.SE2(x_y_theta=se2_data[i, :].unsqueeze(0)) + se2_1 = th.SE2(x_y_theta=se2_data[j, :].unsqueeze(0)) + se2_2 = th.SE2(x_y_theta=se2_data[k, :].unsqueeze(0)) + dist_cf = th.eb.Between( + se2_1, se2_2, th.ScaleCostWeight(1.0), measurement=meas + ) + error = dist_cf.error() + assert np.allclose(error.squeeze().numpy(), between_errors[z]) + z += 1 diff --git a/theseus/embodied/measurements/tests/test_moving_frame_between.py b/theseus/embodied/measurements/tests/test_moving_frame_between.py new file mode 100644 index 000000000..f408ea0ae --- /dev/null +++ b/theseus/embodied/measurements/tests/test_moving_frame_between.py @@ -0,0 +1,143 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +import theseus.core as thcore +import theseus.embodied as thembod +import theseus.geometry as thgeom +from theseus.core.tests.common import ( + check_another_theseus_function_is_copy, + check_another_theseus_tensor_is_copy, +) +from theseus.geometry.tests.test_se2 import create_random_se2 +from theseus.utils import numeric_jacobian + + +def test_copy_moving_frame_between(): + f1 = thgeom.SE2() + f2 = thgeom.SE2() + p1 = thgeom.SE2() + p2 = thgeom.SE2() + meas = thgeom.SE2() + cost_function = thembod.MovingFrameBetween( + f1, f2, p1, p2, thcore.ScaleCostWeight(1.0), meas, name="name" + ) + cost_function2 = cost_function.copy(new_name="new_name") + check_another_theseus_function_is_copy( + cost_function, cost_function2, new_name="new_name" + ) + check_another_theseus_tensor_is_copy(cost_function2.frame1, f1) + check_another_theseus_tensor_is_copy(cost_function2.frame2, f2) + check_another_theseus_tensor_is_copy(cost_function2.pose1, p1) + check_another_theseus_tensor_is_copy(cost_function2.pose2, p2) + check_another_theseus_tensor_is_copy(cost_function2.measurement, meas) + check_another_theseus_function_is_copy( + cost_function.weight, + cost_function2.weight, + new_name=f"{cost_function.weight.name}_copy", + ) + assert cost_function2.name == "new_name" + + +def test_jacobian_moving_frame_between(): + rng = torch.Generator() + rng.manual_seed(0) + cost_weight = thcore.ScaleCostWeight(1) + for batch_size in [1, 10, 100]: + f1 = create_random_se2(batch_size, rng) + f2 = create_random_se2(batch_size, rng) + p1 = create_random_se2(batch_size, rng) + p2 = create_random_se2(batch_size, rng) + measurement = create_random_se2(batch_size, rng) + cost_function = thembod.MovingFrameBetween( + f1, f2, p1, p2, cost_weight, measurement + ) + + def new_error_fn(groups): + new_cost_function = thembod.MovingFrameBetween( + groups[0], groups[1], groups[2], groups[3], cost_weight, measurement + ) + return new_cost_function.measurement.retract(new_cost_function.error()) + + expected_jacs = numeric_jacobian(new_error_fn, [f1, f2, p1, p2]) + jacobians, error_jac = cost_function.jacobians() + error = cost_function.error() + assert torch.allclose(error_jac, error) + for i in range(4): + assert torch.allclose(jacobians[i], expected_jacs[i], atol=1e-8) + + +def test_error_moving_frame_between_se2(): + + measurement = thgeom.SE2( + x_y_theta=torch.DoubleTensor([0.0, 0.0, np.pi / 6]).unsqueeze(0) + ) + cost_weight = thcore.ScaleCostWeight(1) + + inputs = { + "f1": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [1.0, 1.0, np.pi / 2], + ] + ), + "f2": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [1.0, 1.0, np.pi / 2], + [1.0, 1.0, np.pi / 4], + [1.0, 1.0, np.pi / 4], + [0.0, 0.0, 0.0], + ] + ), + "p1": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [0.0, 0.0, np.pi / 2], + [1.0, 1.0, np.pi / 4], + [2.0, 2.0, np.pi / 4], + [2.0, 2.0, np.pi / 4], + ] + ), + "p2": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [1.0, 1.0, np.pi], + [2.0, 2.0, np.pi / 2], + [3.0, 3.0, np.pi / 2], + [3.0, 3.0, np.pi / 2], + ] + ), + } + + outputs = { + "error": torch.DoubleTensor( + [ + [0.0, 0.0, -0.52359878], + [0.0, 0.0, -0.52359878], + [-0.66650618, -0.86860776, -0.52359878], + [-1.33301235, -1.73721552, -0.52359878], + [4.64499601, 2.25899128, 1.83259571], + ] + ) + } + n_tests = outputs["error"].shape[0] + for i in range(0, n_tests): + f1 = thgeom.SE2(x_y_theta=(inputs["f1"][i, :]).unsqueeze(0)) + f2 = thgeom.SE2(x_y_theta=(inputs["f2"][i, :]).unsqueeze(0)) + p1 = thgeom.SE2(x_y_theta=(inputs["p1"][i, :]).unsqueeze(0)) + p2 = thgeom.SE2(x_y_theta=(inputs["p2"][i, :]).unsqueeze(0)) + + cost_fn = thembod.MovingFrameBetween(f1, f2, p1, p2, cost_weight, measurement) + + actual = cost_fn.error() + expected = outputs["error"][i, :] + + assert np.allclose(actual.squeeze().numpy(), expected.squeeze().numpy()) diff --git a/theseus/embodied/misc/__init__.py b/theseus/embodied/misc/__init__.py new file mode 100644 index 000000000..802a3b01a --- /dev/null +++ b/theseus/embodied/misc/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .variable_difference import VariableDifference diff --git a/theseus/embodied/misc/tests/sq_dist_errors_se2.npy b/theseus/embodied/misc/tests/sq_dist_errors_se2.npy new file mode 100644 index 000000000..f3dd008e3 Binary files /dev/null and b/theseus/embodied/misc/tests/sq_dist_errors_se2.npy differ diff --git a/theseus/embodied/misc/tests/sq_dist_errors_so2.npy b/theseus/embodied/misc/tests/sq_dist_errors_so2.npy new file mode 100644 index 000000000..b7bc63346 Binary files /dev/null and b/theseus/embodied/misc/tests/sq_dist_errors_so2.npy differ diff --git a/theseus/embodied/misc/tests/test_variable_difference.py b/theseus/embodied/misc/tests/test_variable_difference.py new file mode 100644 index 000000000..c40d716c5 --- /dev/null +++ b/theseus/embodied/misc/tests/test_variable_difference.py @@ -0,0 +1,114 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +import theseus as th +from theseus.core.tests.common import ( + check_another_theseus_function_is_copy, + check_another_theseus_tensor_is_copy, +) +from theseus.geometry.tests.test_se2 import create_random_se2 +from theseus.utils import numeric_jacobian + + +def test_copy_variable_difference(): + v0 = th.Vector(data=torch.zeros(1, 1)) + target = th.Vector(data=torch.ones(1, 1)) + cost_function = th.eb.VariableDifference( + v0, th.ScaleCostWeight(1.0), target, name="name" + ) + cost_function2 = cost_function.copy(new_name="new_name") + check_another_theseus_function_is_copy( + cost_function, cost_function2, new_name="new_name" + ) + check_another_theseus_tensor_is_copy(cost_function2.var, v0) + check_another_theseus_tensor_is_copy(cost_function2.target, target) + check_another_theseus_function_is_copy( + cost_function.weight, + cost_function2.weight, + new_name=f"{cost_function.weight.name}_copy", + ) + assert cost_function2.name == "new_name" + + +def test_jacobian_variable_difference(): + rng = torch.Generator() + rng.manual_seed(0) + cost_weight = th.ScaleCostWeight(1) + for batch_size in [1, 10, 100]: + v0 = create_random_se2(batch_size, rng) + target = create_random_se2(batch_size, rng) + cost_function = th.eb.VariableDifference(v0, cost_weight, target) + + def new_error_fn(groups): + new_cost_function = th.eb.VariableDifference(groups[0], cost_weight, target) + return new_cost_function.target.retract(new_cost_function.error()) + + expected_jacs = numeric_jacobian(new_error_fn, [v0]) + jacobians, error_jac = cost_function.jacobians() + error = cost_function.error() + assert torch.allclose(error_jac, error) + assert torch.allclose(jacobians[0], expected_jacs[0], atol=1e-8) + + +def test_error_variable_difference_point2(): + rng = torch.Generator() + rng.manual_seed(0) + cost_weight = th.ScaleCostWeight(1) + for batch_size in [1, 10, 100]: + p0 = th.Point2(torch.randn(batch_size, 2, generator=rng)) + target = th.Point2(torch.randn(batch_size, 2, generator=rng)) + cost_function = th.eb.VariableDifference(p0, cost_weight, target) + expected_error = p0 - target + error = cost_function.error() + assert torch.allclose(expected_error.data, error.data) + + +def test_error_variable_difference_so2(): + so2_data = torch.DoubleTensor( + [-np.pi, -np.pi / 2, 0.0, np.pi / 2, np.pi] + ).unsqueeze(1) + + num_val = len(so2_data) + sq_dist_errors = np.load("theseus/embodied/misc/tests/sq_dist_errors_so2.npy") + k = 0 + # sq_dist_errors was generated using all 2-combinations of the 5 SO2 above + for i in range(num_val): + for j in range(num_val): + meas = th.SO2(theta=so2_data[i, :1]) + so2 = th.SO2(theta=so2_data[j, :1]) + dist_cf = th.eb.VariableDifference( + so2, th.ScaleCostWeight(1.0), target=meas + ) + assert np.allclose(dist_cf.error().squeeze().item(), sq_dist_errors[k]) + k += 1 + + +def test_error_variable_difference_se2(): + se2_data = torch.DoubleTensor( + [ + [-1.1, 0.0, -np.pi], + [0.0, 1.1, -np.pi / 2], + [1.1, -1.1, 0.0], + [0.0, 0.0, np.pi / 2], + [-1.1, 1.1, np.pi], + ] + ) + num_val = len(se2_data) + sq_dist_errors = np.load("theseus/embodied/misc/tests/sq_dist_errors_se2.npy") + k = 0 + # sq_dist_errors was generated using all 2-combinations of the 5 SE2 above + for i in range(num_val): + for j in range(num_val): + meas = th.SE2(x_y_theta=se2_data[i, :].unsqueeze(0)) + se2 = th.SE2(x_y_theta=se2_data[j, :].unsqueeze(0)) + dist_cf = th.eb.VariableDifference( + se2, th.ScaleCostWeight(1.0), target=meas + ) + error = dist_cf.error() + assert np.allclose(error.squeeze().numpy(), sq_dist_errors[k]) + k += 1 diff --git a/theseus/embodied/misc/variable_difference.py b/theseus/embodied/misc/variable_difference.py new file mode 100644 index 000000000..c5b02dcf2 --- /dev/null +++ b/theseus/embodied/misc/variable_difference.py @@ -0,0 +1,52 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple + +import torch + +from theseus import CostFunction, CostWeight +from theseus.geometry import LieGroup + + +class VariableDifference(CostFunction): + def __init__( + self, + var: LieGroup, + cost_weight: CostWeight, + target: LieGroup, + name: Optional[str] = None, + ): + super().__init__(cost_weight, name=name) + if not isinstance(var, target.__class__): + raise ValueError( + "Variable for the VariableDifference inconsistent with the given target." + ) + if not var.dof() == target.dof(): + raise ValueError( + "Variable and target in the VariableDifference must have identical dof." + ) + self.var = var + self.target = target + self.register_optim_vars(["var"]) + self.register_aux_vars(["target"]) + + def error(self) -> torch.Tensor: + return self.target.local(self.var) + + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + return [ + torch.eye(self.dim(), dtype=self.var.dtype) + .repeat(self.var.shape[0], 1, 1) + .to(self.var.device) + ], self.error() + + def dim(self) -> int: + return self.var.dof() + + def _copy_impl(self, new_name: Optional[str] = None) -> "VariableDifference": + return VariableDifference( # type: ignore + self.var.copy(), self.weight.copy(), self.target.copy(), name=new_name + ) diff --git a/theseus/embodied/motionmodel/__init__.py b/theseus/embodied/motionmodel/__init__.py new file mode 100644 index 000000000..e845421f8 --- /dev/null +++ b/theseus/embodied/motionmodel/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .double_integrator import DoubleIntegrator, GPCostWeight, GPMotionModel +from .quasi_static_pushing_planar import QuasiStaticPushingPlanar diff --git a/theseus/embodied/motionmodel/double_integrator.py b/theseus/embodied/motionmodel/double_integrator.py new file mode 100644 index 000000000..ca58d51da --- /dev/null +++ b/theseus/embodied/motionmodel/double_integrator.py @@ -0,0 +1,186 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple, Union, cast + +import torch + +from theseus.core import CostFunction, CostWeight, Variable +from theseus.geometry import LieGroup, Vector + + +class DoubleIntegrator(CostFunction): + def __init__( + self, + pose1: LieGroup, + vel1: Vector, + pose2: LieGroup, + vel2: Vector, + dt: Variable, + cost_weight: CostWeight, + name: Optional[str] = None, + ): + super().__init__(cost_weight, name=name) + dof = pose1.dof() + if not (vel1.dof() == pose2.dof() == vel2.dof() == dof): + raise ValueError( + "All variables for a DoubleIntegrator must have the same dimension." + ) + if dt.data.squeeze().ndim > 1: + raise ValueError( + "dt data must be a 0-D or 1-D tensor with numel in {1, batch_size}." + ) + self.pose1 = pose1 + self.vel1 = vel1 + self.pose2 = pose2 + self.vel2 = vel2 + self.register_optim_vars(["pose1", "vel1", "pose2", "vel2"]) + self.dt = dt + self.register_aux_vars(["dt"]) + self.weight = cost_weight + + def dim(self): + return 2 * self.pose1.dof() + + def _new_pose_diff( + self, jacobians: Optional[List[torch.Tensor]] = None + ) -> torch.Tensor: + return self.pose1.local(self.pose2, jacobians=jacobians) + + def _error_from_pose_diff(self, pose_diff: torch.Tensor) -> torch.Tensor: + pose_diff_err = pose_diff - self.dt.data.view(-1, 1) * self.vel1.data + vel_diff = self.vel2.data - self.vel1.data + return torch.cat([pose_diff_err, vel_diff], dim=1) + + def error(self) -> torch.Tensor: + return self._error_from_pose_diff(self._new_pose_diff()) + + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + # Pre-allocate jacobian tensors + batch_size = self.pose1.shape[0] + dof = self.pose1.dof() + dtype = self.pose1.dtype + device = self.pose1.device + Jerr_pose1 = torch.zeros(batch_size, 2 * dof, dof, dtype=dtype, device=device) + Jerr_vel1 = torch.zeros_like(Jerr_pose1) + Jerr_pose2 = torch.zeros_like(Jerr_pose1) + Jerr_vel2 = torch.zeros_like(Jerr_pose1) + + Jlocal: List[torch.Tensor] = [] + error = self._error_from_pose_diff(self._new_pose_diff(Jlocal)) + Jerr_pose1[:, :dof, :] = Jlocal[0] + identity = torch.eye(dof, dtype=dtype, device=device).repeat(batch_size, 1, 1) + Jerr_vel1[:, :dof, :] = -self.dt.data.view(-1, 1, 1) * identity + Jerr_vel1[:, dof:, :] = -identity + Jerr_pose2[:, :dof, :] = Jlocal[1] + Jerr_vel2[:, dof:, :] = identity + return [Jerr_pose1, Jerr_vel1, Jerr_pose2, Jerr_vel2], error + + def _copy_impl(self, new_name: Optional[str] = None) -> "DoubleIntegrator": + return DoubleIntegrator( + self.pose1.copy(), + self.vel1.copy(), + self.pose2.copy(), + self.vel2.copy(), + self.dt.copy(), + self.weight.copy(), + name=new_name, + ) + + +class GPCostWeight(CostWeight): + # Qc_inv is either a single square matrix or a batch of square matrices + # dt is either a single element or a 1-D batch + def __init__( + self, + Qc_inv: Union[Variable, torch.Tensor], + dt: Union[Variable, torch.Tensor], + name: Optional[str] = None, + ): + super().__init__(name=name) + if not isinstance(dt, Variable): + dt = Variable(dt) + if dt.data.squeeze().ndim > 1: + raise ValueError("dt must be a 0-D or 1-D tensor.") + self.dt = dt + self.dt.data = self.dt.data.view(-1, 1, 1) + + if not isinstance(Qc_inv, Variable): + Qc_inv = Variable(Qc_inv) + if Qc_inv.ndim not in [2, 3]: + raise ValueError("Qc_inv must be a single matrix or a batch of matrices.") + if not Qc_inv.shape[-2] == Qc_inv.shape[-1]: + raise ValueError("Qc_inv must contain square matrices.") + self.Qc_inv = Qc_inv + self.Qc_inv.data = Qc_inv.data if Qc_inv.ndim == 3 else Qc_inv.data.unsqueeze(0) + self.register_aux_vars(["Qc_inv", "dt"]) + + def _compute_cost_weight(self) -> torch.Tensor: + batch_size, dof, _ = self.Qc_inv.shape + cost_weight = torch.empty( + batch_size, + 2 * dof, + 2 * dof, + dtype=self.Qc_inv.dtype, + device=self.Qc_inv.device, + ) + Q11 = 12.0 * self.dt.data.pow(-3.0) * self.Qc_inv.data + Q12 = -6.0 * self.dt.data.pow(-2.0) * self.Qc_inv.data + Q22 = 4.0 * self.dt.data.reciprocal() * self.Qc_inv.data + cost_weight[:, :dof, :dof] = Q11 + cost_weight[:, :dof:, dof:] = Q12 + cost_weight[:, dof:, :dof] = Q12 + cost_weight[:, dof:, dof:] = Q22 + return ( + torch.linalg.cholesky(cost_weight.transpose(-2, -1).conj()) + .transpose(-2, -1) + .conj() + ) + + def weight_error(self, error: torch.Tensor) -> torch.Tensor: + weights = self._compute_cost_weight() + return torch.matmul(weights, error.unsqueeze(2)).squeeze(2) + + def weight_jacobians_and_error( + self, + jacobians: List[torch.Tensor], + error: torch.Tensor, + ) -> Tuple[List[torch.Tensor], torch.Tensor]: + cost_weight = self._compute_cost_weight() + error = torch.matmul(cost_weight, error.unsqueeze(2)).squeeze(2) + new_jacobians = [] + for jac in jacobians: + # Jacobian is batch_size x cost_function_dim x var_dim + # This left multiplies the weights (inv cov.) to jacobian + new_jacobians.append(torch.matmul(cost_weight, jac)) + return new_jacobians, error + + def _copy_impl(self, new_name: Optional[str] = None) -> "GPCostWeight": + # need to pass data, since it could be Parameter(self.Qc_inv) in which + # case Qc_inv won't be up to date. + # This will change with the "learning happens outside API" + return GPCostWeight(self.Qc_inv.copy(), self.dt.copy(), name=new_name) + + +class GPMotionModel(DoubleIntegrator): + def __init__( + self, + pose1: LieGroup, + vel1: Vector, + pose2: LieGroup, + vel2: Vector, + dt: Variable, + cost_weight: GPCostWeight, + name: Optional[str] = None, + ): + if not isinstance(cost_weight, GPCostWeight): + raise ValueError( + "GPMotionModel only accepts cost weights of type GPCostWeight. " + "For other weight types, consider using DoubleIntegrator instead." + ) + super().__init__(pose1, vel1, pose2, vel2, dt, cost_weight, name=name) + + def _copy_impl(self, new_name: Optional[str] = None) -> "GPMotionModel": + return cast(GPMotionModel, super().copy(new_name=new_name)) diff --git a/theseus/embodied/motionmodel/quasi_static_pushing_planar.py b/theseus/embodied/motionmodel/quasi_static_pushing_planar.py new file mode 100644 index 000000000..0a4568e6d --- /dev/null +++ b/theseus/embodied/motionmodel/quasi_static_pushing_planar.py @@ -0,0 +1,298 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple, Union, cast + +import torch + +from theseus.core import CostFunction, CostWeight, Variable +from theseus.geometry import SE2, OptionalJacobians, Point2 +from theseus.geometry.so2 import SO2 + + +# Based on the model described in +# Zhou et al. A Fast Stochastic Contact Model for Planar Pushing and Grasping: +# Theory and Experimental Validation, 2017 +# https://arxiv.org/abs/1705.10664 +class QuasiStaticPushingPlanar(CostFunction): + def __init__( + self, + obj1: SE2, + obj2: SE2, + eff1: SE2, + eff2: SE2, + cost_weight: CostWeight, + c_square: Union[Variable, torch.Tensor, float], + name: Optional[str] = None, + ): + super().__init__(cost_weight, name=name) + self.obj1 = obj1 + self.obj2 = obj2 + self.eff1 = eff1 + self.eff2 = eff2 + self.register_optim_vars(["obj1", "obj2", "eff1", "eff2"]) + + if not isinstance(c_square, Variable): + if isinstance(c_square, float): + c_square = torch.tensor(c_square, dtype=self.obj1.dtype).view(1, 1) + c_square = Variable(c_square, name=f"csquare_{name}") + if c_square.data.squeeze().ndim > 1: + raise ValueError("dt must be a 0-D or 1-D tensor.") + self.c_square = c_square + self.register_aux_vars(["c_square"]) + + def _get_tensor_info(self) -> Tuple[int, torch.device, torch.dtype]: + return self.obj1.shape[0], self.obj1.device, self.obj1.dtype + + # See Eqs. 3-7 in Zhou et al. + def _compute_D( + self, + contact_point_2: torch.Tensor, + J_cp2_e2: OptionalJacobians, + get_jacobians: bool = False, + ) -> Tuple[torch.Tensor, OptionalJacobians]: + batch_size, device, dtype = self._get_tensor_info() + + # Setting up intermediate jacobians, if needed + J_cp2o: OptionalJacobians = [] if get_jacobians else None + + # Current contact point in object frame + contact_point_2__obj = self.obj2.transform_to(contact_point_2, jacobians=J_cp2o) + px = contact_point_2__obj.data[:, 0] + py = contact_point_2__obj.data[:, 1] + + # Putting D together + D = ( + torch.eye(3, 3, device=device, dtype=dtype) + .unsqueeze(0) + .repeat(batch_size, 1, 1) + ) + D[:, 0, 2] = D[:, 2, 0] = -py + D[:, 1, 2] = D[:, 2, 1] = px + D[:, 2, 2] = -self.c_square.data.view(-1) + + if not get_jacobians: + return D, None + + # Computing jacobians + J_cp2o_o2, J_cp2o_cp2 = J_cp2o + J_cp2o_e2 = J_cp2o_cp2.matmul(J_cp2_e2[0]) + + def _get_J_D_var(J_cp2o_var_): + # For matmul batching, this is set up with dimensions: + # batch_size x se2_dim x 3 x 3, + # so that J_D_var_[b, d] is the (3, 3) matrix derivative + # of the b-th batch D wrt to the d-th dimention of var + J_D_var_ = torch.zeros(batch_size, 3, 3, 3, device=device, dtype=dtype) + J_D_var_[..., 0, 2] = J_D_var_[..., 2, 0] = -J_cp2o_var_[:, 1] + J_D_var_[..., 1, 2] = J_D_var_[..., 2, 1] = J_cp2o_var_[:, 0] + return J_D_var_ + + J_D_o2 = _get_J_D_var(J_cp2o_o2) + J_D_e2 = _get_J_D_var(J_cp2o_e2) + + return D, [J_D_o2, J_D_e2] + + # See Eqs. 3-7 in Zhou et al. + def _compute_V( + self, + obj2_angle: SO2, + J_o2angle_o2: OptionalJacobians, + get_jacobians: bool = False, + ) -> Tuple[torch.Tensor, OptionalJacobians]: + batch_size, device, dtype = self._get_tensor_info() + + # Setting up intermediate jacobians, if needed + J_o2xy_o2: OptionalJacobians = [] if get_jacobians else None + J_o1xy_o1: OptionalJacobians = [] if get_jacobians else None + J_vxyoo: OptionalJacobians = [] if get_jacobians else None + J_odw: OptionalJacobians = [] if get_jacobians else None + J_omega_odw: OptionalJacobians = [] if get_jacobians else None + + # Compute object velocities using consecutive object poses + vel_xy_obj__world = self.obj2.xy(jacobians=J_o2xy_o2) - self.obj1.xy( + jacobians=J_o1xy_o1 + ) + vel_xy_obj__obj = obj2_angle.unrotate(vel_xy_obj__world, jacobians=J_vxyoo) + + # Putting V together + obj_diff__world = cast(SE2, self.obj1.between(self.obj2, jacobians=J_odw)) + vx = vel_xy_obj__obj.data[:, 0] + vy = vel_xy_obj__obj.data[:, 1] + omega = obj_diff__world.theta(jacobians=J_omega_odw) + V = torch.stack([vx, vy, omega.squeeze(1)], dim=1) + + if not get_jacobians: + return V, None + + # Computing jacobians + def _get_J_V_var(J_vxyoo_var_, J_omega_var_): + # For matmul batching, this is set up with dimensions: + # batch_size x se2_dim x 3 x 1, + # so that J_V_var_[b, d] is the (3, 1) vector derivative + # of the b-th batch V wrt to the d-th dimention of var + J_V_var_ = torch.zeros(batch_size, 3, 3, device=device, dtype=dtype) + J_V_var_[..., 0] = J_vxyoo_var_[:, 0] + J_V_var_[..., 1] = J_vxyoo_var_[:, 1] + J_V_var_[..., 2] = J_omega_var_[:, 0] + return J_V_var_.unsqueeze(3) + + # Computing velocity derivates wrt to objects 1, 2 variables + J_vxyow_o2 = J_o2xy_o2[0] + J_vxyow_o1 = -J_o1xy_o1[0] + J_vxyoo_o2angle, J_vxyoo_vxyow = J_vxyoo + J_vxyoo_o1 = J_vxyoo_vxyow.matmul(J_vxyow_o1) + J_vxyoo_o2 = J_vxyoo_o2angle.matmul(J_o2angle_o2[0]) + J_vxyoo_vxyow.matmul( + J_vxyow_o2 + ) + + J_odw_o1, J_odw_o2 = J_odw + J_omega_o1 = J_omega_odw[0].matmul(J_odw_o1) + J_omega_o2 = J_omega_odw[0].matmul(J_odw_o2) + + J_V_o1 = _get_J_V_var(J_vxyoo_o1, J_omega_o1) + J_V_o2 = _get_J_V_var(J_vxyoo_o2, J_omega_o2) + + return V, [J_V_o1, J_V_o2] + + # See Eqs. 3-7 in Zhou et al. + def _compute_Vp( + self, + obj2_angle: SO2, + J_o2angle_o2: OptionalJacobians, + contact_point_2: torch.Tensor, + J_cp2_e2: OptionalJacobians, + get_jacobians: bool = False, + ) -> Tuple[torch.Tensor, List[torch.Tensor]]: + batch_size, device, dtype = self._get_tensor_info() + + # Setting up intermediate jacobians, if needed + J_cp1_e1: OptionalJacobians = [] if get_jacobians else None + J_vxyco: OptionalJacobians = [] if get_jacobians else None + + # Compute contact point velocities using consecutive end effector poses + contact_point_1 = self.eff1.xy(jacobians=J_cp1_e1) + vel_xy_contact__world = cast(Point2, contact_point_2 - contact_point_1) + + # Transform velocity to object's 2 frame + vel_xy_contact__obj = obj2_angle.unrotate( + vel_xy_contact__world, jacobians=J_vxyco + ) + + # Putting Vp together + v_px = vel_xy_contact__obj.data[:, 0] + v_py = vel_xy_contact__obj.data[:, 1] + Vp = torch.stack([v_px, v_py, torch.zeros_like(v_px)], dim=1) + + if not get_jacobians: + return Vp, None + + # Computing jacobians + def _get_J_Vp_var(J_vxyco_var_): + # For matmul batching, this is set up with dimensions: + # batch_size x se2_dim x 3 x 1, + # so that J_Vp_var_[b, d] is the (3, 1) vector derivative + # of the b-th batch Vp wrt to the d-th dimention of var + J_Vp_var_ = torch.zeros(batch_size, 3, 3, device=device, dtype=dtype) + J_Vp_var_[..., 0] = J_vxyco_var_[:, 0] # batch_size x se2_dim + J_Vp_var_[..., 1] = J_vxyco_var_[:, 1] # batch_size x se2_dim + return J_Vp_var_.unsqueeze(3) + + J_vxyco_o2angle, J_vxyco_vxycw = J_vxyco + J_vxyco_o2 = J_vxyco_o2angle.matmul(J_o2angle_o2[0]) + J_vxyco_e1 = J_vxyco_vxycw.matmul(-J_cp1_e1[0]) + J_vxyco_e2 = J_vxyco_vxycw.matmul(J_cp2_e2[0]) + + J_Vp_o2 = _get_J_Vp_var(J_vxyco_o2) + J_Vp_e1 = _get_J_Vp_var(J_vxyco_e1) + J_Vp_e2 = _get_J_Vp_var(J_vxyco_e2) + + return Vp, [J_Vp_o2, J_Vp_e1, J_Vp_e2] + + def _error_and_jacobians_impl( + self, get_jacobians: bool + ) -> Tuple[torch.Tensor, OptionalJacobians]: + J_o2angle_o2: OptionalJacobians = [] if get_jacobians else None + J_cp2_e2: OptionalJacobians = [] if get_jacobians else None + + # These quantities are needed by two or more of D, V, Vp, so we compute once. + obj2_angle = self.obj2.rotation + self.obj2.theta(jacobians=J_o2angle_o2) + contact_point_2 = self.eff2.xy(jacobians=J_cp2_e2) + + # D * V = Vp, See Zhou et al. + # A Fast Stochastic Contact Model for Planar Pushing and Grasping: + # Theory and Experimental Validation, 2017 + # https://arxiv.org/abs/1705.10664 + D, J_D = self._compute_D(contact_point_2, J_cp2_e2, get_jacobians=get_jacobians) + V, J_V = self._compute_V(obj2_angle, J_o2angle_o2, get_jacobians=get_jacobians) + Vp, J_Vp = self._compute_Vp( + obj2_angle, + J_o2angle_o2, + contact_point_2, + J_cp2_e2, + get_jacobians=get_jacobians, + ) + + error = torch.bmm(D, V.unsqueeze(2)).squeeze(2) - Vp + + if not get_jacobians: + return error, None + + # Computing jacobians + V_exp = V.view(-1, 1, 3, 1) + D_exp = D.view(-1, 1, 3, 3) + sum_str = "bdij,bdjk->bdik" + + J_D_o2, J_D_e2 = J_D + J_V_o1, J_V_o2 = J_V + J_Vp_o2, J_Vp_e1, J_Vp_e2 = J_Vp + + def _get_J_err_var_(J_D_var_, J_V_var_, J_Vp_var_): + # After einsum, each result is a tensor with size: + # batch_size x se2_dim x 3 x 1 + # so that after all terms are summed, + # J_err_var_[b, d] is a (3, 1) vector derivative of + # the b-th batch element of the 3-D error wrt to the + # d-th dimension of the optimization variable. + # At return, we squeeze the trailing dimension, and permute, so that the + # output has shape (batch_size, 3, se2_dim), which is the expected jacobian + # shape. + J_err_var_ = torch.zeros_like(J_V_o1) + if J_D_var_ is not None: + J_err_var_ += torch.einsum(sum_str, J_D_var_, V_exp) + if J_V_var_ is not None: + J_err_var_ += torch.einsum(sum_str, D_exp, J_V_var_) + if J_Vp_var_ is not None: + J_err_var_ += -J_Vp_var_ + return J_err_var_.squeeze(3).permute(0, 2, 1) + + J_err_o1 = _get_J_err_var_(None, J_V_o1, None) + J_err_o2 = _get_J_err_var_(J_D_o2, J_V_o2, J_Vp_o2) + J_err_e1 = _get_J_err_var_(None, None, J_Vp_e1) + J_err_e2 = _get_J_err_var_(J_D_e2, None, J_Vp_e2) + + return error, [J_err_o1, J_err_o2, J_err_e1, J_err_e2] + + def error(self) -> torch.Tensor: + return self._error_and_jacobians_impl(get_jacobians=False)[0] + + def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]: + error, jacobians = self._error_and_jacobians_impl(get_jacobians=True) + return jacobians, error + + def dim(self) -> int: + return 3 + + def _copy_impl(self, new_name: Optional[str] = None) -> "QuasiStaticPushingPlanar": + return QuasiStaticPushingPlanar( + self.obj1.copy(), + self.obj2.copy(), + self.eff1.copy(), + self.eff2.copy(), + self.weight.copy(), + self.c_square.copy(), + name=new_name, + ) diff --git a/theseus/embodied/motionmodel/tests/test_double_integrator.py b/theseus/embodied/motionmodel/tests/test_double_integrator.py new file mode 100644 index 000000000..403884fad --- /dev/null +++ b/theseus/embodied/motionmodel/tests/test_double_integrator.py @@ -0,0 +1,95 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import copy + +import pytest # noqa: F401 +import torch + +import theseus as th +from theseus.core.tests.common import check_another_theseus_function_is_copy +from theseus.utils import numeric_jacobian + + +def test_gp_motion_model_cost_weight_weights(): + for dof in range(1, 10): + for batch_size in [1, 10, 100]: + aux = torch.randn(batch_size, dof, dof).double() + q_inv = aux.transpose(-2, -1).bmm(aux) + dt = torch.rand(1).double() + cost_weight = th.eb.GPCostWeight(q_inv, dt) + sqrt_weights = cost_weight._compute_cost_weight() + a = 12 * (dt.item() ** -3) + b = -6 * (dt.item() ** -2) + c = 4 * (dt.item() ** -1) + weights = sqrt_weights.transpose(-2, -1).bmm(sqrt_weights) + assert torch.allclose(weights[:, :dof, :dof], q_inv * a) + assert torch.allclose(weights[:, :dof, dof:], q_inv * b) + assert torch.allclose(weights[:, dof:, :dof], q_inv * b) + assert torch.allclose(weights[:, dof:, dof:], q_inv * c) + + error = torch.randn(batch_size, 2 * dof).double() + weighted_error = cost_weight.weight_error(error) + assert torch.allclose( + sqrt_weights @ error.view(batch_size, 2 * dof, 1), + weighted_error.view(batch_size, 2 * dof, 1), + ) + + jacobians = [ + torch.randn(batch_size, 2 * dof, nvar).double() for nvar in range(1, 4) + ] + weighted_jacs, weighted_err_2 = cost_weight.weight_jacobians_and_error( + jacobians, error + ) + for i, jac in enumerate(jacobians): + assert torch.allclose(weighted_jacs[i], sqrt_weights @ jacobians[i]) + assert torch.allclose(weighted_err_2, weighted_error) + + +def test_gp_motion_model_cost_weight_copy(): + q_inv = torch.randn(10, 2, 2) + dt = torch.rand(1) + cost_weight = th.eb.GPCostWeight(q_inv, dt, name="gp") + check_another_theseus_function_is_copy( + cost_weight, cost_weight.copy(new_name="new_name"), new_name="new_name" + ) + check_another_theseus_function_is_copy( + cost_weight, copy.deepcopy(cost_weight), new_name="gp_copy" + ) + + +def test_gp_motion_model_cost_function_error_vector_vars(): + for batch_size in [1, 10, 100]: + for dof in range(1, 10): + vars = [ + th.Vector(data=torch.randn(batch_size, dof).double()) for _ in range(4) + ] + dt = th.Variable(torch.rand(1).double()) + + q_inv = torch.randn(batch_size, dof, dof).double() + # won't be used for the test, but it's required by cost_function's constructor + cost_weight = th.eb.GPCostWeight(q_inv, dt) + cost_function = th.eb.GPMotionModel( + vars[0], vars[1], vars[2], vars[3], dt, cost_weight + ) + + error = cost_function.error() + assert torch.allclose( + error[:, :dof], vars[2].data - (vars[0].data + vars[1].data * dt.data) + ) + assert torch.allclose(error[:, dof:], vars[3].data - vars[1].data) + + def new_error_fn(new_vars): + new_cost_function = th.eb.GPMotionModel( + new_vars[0], new_vars[1], new_vars[2], new_vars[3], dt, cost_weight + ) + return th.Vector(data=new_cost_function.error()) + + expected_jacs = numeric_jacobian(new_error_fn, vars, function_dim=2 * dof) + jacobians, error_jac = cost_function.jacobians() + error = cost_function.error() + assert torch.allclose(error_jac, error) + for i in range(4): + assert torch.allclose(jacobians[i], expected_jacs[i], atol=1e-8) diff --git a/theseus/embodied/motionmodel/tests/test_quasi_static_pushing_planar.py b/theseus/embodied/motionmodel/tests/test_quasi_static_pushing_planar.py new file mode 100644 index 000000000..472a97258 --- /dev/null +++ b/theseus/embodied/motionmodel/tests/test_quasi_static_pushing_planar.py @@ -0,0 +1,127 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +import theseus.core as thcore +import theseus.embodied as thembod +import theseus.geometry as thgeom +from theseus.geometry.tests.test_se2 import create_random_se2 +from theseus.utils import numeric_jacobian + + +def test_error_quasi_static_pushing_planar_se2(): + + # c_square is c**2, c = max_torque / max_force is a hyper param dependent on object + c_square = torch.Tensor([1.0]) + cost_weight = thcore.ScaleCostWeight(1) + + inputs = { + "obj1": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [1.0, 1.0, np.pi / 2], + ] + ), + "obj2": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [1.0, 1.0, np.pi / 2], + [1.0, 1.0, np.pi / 4], + [1.0, 1.0, np.pi / 4], + [0.0, 0.0, 0.0], + ] + ), + "eff1": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [0.0, 0.0, np.pi / 2], + [0.0, 0.0, np.pi / 4], + [2.0, 2.0, np.pi / 4], + [2.0, 2.0, np.pi / 4], + ] + ), + "eff2": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [1.0, 1.0, np.pi], + [1.0, 1.0, np.pi / 2], + [3.0, 3.0, np.pi / 2], + [3.0, 3.0, np.pi / 2], + ] + ), + } + + outputs = { + "error": torch.DoubleTensor( + [ + [0.0, 0.0, 0.0], + [0.0, 0.0, -1.57079633], + [0.0, 0.0, -0.78539816], + [0.0, 2.22144147, -0.785398163], + [2.71238898, -6.71238898, 1.57079633], + ] + ) + } + n_tests = outputs["error"].shape[0] + for i in range(0, n_tests): + obj1 = thgeom.SE2(x_y_theta=(inputs["obj1"][i, :]).unsqueeze(0)) + obj2 = thgeom.SE2(x_y_theta=(inputs["obj2"][i, :]).unsqueeze(0)) + eff1 = thgeom.SE2(x_y_theta=(inputs["eff1"][i, :]).unsqueeze(0)) + eff2 = thgeom.SE2(x_y_theta=(inputs["eff2"][i, :]).unsqueeze(0)) + + cost_fn = thembod.QuasiStaticPushingPlanar( + obj1, obj2, eff1, eff2, cost_weight, c_square + ) + + actual = cost_fn.error() + _, actual2 = cost_fn.jacobians() + expected = outputs["error"][i, :] + + print( + f"actual: {actual.squeeze().numpy()}, expected: {expected.squeeze().numpy()}" + ) + assert np.allclose(actual.squeeze().numpy(), expected.squeeze().numpy()) + assert torch.allclose(actual, actual2) + + +def test_quasi_static_pushing_planar_jacobians(): + rng = torch.Generator() + rng.manual_seed(0) + for _ in range(10): # repeat a bunch of times + for batch_size in [1, 10, 100]: + obj1 = create_random_se2(batch_size, rng) + obj2 = create_random_se2(batch_size, rng) + eff1 = create_random_se2(batch_size, rng) + eff2 = create_random_se2(batch_size, rng) + c_square = torch.Tensor([1.0]) + cost_weight = thcore.ScaleCostWeight(1) + + cost_fn = thembod.QuasiStaticPushingPlanar( + obj1, obj2, eff1, eff2, cost_weight, c_square + ) + jacobians, _ = cost_fn.jacobians() + + def new_error_fn(groups): + new_cost_fn = thembod.QuasiStaticPushingPlanar( + groups[0], groups[1], groups[2], groups[3], cost_weight, c_square + ) + return thgeom.Vector(data=new_cost_fn.error()) + + expected_jacs = numeric_jacobian( + new_error_fn, [obj1, obj2, eff1, eff2], delta_mag=1e-6 + ) + + def _check_jacobian(actual_, expected_): + # This makes failures more explicit than torch.allclose() + diff = (expected_ - actual_).norm(p=float("inf")) + assert diff < 1e-5 + + for i in range(len(expected_jacs)): + _check_jacobian(jacobians[i], expected_jacs[i]) diff --git a/theseus/geometry/__init__.py b/theseus/geometry/__init__.py new file mode 100644 index 000000000..5af6f16f6 --- /dev/null +++ b/theseus/geometry/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .lie_group import LieGroup, adjoint, between, compose, exp_map, inverse, log_map +from .manifold import Manifold, OptionalJacobians, local, retract +from .point_types import Point2, Point3 +from .se2 import SE2 +from .so2 import SO2 +from .vector import Vector diff --git a/theseus/geometry/lie_group.py b/theseus/geometry/lie_group.py new file mode 100644 index 000000000..aba122411 --- /dev/null +++ b/theseus/geometry/lie_group.py @@ -0,0 +1,176 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from typing import Any, List, Optional, Tuple, cast + +import torch + +from theseus.geometry.manifold import Manifold + + +# Abstract class to represent Lie groups. +# Concrete classes must implement the following methods: +# - `exp_map` +# - `_log_map` +# - `_adjoint` +# _ `_compose` +# _ `_inverse` +# +# Constructor can optionally provide an initial data value as a keyword argument. +class LieGroup(Manifold): + def __init__( + self, + *args: Any, + data: Optional[torch.Tensor] = None, + name: Optional[str] = None, + dtype: torch.dtype = torch.float, + ): + super().__init__(*args, data=data, name=name, dtype=dtype) + + @staticmethod + def _check_jacobians_list(jacobians: List[torch.Tensor]): + if len(jacobians) != 0: + raise ValueError("jacobians list to be populated must be empty.") + + @staticmethod + @abc.abstractmethod + def _init_data(*args: Any) -> torch.Tensor: + pass + + @abc.abstractmethod + def dof(self) -> int: + pass + + def __str__(self) -> str: + return repr(self) + + @staticmethod + @abc.abstractmethod + def exp_map(tangent_vector: torch.Tensor) -> "LieGroup": + pass + + @abc.abstractmethod + def _log_map_impl(self) -> torch.Tensor: + pass + + def log_map(self) -> torch.Tensor: + return self._log_map_impl() + + @abc.abstractmethod + def _adjoint_impl(self) -> torch.Tensor: + pass + + def adjoint(self) -> torch.Tensor: + return self._adjoint_impl() + + def between( + self, variable2: "LieGroup", jacobians: Optional[List[torch.Tensor]] = None + ) -> "LieGroup": + v1_inverse = self._inverse_impl() + between = v1_inverse._compose_impl(variable2) + if jacobians is not None: + LieGroup._check_jacobians_list(jacobians) + Jinv = LieGroup._inverse_jacobian(self) + Jcmp0, Jcmp1 = v1_inverse._compose_jacobian(variable2) + Jbetween0 = torch.matmul(Jcmp0, Jinv) + jacobians.extend([Jbetween0, Jcmp1]) + return between + + @abc.abstractmethod + def _compose_impl(self, variable2: "LieGroup") -> "LieGroup": + pass + + def compose( + self, variable2: "LieGroup", jacobians: Optional[List[torch.Tensor]] = None + ) -> "LieGroup": + composition = self._compose_impl(variable2) + if jacobians is not None: + LieGroup._check_jacobians_list(jacobians) + jacobians.extend(self._compose_jacobian(variable2)) + return composition + + @abc.abstractmethod + def _inverse_impl(self) -> "LieGroup": + pass + + def inverse(self, jacobian: Optional[List[torch.Tensor]] = None) -> "LieGroup": + the_inverse = self._inverse_impl() + if jacobian is not None: + LieGroup._check_jacobians_list(jacobian) + jacobian.append(self._inverse_jacobian(self)) + return the_inverse + + def _compose_jacobian( + self, group2: "LieGroup" + ) -> Tuple[torch.Tensor, torch.Tensor]: + if not type(self) is type(group2): + raise ValueError("Lie groups for compose must be of the same type.") + g2_inverse = group2._inverse_impl() + jac1 = g2_inverse.adjoint() + jac2 = ( + torch.eye(group2.dof(), dtype=self.dtype) + .repeat(group2.shape[0], 1, 1) + .to(group2.device) + ) + return jac1, jac2 + + @staticmethod + def _inverse_jacobian(group: "LieGroup") -> torch.Tensor: + return -group.adjoint() + + def _local_impl(self, variable2: Manifold) -> torch.Tensor: + variable2 = cast(LieGroup, variable2) + return self.between(variable2).log_map() + + def _local_jacobian(self, variable2: Manifold) -> Tuple[torch.Tensor, torch.Tensor]: + # Need the log_map derivative which is not yet implemented + raise NotImplementedError + + def _retract_impl(self, delta: torch.Tensor) -> "LieGroup": + return self.compose(self.exp_map(delta)) + + # added to avoid casting downstream + def copy(self, new_name: Optional[str] = None) -> "LieGroup": + return cast(LieGroup, super().copy(new_name=new_name)) + + +# Alias for LieGroup.adjoint() +def adjoint(variable: LieGroup) -> torch.Tensor: + return variable.adjoint() + + +def between( + variable1: LieGroup, + variable2: LieGroup, + jacobians: Optional[List[torch.Tensor]] = None, +) -> LieGroup: + return variable1.between(variable2, jacobians=jacobians) + + +# Alias for LieGroup.compose() +def compose( + variable1: LieGroup, + variable2: LieGroup, + jacobians: Optional[List[torch.Tensor]] = None, +) -> LieGroup: + return variable1.compose(variable2, jacobians=jacobians) + + +# Alias for LieGroup.inverse() +def inverse( + variable1: LieGroup, jacobian: Optional[List[torch.Tensor]] = None +) -> LieGroup: + return variable1.inverse(jacobian=jacobian) + + +# Alias for LieGroup.log_map() +def log_map(variable: LieGroup) -> torch.Tensor: + return variable.log_map() + + +# Alias for LieGroup.exp_map() +def exp_map(variable: LieGroup, tangent_vector: torch.Tensor) -> LieGroup: + return variable.__class__.exp_map(tangent_vector) diff --git a/theseus/geometry/manifold.py b/theseus/geometry/manifold.py new file mode 100644 index 000000000..ff6217f53 --- /dev/null +++ b/theseus/geometry/manifold.py @@ -0,0 +1,121 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +import warnings +from typing import Any, List, Optional, Tuple + +import torch + +from theseus.core.variable import Variable + +OptionalJacobians = Optional[List[torch.Tensor]] + + +# Abstract class to represent Manifold-type variables in the objective. +# Concrete classes must implement the following methods: +# - `_init_data`: initializes the underlying data tensor given constructor arguments. +# The provided tensor must have a batch dimension. +# -`_local`: given two close Manifolds gives distance in tangent space +# - `_retract`: returns Manifold close by delta to given Manifold +# - `update`: replaces the data tensor with the provided one (and checks that +# the provided has the right format). +# - `dof`: # of degrees of freedom of the variable +# +# Constructor can optionally provide an initial data value as a keyword argument. +class Manifold(Variable, abc.ABC): + def __init__( + self, + *args: Any, + data: Optional[torch.Tensor] = None, + name: Optional[str] = None, + dtype: Optional[torch.dtype] = None, + ): + # If nothing specified, use torch's default dtype + # else data.dtype takes precedence + if data is None and dtype is None: + dtype = torch.get_default_dtype() + if data is not None: + if dtype is not None and data.dtype != dtype: + warnings.warn( + f"data.dtype {data.dtype} does not match given dtype {dtype}, " + "data.dtype will take precendence." + ) + dtype = data.dtype + + super().__init__(self.__class__._init_data(*args).to(dtype=dtype), name=name) + if data is not None: + self.update(data) + + # This method should return a tensor with the manifold's data representation + # as a function of the given args + @staticmethod + @abc.abstractmethod + def _init_data(*args: Any) -> torch.Tensor: + pass + + @abc.abstractmethod + def dof(self) -> int: + pass + + @abc.abstractmethod + def _local_impl(self, variable2: "Manifold") -> torch.Tensor: + pass + + @abc.abstractmethod + def _local_jacobian( + self, variable2: "Manifold" + ) -> Tuple[torch.Tensor, torch.Tensor]: + pass + + @abc.abstractmethod + def _retract_impl(self, delta: torch.Tensor) -> "Manifold": + pass + + def local( + self, + variable2: "Manifold", + jacobians: Optional[List[torch.Tensor]] = None, + ) -> torch.Tensor: + local_out = self._local_impl(variable2) + if jacobians is not None: + assert len(jacobians) == 0 + jacobians.extend(self._local_jacobian(variable2)) + return local_out + + def retract(self, delta: torch.Tensor) -> "Manifold": + return self._retract_impl(delta) + + # Must copy everything + @abc.abstractmethod + def _copy_impl(self, new_name: Optional[str] = None) -> "Manifold": + pass + + def copy(self, new_name: Optional[str] = None) -> "Manifold": + if not new_name: + new_name = f"{self.name}_copy" + var_copy = self._copy_impl(new_name=new_name) + return var_copy + + def __deepcopy__(self, memo): + if id(self) in memo: + return memo[id(self)] + the_copy = self.copy() + memo[id(self)] = the_copy + return the_copy + + +# Alias for Manifold.local() +def local( + variable1: Manifold, + variable2: Manifold, + jacobians: Optional[List[torch.Tensor]] = None, +) -> torch.Tensor: + return variable1.local(variable2, jacobians=jacobians) + + +# Alias for Manifold.retract() +def retract(variable: Manifold, delta: torch.Tensor) -> Manifold: + return variable.retract(delta) diff --git a/theseus/geometry/point_types.py b/theseus/geometry/point_types.py new file mode 100644 index 000000000..53b579ceb --- /dev/null +++ b/theseus/geometry/point_types.py @@ -0,0 +1,80 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Optional, Tuple, cast + +import torch + +from .lie_group import LieGroup +from .vector import Vector + + +def _prepare_dof_and_data( + expected_dof: int, data: Optional[torch.Tensor] +) -> Tuple[Optional[int], Optional[torch.Tensor]]: + dof = None + if data is None: + dof = expected_dof + else: + if data.ndim == 1: + data = data.view(1, -1) + if data.shape[1] != expected_dof: + raise ValueError( + f"Provied data tensor must have shape (batch_size, {expected_dof})." + ) + return dof, data + + +class Point2(Vector): + def __init__( + self, + data: Optional[torch.Tensor] = None, + name: Optional[str] = None, + dtype: Optional[torch.dtype] = None, + ): + dof, data = _prepare_dof_and_data(2, data) + super().__init__(dof=dof, data=data, name=name, dtype=dtype) + + def x(self) -> torch.Tensor: + return self[:, 0] + + def y(self) -> torch.Tensor: + return self[:, 1] + + @staticmethod + def exp_map(tangent_vector: torch.Tensor) -> LieGroup: + return Point2(data=tangent_vector.clone()) + + # added to avoid casting downstream + def copy(self, new_name: Optional[str] = None) -> "Point2": + return cast(Point2, super().copy(new_name=new_name)) + + +class Point3(Vector): + def __init__( + self, + data: Optional[torch.Tensor] = None, + name: Optional[str] = None, + dtype: Optional[torch.dtype] = None, + ): + dof, data = _prepare_dof_and_data(3, data) + super().__init__(dof=dof, data=data, name=name, dtype=dtype) + + def x(self) -> torch.Tensor: + return self[:, 0] + + def y(self) -> torch.Tensor: + return self[:, 1] + + def z(self) -> torch.Tensor: + return self[:, 2] + + @staticmethod + def exp_map(tangent_vector: torch.Tensor) -> LieGroup: + return Point3(data=tangent_vector.clone()) + + # added to avoid casting downstream + def copy(self, new_name: Optional[str] = None) -> "Point3": + return cast(Point3, super().copy(new_name=new_name)) diff --git a/theseus/geometry/se2.py b/theseus/geometry/se2.py new file mode 100644 index 000000000..aaa7fdb9b --- /dev/null +++ b/theseus/geometry/se2.py @@ -0,0 +1,263 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Union, cast + +import torch + +import theseus.constants + +from .lie_group import LieGroup +from .point_types import Point2 +from .so2 import SO2 + + +# If data is passed, must be x, y, cos, sin +# If x_y_theta is passed, must be tensor with shape batch_size x 3 +class SE2(LieGroup): + def __init__( + self, + x_y_theta: Optional[torch.Tensor] = None, + data: Optional[torch.Tensor] = None, + name: Optional[str] = None, + dtype: Optional[torch.dtype] = None, + ): + if x_y_theta is not None and data is not None: + raise ValueError("Please provide only one of x_y_theta or data.") + if x_y_theta is not None: + dtype = x_y_theta.dtype + super().__init__(data=data, name=name, dtype=dtype) + if x_y_theta is not None: + rotation = SO2(theta=x_y_theta[:, 2:]) + translation = Point2(data=x_y_theta[:, :2]) + self.update_from_rot_and_trans(rotation, translation) + + @staticmethod + def _init_data() -> torch.Tensor: # type: ignore + return torch.empty(1, 4) # x, y, cos and sin + + def dof(self) -> int: + return 3 + + def __repr__(self) -> str: + return f"SE2(data={self.data}, name={self.name})" + + def __str__(self) -> str: + with torch.no_grad(): + theta = torch.atan2(self[:, 3:], self[:, 2:3]) + xytheta = torch.cat([self[:, :2], theta], dim=1) + return f"SE2(xytheta={xytheta}, name={self.name})" + + @property + def rotation(self) -> SO2: + return SO2(data=self[:, 2:]) + + def theta(self, jacobians: Optional[List[torch.Tensor]] = None) -> torch.Tensor: + if jacobians is not None: + self._check_jacobians_list(jacobians) + J_out = torch.zeros( + self.shape[0], 1, 3, device=self.device, dtype=self.dtype + ) + J_out[:, 0, 2] = 1 + jacobians.append(J_out) + return self.rotation.theta() + + @property + def translation(self) -> Point2: + return Point2(data=self[:, :2]) + + def xy(self, jacobians: Optional[List[torch.Tensor]] = None) -> torch.Tensor: + if jacobians is not None: + self._check_jacobians_list(jacobians) + rotation = self.rotation + J_out = torch.zeros( + self.shape[0], 2, 3, device=self.device, dtype=self.dtype + ) + J_out[:, :2, :2] = rotation.to_matrix() + jacobians.append(J_out) + return self.translation.data + + def update_from_rot_and_trans(self, rotation: SO2, translation: Point2): + batch_size = rotation.shape[0] + self.data = torch.empty(batch_size, 4).to( + device=rotation.device, dtype=rotation.dtype + ) + self[:, :2] = translation.data + cosine, sine = rotation.to_cos_sin() + self[:, 2] = cosine + self[:, 3] = sine + + # From https://github.com/strasdat/Sophus/blob/master/sophus/se2.hpp#L160 + def _log_map_impl(self) -> torch.Tensor: + rotation = self.rotation + + theta = rotation.log_map() + half_theta = 0.5 * theta.view(-1) + + cosine, sine = rotation.to_cos_sin() + cos_minus_one = cosine - 1 + halftheta_by_tan_of_halftheta = torch.zeros_like(cos_minus_one) + + # Compute halftheta_by_tan_of_halftheta when theta is not near zero + idx_regular_vals = cos_minus_one.abs() > theseus.constants.EPS + halftheta_by_tan_of_halftheta[idx_regular_vals] = ( + -(half_theta * sine)[idx_regular_vals] / cos_minus_one[idx_regular_vals] + ) + # Same as above three lines but for small values + idx_small_vals = cos_minus_one.abs() < theseus.constants.EPS + if idx_small_vals.any(): + theta_sq_at_idx = theta[idx_small_vals] ** 2 + halftheta_by_tan_of_halftheta[idx_small_vals] = ( + -theta_sq_at_idx.view(-1) / 12 + 1 + ) + + v_inv = torch.empty(self.shape[0], 2, 2).to( + device=self.device, dtype=self.dtype + ) + v_inv[:, 0, 0] = halftheta_by_tan_of_halftheta + v_inv[:, 0, 1] = half_theta + v_inv[:, 1, 0] = -half_theta + v_inv[:, 1, 1] = halftheta_by_tan_of_halftheta + tangent_translation = torch.matmul(v_inv, self[:, :2].unsqueeze(-1)) + + return torch.cat([tangent_translation.view(-1, 2), theta], dim=1) + + # From https://github.com/strasdat/Sophus/blob/master/sophus/se2.hpp#L558 + @staticmethod + def exp_map(tangent_vector: torch.Tensor) -> LieGroup: + u = tangent_vector[:, :2] + theta = tangent_vector[:, 2] + rotation = SO2(theta=theta) + + cosine, sine = rotation.to_cos_sin() + + sin_theta_by_theta = torch.zeros_like(sine) + one_minus_cos_theta_by_theta = torch.zeros_like(sine) + + # Compute above quantities when theta is not near zero + idx_regular_thetas = theta.abs() > theseus.constants.EPS + if idx_regular_thetas.any(): + sin_theta_by_theta[idx_regular_thetas] = ( + sine[idx_regular_thetas] / theta[idx_regular_thetas] + ) + one_minus_cos_theta_by_theta[idx_regular_thetas] = ( + -cosine[idx_regular_thetas] + 1 + ) / theta[idx_regular_thetas] + + # Same as above three lines but for small angles + idx_small_thetas = theta.abs() < theseus.constants.EPS + if idx_small_thetas.any(): + small_theta = theta[idx_small_thetas] + small_theta_sq = small_theta ** 2 + sin_theta_by_theta[idx_small_thetas] = -small_theta_sq / 6 + 1 + one_minus_cos_theta_by_theta[idx_small_thetas] = ( + 0.5 * small_theta - small_theta / 24 * small_theta_sq + ) + + new_x = sin_theta_by_theta * u[:, 0] - one_minus_cos_theta_by_theta * u[:, 1] + new_y = one_minus_cos_theta_by_theta * u[:, 0] + sin_theta_by_theta * u[:, 1] + translation = Point2(data=torch.stack([new_x, new_y], dim=1)) + + se2 = SE2(dtype=tangent_vector.dtype) + se2.update_from_rot_and_trans(rotation, translation) + return se2 + + def _adjoint_impl(self) -> torch.Tensor: + ret = torch.zeros(self.shape[0], 3, 3).to(device=self.device, dtype=self.dtype) + ret[:, :2, :2] = self.rotation.to_matrix() + ret[:, 0, 2] = self[:, 1] + ret[:, 1, 2] = -self[:, 0] + ret[:, 2, 0] = 0 + ret[:, 2, 2] = 1 + return ret + + def _compose_impl(self, se2_2: LieGroup) -> "SE2": + se2_2 = cast(SE2, se2_2) + rotation_1 = self.rotation + rotation_2 = se2_2.rotation + translation_1 = cast(Point2, self.translation) + translation_2 = cast(Point2, se2_2.translation) + new_rotation = rotation_1.compose(rotation_2) + new_translation = cast( + Point2, + translation_1.compose(rotation_1.rotate(translation_2)), + ) + return SE2(data=torch.cat([new_translation.data, new_rotation.data], dim=1)) + + def _inverse_impl(self) -> "SE2": + inverse_rotation = self.rotation._inverse_impl() + inverse_translation = inverse_rotation.rotate(cast(Point2, -self.translation)) + se2_inverse = SE2(dtype=self.dtype) + se2_inverse.update_from_rot_and_trans(inverse_rotation, inverse_translation) + return se2_inverse + + def to_matrix(self) -> torch.Tensor: + matrix = torch.zeros(self.shape[0], 3, 3).to( + device=self.device, dtype=self.dtype + ) + matrix[:, :2, :2] = self.rotation.to_matrix() + matrix[:, :2, 2] = self[:, :2] + matrix[:, 2, 2] = 1.0 + return matrix + + @staticmethod + def hat(tangent_vector: torch.Tensor) -> torch.Tensor: + theta = tangent_vector[:, 2] + u = tangent_vector[:, :2] + matrix = torch.zeros(tangent_vector.shape[0], 3, 3).to( + device=tangent_vector.device, dtype=tangent_vector.dtype + ) + matrix[:, 0, 1] = -theta + matrix[:, 1, 0] = theta + matrix[:, :2, 2] = u + return matrix + + @staticmethod + def vee(matrix: torch.Tensor) -> torch.Tensor: + _check = matrix.ndim == 3 and matrix.shape[1:] == (3, 3) + _check &= torch.allclose(matrix[:, 0, 1], -matrix[:, 1, 0]) + _check &= matrix[:, 0, 0].abs().max().item() < theseus.constants.EPS + _check &= matrix[:, 1, 1].abs().max().item() < theseus.constants.EPS + _check &= matrix[:, 2, 2].abs().max().item() < theseus.constants.EPS + if not _check: + raise ValueError("Invalid hat matrix for SE2.") + batch_size = matrix.shape[0] + tangent_vector = torch.zeros(batch_size, 3).to( + device=matrix.device, dtype=matrix.dtype + ) + tangent_vector[:, 2] = matrix[:, 1, 0] + tangent_vector[:, :2] = matrix[:, :2, 2] + return tangent_vector + + def transform_to( + self, + point: Union[torch.Tensor, Point2], + jacobians: Optional[List[torch.Tensor]] = None, + ) -> Point2: + point_data = point if isinstance(point, torch.Tensor) else point.data + translation_rel = point_data - self.xy() + J_rot: Optional[List[torch.Tensor]] = None + if jacobians is not None: + self._check_jacobians_list(jacobians) + J_rot = [] + transform = self.rotation.unrotate(translation_rel, jacobians=J_rot) + if jacobians is not None: + J_rot_pose, J_rot_point = J_rot + J_out_pose = torch.zeros( + self.shape[0], 2, 3, device=self.device, dtype=self.dtype + ) + J_out_pose[:, :2, :2] = -torch.eye( + 2, device=self.device, dtype=self.dtype + ).unsqueeze(0) + J_out_pose[:, :, 2:] = J_rot_pose + jacobians.extend([J_out_pose, J_rot_point]) + return transform + + def _copy_impl(self, new_name: Optional[str] = None) -> "SE2": + return SE2(data=self.data.clone(), name=new_name) + + # only added to avoid casting downstream + def copy(self, new_name: Optional[str] = None) -> "SE2": + return cast(SE2, super().copy(new_name=new_name)) diff --git a/theseus/geometry/so2.py b/theseus/geometry/so2.py new file mode 100644 index 000000000..ce251ad13 --- /dev/null +++ b/theseus/geometry/so2.py @@ -0,0 +1,193 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple, Union, cast + +import torch + +import theseus.constants + +from .lie_group import LieGroup +from .point_types import Point2 + + +class SO2(LieGroup): + def __init__( + self, + theta: Optional[torch.Tensor] = None, + data: Optional[torch.Tensor] = None, + name: Optional[str] = None, + dtype: Optional[torch.dtype] = None, + ): + if theta is not None and data is not None: + raise ValueError("Please provide only one of theta or data.") + if theta is not None: + dtype = theta.dtype + super().__init__(data=data, name=name, dtype=dtype) + if theta is not None: + if theta.ndim == 1: + theta = theta.unsqueeze(1) + if theta.ndim != 2 or theta.shape[1] != 1: + raise ValueError( + "Argument theta must be have ndim = 1, or ndim=2 and shape[1] = 1." + ) + self.update_from_angle(theta) + + @staticmethod + def _init_data() -> torch.Tensor: # type: ignore + return torch.empty(1, 2) # cos and sin + + def update_from_angle(self, theta: torch.Tensor): + self.update(torch.cat([theta.cos(), theta.sin()], dim=1)) + + def dof(self) -> int: + return 1 + + def __repr__(self) -> str: + return f"SO2(data={self.data}, name={self.name})" + + def __str__(self) -> str: + with torch.no_grad(): + theta = torch.atan2(self[:, 1:], self[:, 0:1]) + return f"SO2(theta={theta}, name={self.name})" + + def theta(self) -> torch.Tensor: + return self.log_map() + + def _adjoint_impl(self) -> torch.Tensor: + return torch.ones(self.shape[0], 1, 1, device=self.device, dtype=self.dtype) + + @staticmethod + def exp_map(tangent_vector: torch.Tensor) -> LieGroup: + so2 = SO2(dtype=tangent_vector.dtype) + so2.update_from_angle(tangent_vector) + return so2 + + def _log_map_impl(self) -> torch.Tensor: + cosine, sine = self.to_cos_sin() + return torch.atan2(sine, cosine).unsqueeze(1) + + def _compose_impl(self, so2_2: LieGroup) -> "SO2": + so2_2 = cast(SO2, so2_2) + cos_1, sin_1 = self.to_cos_sin() + cos_2, sin_2 = so2_2.to_cos_sin() + new_cos = cos_1 * cos_2 - sin_1 * sin_2 + new_sin = sin_1 * cos_2 + cos_1 * sin_2 + return SO2(data=torch.stack([new_cos, new_sin], dim=1)) + + def _inverse_impl(self, get_jacobian: bool = False) -> "SO2": + cosine, sine = self.to_cos_sin() + return SO2(data=torch.stack([cosine, -sine], dim=1)) + + def _rotate_shape_check(self, point: Union[Point2, torch.Tensor]): + err_msg = "SO2 can only rotate 2-D vectors." + if isinstance(point, torch.Tensor): + if not point.ndim == 2 or point.shape[1] != 2: + raise ValueError(err_msg) + elif point.dof() != 2: + raise ValueError(err_msg) + if ( + point.shape[0] != self.shape[0] + and point.shape[0] != 1 + and self.shape[0] != 1 + ): + raise ValueError( + "Input point batch size is not broadcastable with group batch size." + ) + + @staticmethod + def _rotate_from_cos_sin( + point: Union[Point2, torch.Tensor], + cosine: torch.Tensor, + sine: torch.Tensor, + ) -> Point2: + batch_size = max(point.shape[0], cosine.shape[0]) + if isinstance(point, torch.Tensor): + if point.ndim != 2 or point.shape[1] != 2: + raise ValueError( + f"Point tensor must have shape batch_size x 2, " + f"but received {point.shape}." + ) + point_data = point + else: + point_data = point.data + px, py = point_data[:, 0], point_data[:, 1] + new_point_data = torch.empty( + batch_size, 2, device=cosine.device, dtype=cosine.dtype + ) + new_point_data[:, 0] = cosine * px - sine * py + new_point_data[:, 1] = sine * px + cosine * py + return Point2(data=new_point_data) + + def rotate( + self, + point: Union[Point2, torch.Tensor], + jacobians: Optional[List[torch.Tensor]] = None, + ) -> Point2: + self._rotate_shape_check(point) + cosine, sine = self.to_cos_sin() + rotation = SO2._rotate_from_cos_sin(point, cosine, sine) + if jacobians is not None: + self._check_jacobians_list(jacobians) + J1 = torch.stack([-rotation.y(), rotation.x()], dim=1).view(-1, 2, 1) + J2 = self.to_matrix() + jacobians.extend([J1, J2]) + return rotation + + def unrotate( + self, + point: Union[Point2, torch.Tensor], + jacobians: Optional[List[torch.Tensor]] = None, + ) -> Point2: + self._rotate_shape_check(point) + cosine, sine = self.to_cos_sin() + rotation = SO2._rotate_from_cos_sin(point, cosine, -sine) + if jacobians is not None: + self._check_jacobians_list(jacobians) + J1 = torch.stack([rotation.y(), -rotation.x()], dim=1).view(-1, 2, 1) + J2 = self.to_matrix().transpose(2, 1) + jacobians.extend([J1, J2]) + return rotation + + def to_cos_sin(self) -> Tuple[torch.Tensor, torch.Tensor]: + return self[:, 0], self[:, 1] + + def to_matrix(self) -> torch.Tensor: + matrix = torch.empty(self.shape[0], 2, 2).to( + device=self.device, dtype=self.dtype + ) + cosine, sine = self.to_cos_sin() + matrix[:, 0, 0] = cosine + matrix[:, 0, 1] = -sine + matrix[:, 1, 0] = sine + matrix[:, 1, 1] = cosine + return matrix + + @staticmethod + def hat(tangent_vector: torch.Tensor) -> torch.Tensor: + matrix = torch.zeros(tangent_vector.shape[0], 2, 2).to( + dtype=tangent_vector.dtype, + device=tangent_vector.device, + ) + matrix[:, 0, 1] = -tangent_vector.view(-1) + matrix[:, 1, 0] = tangent_vector.view(-1) + return matrix + + @staticmethod + def vee(matrix: torch.Tensor) -> torch.Tensor: + _check = matrix.ndim == 3 and matrix.shape[1:] == (2, 2) + _check &= matrix[:, 0, 0].abs().max().item() < theseus.constants.EPS + _check &= matrix[:, 1, 1].abs().max().item() < theseus.constants.EPS + _check &= torch.allclose(matrix[:, 0, 1], -matrix[:, 1, 0]) + if not _check: + raise ValueError("Invalid hat matrix for SO2.") + return matrix[:, 1, 0].clone().view(-1, 1) + + def _copy_impl(self, new_name: Optional[str] = None) -> "SO2": + return SO2(data=self.data.clone(), name=new_name) + + # only added to avoid casting downstream + def copy(self, new_name: Optional[str] = None) -> "SO2": + return cast(SO2, super().copy(new_name=new_name)) diff --git a/theseus/geometry/tests/__init__.py b/theseus/geometry/tests/__init__.py new file mode 100644 index 000000000..7bec24cb1 --- /dev/null +++ b/theseus/geometry/tests/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. diff --git a/theseus/geometry/tests/common.py b/theseus/geometry/tests/common.py new file mode 100644 index 000000000..fc9c4ca5f --- /dev/null +++ b/theseus/geometry/tests/common.py @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import torch + +from theseus.constants import EPS +from theseus.utils import numeric_jacobian + + +def check_exp_map(tangent_vector, group_cls): + group = group_cls.exp_map(tangent_vector) + assert torch.allclose( + group_cls.hat(tangent_vector).matrix_exp(), + group.to_matrix(), + atol=EPS, + ) + + +def check_log_map(tangent_vector, group_cls): + assert torch.allclose( + tangent_vector, group_cls.exp_map(tangent_vector).log_map(), atol=EPS + ) + + +def check_compose(group_1, group_2): + Jcmp = [] + composition = group_1.compose(group_2, jacobians=Jcmp) + expected_matrix = group_1.to_matrix() @ group_2.to_matrix() + expected_jacs = numeric_jacobian( + lambda groups: groups[0].compose(groups[1]), + [group_1, group_2], + ) + assert torch.allclose(composition.to_matrix(), expected_matrix, atol=EPS) + assert torch.allclose(Jcmp[0], expected_jacs[0]) + assert torch.allclose(Jcmp[1], expected_jacs[1]) + + +def check_inverse(group): + tangent_vector = group.log_map() + inverse_group = group.exp_map(-tangent_vector) + jac = [] + inverse_result = group.inverse(jacobian=jac) + expected_jac = numeric_jacobian(lambda groups: groups[0].inverse(), [group]) + assert torch.allclose( + inverse_group.to_matrix(), inverse_result.to_matrix(), atol=EPS + ) + assert torch.allclose(jac[0], expected_jac[0]) + + +def check_adjoint(group, tangent_vector): + group_matrix = group.to_matrix() + tangent_left = group.__class__.adjoint(group) @ tangent_vector.unsqueeze(2) + tangent_right = group.__class__.vee( + group_matrix @ group.hat(tangent_vector) @ group.inverse().to_matrix() + ) + assert torch.allclose(tangent_left.squeeze(2), tangent_right, atol=EPS) diff --git a/theseus/geometry/tests/test_point_types.py b/theseus/geometry/tests/test_point_types.py new file mode 100644 index 000000000..1c3960813 --- /dev/null +++ b/theseus/geometry/tests/test_point_types.py @@ -0,0 +1,49 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch + +import theseus as th + + +def test_xy_point2(): + for _ in range(100): + for batch_size in [1, 10, 100]: + point = th.Point2(data=torch.randn(batch_size, 2)) + assert point.x().allclose(point.data[:, 0]) + assert point.y().allclose(point.data[:, 1]) + + +def test_xyz_point3(): + for _ in range(100): + for batch_size in [1, 10, 100]: + point = th.Point3(data=torch.randn(batch_size, 3)) + assert point.x().allclose(point.data[:, 0]) + assert point.y().allclose(point.data[:, 1]) + assert point.z().allclose(point.data[:, 2]) + + +def test_point_operations_return_correct_type(): + for point_cls in [th.Point2, th.Point3]: + p1 = point_cls() + p2 = point_cls() + + assert isinstance(p1 + p2, point_cls) + assert isinstance(p1 - p2, point_cls) + assert isinstance(p1 * p2, point_cls) + assert isinstance(p1 / p2, point_cls) + assert isinstance(p1.abs(), point_cls) + assert isinstance(-p1, point_cls) + assert isinstance(p1.compose(p2), point_cls) + assert isinstance(p1.retract(p2.data), point_cls) + + # for these, test result also since this method was overridden + p1_copy = p1.copy() + assert isinstance(p1_copy, point_cls) + assert p1_copy.allclose(p1) + exp_map = point_cls.exp_map(p2.data) + assert isinstance(exp_map, point_cls) + assert exp_map.allclose(p2) diff --git a/theseus/geometry/tests/test_se2.py b/theseus/geometry/tests/test_se2.py new file mode 100644 index 000000000..3961612be --- /dev/null +++ b/theseus/geometry/tests/test_se2.py @@ -0,0 +1,122 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa: F401 +import torch + +import theseus as th +from theseus.core.tests.common import check_copy_var +from theseus.utils import numeric_jacobian + +from .common import ( + check_adjoint, + check_compose, + check_exp_map, + check_inverse, + check_log_map, +) + + +def create_random_se2(batch_size, rng): + theta = torch.rand(batch_size, 1, generator=rng) * 2 * np.pi - np.pi + u = torch.randn(batch_size, 2) + tangent_vector = torch.cat([u, theta], dim=1) + return th.SE2.exp_map(tangent_vector.double()) + + +def test_exp_map(): + for batch_size in [1, 20, 100]: + theta = torch.from_numpy(np.linspace(-np.pi, np.pi, batch_size)) + u = torch.randn(batch_size, 2) + tangent_vector = torch.cat([u, theta.unsqueeze(1)], dim=1) + check_exp_map(tangent_vector.double(), th.SE2) + + +def test_log_map(): + for batch_size in [1, 20, 100]: + theta = torch.from_numpy(np.linspace(-np.pi, np.pi, batch_size)) + u = torch.randn(batch_size, 2) + tangent_vector = torch.cat([u, theta.unsqueeze(1)], dim=1) + check_log_map(tangent_vector, th.SE2) + + +def test_compose(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + se2_1 = create_random_se2(batch_size, rng) + se2_2 = create_random_se2(batch_size, rng) + check_compose(se2_1, se2_2) + + +def test_inverse(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + se2 = create_random_se2(batch_size, rng) + check_inverse(se2) + + +def test_adjoint(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + se2 = create_random_se2(batch_size, rng) + tangent = torch.randn(batch_size, 3).double() + check_adjoint(se2, tangent) + + +def test_copy(): + rng = torch.Generator() + se2 = create_random_se2(1, rng) + check_copy_var(se2) + + +def test_transform_to(): + pose = th.SE2(x_y_theta=torch.DoubleTensor([1, 2, np.pi / 2.0]).view(1, -1)) + point = th.Point2(data=torch.DoubleTensor([-1, 4]).view(1, -1)) + expected = th.Point2(data=torch.DoubleTensor([2, 2]).view(1, -1)) + jacobians = [] + actual = pose.transform_to(point, jacobians=jacobians) + jacobians_from_tensor = [] + actual_from_tensor = pose.transform_to(point.data, jacobians=jacobians_from_tensor) + + expected_jac = numeric_jacobian( + lambda groups: groups[0].transform_to(groups[1]), + [pose, point], + function_dim=2, + ) + assert expected.data.allclose(actual.data) + assert expected.data.allclose(actual_from_tensor.data) + for i in range(2): + assert torch.allclose(jacobians[i], expected_jac[i]) + assert torch.allclose(jacobians_from_tensor[i], expected_jac[i]) + + +def test_xy_jacobian(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + se2 = create_random_se2(batch_size, rng) + jacobian = [] + se2.xy(jacobians=jacobian) + expected_jac = numeric_jacobian( + lambda groups: th.Point2(groups[0].xy()), [se2], function_dim=2 + ) + torch.allclose(jacobian[0], expected_jac[0]) + + +def test_theta_jacobian(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + se2 = create_random_se2(batch_size, rng) + jacobian = [] + se2.theta(jacobians=jacobian) + expected_jac = numeric_jacobian( + lambda groups: th.Vector(data=groups[0].theta()), [se2], function_dim=1 + ) + torch.allclose(jacobian[0], expected_jac[0]) diff --git a/theseus/geometry/tests/test_so2.py b/theseus/geometry/tests/test_so2.py new file mode 100644 index 000000000..89bbe2da9 --- /dev/null +++ b/theseus/geometry/tests/test_so2.py @@ -0,0 +1,110 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa: F401 +import torch + +import theseus as th +from theseus.constants import EPS +from theseus.core.tests.common import check_copy_var +from theseus.utils import numeric_jacobian + +from .common import ( + check_adjoint, + check_compose, + check_exp_map, + check_inverse, + check_log_map, +) + + +def _create_random_so2(batch_size, rng): + theta = torch.rand(batch_size, 1, generator=rng) * 2 * np.pi - np.pi + return th.SO2(theta=theta.double()) + + +def test_exp_map(): + for batch_size in [1, 20, 100]: + theta = torch.from_numpy(np.linspace(-np.pi, np.pi, batch_size)).view(-1, 1) + check_exp_map(theta, th.SO2) + + +def test_log_map(): + for batch_size in [1, 2, 100]: + theta = torch.from_numpy(np.linspace(-np.pi, np.pi, batch_size)).view(-1, 1) + check_log_map(theta, th.SO2) + + +def test_compose(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + so2_1 = _create_random_so2(batch_size, rng) + so2_2 = _create_random_so2(batch_size, rng) + check_compose(so2_1, so2_2) + + +def test_inverse(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + so2 = _create_random_so2(batch_size, rng) + check_inverse(so2) + + +def test_rotate_and_unrotate(): + rng = torch.Generator() + rng.manual_seed(0) + for _ in range(10): # repeat a few times + for batch_size in [1, 20, 100]: + so2 = _create_random_so2(batch_size, rng) + # Tests that rotate works from tensor. unrotate() would work similarly), but + # it's also tested indirectly by test_transform_to() for SE2 + point_tensor = torch.randn(batch_size, 2).double() + jacobians_rotate = [] + rotated_point = so2.rotate(point_tensor, jacobians=jacobians_rotate) + expected_rotated_data = so2.to_matrix() @ point_tensor.unsqueeze(2) + jacobians_unrotate = [] + unrotated_point = so2.unrotate(rotated_point, jacobians_unrotate) + + # Check the operation result + assert torch.allclose( + expected_rotated_data.squeeze(2), rotated_point.data, atol=EPS + ) + assert torch.allclose(point_tensor, unrotated_point.data, atol=EPS) + + # Check the jacobians + # function_dim = 2 because rotate(theta, (x, y)) --> (x_new, y_new) + expected_jac = numeric_jacobian( + lambda groups: groups[0].rotate(groups[1]), + [so2, th.Point2(point_tensor)], + function_dim=2, + ) + assert torch.allclose(jacobians_rotate[0], expected_jac[0]) + assert torch.allclose(jacobians_rotate[1], expected_jac[1]) + expected_jac = numeric_jacobian( + lambda groups: groups[0].unrotate(groups[1]), + [so2, rotated_point], + function_dim=2, + ) + assert torch.allclose(jacobians_unrotate[0], expected_jac[0]) + assert torch.allclose(jacobians_unrotate[1], expected_jac[1]) + + +def test_adjoint(): + rng = torch.Generator() + rng.manual_seed(0) + for batch_size in [1, 20, 100]: + so2 = _create_random_so2(batch_size, rng) + tangent = torch.randn(batch_size, 1).double() + check_adjoint(so2, tangent) + + +def test_copy(): + rng = torch.Generator() + rng.manual_seed(0) + so2 = _create_random_so2(1, rng) + check_copy_var(so2) diff --git a/theseus/geometry/tests/test_vector.py b/theseus/geometry/tests/test_vector.py new file mode 100644 index 000000000..52f10e9fe --- /dev/null +++ b/theseus/geometry/tests/test_vector.py @@ -0,0 +1,178 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa: F401 +import torch + +import theseus as th +from theseus.core.tests.common import check_copy_var + +torch.manual_seed(0) + + +def test_item(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + assert torch.allclose(v1.data, t1) + v1[0, 0] = 11.1 + assert not torch.allclose(v1.data, t1) + t1[0, 0] = 11.1 + assert torch.allclose(v1.data, t1) + + +def test_add(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + t2 = torch.rand(i, j) + v2 = th.Vector(data=t2.clone(), name="v2") + vsum = th.Vector(data=t1 + t2) + assert (v1 + v2).allclose(vsum) + assert v1.compose(v2).allclose(vsum) + + +def test_sub(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + t2 = torch.rand(i, j) + v2 = th.Vector(data=t2.clone(), name="v2") + assert (v1 - v2).allclose(th.Vector(data=t1 - t2)) + v2 = -v2 + assert (v1 + v2).allclose(th.Vector(data=t1 - t2)) + + +def test_mul(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + assert (v1 * torch.tensor(2.1)).allclose(th.Vector(data=t1 * 2.1)) + assert (torch.tensor(1.1) * v1).allclose(th.Vector(data=t1 * 1.1)) + + +def test_div(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + assert (v1 / torch.tensor(2.1)).allclose(th.Vector(data=t1 / 2.1)) + + +def test_matmul(): + for i in range(1, 4): + for j in range(1, 4): + for k in range(1, 4): + t = torch.rand(i, j, k) + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + v1t = v1 @ t + assert v1t.allclose((t1.unsqueeze(1) @ t).squeeze(1)) + assert v1t.shape == (i, k) + t2 = torch.rand(i, k) + v2 = th.Vector(data=t2.clone(), name="v2") + tv2 = t @ v2 + assert tv2.allclose((t @ t2.unsqueeze(2)).squeeze(2)) + assert tv2.shape == (i, j) + + +def test_dot(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + t2 = torch.rand(i, j) + v2 = th.Vector(data=t2.clone(), name="v2") + assert torch.allclose(v1.dot(v2), torch.mul(t1, t2).sum(-1)) + assert torch.allclose(v1.inner(v2), torch.mul(t1, t2).sum(-1)) + + +def test_outer(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + t2 = torch.rand(i, j) + v2 = th.Vector(data=t2.clone(), name="v2") + assert torch.allclose( + v1.outer(v2), torch.matmul(t1.unsqueeze(2), t2.unsqueeze(1)) + ) + + +def test_abs(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + assert v1.abs().allclose(th.Vector(data=t1.abs())) + + +def test_norm(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + assert v1.norm() == t1.norm() + assert v1.norm(p="fro") == torch.norm(t1, p="fro") + + +def test_cat(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + t2 = torch.rand(i, j) + v2 = th.Vector(data=t2.clone(), name="v2") + t3 = torch.rand(i, j) + v3 = th.Vector(data=t3.clone(), name="v3") + assert v1.cat(v2).allclose(th.Vector(data=torch.cat((t1, t2), 1))) + assert v1.cat((v2, v3)).allclose(th.Vector(data=torch.cat((t1, t2, t3), 1))) + + +def test_local(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + t2 = torch.rand(i, j) + v2 = th.Vector(data=t2.clone(), name="v2") + assert torch.allclose(v1._local_impl(v2), t2 - t1) + assert torch.allclose(v1.local(v2), t2 - t1) + assert torch.allclose(th.local(v1, v2), t2 - t1) + + +def test_retract(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v1 = th.Vector(data=t1.clone(), name="v1") + d = torch.rand(i, j) + assert v1._retract_impl(d).allclose(th.Vector(data=t1 + d)) + assert v1.retract(d).allclose(th.Vector(data=t1 + d)) + assert th.retract(v1, d).allclose(th.Vector(data=t1 + d)) + + +def test_update(): + for dof in range(1, 21): + v = th.Vector(dof) + for _ in range(100): + rng = np.random.default_rng() + batch_size = rng.integers(low=1, high=100) + data = torch.rand(batch_size, dof) + v.update(data) + assert torch.allclose(data, v.data) + + +def test_copy(): + for i in range(1, 4): + for j in range(1, 5): + t1 = torch.rand(i, j) + v = th.Vector(data=t1.clone(), name="v") + check_copy_var(v) diff --git a/theseus/geometry/vector.py b/theseus/geometry/vector.py new file mode 100644 index 000000000..d743c85bf --- /dev/null +++ b/theseus/geometry/vector.py @@ -0,0 +1,175 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional, Tuple, Union, cast + +import torch + +from theseus.geometry.manifold import Manifold + +from .lie_group import LieGroup + + +class Vector(LieGroup): + def __init__( + self, + dof: Optional[int] = None, + data: Optional[torch.Tensor] = None, + name: Optional[str] = None, + dtype: Optional[torch.dtype] = None, + ): + if dof is not None and data is not None: + raise ValueError("Please provide only one of dof or data.") + if dof is None: + if data is not None and data.ndim == 1: + data = data.view(1, -1) + if data is None or data.ndim != 2: + raise ValueError( + "If dof not provided, data must " + "be a tensor of shape (batch_size, dof)" + ) + dof = data.shape[1] + + super().__init__(dof, data=data, name=name, dtype=dtype) + + # Vector variables are of shape [batch_size, dof] + @staticmethod + def _init_data(dof: int) -> torch.Tensor: # type: ignore + return torch.zeros(1, dof) + + def dof(self) -> int: + return self.data.shape[1] + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}" + f"(dof={self.data.shape[1]}, data={self.data}, name={self.name})" + ) + + def allclose(self, other: "Vector", *args, **kwargs) -> bool: + return torch.allclose(self.data, other.data, *args, **kwargs) + + def __add__(self, other: "Vector") -> "Vector": + return self.__class__._compose_impl(self, other) + + def __sub__(self, other: "Vector") -> "Vector": + return self.__class__(data=torch.sub(self.data, other.data)) + + def __neg__(self) -> "Vector": + return self.__class__._inverse_impl(self) + + def __mul__(self, other: Union["Vector", torch.Tensor]) -> "Vector": + if isinstance(other, Vector): + return self.__class__(data=torch.mul(self.data, other.data)) + elif isinstance(other, torch.Tensor): + return self.__class__(data=torch.mul(self.data, other)) + else: + raise ValueError( + f"expected type 'Vector' or 'Tensor', got {type(other).__name__}" + ) + + __rmul__ = __mul__ + + def __matmul__(self, other: torch.Tensor) -> torch.Tensor: + if not isinstance(other, torch.Tensor): + raise ValueError("Vector matmul only accepts torch tensors.") + if other.ndim != 3: + raise ValueError( + f"Vector matmul only accepts tensors with ndim=3 " + f"but tensor has ndim={other.ndim}." + ) + return torch.bmm(self.data.unsqueeze(1), other.data).squeeze(1) + + def __rmatmul__(self, other: torch.Tensor) -> torch.Tensor: + if isinstance(other, Vector): + raise ValueError("Vector matmul only accepts torch tensors.") + if other.ndim != 3: + raise ValueError( + f"Vector matmul only accepts tensors with ndim=3 but " + f"tensor has ndim={other.ndim}." + ) + return torch.bmm(other.data, self.data.unsqueeze(2)).squeeze(2) + + def __truediv__(self, other: Union["Vector", torch.Tensor]) -> "Vector": + if isinstance(other, Vector): + return self.__class__(data=torch.div(self.data, other.data)) + elif isinstance(other, torch.Tensor): + return self.__class__(data=torch.div(self.data, other)) + else: + raise ValueError( + f"expected type 'Vector' or 'Tensor', got {type(other).__name__}" + ) + + def dot(self, other: "Vector") -> torch.Tensor: + return torch.mul(self.data, other.data).sum(-1) + + inner = dot + + def abs(self) -> "Vector": + return self.__class__(data=torch.abs(self.data)) + + def outer(self, other: "Vector") -> torch.Tensor: + return torch.matmul(self.data.unsqueeze(2), other.data.unsqueeze(1)) + + def norm(self, *args, **kwargs) -> torch.Tensor: + return torch.norm(self.data, *args, **kwargs) + + def cat(self, vecs: Union["Vector", Tuple["Vector"], List["Vector"]]) -> "Vector": + if isinstance(vecs, Vector): + result = torch.cat([self.data, vecs.data], 1) + else: + result = torch.cat([self.data] + [vec.data for vec in vecs], 1) + return Vector(data=result) + + def _local_impl(self, vec2: Manifold) -> torch.Tensor: + if not isinstance(vec2, Vector): + raise ValueError("Non-vector inputs for Vector.local()") + else: + return vec2.data - self.data + + def _local_jacobian(self, _: Manifold) -> Tuple[torch.Tensor, torch.Tensor]: + identity = torch.eye(self.dof(), dtype=self.dtype, device=self.device).repeat( + self.shape[0], 1, 1 + ) + return -identity, identity + + def _retract_impl(self, delta: torch.Tensor) -> "Vector": + return self.__class__(data=torch.add(self.data, delta)) + + def _compose_impl(self, vec2: LieGroup) -> "Vector": + return self.__class__(data=torch.add(self.data, vec2.data)) + + def _compose_jacobian(self, _: LieGroup) -> Tuple[torch.Tensor, torch.Tensor]: + identity = torch.eye(self.dof(), dtype=self.dtype, device=self.device).repeat( + self.shape[0], 1, 1 + ) + return (identity, identity.clone()) + + def _inverse_impl(self) -> "Vector": + return self.__class__(data=-self.data) + + def _adjoint_impl(self) -> torch.Tensor: + return ( + torch.eye(self.dof(), dtype=self.dtype, device=self.device) + .repeat(self.shape[0], 1, 1) + .to(self.device) + ) + + @staticmethod + def exp_map(tangent_vector: torch.Tensor) -> LieGroup: + return Vector(data=tangent_vector.clone()) + + def _log_map_impl(self) -> torch.Tensor: + return self.data.clone() + + def __hash__(self): + return id(self) + + def _copy_impl(self, new_name: Optional[str] = None) -> "Vector": + return self.__class__(data=self.data.clone(), name=new_name) + + # added to avoid casting downstream + def copy(self, new_name: Optional[str] = None) -> "Vector": + return cast(Vector, super().copy(new_name=new_name)) diff --git a/theseus/optimizer/__init__.py b/theseus/optimizer/__init__.py new file mode 100644 index 000000000..8dd61ba40 --- /dev/null +++ b/theseus/optimizer/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .dense_linearization import DenseLinearization +from .linearization import Linearization +from .optimizer import Optimizer, OptimizerInfo +from .sparse_linearization import SparseLinearization +from .variable_ordering import VariableOrdering diff --git a/theseus/optimizer/autograd/__init__.py b/theseus/optimizer/autograd/__init__.py new file mode 100644 index 000000000..401423146 --- /dev/null +++ b/theseus/optimizer/autograd/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .sparse_autograd import CholmodSolveFunction + +__all__ = [ + "CholmodSolveFunction", +] diff --git a/theseus/optimizer/autograd/sparse_autograd.py b/theseus/optimizer/autograd/sparse_autograd.py new file mode 100644 index 000000000..578832f5c --- /dev/null +++ b/theseus/optimizer/autograd/sparse_autograd.py @@ -0,0 +1,130 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from sksparse.cholmod import Factor as CholeskyDecomposition + +from ..linear_system import SparseStructure + + +class CholmodSolveFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, *args, **kwargs): + At_val: torch.Tensor = args[0] + b: torch.Tensor = args[1] + sparse_structure: SparseStructure = args[2] + symbolic_decomposition: CholeskyDecomposition = args[3] + damping: float = args[4] + + At_val_cpu = At_val.cpu() + b_cpu = b.cpu() + batch_size = At_val.shape[0] + targs = {"dtype": At_val.dtype, "device": "cpu"} + x_cpu = torch.empty(size=(batch_size, sparse_structure.num_cols), **targs) + cholesky_decompositions = [] + + for i in range(batch_size): + + # compute decomposition from symbolic decomposition + At_i = sparse_structure.csc_transpose(At_val_cpu[i, :]) + cholesky_decomposition = symbolic_decomposition.cholesky_AAt(At_i, damping) + + # solve + Atb_i = At_i @ b_cpu[i, :] + x_cpu[i, :] = torch.Tensor(cholesky_decomposition(Atb_i)) + cholesky_decompositions.append(cholesky_decomposition) + + ctx.b_cpu = b_cpu + ctx.x_cpu = x_cpu + ctx.At_val_cpu = At_val_cpu + ctx.sparse_structure = sparse_structure + ctx.cholesky_decompositions = cholesky_decompositions + + return x_cpu.to(device=At_val.device) + + # Let v row vector, and w column vector of dimension n, m, and + # A an nxm matrix. Then + # v * A * w = Sum(v_i * A_ij * w_j) = [v (X) w] . A + # Where by [v (X) w] we mean the nxm matrix which is the + # tensor product of v and w, and "." is the componentwise + # dot product of the two nxm matrices. + # + # Now, we have + # At * A * x = At * b + # + # Therefore if A, b, x are parametrized (eg. by u) we have deriving + # + # (i) At'*A*x + At*A'*x + At*A*x' = At'*b + At*b' + # + # indicating A'=dA/du, b'=db/du, x'=dx/du + # + # Now, assume we have a function f of x, and G = df/dx be the + # gradient that we consider a row vector, so G*x' is df/du. + # + # To compute df/db and df/dA, make x' explicit in the (i): + # + # x' = (At * A)^{-1} (At*b' + At'*b - At'*A*x - At*A'*x) + # + # So multiplying by the row vector G we have + # + # G*x' = G*(At*A)^{-1}*At * b' + G*(At*A)^{-1} * (At'*b - At'*A*x - At*A'*x) + # = H * At * b' + H * At' * (b - A*x) - H * At * A' * x + # = (H*At) * b' + [H (X) (b-A*x)] . At' - [H*At (X) x] . A' + # = (H*At) * b' + [(b-A*x) (X) H - H*At (X) x] . A' + # after putting H = G*(At*A)^{-1} for convenience. + # + # Therefore after switching to column vectors we have + # df/db = A*H + # (where H = (At*A)^{-1}*G), while + # df/dA = (b-A*x) (X) H - A*H (X) x + # The two tensor products means that to compute the gradient of + # a block of A we have to multiply entries taken from (b-A*x) and H, + # and blocks taken from A*H and x. + # + # Here we assume we are provided x and H after the linear solver + # has been applied to Atb and the gradient G. + + # NOTE: in the torch docs the backward is also marked as "staticmethod", I think it makes sense + @staticmethod + def backward(ctx, grad_output): + + batch_size = grad_output.shape[0] + targs = {"dtype": grad_output.dtype, "device": "cpu"} # grad_output.device} + H = torch.empty(size=(batch_size, ctx.sparse_structure.num_cols), **targs) + AH = torch.empty(size=(batch_size, ctx.sparse_structure.num_rows), **targs) + b_Ax = ctx.b_cpu.clone() + grad_output_cpu = grad_output.cpu() + + for i in range(batch_size): + + H[i, :] = torch.Tensor( + ctx.cholesky_decompositions[i](grad_output_cpu[i, :]) + ) + + A_i = ctx.sparse_structure.csr_straight(ctx.At_val_cpu[i, :]) + AH[i, :] = torch.Tensor(A_i @ H[i, :]) + b_Ax[i, :] -= torch.Tensor(A_i @ ctx.x_cpu[i, :]) + + # now we fill values of a matrix with structure identical to A with + # selected entries from the difference of tensor products: + # b_Ax (X) H - AH (X) x + # NOTE: this row-wise manipulation can be much faster in C++ or Cython + A_col_ind = ctx.sparse_structure.col_ind + A_row_ptr = ctx.sparse_structure.row_ptr + batch_size = grad_output.shape[0] + A_grad = torch.empty( + size=(batch_size, len(A_col_ind)), + device="cpu", + ) # return value, A's grad + for r in range(len(A_row_ptr) - 1): + start, end = A_row_ptr[r], A_row_ptr[r + 1] + columns = A_col_ind[start:end] # col indices, for this row + A_grad[:, start:end] = ( + b_Ax[:, r].unsqueeze(1) * H[:, columns] + - AH[:, r].unsqueeze(1) * ctx.x_cpu[:, columns] + ) + + dev = grad_output.device + return A_grad.to(device=dev), AH.to(device=dev), None, None, None diff --git a/theseus/optimizer/autograd/tests/test_sparse_backward.py b/theseus/optimizer/autograd/tests/test_sparse_backward.py new file mode 100644 index 000000000..4eccf3c35 --- /dev/null +++ b/theseus/optimizer/autograd/tests/test_sparse_backward.py @@ -0,0 +1,61 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch +from sksparse.cholmod import analyze_AAt +from torch.autograd import gradcheck + +import theseus as th +from theseus.optimizer.autograd import CholmodSolveFunction + + +def _build_sparse_mat(batch_size): + torch.manual_seed(37) + all_cols = list(range(10)) + col_ind = [] + row_ptr = [0] + for i in range(12): + start = max(0, i - 2) + end = min(i + 1, 10) + col_ind += all_cols[start:end] + row_ptr.append(len(col_ind)) + data = torch.randn(size=(batch_size, len(col_ind)), dtype=torch.double) + return 12, 10, data, col_ind, row_ptr + + +def test_sparse_backward_step(): + void_objective = th.Objective() + void_ordering = th.VariableOrdering(void_objective, default_order=False) + solver = th.CholmodSparseSolver( + void_objective, linearization_kwargs={"ordering": void_ordering}, damping=0.01 + ) + linearization = solver.linearization + + batch_size = 4 + void_objective._batch_size = batch_size + num_rows, num_cols, data, col_ind, row_ptr = _build_sparse_mat(batch_size) + linearization.num_rows = num_rows + linearization.num_cols = num_cols + linearization.A_val = data + linearization.A_col_ind = col_ind + linearization.A_row_ptr = row_ptr + linearization.b = torch.randn(size=(batch_size, num_rows), dtype=torch.double) + + linearization.A_val.requires_grad = True + linearization.b.requires_grad = True + # Only need this line for the test since the objective is a mock + solver._symbolic_cholesky_decomposition = analyze_AAt( + linearization.structure().mock_csc_transpose() + ) + inputs = ( + linearization.A_val, + linearization.b, + linearization.structure(), + solver._symbolic_cholesky_decomposition, + solver._damping, + ) + + assert gradcheck(CholmodSolveFunction.apply, inputs, eps=3e-4, atol=1e-3) diff --git a/theseus/optimizer/dense_linearization.py b/theseus/optimizer/dense_linearization.py new file mode 100644 index 000000000..ebfb920a5 --- /dev/null +++ b/theseus/optimizer/dense_linearization.py @@ -0,0 +1,62 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Optional + +import torch + +from theseus.core import Objective + +from .linearization import Linearization +from .variable_ordering import VariableOrdering + + +class DenseLinearization(Linearization): + def __init__( + self, + objective: Objective, + ordering: Optional[VariableOrdering] = None, + **kwargs + ): + super().__init__(objective, ordering) + self.A: torch.Tensor = None + self.AtA: torch.Tensor = None + self.b: torch.Tensor = None + self.Atb: torch.Tensor = None + + def _linearize_jacobian_impl(self): + err_row_idx = 0 + self.A = torch.zeros( + (self.objective.batch_size, self.num_rows, self.num_cols), + device=self.objective.device, + dtype=self.objective.dtype, + ) + self.b = torch.zeros( + (self.objective.batch_size, self.num_rows), + device=self.objective.device, + dtype=self.objective.dtype, + ) + for cost_function in self.objective: + jacobians, error = cost_function.weighted_jacobians_error() + num_rows = cost_function.dim() + for var_idx_in_cost_function, var_jacobian in enumerate(jacobians): + var_idx_in_order = self.ordering.index_of( + cost_function.optim_var_at(var_idx_in_cost_function).name + ) + var_start_col = self.var_start_cols[var_idx_in_order] + + num_cols = var_jacobian.shape[2] + row_slice = slice(err_row_idx, err_row_idx + num_rows) + col_slice = slice(var_start_col, var_start_col + num_cols) + self.A[:, row_slice, col_slice] = var_jacobian + + self.b[:, row_slice] = -error + err_row_idx += cost_function.dim() + + def _linearize_hessian_impl(self): + self._linearize_jacobian_impl() + At = self.A.transpose(1, 2) + self.AtA = At.bmm(self.A) + self.Atb = At.bmm(self.b.unsqueeze(2)) diff --git a/theseus/optimizer/linear/__init__.py b/theseus/optimizer/linear/__init__.py new file mode 100644 index 000000000..2e8377d76 --- /dev/null +++ b/theseus/optimizer/linear/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .dense_solver import CholeskyDenseSolver, DenseSolver, LUDenseSolver +from .linear_optimizer import LinearOptimizer +from .linear_solver import LinearSolver +from .sparse_solver import CholmodSparseSolver diff --git a/theseus/optimizer/linear/dense_solver.py b/theseus/optimizer/linear/dense_solver.py new file mode 100644 index 000000000..bdf9294b9 --- /dev/null +++ b/theseus/optimizer/linear/dense_solver.py @@ -0,0 +1,158 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +import warnings +from typing import Any, Dict, Optional, Type + +import torch +import torch.linalg + +from theseus.core import Objective +from theseus.optimizer import DenseLinearization, Linearization + +from .linear_solver import LinearSolver + + +class DenseSolver(LinearSolver): + def __init__( + self, + objective: Objective, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + check_singular: bool = False, + ): + linearization_cls = linearization_cls or DenseLinearization + if linearization_cls != DenseLinearization: + raise RuntimeError( + "DenseSolver only works with theseus.nonlinear.DenseLinearization, " + f"but {linearization_cls} was provided." + ) + super().__init__(objective, linearization_cls, linearization_kwargs) + self.linearization: DenseLinearization = self.linearization + self._check_singular = check_singular + + @staticmethod + def _apply_damping( + matrix: torch.Tensor, + damping: float, + ellipsoidal: bool = True, + eps: float = 1e-8, + ) -> torch.Tensor: + if matrix.ndim != 3: + raise ValueError( + "Matrix must have a 3 dimensions, the first one being a batch dimension." + ) + _, n, m = matrix.shape + if n != m: + raise ValueError("Matrix must be square.") + + # See Nocedal and Wright, Numerical Optimization, pp. 260 and 261 + # https://www.csie.ntu.edu.tw/~r97002/temp/num_optimization.pdf + if ellipsoidal: + # Add eps to guard against ill-conditioned matrix + damped_D = torch.diag_embed(damping * matrix.diagonal(dim1=1, dim2=2) + eps) + else: + damped_D = damping * torch.eye( + n, device=matrix.device, dtype=matrix.dtype + ).unsqueeze(0) + return matrix + damped_D + + def _apply_damping_and_solve( + self, + Atb: torch.Tensor, + AtA: torch.Tensor, + damping: Optional[float] = None, + ellipsoidal_damping: bool = True, + damping_eps: float = 1e-8, + ) -> torch.Tensor: + if damping: + AtA = DenseSolver._apply_damping( + AtA, damping, ellipsoidal=ellipsoidal_damping, eps=damping_eps + ) + return self._solve_sytem(Atb, AtA) + + @abc.abstractmethod + def _solve_sytem(self, Atb: torch.Tensor, AtA: torch.Tensor) -> torch.Tensor: + pass + + def solve( + self, + damping: Optional[float] = None, + ellipsoidal_damping: bool = True, + damping_eps: float = 1e-8, + **kwargs, + ) -> torch.Tensor: + if self._check_singular: + AtA = self.linearization.AtA + Atb = self.linearization.Atb + with torch.no_grad(): + output = torch.zeros(AtA.shape[0], AtA.shape[1]).to(AtA.device) + _, _, infos = torch.lu(AtA, get_infos=True) + good_idx = infos.bool().logical_not() + if not good_idx.all(): + warnings.warn( + "Singular matrix found in batch, solution will be set " + "to all 0 for all singular matrices.", + RuntimeWarning, + ) + AtA = AtA[good_idx] + Atb = Atb[good_idx] + solution = self._apply_damping_and_solve( + Atb, + AtA, + damping=damping, + ellipsoidal_damping=ellipsoidal_damping, + damping_eps=damping_eps, + ) + output[good_idx] = solution + return output + else: + return self._apply_damping_and_solve( + self.linearization.Atb, + self.linearization.AtA, + damping=damping, + ellipsoidal_damping=ellipsoidal_damping, + damping_eps=damping_eps, + ) + + +class LUDenseSolver(DenseSolver): + def __init__( + self, + objective: Objective, + linearization_cls: Optional[Type[Linearization]] = DenseLinearization, + linearization_kwargs: Optional[Dict[str, Any]] = None, + check_singular: bool = False, + ): + super().__init__( + objective, + linearization_cls, + linearization_kwargs, + check_singular=check_singular, + ) + + def _solve_sytem(self, Atb: torch.Tensor, AtA: torch.Tensor) -> torch.Tensor: + return torch.linalg.solve(AtA, Atb).squeeze(2) + + +class CholeskyDenseSolver(DenseSolver): + def __init__( + self, + objective: Objective, + linearization_cls: Optional[Type[Linearization]] = DenseLinearization, + linearization_kwargs: Optional[Dict[str, Any]] = None, + check_singular: bool = False, + ): + super().__init__( + objective, + linearization_cls, + linearization_kwargs, + check_singular=check_singular, + ) + + def _solve_sytem(self, Atb: torch.Tensor, AtA: torch.Tensor) -> torch.Tensor: + lower = torch.linalg.cholesky(AtA) + return torch.cholesky_solve(Atb, lower).squeeze(2) diff --git a/theseus/optimizer/linear/linear_optimizer.py b/theseus/optimizer/linear/linear_optimizer.py new file mode 100644 index 000000000..0e75164ac --- /dev/null +++ b/theseus/optimizer/linear/linear_optimizer.py @@ -0,0 +1,85 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import warnings +from enum import Enum +from typing import Any, Dict, Optional, Type + +import numpy as np +import torch + +from theseus.core import Objective +from theseus.optimizer import Linearization, Optimizer, OptimizerInfo + +from .linear_solver import LinearSolver + + +class LinearOptimizerStatus(Enum): + START = 0 + CONVERGED = 1 + FAIL = -1 + + +class LinearOptimizer(Optimizer): + def __init__( + self, + objective: Objective, + linear_solver_cls: Type[LinearSolver], + *args, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + linear_solver_kwargs: Optional[Dict[str, Any]] = None, + **kwargs, + ): + super().__init__(objective) + linearization_kwargs = linearization_kwargs or {} + linear_solver_kwargs = linear_solver_kwargs or {} + self.linear_solver = linear_solver_cls( + objective, + linearization_cls=linearization_cls, + linearization_kwargs=linearization_kwargs, + **linear_solver_kwargs, + ) + + def _optimize_impl( + self, + **kwargs, + ) -> OptimizerInfo: + + info = OptimizerInfo( + best_solution={}, + status=np.array([LinearOptimizerStatus.START] * self.objective.batch_size), + ) + try: + self.linear_solver.linearization.linearize() + delta = self.linear_solver.solve() + except RuntimeError as run_err: + msg = ( + f"There was an error while running the linear optimizer. " + f"Original error message: {run_err}." + ) + if torch.is_grad_enabled(): + raise RuntimeError( + msg + " Backward pass will not work. To obtain " + "the best solution seen before the error, run with torch.no_grad()" + ) + else: + warnings.warn(msg, RuntimeWarning) + info.status[:] = LinearOptimizerStatus.FAIL + return info + self.retract_and_update_variables(delta) + info.status[:] = LinearOptimizerStatus.CONVERGED + for var in self.linear_solver.linearization.ordering: + info.best_solution[var.name] = var.data.clone().cpu() + return info + + # retracts all variables in the given order and updates their values + # with the result + def retract_and_update_variables(self, delta: torch.Tensor): + var_idx = 0 + for var in self.linear_solver.linearization.ordering: + new_var = var.retract(delta[:, var_idx : var_idx + var.dof()]) + var.update(new_var.data) + var_idx += var.dof() diff --git a/theseus/optimizer/linear/linear_solver.py b/theseus/optimizer/linear/linear_solver.py new file mode 100644 index 000000000..647769196 --- /dev/null +++ b/theseus/optimizer/linear/linear_solver.py @@ -0,0 +1,31 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from typing import Any, Dict, Optional, Type + +import torch + +from theseus.core import Objective +from theseus.optimizer import Linearization + + +class LinearSolver(abc.ABC): + # linearization_cls is optional, since every linear solver will have a default + def __init__( + self, + objective: Objective, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + **kwargs + ): + linearization_kwargs = linearization_kwargs or {} + self.linearization: Linearization = linearization_cls( + objective, **linearization_kwargs + ) + + @abc.abstractmethod + def solve(self, damping: Optional[float] = None, **kwargs) -> torch.Tensor: + pass diff --git a/theseus/optimizer/linear/sparse_solver.py b/theseus/optimizer/linear/sparse_solver.py new file mode 100644 index 000000000..2d92255a0 --- /dev/null +++ b/theseus/optimizer/linear/sparse_solver.py @@ -0,0 +1,63 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any, Dict, Optional, Type + +import torch + +# rename Cholesky `Factor` to CholeskyDecomposition to +# prevent confusion with the factors of the factor graph, +# when using the probabilistic naming convention +from sksparse.cholmod import Factor as CholeskyDecomposition +from sksparse.cholmod import analyze_AAt + +from theseus.core import Objective +from theseus.optimizer import Linearization, SparseLinearization +from theseus.optimizer.autograd import CholmodSolveFunction + +from .linear_solver import LinearSolver + + +class CholmodSparseSolver(LinearSolver): + def __init__( + self, + objective: Objective, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + damping: float = 1e-6, + **kwargs, + ): + linearization_cls = linearization_cls or SparseLinearization + if not linearization_cls == SparseLinearization: + raise RuntimeError( + "CholmodSparseSolver only works with theseus.optimizer.SparseLinearization," + + f" got {type(self.linearization)}" + ) + + super().__init__(objective, linearization_cls, linearization_kwargs, **kwargs) + self.linearization: SparseLinearization = self.linearization + + # symbolic decomposition depending on the sparse structure, done with mock data + self._symbolic_cholesky_decomposition: CholeskyDecomposition = analyze_AAt( + self.linearization.structure().mock_csc_transpose() + ) + + # the `damping` has the purpose of (optionally) improving conditioning + self._damping: float = damping + + def solve(self, damping: Optional[float] = None, **kwargs) -> torch.Tensor: + damping = damping or self._damping + if not isinstance(self.linearization, SparseLinearization): + raise RuntimeError( + "CholmodSparseSolver only works with theseus.optimizer.SparseLinearization." + ) + + return CholmodSolveFunction.apply( + self.linearization.A_val, + self.linearization.b, + self.linearization.structure(), + self._symbolic_cholesky_decomposition, + damping, + ) diff --git a/theseus/optimizer/linear/tests/test_dense_solver.py b/theseus/optimizer/linear/tests/test_dense_solver.py new file mode 100644 index 000000000..c48a062b9 --- /dev/null +++ b/theseus/optimizer/linear/tests/test_dense_solver.py @@ -0,0 +1,93 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch + +import theseus as th + + +def _create_linear_system(batch_size=32, matrix_size=10): + A = torch.randn((batch_size, matrix_size, matrix_size)) + AtA = torch.empty((batch_size, matrix_size, matrix_size)) + Atb = torch.empty((batch_size, matrix_size, 1)) + x = torch.randn((batch_size, matrix_size)) + for i in range(batch_size): + AtA[i] = A[i].t() @ A[i] + 0.1 * torch.eye(matrix_size, matrix_size) + Atb[i] = AtA[i] @ x[i].unsqueeze(1) + return AtA, Atb, x + + +def test_dense_solvers(): + void_objective = th.Objective() + void_ordering = th.VariableOrdering(void_objective, default_order=False) + for solver_cls in [th.LUDenseSolver, th.CholeskyDenseSolver]: + solver = solver_cls( + void_objective, linearization_kwargs={"ordering": void_ordering} + ) + solver.linearization.AtA, solver.linearization.Atb, x = _create_linear_system() + solved_x = solver.solve().squeeze() + error = torch.norm(x - solved_x, p=float("inf")) + assert error < 1e-4 + + # Test damping + damping = 0.2 + damping_eps = 1e-3 + solved_x_with_ellipsoidal_damp = solver.solve( + damping=damping, ellipsoidal_damping=True, damping_eps=damping_eps + ).squeeze() + solved_x_with_spherical_damp = solver.solve( + damping=damping, ellipsoidal_damping=False + ).squeeze() + AtA_ellipsoidal_damp = solver.linearization.AtA.clone() + AtA_spherical_damp = solver.linearization.AtA.clone() + batch_size, n, _ = AtA_ellipsoidal_damp.shape + # Add damping (using loop to make it more visually explicit) + for i in range(batch_size): + for j in range(n): + AtA_ellipsoidal_damp[i, j, j] *= 1 + damping + AtA_ellipsoidal_damp[i, j, j] += damping_eps + AtA_spherical_damp[i, j, j] += damping + # AtA_ellipsoidal_damp += damping_eps + Atb_check_ellipsoidal = AtA_ellipsoidal_damp.bmm( + solved_x_with_ellipsoidal_damp.unsqueeze(-1) + ) + Atb_check_spherical = AtA_spherical_damp.bmm( + solved_x_with_spherical_damp.unsqueeze(-1) + ) + error = torch.norm( + solver.linearization.Atb - Atb_check_ellipsoidal, p=float("inf") + ) + assert error < 1e-4 + error = torch.norm( + solver.linearization.Atb - Atb_check_spherical, p=float("inf") + ) + assert error < 1e-4 + + +def test_handle_singular(): + void_objective = th.Objective() + void_ordering = th.VariableOrdering(void_objective, default_order=False) + batch_size = 32 + matrix_size = 10 + mid_point = batch_size // 2 + + for solver_cls in [th.LUDenseSolver, th.CholeskyDenseSolver]: + with pytest.warns(RuntimeWarning): + solver = solver_cls( + void_objective, + linearization_kwargs={"ordering": void_ordering}, + check_singular=True, + ) + ( + solver.linearization.AtA, + solver.linearization.Atb, + x, + ) = _create_linear_system(batch_size=batch_size, matrix_size=10) + solver.linearization.AtA[:mid_point] = torch.zeros(matrix_size, matrix_size) + solved_x = solver.solve().squeeze() + error = (x - solved_x) ** 2 + assert error[mid_point:].mean().item() < 1e-6 + assert solved_x[:mid_point].abs().max() == 0 diff --git a/theseus/optimizer/linear/tests/test_sparse_solver.py b/theseus/optimizer/linear/tests/test_sparse_solver.py new file mode 100644 index 000000000..cb6b61327 --- /dev/null +++ b/theseus/optimizer/linear/tests/test_sparse_solver.py @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch +from sksparse.cholmod import analyze_AAt + +import theseus as th + + +def _build_sparse_mat(batch_size): + all_cols = list(range(10)) + col_ind = [] + row_ptr = [0] + for i in range(12): + start = max(0, i - 2) + end = min(i + 1, 10) + col_ind += all_cols[start:end] + row_ptr.append(len(col_ind)) + data = torch.randn((batch_size, len(col_ind))) + return 12, 10, data, col_ind, row_ptr + + +def test_sparse_solver(): + + void_objective = th.Objective() + void_ordering = th.VariableOrdering(void_objective, default_order=False) + damping = 0.2 # set big value for checking + solver = th.CholmodSparseSolver( + void_objective, + linearization_kwargs={"ordering": void_ordering}, + damping=damping, + ) + linearization = solver.linearization + + batch_size = 4 + void_objective._batch_size = batch_size + num_rows, num_cols, data, col_ind, row_ptr = _build_sparse_mat(batch_size) + linearization.num_rows = num_rows + linearization.num_cols = num_cols + linearization.A_val = data + linearization.A_col_ind = col_ind + linearization.A_row_ptr = row_ptr + linearization.b = torch.randn((batch_size, num_rows)) + # Only need this line for the test since the objective is a mock + solver._symbolic_cholesky_decomposition = analyze_AAt( + linearization.structure().mock_csc_transpose() + ) + + solved_x = solver.solve() + + # also check that damping is being overridden via kwargs correctly + other_damping = 0.3 + solved_x_other_damping = solver.solve(damping=other_damping) + + for i in range(batch_size): + csrAi = linearization.structure().csr_straight(linearization.A_val[i, :]) + Ai = torch.Tensor(csrAi.todense()) + ata = Ai.T @ Ai + b = linearization.b[i] + atb = torch.Tensor(csrAi.transpose() @ b) + + def _check_correctness(solved_x_, damping_): + # the linear system solved is with matrix (AtA + damping*I) + atb_check = ata @ solved_x_[i] + damping_ * solved_x_[i] + max_offset = torch.norm(atb - atb_check, p=float("inf")) + assert max_offset < 1e-4 + + _check_correctness(solved_x, damping) + _check_correctness(solved_x_other_damping, other_damping) diff --git a/theseus/optimizer/linear_system.py b/theseus/optimizer/linear_system.py new file mode 100644 index 000000000..a03f52028 --- /dev/null +++ b/theseus/optimizer/linear_system.py @@ -0,0 +1,46 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc + +import numpy as np +from scipy.sparse import csc_matrix, csr_matrix + + +class SparseStructure(abc.ABC): + def __init__( + self, + col_ind: np.ndarray, + row_ptr: np.ndarray, + num_rows: int, + num_cols: int, + dtype: np.dtype = np.float_, + ): + self.col_ind = col_ind + self.row_ptr = row_ptr + self.num_rows = num_rows + self.num_cols = num_cols + self.dtype = dtype + + def csr_straight(self, val): + return csr_matrix( + (val, self.col_ind, self.row_ptr), + (self.num_rows, self.num_cols), + dtype=self.dtype, + ) + + def csc_transpose(self, val): + return csc_matrix( + (val, self.col_ind, self.row_ptr), + (self.num_cols, self.num_rows), + dtype=self.dtype, + ) + + def mock_csc_transpose(self): + return csc_matrix( + (np.zeros(len(self.col_ind), dtype=self.dtype), self.col_ind, self.row_ptr), + (self.num_cols, self.num_rows), + dtype=self.dtype, + ) diff --git a/theseus/optimizer/linearization.py b/theseus/optimizer/linearization.py new file mode 100644 index 000000000..ee4e583db --- /dev/null +++ b/theseus/optimizer/linearization.py @@ -0,0 +1,54 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from typing import List, Optional + +from theseus.core import Objective + +from .variable_ordering import VariableOrdering + + +class Linearization(abc.ABC): + # if ordering is None, this will generate a default ordering + def __init__( + self, + objective: Objective, + ordering: Optional[VariableOrdering] = None, + **kwargs + ): + self.objective = objective + if ordering is None: + ordering = VariableOrdering(objective, default_order=True) + self.ordering = ordering + if not self.ordering.complete: + raise ValueError("Given variable ordering is not complete.") + + self.var_dims: List[int] = [] + self.var_start_cols: List[int] = [] + col_counter = 0 + for var in ordering: + v_dim = var.dof() + self.var_start_cols.append(col_counter) + self.var_dims.append(v_dim) + col_counter += v_dim + + self.num_cols = col_counter + self.num_rows = self.objective.dim() + + @abc.abstractmethod + def _linearize_jacobian_impl(self): + pass + + @abc.abstractmethod + def _linearize_hessian_impl(self): + pass + + def linearize(self): + if not self.ordering.complete: + raise RuntimeError( + "Attempted to linearize an objective with an incomplete variable order." + ) + self._linearize_hessian_impl() diff --git a/theseus/optimizer/nonlinear/__init__.py b/theseus/optimizer/nonlinear/__init__.py new file mode 100644 index 000000000..61c1e0303 --- /dev/null +++ b/theseus/optimizer/nonlinear/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .gauss_newton import GaussNewton +from .levenberg_marquardt import LevenbergMarquardt +from .nonlinear_least_squares import NonlinearLeastSquares +from .nonlinear_optimizer import ( + NonlinearOptimizer, + NonlinearOptimizerParams, + NonlinearOptimizerStatus, +) diff --git a/theseus/optimizer/nonlinear/gauss_newton.py b/theseus/optimizer/nonlinear/gauss_newton.py new file mode 100644 index 000000000..eeaabd982 --- /dev/null +++ b/theseus/optimizer/nonlinear/gauss_newton.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any, Dict, Optional, Type + +import torch + +from theseus.core import Objective +from theseus.optimizer import Linearization +from theseus.optimizer.linear import LinearSolver + +from .nonlinear_least_squares import NonlinearLeastSquares + + +class GaussNewton(NonlinearLeastSquares): + def __init__( + self, + objective: Objective, + linear_solver_cls: Optional[Type[LinearSolver]] = None, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + linear_solver_kwargs: Optional[Dict[str, Any]] = None, + abs_err_tolerance: float = 1e-10, + rel_err_tolerance: float = 1e-8, + max_iterations: int = 20, + step_size: float = 1.0, + ): + super().__init__( + objective, + linear_solver_cls=linear_solver_cls, + linearization_cls=linearization_cls, + linearization_kwargs=linearization_kwargs, + linear_solver_kwargs=linear_solver_kwargs, + abs_err_tolerance=abs_err_tolerance, + rel_err_tolerance=rel_err_tolerance, + max_iterations=max_iterations, + step_size=step_size, + ) + + def compute_delta(self, **kwargs) -> torch.Tensor: + return self.linear_solver.solve() diff --git a/theseus/optimizer/nonlinear/levenberg_marquardt.py b/theseus/optimizer/nonlinear/levenberg_marquardt.py new file mode 100644 index 000000000..f9ed54aab --- /dev/null +++ b/theseus/optimizer/nonlinear/levenberg_marquardt.py @@ -0,0 +1,67 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any, Dict, Optional, Type + +import torch + +from theseus.core import Objective +from theseus.optimizer import Linearization +from theseus.optimizer.linear import DenseSolver, LinearSolver + +from .nonlinear_least_squares import NonlinearLeastSquares + + +# See Nocedal and Wright, Numerical Optimization, pp. 258 - 261 +# https://www.csie.ntu.edu.tw/~r97002/temp/num_optimization.pdf +class LevenbergMarquardt(NonlinearLeastSquares): + def __init__( + self, + objective: Objective, + linear_solver_cls: Optional[Type[LinearSolver]] = None, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + linear_solver_kwargs: Optional[Dict[str, Any]] = None, + abs_err_tolerance: float = 1e-10, + rel_err_tolerance: float = 1e-8, + max_iterations: int = 20, + step_size: float = 1.0, + ): + super().__init__( + objective, + linear_solver_cls=linear_solver_cls, + linearization_cls=linearization_cls, + linearization_kwargs=linearization_kwargs, + linear_solver_kwargs=linear_solver_kwargs, + abs_err_tolerance=abs_err_tolerance, + rel_err_tolerance=rel_err_tolerance, + max_iterations=max_iterations, + step_size=step_size, + ) + + def compute_delta( + self, + damping: float = 1e-3, + ellipsoidal_damping: bool = False, + damping_eps: Optional[float] = None, + **kwargs, + ) -> torch.Tensor: + if ellipsoidal_damping: + raise NotImplementedError("Ellipsoidal damping is not currently supported.") + if ellipsoidal_damping and not isinstance(self.linear_solver, DenseSolver): + raise NotImplementedError( + "Ellipsoidal damping is only supported when using DenseSolver." + ) + if damping_eps and not isinstance(self.linear_solver, DenseSolver): + raise NotImplementedError( + "damping eps is only supported when using DenseSolver." + ) + damping_eps = damping_eps or 1e-8 + + return self.linear_solver.solve( + damping=damping, + ellipsoidal_damping=ellipsoidal_damping, + damping_eps=damping_eps, + ) diff --git a/theseus/optimizer/nonlinear/nonlinear_least_squares.py b/theseus/optimizer/nonlinear/nonlinear_least_squares.py new file mode 100644 index 000000000..27203a0a1 --- /dev/null +++ b/theseus/optimizer/nonlinear/nonlinear_least_squares.py @@ -0,0 +1,49 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from typing import Any, Dict, Optional, Type + +import torch + +from theseus.core import Objective +from theseus.optimizer import Linearization +from theseus.optimizer.linear import LinearSolver +from theseus.optimizer.linear.dense_solver import CholeskyDenseSolver + +from .nonlinear_optimizer import NonlinearOptimizer + + +class NonlinearLeastSquares(NonlinearOptimizer, abc.ABC): + def __init__( + self, + objective: Objective, + *args, + linear_solver_cls: Optional[Type[LinearSolver]] = None, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + linear_solver_kwargs: Optional[Dict[str, Any]] = None, + abs_err_tolerance: float = 1e-10, + rel_err_tolerance: float = 1e-8, + max_iterations: int = 20, + step_size: float = 1.0, + **kwargs, + ): + linear_solver_cls = linear_solver_cls or CholeskyDenseSolver + super().__init__( + objective, + linear_solver_cls=linear_solver_cls, + linearization_cls=linearization_cls, + linearization_kwargs=linearization_kwargs, + linear_solver_kwargs=linear_solver_kwargs, + abs_err_tolerance=abs_err_tolerance, + rel_err_tolerance=rel_err_tolerance, + max_iterations=max_iterations, + step_size=step_size, + ) + + @abc.abstractmethod + def compute_delta(self, **kwargs) -> torch.Tensor: + pass diff --git a/theseus/optimizer/nonlinear/nonlinear_optimizer.py b/theseus/optimizer/nonlinear/nonlinear_optimizer.py new file mode 100644 index 000000000..9138cbcfb --- /dev/null +++ b/theseus/optimizer/nonlinear/nonlinear_optimizer.py @@ -0,0 +1,233 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +import math +import warnings +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, Optional, Type + +import numpy as np +import torch + +import theseus.constants +from theseus.core import Objective +from theseus.optimizer import Linearization, Optimizer, OptimizerInfo +from theseus.optimizer.linear import LinearSolver + + +@dataclass +class NonlinearOptimizerParams: + abs_err_tolerance: float + rel_err_tolerance: float + max_iterations: int + step_size: float + + def update(self, params_dict): + for param, value in params_dict.items(): + if hasattr(self, param): + setattr(self, param, value) + else: + raise ValueError(f"Invalid nonlinear least squares parameter {param}.") + + +class NonlinearOptimizerStatus(Enum): + START = 0 + CONVERGED = 1 + MAX_ITERATIONS = 2 + FAIL = -1 + + +# All info information is batched +@dataclass +class NonlinearOptimizerInfo(OptimizerInfo): + converged_iter: torch.Tensor + best_iter: torch.Tensor + err_history: Optional[torch.Tensor] + + +class NonlinearOptimizer(Optimizer, abc.ABC): + def __init__( + self, + objective: Objective, + linear_solver_cls: Type[LinearSolver], + *args, + linearization_cls: Optional[Type[Linearization]] = None, + linearization_kwargs: Optional[Dict[str, Any]] = None, + linear_solver_kwargs: Optional[Dict[str, Any]] = None, + abs_err_tolerance: float = 1e-10, + rel_err_tolerance: float = 1e-8, + max_iterations: int = 20, + step_size: float = 1.0, + **kwargs, + ): + super().__init__(objective) + linear_solver_kwargs = linear_solver_kwargs or {} + self.linear_solver = linear_solver_cls( + objective, + linearization_cls=linearization_cls, + linearization_kwargs=linearization_kwargs, + **linear_solver_kwargs, + ) + self.params = NonlinearOptimizerParams( + abs_err_tolerance, rel_err_tolerance, max_iterations, step_size + ) + + def set_params(self, **kwargs): + self.params.update(kwargs) + + def _check_convergence(self, err: torch.Tensor, last_err: torch.Tensor): + assert not torch.is_grad_enabled() + if err.abs().mean() < theseus.constants.EPS: + return torch.ones_like(err).bool() + + abs_error = (last_err - err).abs() + rel_error = abs_error / last_err + return (abs_error < self.params.abs_err_tolerance).logical_or( + rel_error < self.params.rel_err_tolerance + ) + + def _maybe_init_best_solution( + self, do_init: bool = False + ) -> Optional[Dict[str, torch.Tensor]]: + if not do_init: + return None + solution_dict = {} + for var in self.linear_solver.linearization.ordering: + solution_dict[var.name] = var.data.detach().clone().cpu() + return solution_dict + + def _init_info( + self, last_err: torch.Tensor, track_best_solution: bool, verbose: bool + ) -> NonlinearOptimizerInfo: + if verbose: + err_history = ( + torch.ones(self.objective.batch_size, self.params.max_iterations + 1) + * math.inf + ) + assert last_err.grad_fn is None + err_history[:, 0] = last_err.clone().cpu() + else: + err_history = None + return NonlinearOptimizerInfo( + best_solution=self._maybe_init_best_solution(do_init=track_best_solution), + status=np.array( + [NonlinearOptimizerStatus.START] * self.objective.batch_size + ), + converged_iter=torch.zeros_like(last_err, dtype=torch.long), + best_iter=torch.zeros_like(last_err, dtype=torch.long), + err_history=err_history, + ) + + # Only copy best solution if needed (None means track_best_solution=False) + + def _update_info( + self, + info: NonlinearOptimizerInfo, + current_iter: int, + best_err: Optional[torch.Tensor], + err: torch.Tensor, + converged_indices: torch.Tensor, + ) -> torch.Tensor: + info.converged_iter += 1 - converged_indices.long() + if info.err_history is not None: + assert err.grad_fn is None + info.err_history[:, current_iter + 1] = err.clone().cpu() + if info.best_solution is None: + return best_err + # Only copy best solution if needed (None means track_best_solution=False) + assert best_err is not None + good_indices = err < best_err + info.best_iter[good_indices] = current_iter + for var in self.linear_solver.linearization.ordering: + info.best_solution[var.name][good_indices] = ( + var.data.detach().clone()[good_indices].cpu() + ) + return torch.minimum(best_err, err) + + # `track_best_solution` keeps a **detached** copy (as in no gradient info) + # of the best variables found, but it is optional to avoid unnecessary copying + # if this is not needed + # + # if verbose, info will also keep track of the full error history + def _optimize_impl( + self, + track_best_solution: bool = False, + verbose: bool = False, + **kwargs, + ) -> OptimizerInfo: + # All errors are only used for stopping conditions, so they are outside + # compute graph + last_err = self.objective.error_squared_norm().detach() / 2 + + if verbose: + print( + f"Nonlinear optimizer. Iteration: {0}. Error: {last_err.mean().item()}" + ) + + best_err = last_err.clone() if track_best_solution else None + converged_indices = torch.zeros_like(last_err).bool() + info = self._init_info(last_err, track_best_solution, verbose) + for it_ in range(self.params.max_iterations): + # do optimizer step + self.linear_solver.linearization.linearize() + try: + delta = self.compute_delta(**kwargs) + except RuntimeError as run_err: + msg = ( + f"There was an error while running the linear optimizer. " + f"Original error message: {run_err}." + ) + if torch.is_grad_enabled(): + raise RuntimeError( + msg + " Backward pass will not work. To obtain " + "the best solution seen before the error, run with " + "torch.no_grad()" + ) + else: + warnings.warn(msg, RuntimeWarning) + info.status[:] = NonlinearOptimizerStatus.FAIL + return info + self.retract_and_update_variables(delta, converged_indices) + + # check for convergence + with torch.no_grad(): + err = self.objective.error_squared_norm().detach() / 2 + best_err = self._update_info( + info, it_, best_err, err, converged_indices + ) + if verbose: + print( + f"Nonlinear optimizer. Iteration: {it_ + 1}. " + f"Error: {err.mean().item()}" + ) + converged_indices = self._check_convergence(err, last_err) + info.status[ + converged_indices.cpu().numpy() + ] = NonlinearOptimizerStatus.CONVERGED + if converged_indices.all(): + break # nothing else will happen at this point + last_err = err + info.status[ + info.status == NonlinearOptimizerStatus.START + ] = NonlinearOptimizerStatus.MAX_ITERATIONS + return info + + @abc.abstractmethod + def compute_delta(self, **kwargs) -> torch.Tensor: + pass + + # retracts all variables in the given order and updates their values + # with the result + def retract_and_update_variables( + self, delta: torch.Tensor, converged_indices: torch.Tensor + ): + var_idx = 0 + delta = self.params.step_size * delta + for var in self.linear_solver.linearization.ordering: + new_var = var.retract(delta[:, var_idx : var_idx + var.dof()]) + var.update(new_var.data, batch_ignore_mask=converged_indices) + var_idx += var.dof() diff --git a/theseus/optimizer/nonlinear/tests/__init__.py b/theseus/optimizer/nonlinear/tests/__init__.py new file mode 100644 index 000000000..7bec24cb1 --- /dev/null +++ b/theseus/optimizer/nonlinear/tests/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. diff --git a/theseus/optimizer/nonlinear/tests/common.py b/theseus/optimizer/nonlinear/tests/common.py new file mode 100644 index 000000000..963a5854d --- /dev/null +++ b/theseus/optimizer/nonlinear/tests/common.py @@ -0,0 +1,253 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch + +import theseus as th + + +class ResidualCostFunction(th.CostFunction): + def __init__( + self, + optim_vars, + cost_weight, + name=None, + true_coeffs=None, + point=None, + multivar=False, + noise_mag=0, + ): + super().__init__(cost_weight, name=name) + len_vars = len(optim_vars) if multivar else optim_vars[0].dof() + assert true_coeffs.ndim == 1 and true_coeffs.numel() == len_vars + if point.ndim == 1: + point = point.unsqueeze(0) + assert point.ndim == 2 and point.shape[1] == len_vars - 1 + batch_size = point.shape[0] + for i, var in enumerate(optim_vars): + attr_name = f"optim_var_{i}" + setattr(self, attr_name, var) + self.register_optim_var(attr_name) + + self.point = torch.cat([point, torch.ones(batch_size, 1)], dim=1) + self.target = (self.point * true_coeffs.unsqueeze(0)).sum(1, keepdim=True) ** 2 + if noise_mag: + self.target += noise_mag * torch.randn(size=self.target.shape) + self.multivar = multivar + + def _eval_coeffs(self): + if self.multivar: + coeffs = torch.cat([v.data for v in self.optim_vars], axis=1) + else: + coeffs = self.optim_var_0.data + return (self.point * coeffs).sum(1, keepdim=True) + + def error(self): + # h(B * x) - h(Btrue * x) + return self._eval_coeffs() ** 2 - self.target + + def jacobians(self): + dhdz = 2 * self._eval_coeffs() + grad = self.point * dhdz + return [grad.unsqueeze(1)], self.error() + + def dim(self): + return 1 + + def _copy_impl(self, new_name=None): + raise NotImplementedError + + +def _check_info(info, batch_size, max_iterations, initial_error, objective): + assert info.err_history.shape == (batch_size, max_iterations + 1) + assert info.err_history[:, 0].allclose(initial_error) + assert info.err_history.argmin(dim=1).allclose(info.best_iter + 1) + last_error = objective.error_squared_norm() / 2 + last_convergence_idx = info.converged_iter.max().item() + assert info.err_history[:, last_convergence_idx].allclose(last_error) + + +# This test uses least-squares regression to find the coefficients b_j of +# +# y = f(x, B) = h(sum_i b_i * x_i + b_0) , for i in [1, nvars-1] +# +# where h(z) = z^2. +# +# There is only one nvars-dimensional Vector for all coefficients, and +# one cost function per data-point (generated at random), which computes residual +# +# (f(p, Btrue) - f(p, B)) +# +# for some data point p specific to this cost function. There is no noise. +def _check_nonlinear_least_squares_fit( + nonlinear_optim_cls, + optimize_kwargs, + points, + nvars, + npoints, + batch_size, +): + true_coeffs = torch.ones(nvars) + variables = [th.Vector(nvars, name="coefficients")] + cost_weight = th.ScaleCostWeight(1.0) + objective = th.Objective() + for i in range(npoints): + objective.add( + ResidualCostFunction( + variables, + cost_weight, + true_coeffs=true_coeffs, + point=points[i], + name=f"residual_point_{i}", + ) + ) + + # Initial value is B = [0, 1, ..., nvars - 1] + values = {"coefficients": torch.arange(nvars).repeat(batch_size, 1).float()} + objective.update(values) + initial_error = objective.error_squared_norm() / 2 + max_iterations = 20 + optimizer = nonlinear_optim_cls(objective) + assert isinstance(optimizer.linear_solver, th.CholeskyDenseSolver) + optimizer.set_params(max_iterations=max_iterations) + info = optimizer.optimize(track_best_solution=True, verbose=True, **optimize_kwargs) + # Solution must now match the true coefficients + assert variables[0].data.allclose(true_coeffs.repeat(batch_size, 1), atol=1e-6) + _check_info(info, batch_size, max_iterations, initial_error, objective) + + +# Same test as above but now each cost function has m 1-d variables instead +# of a single m-dim variable +def _check_nonlinear_least_squares_fit_multivar( + nonlinear_optim_cls, + optimize_kwargs, + points, + nvars, + npoints, + batch_size, +): + true_coeffs = torch.ones(nvars) + variables = [th.Vector(1, name=f"coeff{i}") for i in range(nvars)] + cost_weight = th.ScaleCostWeight(1.0) + objective = th.Objective() + for i in range(npoints): + objective.add( + ResidualCostFunction( + variables, + cost_weight, + true_coeffs=true_coeffs, + point=points[i], + name=f"residual_point_{i}", + multivar=True, + ) + ) + # Initial value is B = [0, 1, ..., nvars - 1] + values = dict((f"coeff{i}", i * torch.ones(batch_size, 1)) for i in range(nvars)) + objective.update(values) + initial_error = objective.error_squared_norm() / 2 + + max_iterations = 20 + optimizer = nonlinear_optim_cls(objective) + assert isinstance(optimizer.linear_solver, th.CholeskyDenseSolver) + optimizer.set_params(max_iterations=max_iterations) + info = optimizer.optimize(track_best_solution=True, verbose=True, **optimize_kwargs) + + # Solution must now match the true coefficients + for i in range(nvars): + assert variables[i].data.allclose(true_coeffs[i].repeat(batch_size, 1)) + + _check_info(info, batch_size, max_iterations, initial_error, objective) + + +def _check_optimizer_returns_fail_status_on_singular( + nonlinear_optim_cls, optimize_kwargs +): + class BadLinearization(th.optimizer.Linearization): + def __init__(self, objective): + super().__init__(objective) + + def _linearize_jacobian_impl(self): + pass + + def _linearize_hessian_impl(self): + self.AtA = torch.zeros( + self.objective.batch_size, self.num_rows, self.num_cols + ) + self.Atb = torch.ones(self.objective.batch_size, self.num_cols) + + class MockCostFunction(th.CostFunction): + def __init__(self, optim_vars, cost_weight): + super().__init__(cost_weight) + for var in optim_vars: + setattr(self, var.name, var) + self.register_optim_var(var.name) + + def error(self): + return torch.ones(1) + + def jacobians(self): + return torch.ones(1), self.error() + + def dim(self): + return 1 + + def _copy_impl(self): + raise NotImplementedError + + objective = th.Objective() + variables = [th.Vector(1, name="dummy")] + objective.add(MockCostFunction(variables, th.ScaleCostWeight(1.0))) + values = {"dummy": torch.zeros(1, 1)} + objective.update(values) + + optimizer = nonlinear_optim_cls(objective) + assert isinstance(optimizer.linear_solver, th.CholeskyDenseSolver) + optimizer.set_params(max_iterations=30) + optimizer.linear_solver.linearization = BadLinearization(objective) + with pytest.raises(RuntimeError): + optimizer.optimize(track_best_solution=True, verbose=True, **optimize_kwargs) + + with pytest.warns(RuntimeWarning): + with torch.no_grad(): + info = optimizer.optimize( + track_best_solution=True, verbose=True, **optimize_kwargs + ) + assert (info.status == th.NonlinearOptimizerStatus.FAIL).all() + + +def run_nonlinear_least_squares_check( + nonlinear_optim_cls, optimize_kwargs, singular_check=True +): + rng = torch.Generator() + rng.manual_seed(0) + nvars = 5 + npoints = 50 + batch_size = 32 + points = [torch.randn(batch_size, nvars - 1, generator=rng) for _ in range(npoints)] + + print("----- Check single variable formulation -----") + _check_nonlinear_least_squares_fit( + nonlinear_optim_cls, + optimize_kwargs, + points, + nvars, + npoints, + batch_size, + ) + print("----- Check multiple variable formulation -----") + _check_nonlinear_least_squares_fit_multivar( + nonlinear_optim_cls, + optimize_kwargs, + points, + nvars, + npoints, + batch_size, + ) + if singular_check: + print("----- Check failure status on singular system formulation -----") + _check_optimizer_returns_fail_status_on_singular( + nonlinear_optim_cls, optimize_kwargs + ) diff --git a/theseus/optimizer/nonlinear/tests/test_gauss_newton.py b/theseus/optimizer/nonlinear/tests/test_gauss_newton.py new file mode 100644 index 000000000..b9384f362 --- /dev/null +++ b/theseus/optimizer/nonlinear/tests/test_gauss_newton.py @@ -0,0 +1,14 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 + +import theseus as th + +from .common import run_nonlinear_least_squares_check + + +def test_gauss_newton(): + run_nonlinear_least_squares_check(th.GaussNewton, {}) diff --git a/theseus/optimizer/nonlinear/tests/test_levenberg_marquardt.py b/theseus/optimizer/nonlinear/tests/test_levenberg_marquardt.py new file mode 100644 index 000000000..379431678 --- /dev/null +++ b/theseus/optimizer/nonlinear/tests/test_levenberg_marquardt.py @@ -0,0 +1,24 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 + +import theseus as th + +from .common import run_nonlinear_least_squares_check + + +def test_levenberg_marquartd(): + for ellipsoidal_damping in [False]: + for damping in [0, 0.001, 0.01, 0.1]: + run_nonlinear_least_squares_check( + th.LevenbergMarquardt, + { + "damping": damping, + "ellipsoidal_damping": ellipsoidal_damping, + "damping_eps": 0.0, + }, + singular_check=damping < 0.001, + ) diff --git a/theseus/optimizer/optimizer.py b/theseus/optimizer/optimizer.py new file mode 100644 index 000000000..fdcc86382 --- /dev/null +++ b/theseus/optimizer/optimizer.py @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import abc +from dataclasses import dataclass +from typing import Dict, Optional + +import numpy as np +import torch + +from theseus.core import Objective + + +# All info information is batched +@dataclass +class OptimizerInfo: + best_solution: Optional[Dict[str, torch.Tensor]] + # status for each element + status: np.ndarray + + +class Optimizer(abc.ABC): + def __init__(self, objective: Objective, *args, **kwargs): + self.objective = objective + self._objectives_version = objective.current_version + + @abc.abstractmethod + def _optimize_impl(self, **kwargs) -> OptimizerInfo: + pass + + def optimize(self, **kwargs) -> OptimizerInfo: + if self._objectives_version != self.objective.current_version: + raise RuntimeError( + "The objective was modified after optimizer construction, which is " + "currently not supported." + ) + return self._optimize_impl(**kwargs) diff --git a/theseus/optimizer/sparse_linearization.py b/theseus/optimizer/sparse_linearization.py new file mode 100644 index 000000000..d3e3cf8b6 --- /dev/null +++ b/theseus/optimizer/sparse_linearization.py @@ -0,0 +1,135 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import List, Optional + +import numpy as np +import torch + +from theseus.core import Objective + +from .linear_system import SparseStructure +from .linearization import Linearization +from .variable_ordering import VariableOrdering + + +class SparseLinearization(Linearization): + def __init__( + self, + objective: Objective, + ordering: Optional[VariableOrdering] = None, + **kwargs + ): + super().__init__(objective, ordering) + + # we prepare the indices for At as csc matrix (or A as csr, same thing) + # for similarity with dense_linearization code we build A as csr, then + # actually we have At as csc and can feed it to `cholesky_AAt` routine + # we want a unique set of rowPtr/colInd indices for A for all batches + # we also save pointers to the data block, so that we can later quickly + # write the data blocks + A_col_ind: List[int] = [] + A_row_ptr: List[int] = [0] + + # ptr to data block (stride = sum of variable.dim() + cost_function_block_pointers = [] + cost_function_row_block_starts = [] # where data start for this row block + cost_function_stride = [] # total jacobian cols + + for _, cost_function in enumerate(self.objective): + num_rows = cost_function.dim() + col_slices_indices = [] + for var_idx_in_cost_function, variable in enumerate( + cost_function.optim_vars + ): + var_idx_in_order = self.ordering.index_of( + cost_function.optim_var_at(var_idx_in_cost_function).name + ) + var_start_col = self.var_start_cols[var_idx_in_order] + num_cols = variable.dof() + col_slice = slice(var_start_col, var_start_col + num_cols) + col_slices_indices.append((col_slice, var_idx_in_cost_function)) + + # sort according to how they will be written inside A + col_slices_indices.sort() + sorted_block_sizes = [(s.stop - s.start) for s, _ in col_slices_indices] + sorted_block_pointers = np.cumsum([0] + sorted_block_sizes)[:-1] + sorted_indices = np.array([i for _, i in col_slices_indices]) + block_pointers: np.ndarray = np.ndarray( + (len(col_slices_indices),), dtype=int + ) + block_pointers[sorted_indices] = sorted_block_pointers + cost_function_block_pointers.append(block_pointers) + + cost_function_row_block_starts.append(len(A_col_ind)) + col_ind = [c for s, _ in col_slices_indices for c in range(s.start, s.stop)] + cost_function_stride.append(len(col_ind)) + + for _ in range(num_rows): + A_col_ind += col_ind + A_row_ptr.append(len(A_col_ind)) + + # not batched, these data are the same across batches + self.cost_function_block_pointers = cost_function_block_pointers + self.cost_function_row_block_starts: np.ndarray = np.array( + cost_function_row_block_starts, dtype=int + ) + self.cost_function_stride: np.ndarray = np.array( + cost_function_stride, dtype=int + ) + self.A_row_ptr: np.ndarray = np.array(A_row_ptr, dtype=int) + self.A_col_ind: np.ndarray = np.array(A_col_ind, dtype=int) + + # batched data + self.A_val: torch.Tensor = None + self.b: torch.Tensor = None + self.Atb: torch.Tensor = None + + def _linearize_jacobian_impl(self): + # those will be fully overwritten, no need to zero: + self.A_val = torch.empty( + size=(self.objective.batch_size, len(self.A_col_ind)), + device=self.objective.device, + ) + self.b = torch.empty( + size=(self.objective.batch_size, self.num_rows), + device=self.objective.device, + ) + + err_row_idx = 0 + for f_idx, cost_function in enumerate(self.objective): + jacobians, error = cost_function.weighted_jacobians_error() + num_rows = cost_function.dim() + row_slice = slice(err_row_idx, err_row_idx + num_rows) + + # we will view the blocks of rows inside `A_val` as `num_rows` x `stride` matrix + block_start = self.cost_function_row_block_starts[f_idx] + stride = self.cost_function_stride[f_idx] + block = self.A_val[:, block_start : block_start + stride * num_rows].view( + -1, num_rows, stride + ) + block_pointers = self.cost_function_block_pointers[f_idx] + + for var_idx_in_cost_function, var_jacobian in enumerate(jacobians): + + # the proper block is written, using the precomputed index in `block_pointers` + num_cols = var_jacobian.shape[2] + pointer = block_pointers[var_idx_in_cost_function] + block[:, :, pointer : pointer + num_cols] = var_jacobian + + self.b[:, row_slice] = -error + err_row_idx += cost_function.dim() + + def structure(self): + return SparseStructure( + self.A_col_ind, + self.A_row_ptr, + self.num_rows, + self.num_cols, + dtype=np.float64 if self.objective.dtype == torch.double else np.float32, + ) + + def _linearize_hessian_impl(self): + self._linearize_jacobian_impl() diff --git a/theseus/optimizer/tests/linearization_test_utils.py b/theseus/optimizer/tests/linearization_test_utils.py new file mode 100644 index 000000000..01bcc921c --- /dev/null +++ b/theseus/optimizer/tests/linearization_test_utils.py @@ -0,0 +1,177 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import torch # noqa: F401 + +import theseus as th + + +class MockVector(th.Manifold): + def __init__(self, value, length, name=None): + super().__init__(value, length, name=name) + + def _init_data(value, length): + return value * torch.ones(1, length) + + def dof(self): + return self.data.shape[1] + + def _local_impl(self, variable2): + return torch.zeros(1) + + def _local_jacobian(self, vec2): + pass + + def _retract_impl(self, delta): + return self + + def _copy_impl(self): + raise NotImplementedError + + +class MockCostFunction(th.CostFunction): + def __init__(self, optim_vars, cost_weight, dim, name=None): + super().__init__(cost_weight, name=name) + for i, var in enumerate(optim_vars): + attr_name = f"optim_var_{i}" + setattr(self, attr_name, var) + self.register_optim_var(attr_name) + self.the_dim = dim + + # Error function for this cost function is (for each batch element) + # ei = (i + 1) * sum_j (j + 1) * sum_k [var_j]_k + def error(self): + batch_size = self.optim_var_0.data.shape[0] + error = torch.zeros(batch_size, self.the_dim) + for batch_idx in range(batch_size): + value = torch.sum( + torch.tensor( + [ + (j + 1) * v[batch_idx].sum() + for j, v in enumerate(self.optim_vars) + ] + ) + ) + for i in range(self.the_dim): + error[batch_idx, i] = (i + 1) * value + return error + + # Jacobian_var_i = (i + 1) * jv + # where jv is a matrix with self.the_dim rows such that: + # jv[j,k] = (j + 1) * [1, 1, 1, ..., 1] (#vars times, i.e., v.dim()) + def jacobians(self): + batch_size = self.optim_var_0.data.shape[0] + jacs = [] + for i, v in enumerate(self.optim_vars): + jv = torch.ones(batch_size, self.the_dim, v.dof()) + jv = jv * torch.arange(1, self.the_dim + 1).view(1, -1, 1) + jacs.append(jv * (i + 1)) + return jacs, self.error() + + def dim(self): + return self.the_dim + + def _copy_impl(self): + raise NotImplementedError + + +# This is just an identity matrix times some multiplier +class MockCostWeight(th.CostWeight): + def __init__(self, dim, mult, name=None): + super().__init__(name=name) + self.sqrt = th.Variable(torch.eye(dim).unsqueeze(0) * mult) + self.register_aux_var("sqrt") + + def weight_error(self, error): + return NotImplemented + + def weight_jacobians_and_error(self, jacobians, error): + batch_size = error.shape[0] + sqrt = self.sqrt.data.repeat((batch_size, 1, 1)) + wjs = [] + for jac in jacobians: + wjs.append(torch.matmul(self.sqrt.data, jac)) + werr = torch.matmul(sqrt, error.unsqueeze(2)).squeeze(2) + return wjs, werr + + def _copy_impl(self, new_name=None): + raise NotImplementedError + + +def build_test_objective_and_linear_system(): + # This function creates the an objective that results in the + # following Ax = b, with A = + # + # | v4 | v3 | v2 | v1 | + # f1 [- - - - | - - - | 2 2 | 1 ] + # f2 [- - - - | 2 2 2 | - - | 1 ] * 2 * 1 + # f2 [- - - - | 2 2 2 | - - | 1 ] * 2 * 2 + # f3 [2 2 2 2 | - - - | 1 1 | _ ] * 3 * 1 + # f3 [2 2 2 2 | - - - | 1 1 | _ ] * 3 * 2 + # f3 [2 2 2 2 | - - - | 1 1 | _ ] * 3 * 3 + # + # (the numbers at the right of the matrix are (cov) * cost_function_dim) + # + # and b = -[9, 38, 76, 108, 216, 324] + # + # Then checks that torch.linearization produces the same result + var1 = MockVector(1, 1, name="v1") + var2 = MockVector(2, 2, name="v2") + var3 = MockVector(3, 3, name="v3") + var4 = MockVector(4, 4, name="v4") + + cost_function1 = MockCostFunction( + [var1, var2], MockCostWeight(1, 1, name="cov1"), 1, name="f1" + ) + cost_function2 = MockCostFunction( + [var1, var3], MockCostWeight(2, 2, name="cov2"), 2, name="f2" + ) + cost_function3 = MockCostFunction( + [var2, var4], MockCostWeight(3, 3, name="cov3"), 3, name="f3" + ) + + objective = th.Objective() + objective.add(cost_function1) + objective.add(cost_function2) + objective.add(cost_function3) + + ordering = th.VariableOrdering(objective, default_order=False) + ordering.extend([var4, var3, var2, var1]) + + batch_size = 4 + objective.update( + { + "v1": torch.ones(batch_size, 1), + "v2": 2 * torch.ones(batch_size, 2), + "v3": 3 * torch.ones(batch_size, 3), + "v4": 4 * torch.ones(batch_size, 4), + } + ) + + # if cost_function_k(vi, vj) then error is i ** 2 + 2 * (j ** 2) + # and weight is k + wer1 = 1.0 * (1 + 2 * 4) + wer2 = 2.0 * (1 + 2 * 9) + wer3 = 3.0 * (4 + 2 * 16) + + b = -torch.tensor([wer1, wer2, 2 * wer2, wer3, 2 * wer3, 3 * wer3]) + b = b.unsqueeze(0).repeat(batch_size, 1) + + # --- The following puts together the matrix shown above + # weighted jacobians for all errors + var1j = torch.ones(1) + var2j = torch.ones(2) + var3j = torch.ones(3) + var4j = torch.ones(4) + + A1 = torch.cat([torch.zeros(7), 2 * var2j, var1j]).view(1, -1) + A2p = torch.cat([torch.zeros(4), 2 * var3j, torch.zeros(2), var1j]) + A2 = 2 * torch.stack([A2p, 2 * A2p]) + A3p = torch.cat([2 * var4j, torch.zeros(3), var2j, torch.zeros(1)]) + A3 = 3 * torch.stack([A3p, 2 * A3p, 3 * A3p]) + A = torch.cat([A1, A2, A3]) + A = A.unsqueeze(0).repeat(batch_size, 1, 1) + + return objective, ordering, A, b diff --git a/theseus/optimizer/tests/test_dense_linearization.py b/theseus/optimizer/tests/test_dense_linearization.py new file mode 100644 index 000000000..bed9c1938 --- /dev/null +++ b/theseus/optimizer/tests/test_dense_linearization.py @@ -0,0 +1,31 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch # noqa: F401 + +import theseus as th +from theseus.optimizer.tests.linearization_test_utils import ( + build_test_objective_and_linear_system, +) + + +def test_dense_linearization(): + objective, ordering, A, b = build_test_objective_and_linear_system() + + linearization = th.DenseLinearization(objective, ordering=ordering) + linearization.linearize() + + assert b.isclose(linearization.b).all() + + assert A.isclose(linearization.A).all() + + batch_size = A.shape[0] + + for i in range(batch_size): + ata = A[i].t() @ A[i] + atb = (A[i].t() @ b[i]).unsqueeze(1) + assert ata.isclose(linearization.AtA[i]).all() + assert atb.isclose(linearization.Atb[i]).all() diff --git a/theseus/optimizer/tests/test_sparse_linearization.py b/theseus/optimizer/tests/test_sparse_linearization.py new file mode 100644 index 000000000..4e15fbf30 --- /dev/null +++ b/theseus/optimizer/tests/test_sparse_linearization.py @@ -0,0 +1,31 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import pytest # noqa: F401 +import torch # noqa: F401 + +import theseus as th +from theseus.optimizer.tests.linearization_test_utils import ( + build_test_objective_and_linear_system, +) + + +def test_sparse_linearization(): + + objective, ordering, A, b = build_test_objective_and_linear_system() + + linearization = th.SparseLinearization(objective, ordering=ordering) + linearization.linearize() + + assert b.isclose(linearization.b).all() + + batch_size = A.shape[0] + + for i in range(batch_size): + csrAi = linearization.structure().csr_straight(linearization.A_val[i, :]) + assert A[i, :, :].isclose(torch.Tensor(csrAi.todense())).all() + + for i in range(batch_size): + assert b[i].isclose(linearization.b[i]).all() diff --git a/theseus/optimizer/tests/test_variable_ordering.py b/theseus/optimizer/tests/test_variable_ordering.py new file mode 100644 index 000000000..672d5b5a6 --- /dev/null +++ b/theseus/optimizer/tests/test_variable_ordering.py @@ -0,0 +1,82 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import itertools +import random + +import pytest # noqa: F401 +import torch + +import theseus as th +from theseus.core.tests.common import MockCostFunction, MockCostWeight, MockVar + + +def test_default_variable_ordering(): + # repeat test a few times with different inputs + for _ in range(5): + # vary the number of variables to have in the objective + for num_variables in range(2, 11): + # generate all possible 2-variable cost functions, then shuffle their add order + variables = [] + for i in range(num_variables): + variables.append(MockVar(1, data=None, name=f"var{i}")) + variable_pairs = [c for c in itertools.combinations(variables, 2)] + random.shuffle(variable_pairs) + + # add the cost function to the objective and store the order of variable addition + expected_variable_order = [] + objective = th.Objective() + cost_weight = MockCostWeight( + th.Variable(torch.ones(1), name="cost_weight_aux") + ) + for var1, var2 in variable_pairs: + cost_function = MockCostFunction([var1, var2], [], cost_weight) + if var1 not in expected_variable_order: + expected_variable_order.append(var1) + if var2 not in expected_variable_order: + expected_variable_order.append(var2) + objective.add(cost_function) + + # check the the default variable order matches the expected order + default_order = th.VariableOrdering(objective) + for i, var in enumerate(expected_variable_order): + assert i == default_order.index_of(var.name) + + +def test_variable_ordering_append_and_remove(): + variables = [MockVar(1, data=None, name=f"var{i}") for i in range(50)] + mock_objective = th.Objective() + mock_objective.optim_vars = dict([(var.name, var) for var in variables]) + # repeat a bunch of times with different order + for _ in range(100): + random.shuffle(variables) + order = th.VariableOrdering(mock_objective, default_order=False) + for v in variables: + order.append(v) + for i, v in enumerate(variables): + assert i == order.index_of(v.name) + assert v == order[i] + assert order.complete + + random.shuffle(variables) + for v in variables: + order.remove(v) + assert not order.complete + assert v not in order._var_order + assert v.name not in order._var_name_to_index + + +def test_variable_ordering_iterator(): + variables = [MockVar(1, data=None, name=f"var{i}") for i in range(50)] + mock_objective = th.Objective() + mock_objective.optim_vars = dict([(var.name, var) for var in variables]) + order = th.VariableOrdering(mock_objective, default_order=False) + for v in variables: + order.append(v) + + i = 0 + for v in order: + assert v == variables[i] + i += 1 diff --git a/theseus/optimizer/variable_ordering.py b/theseus/optimizer/variable_ordering.py new file mode 100644 index 000000000..101601db2 --- /dev/null +++ b/theseus/optimizer/variable_ordering.py @@ -0,0 +1,60 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Dict, List, Sequence + +from theseus.core import Objective, Variable + + +class VariableOrdering: + def __init__(self, objective: Objective, default_order: bool = True): + self.objective = objective + self._var_order: List[Variable] = [] + self._var_name_to_index: Dict[str, int] = {} + if default_order: + self._compute_default_order(objective) + + def _compute_default_order(self, objective: Objective): + assert not self._var_order and not self._var_name_to_index + cur_idx = 0 + for variable_name, variable in objective.optim_vars.items(): + if variable_name in self._var_name_to_index: + continue + self._var_order.append(variable) + self._var_name_to_index[variable_name] = cur_idx + cur_idx += 1 + + def index_of(self, key: str) -> int: + return self._var_name_to_index[key] + + def __getitem__(self, index) -> Variable: + return self._var_order[index] + + def __iter__(self): + return iter(self._var_order) + + def append(self, var: Variable): + if var in self._var_order: + raise ValueError( + f"Variable {var.name} has already been added to the order." + ) + if var.name not in self.objective.optim_vars: + raise ValueError( + f"Variable {var.name} is not an optimization variable for the objective." + ) + self._var_order.append(var) + self._var_name_to_index[var.name] = len(self._var_order) - 1 + + def remove(self, var: Variable): + self._var_order.remove(var) + del self._var_name_to_index[var.name] + + def extend(self, variables: Sequence[Variable]): + for var in variables: + self.append(var) + + @property + def complete(self): + return len(self._var_order) == self.objective.size_variables() diff --git a/theseus/tests/test_theseus_layer.py b/theseus/tests/test_theseus_layer.py new file mode 100644 index 000000000..b8fc10d13 --- /dev/null +++ b/theseus/tests/test_theseus_layer.py @@ -0,0 +1,389 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import math + +import pytest # noqa: F401 +import torch +import torch.nn as nn +import torch.nn.functional as F + +import theseus as th +import theseus.utils as thutils +from theseus.core.tests.common import ( + MockCostFunction, + MockCostWeight, + MockVar, + create_objective_with_mock_cost_functions, +) +from theseus.theseus_layer import TheseusLayer + +device = "cuda:0" if torch.cuda.is_available() else "cpu" + + +def model(x, b): + return (b[..., :1] * x) ** 2 + b[..., 1:] + + +def model_grad(x, b): + g1 = 2 * (b[..., :1] * x) * x + g2 = b[..., 1:] * torch.ones_like(x) + return g1, g2 + + +# This is a cost function of two variables that tries to fit +# f(x;b) = A * x[0]^ 2 + B to the dataset +# given by xs and ys. Here the variables for the cost function +# are A, B, and the goal is to minimize MSE over the +# dataset. We will pass the variables as a single +# variable object of dimension 2. +class QuadraticFitCostFunction(th.CostFunction): + def __init__(self, optim_vars, cost_weight, xs=None, ys=None): + super().__init__(cost_weight, name="qf_cost_function") + assert len(optim_vars) == 1 and optim_vars[0].dof() == 2 + for i, var in enumerate(optim_vars): + setattr(self, f"optim_var_{i}", var) + self.register_optim_var(f"optim_var_{i}") + self.xs = xs + self.ys = ys + + def error_from_tensors(self, optim_var_0_data): + pred_y = model(self.xs, optim_var_0_data) + return self.ys - pred_y + + # err = y - f(x:b), where b are the current variable values + def error(self): + return self.error_from_tensors(self.optim_var_0.data) + + def jacobians(self): + g1, g2 = model_grad(self.xs, self.optim_var_0.data) + return [-torch.stack([g1, g2], axis=2)], self.error() + + def dim(self): + return self.xs.shape[1] + + def to(self, *args, **kwargs): + super().to(*args, **kwargs) + self.xs = self.xs.to(*args, **kwargs) + self.ys = self.ys.to(*args, **kwargs) + + def _copy_impl(self): + raise NotImplementedError + + +def create_qf_theseus_layer( + xs, + ys, + cost_weight=th.ScaleCostWeight(1.0), + nonlinear_optimizer_cls=th.GaussNewton, + linear_solver_cls=th.CholeskyDenseSolver, + max_iterations=10, + use_learnable_error=False, +): + variables = [th.Vector(2, name="coefficients")] + objective = th.Objective() + cost_function = QuadraticFitCostFunction(variables, cost_weight, xs=xs, ys=ys) + + if use_learnable_error: + # For learnable error we embed the original cost weight as an auxiliary + # variable that's part of the error function, and now becomes a learnable + # parameter of the error + def error_fn(optim_vars, aux_vars): + # aux_vars is the learned weight + # note that this is a hybrid cost function since, part of the function + # follows the structure of QuadraticFitCostFunction, only the error weight + # factor (aux_vars[0]) is learned + return aux_vars[0].data * cost_function.error_from_tensors( + optim_vars[0].data + ) + + if isinstance(cost_weight, th.ScaleCostWeight): + # this case only hits with the reference layer, for which weight + # is not learned (just a scalar value of 1) + cost_weight_dim = None # Vector infers dimension from given cw_data + cw_data = torch.ones(1, 1) + elif isinstance(cost_weight, th.DiagonalCostWeight): + # cw_data is None, since no need to pass data to aux variable, + # because it will be replaced during forward pass of learned layer + cost_weight_dim = cost_function.weight.diagonal.shape[1] + cw_data = None + + # in this case the cost weight is a scalar constant of 1.0 + learnable_cost_function = th.AutoDiffCostFunction( + variables, + error_fn, + cost_function.dim(), + aux_vars=[ + th.Vector(cost_weight_dim, name="learnable_err_param", data=cw_data) + ], + autograd_vectorize=True, + ) + objective.add(learnable_cost_function) + else: + objective.add(cost_function) + + optimizer = nonlinear_optimizer_cls( + objective, + linear_solver_cls=linear_solver_cls, + max_iterations=max_iterations, + ) + assert isinstance(optimizer.linear_solver, linear_solver_cls) + theseus_layer = th.TheseusLayer(optimizer) + return theseus_layer + + +def test_layer_solver_constructor(): + dummy = torch.ones(1, 1) + for linear_solver_cls in [th.LUDenseSolver, th.CholeskyDenseSolver]: + layer = create_qf_theseus_layer( + dummy, dummy, linear_solver_cls=linear_solver_cls + ) + assert isinstance( + layer.optimizer.linear_solver.linearization, th.DenseLinearization + ) + assert isinstance(layer.optimizer.linear_solver, linear_solver_cls) + assert isinstance(layer.optimizer, th.GaussNewton) + + +def _run_optimizer_test( + nonlinear_optimizer_cls, + linear_solver_cls, + optimizer_kwargs, + cost_weight_model, + use_learnable_error=False, + verbose=True, +): + device = "cuda:0" if torch.cuda.is_available() else "cpu" + print(f"_run_test_for: {device}") + print( + f"testing for optimizer {nonlinear_optimizer_cls.__name__}, " + f"cost weight modeled as {cost_weight_model}, " + f"linear solver {linear_solver_cls.__name__}" + ) + + rng = torch.Generator(device=device) + rng.manual_seed(0) + + torch.manual_seed(0) # fix global seed for mlp + + # Create the dataset to fit, model(x) is the true data generation process + batch_size = 16 + num_points = 10 + xs = torch.linspace(0, 10, num_points).repeat(batch_size, 1).to(device) + xs += 0.1 * torch.randn(batch_size, num_points, generator=rng, device=device) + + ys = model(xs, torch.ones(batch_size, 2, device=device)) + # Shift the y values a bit so there is no perfect fit and changing the + # cost weight results in a different parameter fit + fake_noise = torch.logspace(-4, 4, num_points, base=math.e).unsqueeze(0).to(device) + ys -= fake_noise + + # First we create a quadratic fit problem with unit cost weight to see what + # its solution is and use this solution as the target + layer_ref = create_qf_theseus_layer( + xs, + ys, + nonlinear_optimizer_cls=nonlinear_optimizer_cls, + linear_solver_cls=linear_solver_cls, + use_learnable_error=use_learnable_error, + ) + layer_ref.to(device) + with torch.no_grad(): + input_values = {"coefficients": torch.ones(batch_size, 2, device=device) * 0.75} + target_vars, _ = layer_ref.forward( + input_values, verbose=verbose, **optimizer_kwargs + ) + + # Now create another that starts with a random cost weight and use backpropagation to + # find the cost weight whose solution matches the above target + # To do this, we create a diagonal cost weight with an auxiliary variable called + # "cost_weight_values", which will get updated by the forward method of Theseus + # layer. + + # Note: interestingly, if we pass a torch.Parameter parameter as the data to the + # auxiliary variable of the cost weight, we don't even + # need to pass updated values through "objective.update". I'm doing it this way + # to check that update works properly + cost_weight = th.DiagonalCostWeight( + th.Variable(torch.empty(num_points), name="cost_weight_values") + ) + + # Here we create the outer loop models and optimizers for the cost weight + if cost_weight_model == "softmax": + + cost_weight_params = nn.Parameter( + torch.randn(num_points, generator=rng, device=device) + ) + + def cost_weight_fn(): + return F.softmax(cost_weight_params, dim=0).view(1, -1) + + optimizer = torch.optim.Adam([cost_weight_params], lr=0.075) + + elif cost_weight_model == "mlp": + mlp = thutils.build_mlp(num_points, 20, num_points, 2).to(device) + dummy_input = torch.ones(1, num_points, device=device) + + def cost_weight_fn(): + return mlp(dummy_input) + + optimizer = torch.optim.Adam(mlp.parameters(), lr=0.075) + + layer_to_learn = create_qf_theseus_layer( + xs, + ys, + cost_weight=cost_weight, + nonlinear_optimizer_cls=nonlinear_optimizer_cls, + linear_solver_cls=linear_solver_cls, + use_learnable_error=use_learnable_error, + ) + layer_to_learn.to(device) + + # Check the initial solution quality to check how much has loss improved later + + # When using learnable error function, we don't update the cost weight directly but do it + # through the parameters of the learnable error + cost_weight_param_name = ( + "learnable_err_param" if use_learnable_error else "cost_weight_values" + ) + input_values = { + "coefficients": torch.ones(batch_size, 2, device=device) * 0.75, + cost_weight_param_name: cost_weight_fn(), + } + + with torch.no_grad(): + pred_vars, info = layer_to_learn.forward(input_values, **optimizer_kwargs) + loss0 = F.mse_loss( + pred_vars["coefficients"], target_vars["coefficients"] + ).item() + assert not ( + (info.status == th.NonlinearOptimizerStatus.START) + | (info.status == th.NonlinearOptimizerStatus.FAIL) + ).all() + + print("Initial loss: ", loss0) + # --------- Learning happens here ---------# + solved = False + for i in range(200): + optimizer.zero_grad() + input_values = { + "coefficients": torch.ones(batch_size, 2, device=device) * 0.75, + cost_weight_param_name: cost_weight_fn(), + } + pred_vars, info = layer_to_learn.forward( + input_values, verbose=verbose, **optimizer_kwargs + ) + assert not ( + (info.status == th.NonlinearOptimizerStatus.START) + | (info.status == th.NonlinearOptimizerStatus.FAIL) + ).all() + + loss = F.mse_loss(pred_vars["coefficients"], target_vars["coefficients"]) + loss.backward() + print(i, loss.item(), loss.item() / loss0) + optimizer.step() + if loss.item() / loss0 < 5e-3: + solved = True + break + assert solved + + +def test_backward_gauss_newton(): + for use_learnable_error in [True, False]: + for linear_solver_cls in [th.CholeskyDenseSolver, th.LUDenseSolver]: + for cost_weight_model in ["softmax", "mlp"]: + _run_optimizer_test( + th.GaussNewton, + linear_solver_cls, + {}, + cost_weight_model, + use_learnable_error=use_learnable_error, + ) + + +def test_backward_gauss_newton_choleskysparse(): + for use_learnable_error in [True, False]: + for cost_weight_model in ["softmax", "mlp"]: + _run_optimizer_test( + th.GaussNewton, + th.CholmodSparseSolver, + {}, + cost_weight_model, + use_learnable_error=use_learnable_error, + ) + + +def test_backward_levenberg_marquardt(): + for use_learnable_error in [True, False]: + for linear_solver_cls in [th.CholeskyDenseSolver, th.LUDenseSolver]: + for cost_weight_model in ["softmax", "mlp"]: + _run_optimizer_test( + th.LevenbergMarquardt, + linear_solver_cls, + {"damping": 0.01}, + cost_weight_model, + use_learnable_error=use_learnable_error, + ) + + +def test_backward_levenberg_marquardt_choleskysparse(): + for use_learnable_error in [True, False]: + for cost_weight_model in ["softmax", "mlp"]: + _run_optimizer_test( + th.LevenbergMarquardt, + th.CholmodSparseSolver, + {"damping": 0.01, "ellipsoidal_damping": False}, + cost_weight_model, + use_learnable_error=use_learnable_error, + ) + + +def test_send_to_device(): + device = "cuda:0" if torch.cuda.is_available() else "cpu" + print(f"test_send_to_device: {device}") + + # Create the dataset to fit, model(x) is the true data generation process + batch_size = 16 + num_points = 10 + xs = torch.linspace(0, 10, num_points).repeat(batch_size, 1) + ys = model(xs, torch.ones(batch_size, 2)) + + objective = create_qf_theseus_layer(xs, ys) + input_values = {"coefficients": torch.ones(batch_size, 2, device=device) * 0.5} + with torch.no_grad(): + if device != "cpu": + with pytest.raises(RuntimeError): + objective.forward(input_values) + objective.to(device) + output_values, _ = objective.forward(input_values) + for k, v in output_values.items(): + assert v.device == input_values[k].device + + +def test_check_objective_consistency(): + objective, *_ = create_objective_with_mock_cost_functions() + optimizer = th.GaussNewton(objective, th.CholeskyDenseSolver) + + def _do_check(layer_, optimizer_): + with pytest.raises(RuntimeError): + layer_.forward({}) + with pytest.raises(RuntimeError): + optimizer_.optimize() + + # Check for adding a factor + new_cost = MockCostFunction( + [MockVar(1, name="dummy")], + [], + MockCostWeight(MockVar(1, name="weight_aux")), + ) + layer = TheseusLayer(optimizer) + objective.add(new_cost) + _do_check(layer, optimizer) + + # Now check erasing a factor + objective, cost_functions, *_ = create_objective_with_mock_cost_functions() + optimizer = th.GaussNewton(objective, th.CholeskyDenseSolver) + objective.erase(cost_functions[0].name) + _do_check(layer, optimizer) diff --git a/theseus/theseus_layer.py b/theseus/theseus_layer.py new file mode 100644 index 000000000..66542811b --- /dev/null +++ b/theseus/theseus_layer.py @@ -0,0 +1,59 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Dict, Optional, Tuple + +import torch +import torch.nn as nn + +from theseus.optimizer import Optimizer, OptimizerInfo + + +class TheseusLayer(nn.Module): + def __init__( + self, + optimizer: Optimizer, + ): + super().__init__() + self.objective = optimizer.objective + self.optimizer = optimizer + self._objectives_version = optimizer.objective.current_version + + def forward( + self, + input_data: Optional[Dict[str, torch.Tensor]] = None, + track_best_solution: bool = False, + verbose: bool = False, + **optimizer_kwargs + ) -> Tuple[Dict[str, torch.Tensor], OptimizerInfo]: + if self._objectives_version != self.objective.current_version: + raise RuntimeError( + "The objective was modified after the layer's construction, which is " + "currently not supported." + ) + self.objective.update(input_data) + info = self.optimizer.optimize( + track_best_solution=track_best_solution, verbose=verbose, **optimizer_kwargs + ) + values = dict( + [ + (var_name, var.data) + for var_name, var in self.objective.optim_vars.items() + ] + ) + return values, info + + # Applies to() with given args to all tensors in the objective + def to(self, *args, **kwargs): + super().to(*args, **kwargs) + self.objective.to(*args, **kwargs) + + @property + def device(self) -> torch.device: + return self.objective.device + + @property + def dtype(self) -> torch.dtype: + return self.objective.dtype diff --git a/theseus/utils/__init__.py b/theseus/utils/__init__.py new file mode 100644 index 000000000..7cd52a654 --- /dev/null +++ b/theseus/utils/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .utils import build_mlp, gather_from_rows_cols, numeric_jacobian diff --git a/theseus/utils/examples/__init__.py b/theseus/utils/examples/__init__.py new file mode 100644 index 000000000..dcf9deca5 --- /dev/null +++ b/theseus/utils/examples/__init__.py @@ -0,0 +1,26 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .motion_planning import ( + InitialTrajectoryModel, + MotionPlanner, + ScalarCollisionWeightAndCostEpstModel, + ScalarCollisionWeightModel, + TrajectoryDataset, + generate_trajectory_figs, +) +from .tactile_pose_estimation import ( + TactileMeasModel, + TactileWeightModel, + get_tactile_cost_weight_inputs, + get_tactile_initial_optim_vars, + get_tactile_motion_capture_inputs, + get_tactile_nn_measurements_inputs, + get_tactile_poses_from_values, + init_tactile_model_from_file, + load_tactile_dataset_from_file, + load_tactile_sdf_from_file, + visualize_tactile_push2d, +) diff --git a/theseus/utils/examples/motion_planning/__init__.py b/theseus/utils/examples/motion_planning/__init__.py new file mode 100644 index 000000000..a40e1366c --- /dev/null +++ b/theseus/utils/examples/motion_planning/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from .misc import TrajectoryDataset, generate_trajectory_figs +from .models import ( + InitialTrajectoryModel, + ScalarCollisionWeightAndCostEpstModel, + ScalarCollisionWeightModel, +) +from .motion_planner import MotionPlanner diff --git a/theseus/utils/examples/motion_planning/misc.py b/theseus/utils/examples/motion_planning/misc.py new file mode 100644 index 000000000..1f7083bde --- /dev/null +++ b/theseus/utils/examples/motion_planning/misc.py @@ -0,0 +1,194 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import os +import pathlib +import random +from typing import Dict, List, Optional, Tuple + +import matplotlib as mpl +import matplotlib.pyplot as plt +import numpy as np +import torch +import torch.utils.data +import yaml + +import theseus as th + +# ----------------------------------------------------------------------------------- # +# ------------------------------------ Data loading --------------------------------- # +# ----------------------------------------------------------------------------------- # +FileInfo = Tuple[pathlib.Path, pathlib.Path, pathlib.Path, str] + + +class TrajectoryDataset(torch.utils.data.Dataset): + def __init__( + self, + train: bool, + num_images: int, + dataset_dir: str, + map_type: str, + val_ratio: float = 0, + filter_collision_maps: bool = True, + ): + self.dataset_dir = pathlib.Path(dataset_dir) + self.map_type = map_type + + with open(self.dataset_dir / "meta.yaml") as f: + self.cfg = yaml.safe_load(f) + + self.collision_maps = set() + collision_fname = self.dataset_dir / "collision_maps.txt" + if collision_fname.is_file() and filter_collision_maps: + with open(collision_fname, "r") as f: + self.collision_maps.update(f.read().splitlines()) + + files_per_type = self.get_all_files() + all_train_files: List[FileInfo] = [] + all_val_files: List[FileInfo] = [] + num_train = int((1 - val_ratio) * self.cfg["num_envs"]) + for type_ in files_per_type: + if map_type == "mixed" or map_type == type_: + all_train_files.extend(files_per_type[type_][:num_train]) + all_val_files.extend(files_per_type[type_][num_train:]) + + random.shuffle(all_train_files) + random.shuffle(all_val_files) + self.files = ( + all_train_files[:num_images] if train else all_val_files[:num_images] + ) + + def get_all_files(self) -> Dict[str, List[FileInfo]]: + files: Dict[str, List[FileInfo]] = dict((k, []) for k in self.cfg["map_types"]) + for map_type in self.cfg["map_types"]: + for idx in range(self.cfg["num_envs"]): + if f"{map_type}_{idx}" in self.collision_maps: + continue + img_fname = self.dataset_dir / "im_sdf" / map_type / f"{idx}_im.png" + sdf_fname = self.dataset_dir / "im_sdf" / map_type / f"{idx}_sdf.npy" + traj_fname = ( + self.dataset_dir + / "opt_trajs_gpmp2" + / map_type + / f"env_{idx}_prob_0.npz" + ) + for f in [img_fname, sdf_fname, traj_fname]: + assert os.path.isfile(f) + files[map_type].append( + (img_fname, sdf_fname, traj_fname, f"{map_type}_{idx}") + ) + return files + + def __getitem__(self, idx: int): + img_file, sdf_file, traj_file, file_id = self.files[idx] + + env_params = self.cfg["env_params"] + + # SDF Data + cells_per_unit = self.cfg["im_size"] / ( + env_params["x_lims"][1] - env_params["x_lims"][0] + ) + cell_size = torch.tensor(cells_per_unit).reciprocal().view(1) + origin = torch.tensor([env_params["x_lims"][0], env_params["y_lims"][0]]) + sdf_data = torch.from_numpy(np.load(sdf_file)) + + # Image + tmp_map = plt.imread(img_file) + if tmp_map.ndim == 3: + the_map = tmp_map[..., 0] + + # Trajectory + traj_data = np.load(traj_file) + trajectory = torch.from_numpy(traj_data["th_opt"]).permute(1, 0) + # next two lines re-orient the dgpmp2 trajectory to theseus coordinate system + trajectory[1] *= -1.0 + trajectory[3] *= -1.0 + return { + "map_tensor": the_map, + "sdf_origin": origin.double(), + "cell_size": cell_size.double(), + "sdf_data": sdf_data.double(), + "expert_trajectory": trajectory.double(), + "file_id": file_id, + } + + def __len__(self): + return len(self.files) + + +# ----------------------------------------------------------------------------------- # +# ------------------------------- Plotting utilities -------------------------------- # +# ----------------------------------------------------------------------------------- # +def _create_line_from_trajectory( + x_list, y_list, linestyle="-", linewidth=2, alpha=1.0, color="red" +): + line = plt.Line2D(x_list, y_list) + line.set_linestyle(linestyle) + line.set_linewidth(linewidth) + line.set_color(color) + line.set_alpha(alpha) + return line + + +def _add_robot_to_trajectory(x_list, y_list, radius, color="magenta", alpha=0.05): + patches = [] + for i in range(x_list.shape[0]): + patch = mpl.patches.Circle((x_list[i], y_list[i]), radius) + patches.append(patch) + patch_collection = mpl.collections.PatchCollection( + patches, alpha=alpha, color=color + ) + return patch_collection + + +def generate_trajectory_figs( + map_tensor: torch.Tensor, + sdf: th.eb.SignedDistanceField2D, + trajectories: List[torch.Tensor], + robot_radius: float, + max_num_figures: int = 20, + labels: Optional[List[str]] = None, + fig_idx_robot: int = 1, + figsize: Tuple[int, int] = (8, 8), +) -> List[plt.Figure]: + # cell rows/cols for each batch of trajectories + traj_rows = [] + traj_cols = [] + for trajectory in trajectories: + row, col, _ = sdf.convert_points_to_cell(trajectory[:, :2, :]) + traj_rows.append(np.clip(row, 0, map_tensor.shape[1] - 1)) + traj_cols.append(np.clip(col, 0, map_tensor.shape[1] - 1)) + + # Generate a separate figure for each batch index + colors = ["green", "blue", "red"] + if not labels: + labels = ["initial_solution", "best_solution", "expert"] + figures: List[plt.Figure] = [] + for map_idx in range(map_tensor.shape[0]): + if map_idx >= max_num_figures: + continue + fig, axs = plt.subplots(1, 1, figsize=figsize) + map_data = map_tensor[map_idx].clone().cpu().numpy() + map_data = np.tile(map_data, (3, 1, 1)).transpose((1, 2, 0)) + axs.imshow(map_data) + cell_size = sdf.cell_size.data + patches = [] + for t_idx, trajectory in enumerate(trajectories): + row = traj_rows[t_idx][map_idx] + col = traj_cols[t_idx][map_idx] + line = _create_line_from_trajectory(col, row, color=colors[t_idx]) + axs.add_line(line) + if t_idx == fig_idx_robot: # solution trajectory + radius = robot_radius / cell_size[map_idx][0] + patch_coll = _add_robot_to_trajectory(col, row, radius) + axs.add_collection(patch_coll) + patches.append(mpl.patches.Patch(color=colors[t_idx], label=labels[t_idx])) + patches.append( + mpl.patches.Patch(color="magenta", label=f"radius = {robot_radius}") + ) + axs.legend(handles=patches, fontsize=10) + fig.tight_layout() + figures.append(fig) + return figures diff --git a/theseus/utils/examples/motion_planning/models.py b/theseus/utils/examples/motion_planning/models.py new file mode 100644 index 000000000..6a994f8e1 --- /dev/null +++ b/theseus/utils/examples/motion_planning/models.py @@ -0,0 +1,242 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Any, Dict, cast + +import numpy as np +import torch +import torch.nn as nn + +import theseus as th + +from .motion_planner import MotionPlanner + + +class _ScalarModel(nn.Module): + def __init__(self, hidden_size: int): + super().__init__() + self.layers = nn.Sequential( + nn.Linear(1, hidden_size), + nn.ReLU(), + nn.Linear(hidden_size, 1), + ) + + def forward(self): + dummy = torch.ones(1, 1).to(self.layers[0].weight.device) + return self.layers(dummy) + + +class _OrderOfMagnitudeModel(nn.Module): + def __init__(self, hidden_size: int, max_order: int): + super().__init__() + self.layers = nn.Sequential( + nn.Linear(1, hidden_size), + nn.ReLU(), + nn.Linear(hidden_size, max_order), + nn.ReLU(), + ) + self.register_buffer("magnitudes", (10 ** torch.arange(max_order)).unsqueeze(0)) + + def forward(self): + dummy = torch.ones(1, 1).to(self.layers[0].weight.device) + mag_weights = self.layers(dummy).softmax(dim=1) + return (mag_weights * self.magnitudes).sum(dim=1, keepdim=True) + + +# ------------------------------------------------------------------------------------ # +# All public models in this module receive a batch of map data and return one or more +# torch tensors. +# ------------------------------------------------------------------------------------ # + + +class ScalarCollisionWeightModel(nn.Module): + def __init__(self): + super().__init__() + self.model = _OrderOfMagnitudeModel(10, 5) + + def forward(self, _: Dict[str, torch.Tensor]): + return {"collision_w": self.model()} + + +class ScalarCollisionWeightAndCostEpstModel(nn.Module): + def __init__(self, robot_radius: float): + super().__init__() + self.collision_weight_model = _OrderOfMagnitudeModel(200, 5) + self.safety_dist_model = _ScalarModel(100) + self.robot_radius = robot_radius + + def forward(self, _: Dict[str, torch.Tensor]): + collision_w = self.collision_weight_model() + safety_dist = self.safety_dist_model().sigmoid() + return {"collision_w": collision_w, "cost_eps": safety_dist + self.robot_radius} + + +# High level idea: the trajectory is generated by the following sequence of steps: +# +# 1. Generate a parabola whose focus is the middle point between start and goal, +# and whose distance between focus and vertex is the output of an MLP. Thus, +# this part of the model learns how much it should bend outward from the straight +# line path (and towards which side). +# +# 2. Add more precise curvatures to the trajectory by constructing a "sample" around +# the parabola, using the Gaussian Process formulation of the original motion +# planning problem (but ignoring obstacles). This relies on the fact that sampling +# from the GP is equivalent to: +# +# sample = Cholesky_Low(inverse(GP_covariance)) @ u, +# +# where u is a multivariate vector sampled from a standard multivariate Normal +# in the space of the GP variables. +# +# Thus, we can generate arbitrary samples by learning the vector u to use, since +# each value of the vector will be associated to a different trajectory. We learn the +# vector u with another MLP, and use it to compute a fake "sample" around the +# mean trajectory specified by the parabola. +# +# Note: This basic model is included here for illustration purposes, and it +# uses the map's file ID as input, so it has no ability to generalize. A more +# practical extension would use a CNN encoder upstream to produce an input for +# each individual map image. +class InitialTrajectoryModel(nn.Module): + def __init__( + self, + planner: MotionPlanner, + max_num_images: int = 1000, + hid_size: int = 200, + ): + super().__init__() + + # This planner is only used to generate initial trajectories from a + # multivariate gaussian around a curved trajectory, + # and it's not part of the motion planning optimization that generates + # the final trajectory + self.aux_motion_planner = planner.copy(collision_weight=0.0) + + # Learns the vector u to use for generating the trajectory "sample", as + # explained in Step 2 above. + self.layers_u = nn.Sequential( + nn.Linear(2 * max_num_images, hid_size), + nn.ReLU(), + nn.Linear(hid_size, hid_size), + nn.ReLU(), + nn.Linear(hid_size, 4 * (planner.num_time_steps + 1)), + ) + + # Learns a quadratic offset in normal direction to bend the mean trajectory. + # This is the distance/direction between the parabola's focus and vertex, + # mentioned in the Step 1 above. + self.bend_factor = nn.Sequential( + nn.Linear(2 * max_num_images, hid_size), + nn.ReLU(), + nn.Linear(hid_size, 1), + nn.Tanh(), + ) + + def init_weights(m_): + if isinstance(m_, nn.Linear): + torch.nn.init.normal_(m_.weight) + torch.nn.init.normal_(m_.bias) + + self.bend_factor.apply(init_weights) + + self.dt = planner.total_time / planner.num_time_steps + + self.num_images = max_num_images + + def forward(self, batch: Dict[str, Any]): + device = self.aux_motion_planner.objective.device + start = batch["expert_trajectory"][:, :2, 0].to(device) + goal = batch["expert_trajectory"][:, :2, -1].to(device) + + one_hot_dummy = torch.zeros(start.shape[0], self.num_images * 2).to(device) + file_ids = batch["file_id"] + for batch_idx, fi in enumerate(file_ids): + idx = int(fi.split("_")[1]) + int("forest" in fi) * self.num_images + one_hot_dummy[batch_idx, idx] = 1 + + # Compute straight line positions to use as mean of initial trajectory + trajectory_len = self.aux_motion_planner.trajectory_len + dist_vec = goal - start + pos_incr_per_step = dist_vec / (trajectory_len - 1) + trajectory = torch.zeros(start.shape[0], 4 * trajectory_len).to( + device=device, dtype=start.dtype + ) + trajectory[:, :2] = start + for t_step in range(1, trajectory_len): + idx = 4 * t_step + trajectory[:, idx : idx + 2] = ( + trajectory[:, idx - 4 : idx - 2] + pos_incr_per_step + ) + + # Add the parabola bend + bend_factor = self.bend_factor(one_hot_dummy) + start_goal_dist = dist_vec.norm(dim=1) + cur_t = torch.zeros_like(start_goal_dist) - start_goal_dist / 2 + c = (start_goal_dist / 2) ** 2 + angle = th.SO2( + theta=torch.ones(dist_vec.shape[0], 1).to(device=device) * np.pi / 2 + ) + normal_vector = angle.rotate(th.Point2(data=dist_vec)).data.to(device=device) + normal_vector /= normal_vector.norm(dim=1, keepdim=True) + for t_step in range(1, trajectory_len): + idx = 4 * t_step + cur_t += start_goal_dist / (trajectory_len - 1) + add = 2 * bend_factor * ((cur_t ** 2 - c) / c).view(-1, 1) + trajectory[:, idx : idx + 2] += normal_vector * add + + # Compute resulting velocities + for t_step in range(1, trajectory_len): + idx = 4 * t_step + trajectory[:, idx + 2 : idx + 4] = ( + trajectory[:, idx : idx + 2] - trajectory[:, idx - 4 : idx - 2] + ) / self.dt + + # Finally, the final initial trajectory is further curved by a learned + # "sample" from the GP describing the motion planning problem (ignoring + # obstacles) + # First, compute the covariance matrix + with torch.no_grad(): + planner_inputs = { + "sdf_origin": batch["sdf_origin"].to(device), + "start": start.to(device), + "goal": goal.to(device), + "cell_size": batch["cell_size"].to(device), + "sdf_data": batch["sdf_data"].to(device), + } + self.aux_motion_planner.objective.update(planner_inputs) + + motion_optimizer = cast( + th.NonlinearLeastSquares, self.aux_motion_planner.layer.optimizer + ) + linearization = cast( + th.DenseLinearization, motion_optimizer.linear_solver.linearization + ) + + # Assign current trajectory as the mean of linearization + for var in linearization.ordering: + var_type, time_idx = var.name.split("_") + assert var_type in ["pose", "vel"] + if var_type == "pose": + traj_idx = int(time_idx) * 4 + if var_type == "vel": + traj_idx = int(time_idx) * 4 + 2 + var.update(trajectory[:, traj_idx : traj_idx + 2]) + + linearization.linearize() + cov_matrix = torch.inverse(linearization.AtA) + lower_cov = torch.linalg.cholesky(cov_matrix) + + # Compute the u vector to generate the "sample" + u = self.layers_u(one_hot_dummy).unsqueeze(2) + initial_trajectory = trajectory.unsqueeze(2) + torch.matmul(lower_cov, u) + + # Construct the variable values dictionary to return + values: Dict[str, torch.Tensor] = {} + for t_step in range(trajectory_len): + idx = 4 * t_step + values[f"pose_{t_step}"] = initial_trajectory[:, idx : idx + 2, 0] + values[f"vel_{t_step}"] = initial_trajectory[:, idx + 2 : idx + 4, 0] + + return values diff --git a/theseus/utils/examples/motion_planning/motion_planner.py b/theseus/utils/examples/motion_planning/motion_planner.py new file mode 100644 index 000000000..21ee13bd9 --- /dev/null +++ b/theseus/utils/examples/motion_planning/motion_planner.py @@ -0,0 +1,327 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import copy +from typing import Dict, List, Optional, Tuple + +import torch + +import theseus as th + + +class MotionPlanner: + def __init__( + self, + map_size: int, + epsilon_dist: float, + total_time: float, + collision_weight: float, + Qc_inv: List[List[int]], + num_time_steps: int, + optim_method: str, + max_optim_iters: int, + step_size: float = 1.0, + use_single_collision_weight: bool = True, + device: str = "cpu", + dtype: torch.dtype = torch.double, + ): + self.map_size = map_size + self.epsilon_dist = epsilon_dist + self.total_time = total_time + self.collision_weight = collision_weight + self.Qc_inv = copy.deepcopy(Qc_inv) + self.num_time_steps = num_time_steps + self.optim_method = optim_method + self.max_optim_iters = max_optim_iters + self.step_size = step_size + self.use_single_collision_weight = use_single_collision_weight + self.device = device + self.dtype = dtype + + self.trajectory_len = num_time_steps + 1 + + # --------------------------------------------------------------------------- # + # ---------------------------- Auxiliary variables -------------------------- # + # --------------------------------------------------------------------------- # + # First we create auxiliary variables for all the cost functions we are going + # to need. Auxiliary variables are variables whose value won't be changed by + # the motion planner's optimizer, but that are required to compute cost + # functions. + # By giving them names, we can update for each batch (if needed), + # via the motion planner's forward method. + sdf_origin = th.Point2(name="sdf_origin") + start_point = th.Point2(name="start") + goal_point = th.Point2(name="goal") + cell_size_tensor = th.Variable(torch.empty(1, 1), name="cell_size") + sdf_data_tensor = th.Variable( + torch.empty(1, map_size, map_size), name="sdf_data" + ) + cost_eps = th.Variable(torch.tensor(epsilon_dist).view(1, 1), name="cost_eps") + dt = th.Variable( + torch.tensor(total_time / num_time_steps).view(1, 1), name="dt" + ) + + # --------------------------------------------------------------------------- # + # ------------------------------- Cost weights ------------------------------ # + # --------------------------------------------------------------------------- # + # For the GP cost functions, we create a single GPCost weight + gp_cost_weight = th.eb.GPCostWeight(torch.tensor(Qc_inv), dt) + + # Now we create cost weights for the collision-avoidance cost functions + # Each of this is a scalar cost weight with a named auxiliary variable. + # Before running the motion planner, each cost weight value can be updated + # by passing {name: new_cost_weight_tensor} to the forward method + collision_cost_weights = [] + if use_single_collision_weight: + collision_cost_weights.append( + th.ScaleCostWeight( + th.Variable(torch.tensor(collision_weight), name="collision_w") + ) + ) + else: + for i in range(1, self.trajectory_len): + collision_cost_weights.append( + th.ScaleCostWeight( + th.Variable( + torch.tensor(collision_weight), name=f"collision_w_{i}" + ) + ) + ) + + # For hard-constraints (end points pos/vel) we use a single scalar weight + # with high value + boundary_cost_weight = th.ScaleCostWeight(torch.tensor(100.0)) + + # --------------------------------------------------------------------------- # + # -------------------------- Optimization variables ------------------------- # + # --------------------------------------------------------------------------- # + # The optimization variables for the motion planer are 2-D positions and + # velocities for each of the discrete time steps + poses = [] + velocities = [] + for i in range(self.trajectory_len): + poses.append(th.Point2(name=f"pose_{i}", dtype=torch.double)) + velocities.append(th.Point2(name=f"vel_{i}", dtype=torch.double)) + + # --------------------------------------------------------------------------- # + # ------------------------------ Cost functions ----------------------------- # + # --------------------------------------------------------------------------- # + # Create a Theseus objective for adding the cost functions + objective = th.Objective(dtype=torch.double) + + # First create the cost functions for the end point positions and velocities + # which are hard constraints, and can be implemented via VariableDifference cost + # functions. + objective.add( + th.eb.VariableDifference( + poses[0], boundary_cost_weight, start_point, name="pose_0" + ) + ) + objective.add( + th.eb.VariableDifference( + velocities[0], + boundary_cost_weight, + th.Point2(data=torch.zeros(1, 2)), + name="vel_0", + ) + ) + objective.add( + th.eb.VariableDifference( + poses[-1], boundary_cost_weight, goal_point, name="pose_N" + ) + ) + objective.add( + th.eb.VariableDifference( + velocities[-1], + boundary_cost_weight, + th.Point2(data=torch.zeros(1, 2)), + name="vel_N", + ) + ) + + # Next we add 2-D collisions and GP cost functions, and associate them with the + # cost weights created above. We need a separate cost function for each time + # step + for i in range(1, self.trajectory_len): + objective.add( + th.eb.Collision2D( + poses[i], + collision_cost_weights[0] + if use_single_collision_weight + else collision_cost_weights[i - 1], + sdf_origin, + sdf_data_tensor, + cell_size_tensor, + cost_eps, + name=f"collision_{i}", + ) + ) + objective.add( + ( + th.eb.GPMotionModel( + poses[i - 1], + velocities[i - 1], + poses[i], + velocities[i], + dt, + gp_cost_weight, + name=f"gp_{i}", + ) + ) + ) + + # Finally, create the Nonlinear Least Squares optimizer for this objective + # and wrap both into a TheseusLayer + optimizer: th.NonlinearLeastSquares + if optim_method == "gauss_newton": + optimizer = th.GaussNewton( + objective, + th.CholeskyDenseSolver, + max_iterations=max_optim_iters, + step_size=step_size, + ) + elif optim_method == "levenberg_marquardt": + optimizer = th.LevenbergMarquardt( + objective, + th.CholeskyDenseSolver, + max_iterations=max_optim_iters, + step_size=step_size, + ) + + self.objective = objective + self.layer = th.TheseusLayer(optimizer) + self.layer.to(device=device, dtype=dtype) + + # A call to motion_planner.layer.forward(input_dict) will run the NLLS optimizer + # to solve for a trajectory given the input data. The input dictionary, supports + # the keys and values illustrated below, where ':' represents a batch dimension. + # + # input_dict = { + # "cell_size": tensor(:, 1), + # "cost_eps": tensor(:, 1), + # "dt": tensor(:, 1), + # "sdf_origin": tensor (:, 2), + # "start": tensor (:, 2), + # "goal": tensor(:, 2), + # "sdf_data": tensor(:, map_size, map_size), + # "collision_w_0": tensor(:, 1), + # "collision_w_1": tensor(:, 1), + # ... + # "pose_0": tensor(:, 2), + # "pose_1": tensor(:, 2), + # ... + # "pose_N": tensor(:, 2), + # "vel_0": tensor(:, 2), + # "vel_1": tensor(:, 2), + # ... + # "vel_N": tensor(:, 2), + # } + # + # TheseusLayer will match the names to their corresponding variables and update + # their data before calling the optimizer. + # + # **Important**, all of these keys are + # optional, so that only variables that need to change from call to call need to + # have their names passed. Those not updated explictly here will retain their + # previous internal data. + # + # When running + # output, info = motion_planner.forward(input_dict) + # + # The motion planner will only modify optimization variables, and all other + # variables are not modified. For convenience, the output is a dictionary of + # (str, tensor) mapping variable names to optimized variable data tensors. + + def get_variable_values_from_straight_line( + self, start: torch.Tensor, goal: torch.Tensor + ) -> Dict[str, torch.Tensor]: + # Returns a dictionary of variable names to values that represent a straight + # line trajectory from start to goal. + start_goal_dist = goal - start + avg_vel = start_goal_dist / self.total_time + unit_trajectory_len = start_goal_dist / (self.trajectory_len - 1) + input_dict: Dict[str, torch.Tensor] = {} + for i in range(self.trajectory_len): + input_dict[f"pose_{i}"] = start + unit_trajectory_len * i + input_dict[f"vel_{i}"] = avg_vel + return input_dict + + def get_random_variable_values( + self, start: torch.Tensor + ) -> Dict[str, torch.Tensor]: + # Returns a dictionary of variable names with random initial poses. + input_dict: Dict[str, torch.Tensor] = {} + for i in range(self.trajectory_len): + input_dict[f"pose_{i}"] = torch.randn_like(start) + input_dict[f"vel_{i}"] = torch.randn_like(start) + return input_dict + + def get_variable_values_from_trajectory( + self, trajectory: torch.Tensor + ) -> Dict[str, torch.Tensor]: + # Returns a dictionary of variable names to values, so that values + # are assigned with the data from the given trajectory. Trajectory should be a + # tensor of shape (batch_size, 4, planner.trajectory_len). + assert trajectory.shape[1:] == (4, self.trajectory_len) + input_dict: Dict[str, torch.Tensor] = {} + for i in range(self.trajectory_len): + input_dict[f"pose_{i}"] = trajectory[:, :2, i] + input_dict[f"vel_{i}"] = trajectory[:, :2, i] + return input_dict + + def error(self) -> float: + # Returns the current MSE of the optimization problem + with torch.no_grad(): + return self.objective.error_squared_norm().mean().item() + + def get_trajectory( + self, + values_dict: Optional[Dict[str, torch.Tensor]] = None, + detach: bool = False, + ) -> torch.Tensor: + # Returns the a tensor with the trajectory that the given variable + # values represent. If no dictionary is passed, it will used the latest + # values stored in the objective's variables. + trajectory = torch.empty( + self.objective.batch_size, + 4, + self.trajectory_len, + device=self.objective.device, + ) + variables = self.objective.optim_vars + for i in range(self.trajectory_len): + if values_dict is None: + trajectory[:, :2, i] = variables[f"pose_{i}"].data.clone() + trajectory[:, 2:, i] = variables[f"vel_{i}"].data.clone() + else: + trajectory[:, :2, i] = values_dict[f"pose_{i}"] + trajectory[:, 2:, i] = values_dict[f"vel_{i}"] + return trajectory.detach() if detach else trajectory + + def get_total_squared_errors(self) -> Tuple[torch.Tensor, torch.Tensor]: + gp_error: torch.Tensor = 0 # type: ignore + collision_error: torch.Tensor = 0 # type: ignore + for name, cf in self.objective.cost_functions.items(): + if "gp" in name: + gp_error += cf.error().square().mean() + if "collision" in name: + collision_error += cf.error().square().mean() + return gp_error, collision_error + + def copy(self, collision_weight: Optional[float] = None) -> "MotionPlanner": + return MotionPlanner( + self.map_size, + self.epsilon_dist, + self.total_time, + collision_weight or self.collision_weight, + self.Qc_inv, + self.num_time_steps, + self.optim_method, + self.max_optim_iters, + step_size=self.step_size, + use_single_collision_weight=self.use_single_collision_weight, + device=self.device, + dtype=self.dtype, + ) diff --git a/theseus/utils/examples/tactile_pose_estimation/__init__.py b/theseus/utils/examples/tactile_pose_estimation/__init__.py new file mode 100644 index 000000000..08c4ab692 --- /dev/null +++ b/theseus/utils/examples/tactile_pose_estimation/__init__.py @@ -0,0 +1,15 @@ +from .misc import ( + load_tactile_dataset_from_file, + load_tactile_sdf_from_file, + visualize_tactile_push2d, +) +from .models import ( + TactileMeasModel, + TactileWeightModel, + get_tactile_cost_weight_inputs, + get_tactile_initial_optim_vars, + get_tactile_motion_capture_inputs, + get_tactile_nn_measurements_inputs, + get_tactile_poses_from_values, + init_tactile_model_from_file, +) diff --git a/theseus/utils/examples/tactile_pose_estimation/misc.py b/theseus/utils/examples/tactile_pose_estimation/misc.py new file mode 100644 index 000000000..1f24b72a6 --- /dev/null +++ b/theseus/utils/examples/tactile_pose_estimation/misc.py @@ -0,0 +1,163 @@ +import matplotlib.patches as mpatches +import matplotlib.pyplot as plt +import numpy as np +import torch + +# ----------------------------------------------------------------------------------- # +# ------------------------------------ Data loading --------------------------------- # +# ----------------------------------------------------------------------------------- # + + +def load_tactile_dataset_from_file(filename, episode, device=None): + + with open(filename) as f: + import json + + data_from_file = json.load(f) + + dataset_all = {} + dataset_all["obj_poses"] = torch.tensor( + data_from_file["obj_poses_2d"], device=device + ) + dataset_all["eff_poses"] = torch.tensor( + data_from_file["ee_poses_2d"], device=device + ) + dataset_all["img_feats"] = torch.tensor(data_from_file["img_feats"], device=device) + dataset_all["contact_episode"] = torch.tensor( + data_from_file["contact_episode"], device=device + ) + dataset_all["contact_flag"] = torch.tensor( + data_from_file["contact_flag"], device=device + ) + + dataset = {} + ds_idxs = torch.nonzero(dataset_all["contact_episode"] == episode).squeeze() + for key, val in dataset_all.items(): + dataset[key] = val[ds_idxs] + + return dataset + + +def load_tactile_sdf_from_file(filename, device=None): + + with open(filename) as f: + import json + + sdf_from_file = json.load(f) + sdf_data_vec = sdf_from_file["grid_data"] + + sdf_data_mat = np.zeros( + (sdf_from_file["grid_size_y"], sdf_from_file["grid_size_x"]) + ) + for i in range(sdf_data_mat.shape[0]): + for j in range(sdf_data_mat.shape[1]): + sdf_data_mat[i, j] = sdf_data_vec[i][j] + + sdf_data_mat = torch.tensor(sdf_data_mat, device=device).unsqueeze(0) + cell_size = torch.tensor([sdf_from_file["grid_res"]], device=device).unsqueeze(0) + origin = torch.tensor( + [sdf_from_file["grid_origin_x"], sdf_from_file["grid_origin_y"]], device=device + ).unsqueeze(0) + + return sdf_data_mat, cell_size, origin + + +# ----------------------------------------------------------------------------------- # +# ---------------------------------- Visualization ---------------------------------- # +# ----------------------------------------------------------------------------------- # +def _draw_tactile_effector(poses: np.ndarray, label: str = "groundtruth"): + + linestyle = "-" + color = "tab:orange" + if label == "groundtruth": + linestyle = "--" + color = "tab:gray" + + # plot contact point and normal + plt.plot(poses[-1][0], poses[-1][1], "k*", linestyle=linestyle) + ori = poses[-1][2] + (dx, dy) = (0.03 * -np.sin(ori), 0.03 * np.cos(ori)) + plt.arrow( + poses[-1][0], + poses[-1][1], + dx, + dy, + linewidth=2, + head_width=0.001, + color=color, + head_length=0.01, + fc=color, + ec=color, + ) + + eff_radius = 0.0075 + circle = mpatches.Circle( + (poses[-1][0], poses[-1][1]), color=color, radius=eff_radius + ) + plt.gca().add_patch(circle) + + +def _draw_tactile_object( + poses: np.ndarray, rect_len_x: float, rect_len_y: float, label: str = "optimizer" +): + + linestyle = "-" + color = "tab:orange" + if label == "groundtruth": + linestyle = "--" + color = "tab:gray" + + plt.plot( + poses[:, 0], + poses[:, 1], + color=color, + linestyle=linestyle, + label=label, + linewidth=2, + alpha=0.9, + ) + + # shape: rect + yaw = poses[-1][2] + R = np.array([[np.cos(yaw), -np.sin(yaw)], [np.sin(yaw), np.cos(yaw)]]) + offset = np.matmul(R, np.array([[0.5 * rect_len_x], [0.5 * rect_len_y]])) + xb, yb = poses[-1][0] - offset[0], poses[-1][1] - offset[1] + rect = mpatches.Rectangle( + (xb, yb), + rect_len_x, + rect_len_y, + angle=(np.rad2deg(yaw)), + facecolor="None", + edgecolor=color, + linestyle=linestyle, + linewidth=2, + ) + plt.gca().add_patch(rect) + + +def visualize_tactile_push2d( + obj_poses: torch.Tensor, + eff_poses: torch.Tensor, + obj_poses_gt: torch.Tensor, + eff_poses_gt: torch.Tensor, + rect_len_x: float, + rect_len_y: float, +): + + plt.cla() + plt.xlim((-2, 2)) + plt.ylim((-2, 2)) + plt.gca().axis("equal") + plt.axis("off") + + _draw_tactile_object( + obj_poses.cpu().detach().numpy(), rect_len_x, rect_len_y, label="optimizer" + ) + _draw_tactile_effector(eff_poses.cpu().detach().numpy(), label="optimizer") + _draw_tactile_object( + obj_poses_gt.cpu().detach().numpy(), rect_len_x, rect_len_y, label="groundtruth" + ) + _draw_tactile_effector(eff_poses_gt.cpu().detach().numpy(), label="groundtruth") + + plt.show() + plt.pause(1e-9) diff --git a/theseus/utils/examples/tactile_pose_estimation/models.py b/theseus/utils/examples/tactile_pose_estimation/models.py new file mode 100644 index 000000000..b12599028 --- /dev/null +++ b/theseus/utils/examples/tactile_pose_estimation/models.py @@ -0,0 +1,220 @@ +import collections +from typing import Dict, List, Optional, Tuple, cast + +import numpy as np +import torch +import torch.nn as nn + +import theseus as th + + +class TactileMeasModel(nn.Module): + def __init__(self, input_size: int, output_size: int): + super().__init__() + + self.fc1 = nn.Linear(input_size, output_size) + + def forward(self, x1: torch.Tensor, x2: torch.Tensor, k: torch.Tensor): + x = torch.cat([x1, x2], dim=1) + + k1_ = k.unsqueeze(1) # b x 1 x cl + x1_ = x.unsqueeze(-1) # b x dim x 1 + x = torch.mul(x1_, k1_) # b x dim x cl + + x = x.view(x.shape[0], -1) + x = self.fc1(x) + + return x + + +def init_tactile_model_from_file(model: nn.Module, filename: str): + + model_saved = torch.jit.load(filename) + state_dict_saved = model_saved.state_dict() + state_dict_new = collections.OrderedDict() + state_dict_new["fc1.weight"] = state_dict_saved["model.fc1.weight"] + state_dict_new["fc1.bias"] = state_dict_saved["model.fc1.bias"] + + model.load_state_dict(state_dict_new) + + return model + + +# Set some parameters for the cost weights +class TactileWeightModel(nn.Module): + def __init__( + self, device: str, dim: int = 3, wt_init: Optional[torch.Tensor] = None + ): + super().__init__() + + wt_init_ = torch.rand(1, dim) + if wt_init is not None: + wt_init_ = wt_init + self.param = nn.Parameter(wt_init_.to(device)) + + def forward(self): + return self.param.clone() + + +# ----------------------------------------------------------------------------------- # +# ------------------------------- Theseus Model Interface --------------------------- # +# ----------------------------------------------------------------------------------- # + + +def get_tactile_nn_measurements_inputs( + batch: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], + device: str, + class_label: int, + num_classes: int, + min_win_mf: int, + max_win_mf: int, + step_win_mf: int, + time_steps: int, + model: Optional[nn.Module] = None, +): + inputs = {} + + if model is not None: + images_feat_meas = batch[0].to(device) + class_label_vec = ( + nn.functional.one_hot(torch.tensor(class_label), torch.tensor(num_classes)) + .view(1, -1) + .to(device) + ) + + meas_model_input_1_list: List[torch.Tensor] = [] + meas_model_input_2_list: List[torch.Tensor] = [] + for i in range(min_win_mf, time_steps): + for offset in range(min_win_mf, np.minimum(i, max_win_mf), step_win_mf): + meas_model_input_1_list.append(images_feat_meas[:, i - offset, :]) + meas_model_input_2_list.append(images_feat_meas[:, i, :]) + + meas_model_input_1 = torch.cat(meas_model_input_1_list, dim=0) + meas_model_input_2 = torch.cat(meas_model_input_2_list, dim=0) + num_measurements = meas_model_input_1.shape[0] + model_measurements = model.forward( + meas_model_input_1, meas_model_input_2, class_label_vec + ).reshape( + -1, num_measurements, 4 + ) # data format (x, y, cos, sin) + else: # use oracle model + model_measurements = [] + for i in range(min_win_mf, time_steps): + for offset in range(min_win_mf, np.minimum(i, max_win_mf), step_win_mf): + eff_pose_1 = th.SE2(x_y_theta=batch[1][:, i - offset]) + obj_pose_1 = th.SE2(x_y_theta=batch[2][:, i - offset]) + eff_pose_1__obj = obj_pose_1.between(eff_pose_1) + + eff_pose_2 = th.SE2(x_y_theta=batch[1][:, i]) + obj_pose_2 = th.SE2(x_y_theta=batch[2][:, i]) + eff_pose_2__obj = obj_pose_2.between(eff_pose_2) + + meas_pose_rel = cast(th.SE2, eff_pose_1__obj.between(eff_pose_2__obj)) + model_measurements.append( + torch.cat( + ( + meas_pose_rel.xy(), + meas_pose_rel.theta().cos(), + meas_pose_rel.theta().sin(), + ), + dim=1, + ) + ) # data format (x, y, cos, sin) + + num_measurements = len(model_measurements) + model_measurements = torch.cat(model_measurements, dim=0).reshape( + -1, num_measurements, 4 + ) + model_measurements = model_measurements.to(device) + + # set MovingFrameBetween measurements from the NN output + nn_meas_idx = 0 + for i in range(min_win_mf, time_steps): + for offset in range(min_win_mf, np.minimum(i, max_win_mf), step_win_mf): + meas_xycs_ = torch.stack( + [ + model_measurements[:, nn_meas_idx, 0], + model_measurements[:, nn_meas_idx, 1], + model_measurements[:, nn_meas_idx, 2], + model_measurements[:, nn_meas_idx, 3], + ], + dim=1, + ) + inputs[f"nn_measurement_{i-offset}_{i}"] = meas_xycs_ + nn_meas_idx = nn_meas_idx + 1 + + return inputs + + +def get_tactile_motion_capture_inputs( + batch: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], device: str, time_steps: int +): + inputs = {} + captures = batch[1].to(device) + for step in range(time_steps): + capture = captures[:, step, :] + cature_xycs = torch.stack( + [capture[:, 0], capture[:, 1], capture[:, 2].cos(), capture[:, 2].sin()], + dim=1, + ) + inputs[f"motion_capture_{step}"] = cature_xycs + return inputs + + +def get_tactile_cost_weight_inputs(qsp_model, mf_between_model): + return {"qsp_weight": qsp_model(), "mf_between_weight": mf_between_model()} + + +def get_tactile_initial_optim_vars( + batch: Tuple[torch.Tensor, torch.Tensor, torch.Tensor], device: str, time_steps: int +): + inputs_ = {} + eff_captures_ = batch[1].to(device) + obj_captures_ = batch[2].to(device) + + for step in range(time_steps): + eff_xyth_ = eff_captures_[:, step, :] + eff_xycs_ = torch.stack( + [ + eff_xyth_[:, 0], + eff_xyth_[:, 1], + eff_xyth_[:, 2].cos(), + eff_xyth_[:, 2].sin(), + ], + dim=1, + ) + + obj_xyth_ = obj_captures_[:, step, :] + obj_xycs_ = torch.stack( + [ + obj_xyth_[:, 0], + obj_xyth_[:, 1], + obj_xyth_[:, 2].cos(), + obj_xyth_[:, 2].sin(), + ], + dim=1, + ) + + # layer will route this to the optimization variables with the given name + inputs_[f"obj_pose_{step}"] = obj_xycs_.clone() + 0.0 * torch.cat( + [torch.randn((1, 2)), torch.zeros((1, 2))], dim=1 + ).to(device) + inputs_[f"eff_pose_{step}"] = eff_xycs_.clone() + + return inputs_ + + +def get_tactile_poses_from_values( + batch_size: int, + values: Dict[str, torch.Tensor], + time_steps, + device: str, + key: str = "pose", +): + poses = torch.empty(batch_size, time_steps, 3, device=device) + for t_ in range(time_steps): + poses[:, t_, :2] = values[f"{key}_{t_}"][:, 0:2] + poses[:, t_, 2] = torch.atan2( + values[f"{key}_{t_}"][:, 3], values[f"{key}_{t_}"][:, 2] + ) + return poses diff --git a/theseus/utils/tests/test_utils.py b/theseus/utils/tests/test_utils.py new file mode 100644 index 000000000..dfa7fff32 --- /dev/null +++ b/theseus/utils/tests/test_utils.py @@ -0,0 +1,95 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import pytest # noqa: F401 +import torch +import torch.nn as nn + +import theseus.utils as thutils + + +def test_build_mlp(): + # set seed for mlp + torch.manual_seed(0) + # create name to class map for activation function + act_name_to_cls_map = {"relu": nn.ReLU, "elu": nn.ELU, "tanh": nn.Tanh} + + # set test parameters + test_hidden_depth = [0, 1, 2] + input_dim = 3 + hidden_dim = 4 + output_dim = 5 + batch_size = 16 + + for hidden_depth in test_hidden_depth: + for act_name in act_name_to_cls_map.keys(): + sample_mlp = thutils.build_mlp( + input_dim, hidden_dim, output_dim, hidden_depth, act_name + ) + # check depth by counting linear modules + layer_count = 0 + for curr_mod in sample_mlp: + if isinstance(curr_mod, nn.Linear): + layer_count += 1 + created_depth = layer_count - 1 + assert ( + created_depth == hidden_depth + ), f"Incorrect number of layers: {created_depth} should be {hidden_depth}" + + # check input and output sizes + x = torch.rand(batch_size, input_dim) + y = sample_mlp(x) # automatically tests input size + assert ( + y.size(0) == batch_size and y.size(1) == output_dim + ), "Incorrect output tensor created" + + # check size of each layer in mlp + def _get_layer_sizes(layer_id): + layer_input_dim, layer_output_dim = hidden_dim, hidden_dim + if layer_id == 0: + layer_input_dim = input_dim + if layer_id == created_depth: + layer_output_dim = output_dim + return layer_input_dim, layer_output_dim + + layer_id = 0 + for curr_mod in sample_mlp: + if isinstance(curr_mod, nn.Linear): + layer_input_dim, layer_output_dim = _get_layer_sizes(layer_id) + x = torch.rand(batch_size, layer_input_dim) + y = curr_mod(x) + assert ( + y.size(0) == batch_size and y.size(1) == layer_output_dim + ), f"Incorrect hidden layer dimensions at layer {layer_id}" + layer_id += 1 + + # check activation function + # assume all non Linear layers must be activation layers + act_cls = act_name_to_cls_map[act_name] + for curr_mod in sample_mlp: + if not isinstance(curr_mod, nn.Linear): + assert isinstance( + curr_mod, act_cls + ), f"Incorrect activation class: {curr_mod} should be {act_cls}" + + +def test_gather_from_rows_cols(): + rng = np.random.default_rng(0) + generator = torch.Generator() + generator.manual_seed(0) + for _ in range(100): + batch_size = rng.integers(1, 8) + num_rows = rng.integers(1, 20) + num_cols = rng.integers(1, 20) + num_points = rng.integers(1, 100) + matrix = torch.randn((batch_size, num_rows, num_cols)) + rows = torch.randint(0, num_rows, size=(batch_size, num_points)) + cols = torch.randint(0, num_cols, size=(batch_size, num_points)) + res = thutils.gather_from_rows_cols(matrix, rows, cols) + assert res.shape == (batch_size, num_points) + for i in range(batch_size): + for j in range(num_points): + assert torch.allclose(res[i, j], matrix[i, rows[i, j], cols[i, j]]) diff --git a/theseus/utils/utils.py b/theseus/utils/utils.py new file mode 100644 index 000000000..dd69bf4a5 --- /dev/null +++ b/theseus/utils/utils.py @@ -0,0 +1,107 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Callable, List, Optional, Type + +import torch +import torch.nn as nn + +import theseus.geometry as th + + +def build_mlp( + input_dim: int, + hidden_dim: int, + output_dim: int, + hidden_depth: int, + act_name: str = "relu", +) -> torch.nn.Sequential: + # assign types + act: Type[nn.Module] + mods: List[nn.Module] + + # find activation + if act_name == "relu": + act = nn.ReLU + elif act_name == "elu": + act = nn.ELU + elif act_name == "tanh": + act = nn.Tanh + else: + raise NotImplementedError() + + # construct sequential list of modules + if hidden_depth == 0: + mods = [nn.Linear(input_dim, output_dim)] + else: + mods = [nn.Linear(input_dim, hidden_dim), act()] + for i in range(hidden_depth - 1): + mods += [nn.Linear(hidden_dim, hidden_dim), act()] + mods.append(nn.Linear(hidden_dim, output_dim)) + trunk = nn.Sequential(*mods) + return trunk + + +# Inputs are a batched matrix and batched rows/cols for indexing +# Sizes are: +# matrix -> batch_size x num_rows x num_cols +# rows/cols -> batch_size x num_points +# +# Returns: +# data -> batch_size x num_points +# where +# data[i, j] = matrix[i, rows[i, j], cols[i, j]] +def gather_from_rows_cols(matrix: torch.Tensor, rows: torch.Tensor, cols: torch.Tensor): + assert matrix.ndim == 3 and rows.ndim == 2 and rows.ndim == 2 + assert matrix.shape[0] == rows.shape[0] and matrix.shape[0] == cols.shape[0] + assert rows.shape[1] == cols.shape[1] + assert rows.min() >= 0 and rows.max() < matrix.shape[1] + assert cols.min() >= 0 and cols.max() < matrix.shape[2] + + aux_idx = torch.arange(matrix.shape[0]).unsqueeze(-1).to(matrix.device) + return matrix[aux_idx, rows, cols] + + +# Computes jacobians of a function of LieGroup arguments using finite differences +# function: a callable representing the function whose jacobians will be computed +# group_args: the group values at which the jacobians should be computed +# function_dim: the dimension of the function. If None, it uses the dimension of the +# group args. +# delta_mag: magnitude of the differences used to compute the jacobian +def numeric_jacobian( + function: Callable[[List[th.LieGroup]], th.LieGroup], + group_args: List[th.LieGroup], + function_dim: Optional[int] = None, + delta_mag: float = 1e-3, +): + batch_size = group_args[0].shape[0] + + def _compute(group_idx): + dof = group_args[group_idx].dof() + function_dim_ = function_dim or dof + jac = torch.zeros( + batch_size, function_dim_, dof, dtype=group_args[group_idx].dtype + ) + for d in range(dof): + delta = torch.zeros(1, dof).to( + device=group_args[0].device, dtype=group_args[group_idx].dtype + ) + delta[:, d] = delta_mag + + group_plus = group_args[group_idx].retract(delta) + group_minus = group_args[group_idx].retract(-delta) + group_plus_args = [g for g in group_args] + group_plus_args[group_idx] = group_plus + group_minus_args = [g for g in group_args] + group_minus_args[group_idx] = group_minus + + diff = function(group_minus_args).local(function(group_plus_args)) + jac[:, :, d] = diff / (2 * delta_mag) + return jac + + jacs = [] + for group_idx in range(len(group_args)): + jacs.append(_compute(group_idx)) + return jacs diff --git a/tutorials/00_introduction.ipynb b/tutorials/00_introduction.ipynb new file mode 100644 index 000000000..28257d77a --- /dev/null +++ b/tutorials/00_introduction.ipynb @@ -0,0 +1,572 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "997d6803", + "metadata": {}, + "source": [ + "# Theseus\n", + "\n", + "Theseus is a library for differentiable nonlinear optimization built on PyTorch. \n", + "\n", + "Theseus is motivated by problems in robotics and computer vision that can be formulated as differentiable nonlinear least squares optimization problems, such as Simultaneous Localization and Mapping (SLAM), motion planning, and bundle adjustment. These problems can be broadly categorized as doing structured learning, where neural components can be modularly mixed with known priors to get the benefit of deep learning in a way that adds value over classical methods. While interest in this area is rapidly increasing, existing work is fragmented and built using application-specific codebases. Theseus fills this gap by providing a problem-agnostic platform for structured learning, letting the user easily combine neural networks with priors represented as differentiable blocks of nonlinear optimization problems, and do end-to-end training over these. \n", + "\n", + "This tutorial introduces the basic building blocks for solving such optimization problems in Theseus; in the following tutorials, we will show how to put together these building blocks to solve optimization problems with various aspects and increasing complexity. We cover six conceptual building blocks in this tutorial: \n", + "* **Variables:** named wrappers for torch tensors that form the fundamental data type for defining optimization problems in Theseus. (Section 1)\n", + "* **Cost functions:** computes an error term as a function of one or more variables, and which are the functions to be minimized by Theseus optimizers. (Section 2)\n", + "* **Cost weights:** computes a weight that modifies the contribution of one or more cost functions to the overall objective. (Section 3)\n", + "* **Objective:** compiles multiple cost functions and weights to define the structure of an optimization problem. (Section 4)\n", + "* **Optimizer:** implements an optimization algorithm (e.g., Gauss-Newton, LevenbergMarquardt) that can be used to minimize an Objective. (Section 5)\n", + "* **TheseusLayer:** groups an objective and optimizer and serves as the interface between torch modules upstream/downstream and differentiable optimization problems. (Section 6)" + ] + }, + { + "cell_type": "markdown", + "id": "6f59f53e", + "metadata": {}, + "source": [ + "## 1. Variables\n", + "\n", + "Optimization objectives in Theseus are a function of `th.Variable` objects, which are `torch.tensor` wrappers of different types (e.g., 2D points, rotation groups, etc.) that can be, optionally, associated with a name. In Theseus, we require the first data dimension of all variables to be a batch dimension (similar to the convention in PyTorch modules). We describe here two main operations common to all `Variables`: (1) Creating variables and (2) Updating `Variables`.\n", + "\n", + "### 1.1 Creating variables\n", + "Variables can be created using the generic `th.Variable` interface, or through a sub-class with custom functionality. Many `Variables` used in Theseus applications are manifolds; therefore, Theseus provides several `Variable` sub-classes supporting commonly used manifolds, such as vectors, 2-D/3-D points, 2-D rotations and 2-D rigid transformations. We show some example usage below:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "de642d29", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import theseus as th" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "83c82605", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "x: Named variable with 3-D data of batch size 2:\n", + " Variable(data=tensor([[-0.1380, 1.6347, 0.2730],\n", + " [-1.0099, -1.5770, -0.4740]]), name=x)\n", + "\n", + "y: Un-named variable:\n", + " Variable(data=tensor([[0.]]), name=Variable__2)\n", + "\n", + "z: Named SE2 variable:\n", + " SE2(xytheta=tensor([[0., 0., 0.],\n", + " [0., 0., 0.]], dtype=torch.float64), name=se2_1)\n" + ] + } + ], + "source": [ + "# Create a variable with 3-D random data of batch size = 2 and name \"x\"\n", + "x = th.Variable(torch.randn(2, 3), name=\"x\")\n", + "print(f\"x: Named variable with 3-D data of batch size 2:\\n {x}\\n\")\n", + "\n", + "# Create an unnamed variable. A default name will be created for it\n", + "y = th.Variable(torch.zeros(1, 1))\n", + "print(f\"y: Un-named variable:\\n {y}\\n\")\n", + "\n", + "# Create a named SE2 (2D rigid transformation) specifying data (batch_size=2)\n", + "z = th.SE2(x_y_theta=torch.zeros(2, 3).double(), name=\"se2_1\")\n", + "print(f\"z: Named SE2 variable:\\n {z}\")" + ] + }, + { + "cell_type": "markdown", + "id": "f5046aea", + "metadata": {}, + "source": [ + "### 1.2 Updating variables\n", + "\n", + "After creating a variable, its value can be updated via `update()` method. Below we show a few examples and possible errors to avoid when updating variables." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "9c7a282a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Example usage of `update`: \n", + " Original variable: Variable(data=tensor([[-0.1380, 1.6347, 0.2730],\n", + " [-1.0099, -1.5770, -0.4740]]), name=x)\n", + " Updated variable: Variable(data=tensor([[1., 1., 1.],\n", + " [1., 1., 1.]]), name=x)\n", + "\n", + "Error inputs for a Variable `update`:\n", + " Mismatched internal data format:\n", + " Tried to update tensor x with data incompatible with original tensor shape. Given torch.Size([4]). Expected: torch.Size([3])\n", + " Missing batch dimension: \n", + " Tried to update tensor x with data incompatible with original tensor shape. Given torch.Size([]). Expected: torch.Size([3])\n", + "\n", + "Change variable batch size via `update`:\n", + " New shape: torch.Size([4, 3])\n" + ] + } + ], + "source": [ + "# Example usage of `update`\n", + "print(\"Example usage of `update`: \")\n", + "print(f\" Original variable: {x}\")\n", + "x.update(torch.ones(2, 3))\n", + "print(f\" Updated variable: {x}\\n\")\n", + "\n", + "# The following inputs don't work\n", + "print(\"Error inputs for a Variable `update`:\")\n", + "try:\n", + " # `update` expects input tensor to respect the internal data format\n", + " x.update(torch.zeros(2, 4))\n", + "except ValueError as e:\n", + " print(f\" Mismatched internal data format:\")\n", + " print(f\" {e}\")\n", + "try:\n", + " # `update` expects a batch dimension\n", + " x.update(torch.zeros(3))\n", + "except ValueError as e:\n", + " print(f\" Missing batch dimension: \")\n", + " print(f\" {e}\\n\")\n", + " \n", + "# However the batch size can be changed via `update`\n", + "print(\"Change variable batch size via `update`:\")\n", + "x.update(torch.ones(4, 3))\n", + "print(f\" New shape: {x.shape}\")" + ] + }, + { + "cell_type": "markdown", + "id": "fadd5770", + "metadata": {}, + "source": [ + "Over the next few sections, we will see the different ways that `Variable`s are used in optimization problems in Theseus." + ] + }, + { + "cell_type": "markdown", + "id": "9fdc9dd3", + "metadata": {}, + "source": [ + "## 2. Cost functions\n", + "\n", + "A Theseus cost function represents an error function of one or more Theseus variables. Thus, cost functions capture the core quantities being optimized in Theseus.\n", + "\n", + "For this reason, a cost function needs to know which variables can be optimized, and which variables are not allowed to be optimized. In Theseus, we represent this concept by having two kinds of variables: \n", + "* *optimization variables*: variables that can be modified by Theseus optimizers for minimizing the objective. \n", + "* *auxiliary variables*: variables that are required to compute the objective, but that remain constant to Theseus optimizers.\n", + " \n", + "In Theseus, a `Variable` becomes an optimization variable if it is defined as such in the creation of a cost function. All optimization variables must be sub-classes of `th.Manifold`.\n", + "\n", + "A cost function thus needs to be created with its optimization (required) and auxiliary variables (optional) declared. The core operations provided by a cost function are the computation of the error and the error's Jacobian using the latest values of its variables. The `th.CostFunction` class is an abstract class, and to instantiate it, one needs to implement the error computation and the Jacobian. A cost function must return a `torch` tensor as its error. \n", + "\n", + "As a simple example, we will show how to use the `th.eb.VariableDifference` cost function, which is a concrete sub-class of `th.CostFunction`. Below, we instantiate this cost function with two `Vector` variables, one optimization and one auxiliary. \n", + "\n", + "We then show a few useful operations on the cost function: how the cost function can access its optimization and auxiliary variables; the computation of its error, which is defined as `optim_var - target` for the `th.eb.VariableDifference` c); how the error changes when an underlying `Variable` is updated. Lastly, we show the computation of its jacobians: this returns a list of jacobians, with one entry per *optimization* variable." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "0845cabf", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Retrieving the optimization and auxiliary variables from the cost function:\n", + " Optimization variables: [Vector(dof=2, data=tensor([[1., 1.]]), name=x1)]\n", + " Auxiliary variables: [Vector(dof=2, data=tensor([[0., 0.]]), name=target)]\n", + "\n", + "Original cost function (unweighted) error:\n", + " tensor([[1., 1.]]) of shape torch.Size([1, 2])\n", + "\n", + "Updating optimization variables by factor of 2: \n", + " Updated variables: Vector(dof=2, data=tensor([[2., 2.]]), name=x1)\n", + " Updated (unweighted) error: tensor([[2., 2.]])\n", + "\n", + "Computing cost function's (unweighted) jacobians:\n", + " Jacobians: of length 1\n", + " tensor([[[1., 0.],\n", + " [0., 1.]]])\n", + " Shape of 0-th Jacobian: torch.Size([1, 2, 2])\n" + ] + } + ], + "source": [ + "# Note: CostWeight is a weighting quantity required for constructing a cost function.\n", + "# We explain it in Section 3; for this example, we simply create it but we do not use it.\n", + "w1 = th.ScaleCostWeight(2.0)\n", + "\n", + "# Create a VariableDifference cost function\n", + "optim_var = th.Vector(data=torch.ones(1, 2), name=\"x1\")\n", + "target = th.Vector(data=torch.zeros(1, 2), name=\"target\")\n", + "cf = th.eb.VariableDifference(optim_var, w1, target)\n", + "\n", + "# A cost function can retrieve its optimization and auxiliary variables \n", + "print (\"Retrieving the optimization and auxiliary variables from the cost function:\")\n", + "print(\" Optimization variables: \", list(cf.optim_vars))\n", + "print(\" Auxiliary variables: \", list(cf.aux_vars))\n", + "print(\"\")\n", + "\n", + "# Cost functions compute the error using the values of the variables.\n", + "error = cf.error()\n", + "print(f\"Original cost function (unweighted) error:\\n {error} of shape {error.shape}\\n\")\n", + "\n", + "# Cost functions use the _latest_ values of the variables,\n", + "# as shown by the error values after the variable is updated.\n", + "print(\"Updating optimization variables by factor of 2: \")\n", + "optim_var.update(2 * torch.ones(1, 2))\n", + "print(f\" Updated variables: {optim_var}\")\n", + "# Error is now twice as large as the one printed above\n", + "print(f\" Updated (unweighted) error: {cf.error()}\\n\")\n", + "\n", + "# Compute the (unweighted) jacobians and error\n", + "# This returns a list of jacobians, with one entry per _optimization_ variable.\n", + "print(\"Computing cost function's (unweighted) jacobians:\")\n", + "jacobians, error = cf.jacobians() # Note cf.jacobians also returns error \n", + "print(f\" Jacobians: {type(jacobians)} of length {len(jacobians)}\")\n", + "print(f\" {jacobians[0]}\")\n", + "# The i-th jacobian has shape (batch_size, cf.dim(), i-th_optim_var.dof())\n", + "print(f\" Shape of 0-th Jacobian: {jacobians[0].shape}\")" + ] + }, + { + "cell_type": "markdown", + "id": "ddcb3327", + "metadata": {}, + "source": [ + "In Tutorial 3, we will delve into the internals of a cost function and show how to construct custom cost functions.\n", + "\n", + "## 3. Cost weights\n", + "\n", + "The Theseus *cost weight* is a weighting function applied to cost functions: it computes a weight as a function of one or more variables, and applies it to the error of one or more cost functions. The cost weights are thus a way of modifying the error of a cost function in the optimization problem. Cost weights add another layer of abstraction that help trade-off between different cost functions in an objective.\n", + "\n", + "The `th.CostWeight` class is abstract, as any function of `Variable`s can be used to create `CostWeight`. Theseus provides a number of concrete `CostWeight` sub-classes currently: \n", + "- `ScaleCostWeight`, where the weighting function is a scalar real number, \n", + "- `DiagonalCostWeight`, where the the weighting function is a diagonal matrix,\n", + "- `th.eb.GPCostWeight`, where the weighting function represents the inverse covariance function of an [exactly sparse Gaussian process](http://roboticsproceedings.org/rss10/p01.pdf). \n", + "\n", + "The main use of the `CostWeight` is to support the `weighted_error` and `weighted_jacobians_and_error` functions of the cost functions; so these sub-classes implement their (defined) weighting functions.\n", + "\n", + "The `Variable`s used in a `CostWeight` may be named or unnamed; however, using a named `Variable` allows us to update the value of the `CostWeight` directly; this is especially useful in updating the `Objective` or the `TheseusLayer` whenever the cost weight is computed by some external function (e.g., a `torch.nn.Module`). \n", + "\n", + "We show an example of `CostWeight` usage below with the `ScaleCostWeight` class. " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "c9edd8f5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Scale cost weight creation:\n", + " w1 (default variable): Variable(data=tensor([[10.]]), name=Variable__9)\n", + " w2 (named variable): Variable(data=tensor([[2.]]), name=scale)\n", + "\n", + "Weighting errors/jacobian directly with a ScaleCostWeight:\n", + " jacobians:\n", + " weighted: [tensor([[[10., 0.],\n", + " [ 0., 10.]]])]\n", + " original: [tensor([[[1., 0.],\n", + " [0., 1.]]])]\n", + " error:\n", + " weighted: tensor([[20., 20.]])\n", + " original: tensor([[2., 2.]])\n", + "\n", + "Using the `weighted_error` function of the previous cost function:\n", + " weighted cost function error: tensor([[4., 4.]]) vs unweighted error: tensor([[2., 2.]])\n" + ] + } + ], + "source": [ + "print(\"Scale cost weight creation:\")\n", + "# Create a scale cost weight from a float\n", + "w1 = th.ScaleCostWeight(10.0)\n", + "# The weight is wrapped into a default variable\n", + "print(f\" w1 (default variable): {w1.scale}\")\n", + "\n", + "# A theseus variable can be passed directly\n", + "w2 = th.ScaleCostWeight(th.Variable(2 * torch.ones(1, 1), name=\"scale\"))\n", + "print(f\" w2 (named variable): {w2.scale}\\n\")\n", + "\n", + "# Weighting errors and jacobians with a ScaleCostWeight\n", + "print(\"Weighting errors/jacobian directly with a ScaleCostWeight:\")\n", + "weighted_jacobians, weighted_error = w1.weight_jacobians_and_error(jacobians, error)\n", + "print(f\" jacobians:\\n weighted: {weighted_jacobians}\\n original: {jacobians}\")\n", + "print(f\" error:\\n weighted: {weighted_error}\\n original: {error}\\n\")\n", + "\n", + "# If the ScaleCostWeight is included in the cost function, we can directly\n", + "# use the `weight_errors` and `weight_jacobians_and_error` of the cost function.\n", + "print(\"Using the `weighted_error` function of the previous cost function:\") \n", + "print(f\" weighted cost function error: {cf.weighted_error()} vs unweighted error: {cf.error()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "5e5bfa8e", + "metadata": {}, + "source": [ + "## 4. Objective" + ] + }, + { + "cell_type": "markdown", + "id": "666218ec", + "metadata": {}, + "source": [ + "A `th.Objective` defines the structure of an optimization problem, by adding one or more cost functions to it, each with associated cost weights and variables. The `th.Objective` will combine them into a global error function, with an internal structure that can be used by a Theseus optimizer to minimize the global error via changes in the optimization variables. \n", + "\n", + "Currently, `th.Objective` supports nonlinear sum of squares objectives, where the global error is the sum of the squares of each of its cost function errors, weighted by their corresponding cost weights. We plan to extend to other optimization structures in the future. A critical point in the creation of the objective is that **Theseus assumes that cost weights provided will also be squared in the final the objective.** Formally, we currently support objectives of the form\n", + "\n", + "

\n", + " \"Theseus\n", + "

\n", + "\n", + "where **v** represents the set of variables, *f*i is a cost function error, and *w*i its associated cost weight.\n", + "\n", + "Below we show a simple example of creating an objective. We will want to minimize the following function (x - a)2 + 4(y - b)2, where *a* and *b* as constants, *x* and *y* as variables. Below, we first create (1) the optimization and auxiliary variables, (2) cost weights, (3) cost functions, (4) objective.\n", + "\n", + "Then, to evaluate the `Objective`, we will use its `error_squared_norm` function. Before we can evaluate it, however, we must use the `Objective.update` function at least once (so that the internal data structures are correctly set up). In general, the `update` function is used to easily change the values of all variables registered with the `Objective`. This function receives a dictionary that maps variable names to torch tensors to which the corresponding variables should be updated. \n", + "\n", + "We finally show that the current objective is computed correctly for this function. (In the next section, we optimize the objective to its minimum value)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "d1c4bb95", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error term 1: unweighted: tensor([[-0.5000]]) weighted: tensor([[-0.5000]])\n", + "Error term 2: unweighted: tensor([[1.]]) weighted: tensor([[2.]])\n", + "Objective value: tensor([4.2500])\n" + ] + } + ], + "source": [ + "# Step 1: Construct optimization and auxiliary variables.\n", + "# Construct variables of the function: these the optimization variables of the cost functions. \n", + "x = th.Vector(1, name=\"x\")\n", + "y = th.Vector(1, name=\"y\")\n", + "\n", + "# Construct auxiliary variables for the constants of the function.\n", + "a = th.Vector(data=torch.randn(1,1), name=\"a\")\n", + "b = th.Vector(data=torch.randn(1,1), name=\"b\")\n", + "\n", + "# Step 2: Construct cost weights\n", + "# For w1, let's use a named variable\n", + "w1 = th.ScaleCostWeight(th.Variable(data=torch.ones(1, 1), name=\"w1_sqrt\"))\n", + "w2 = th.ScaleCostWeight(2.0) # we provide 2, as sqrt of 4 for the (y-b)^2 term\n", + "\n", + "# Step 3: Construct cost functions representing each error term\n", + "# First term\n", + "cf1 = th.eb.VariableDifference(x, w1, a, name=\"term_1\")\n", + "# Second term\n", + "cf2 = th.eb.VariableDifference(y, w2, b, name=\"term_2\")\n", + "\n", + "# Step 4: Create the objective function and add the error terms\n", + "objective = th.Objective()\n", + "objective.add(cf1)\n", + "objective.add(cf2)\n", + "\n", + "# Step 5: Evaluate objective under current values\n", + "# Note this needs to be preceded by a call to `objective.update`\n", + "# Here we use the update function to set values of all variables\n", + "objective.update({\"a\": torch.ones(1,1), \"b\": 2 * torch.ones(1, 1), \n", + " \"x\": 0.5 * torch.ones(1,1), \"y\": 3 * torch.ones(1, 1)})\n", + "# Weighted error should be: cost_weight * weighted_error \n", + "print(f\"Error term 1: unweighted: {cf1.error()} weighted: {cf1.weighted_error()}\")\n", + "print(f\"Error term 2: unweighted: {cf2.error()} weighted: {cf2.weighted_error()}\")\n", + "# Objective value should be: (error1)^2 + (error2)^2 \n", + "print(f\"Objective value: {objective.error_squared_norm()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "efa8e8e7", + "metadata": {}, + "source": [ + "Adding cost functions to the objective registers all of its optimization and auxiliary variables (and those of its cost weights, if present). `th.Objective` also checks that names are not overloaded by different variable or cost function objects" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "d2be52d5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Two different cost function objects with the same name (term_1) are not allowed in the same objective.\n", + "Two different variable objects with the same name (x) are not allowed in the same objective.\n" + ] + } + ], + "source": [ + "try:\n", + " objective.add(th.eb.VariableDifference(y, w2, b, name=\"term_1\"))\n", + "except ValueError as e:\n", + " print(e)\n", + " \n", + "try:\n", + " obj2 = th.Objective()\n", + " obj2.add(th.eb.VariableDifference(x, w1, a, name=\"term_1\"))\n", + " fake_x1 = th.Vector(1, name=\"x\")\n", + " obj2.add(th.eb.VariableDifference(fake_x1, w2, b, name=\"fake_term\"))\n", + "except ValueError as e:\n", + " print(e)" + ] + }, + { + "cell_type": "markdown", + "id": "41760de5", + "metadata": {}, + "source": [ + "## 5. Optimizers\n", + "\n", + "Theseus provides a set of linear and nonlinear optimizers for minimizing problems described as `th.Objective`. \n", + "The objective can be solved by calling `optimizer.optimize()`, which will change the values of optimization \n", + "variables to minimize its associated objective. `optimize` leaves the optimization variables at the final values found, \n", + "and returns an info object about the optimization (which contains the best solution and optimization statistics)." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "44c27658", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "x: tensor([[1.0000]]) vs a: tensor([[1.]])\n", + "y: tensor([[2.0000]]) vs b: tensor([[2.]])\n", + "Objective after optimization: tensor([7.1054e-14])\n" + ] + } + ], + "source": [ + "# Recall that our objective is (x - a)^2 + 4 (y - b)^2\n", + "# which is minimized at x = a and y = b\n", + "# Let's start by assigning random values to them\n", + "objective.update({\n", + " \"x\": torch.randn(1, 1),\n", + " \"y\": torch.randn(1, 1)\n", + "})\n", + "\n", + "# Now let's use the optimizer. Because this problem is minimizing a\n", + "# quadratic form, a linear optimizer can solve for the optimal solution\n", + "optimizer = th.LinearOptimizer(objective, th.CholeskyDenseSolver)\n", + "info = optimizer.optimize()\n", + "\n", + "# Now let's check the values of x and y \n", + "# Here we print only the Vectors' data attributes for ease of understanding\n", + "print(f\"x: {x.data} vs a: {a.data}\") # Matches a = 1\n", + "print(f\"y: {y.data} vs b: {b.data}\") # Matches b = 2\n", + "print(f\"Objective after optimization: {objective.error_squared_norm()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "3b2d58f0", + "metadata": {}, + "source": [ + "## 6. TheseusLayer\n", + "\n", + "The `TheseusLayer` provides an interface between `torch` code upstream/downstream, and Theseus objectives and optimizers. The `forward()` method combines the functionality of `Objective.update()` and `Optimizer.optimizer()` into a single call. It receives an update dictionary as input, and returns a dictionary with the torch data of optimization variables after optimization, as well as the optimizer's output info." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "2a6f1dea", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "After calling TheseusLayer's forward():\n", + " Values: {'x': tensor([[1.]]), 'y': tensor([[2.]])}\n", + " Info: OptimizerInfo(best_solution={'x': tensor([[1.]]), 'y': tensor([[2.]])}, status=array([], dtype=object))\n", + " Optimized objective: tensor([0.])\n" + ] + } + ], + "source": [ + "layer = th.TheseusLayer(optimizer)\n", + "values, info = layer.forward({\n", + " \"x\": torch.randn(1, 1),\n", + " \"y\": torch.randn(1, 1),\n", + " \"a\": torch.ones(1, 1),\n", + " \"b\": 2 * torch.ones(1, 1),\n", + " \"w1_sqrt\": torch.ones(1, 1)\n", + "})\n", + "print(f\"After calling TheseusLayer's forward():\")\n", + "print(f\" Values: {values}\")\n", + "print(f\" Info: {info}\")\n", + "print(f\" Optimized objective: {objective.error_squared_norm()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "d9cc32be", + "metadata": {}, + "source": [ + "The `TheseusLayer` allows for backpropagation, and is semantically similar to a layer in a PyTorch neural network. Backpropagating through the `TheseusLayer` allows for learning of any necessary quantities of the problem, e.g., `CostWeight`, `Variable`, etc. The following tutorials will illustrate several applications for learning with a `TheseusLayer`.\n", + "\n", + "To distinguish between the optimization done by the Theseus optimizers, and those done outside the Theseus optimizers (e.g., by PyTorch's autograd during learning), we will refer to them as *inner loop optimization* and *outer loop optimization* respectively. Note that the inner loop optimization optimizes only the optimization variables, and the outer loop optimization can optimize only (selected) auxiliary variables provided to the PyTorch autograd optimizers. A call to `TheseusLayer` `forward()` performs only inner loop optimization; typically the PyTorch autograd learning steps will perform the outer loop optimizations. We will see examples of this in the following tutorials.\n", + "\n", + "Any updates to the auxiliary variables during the learning loop are best done via the `forward` method of the `TheseusLayer`. While variables and objectives can be updated independently without going through the `TheseusLayer`, this may result in an error during optimization, depending on the states of the internal data structures. Therefore, we recommend that any updates during learning be performed only via the `TheseusLayer`." + ] + } + ], + "metadata": { + "interpreter": { + "hash": "cc5406e9a0deef8e8d80dfeae7f152b84172dd1229ee5c42b512f2c6ec6850e3" + }, + "kernelspec": { + "display_name": "Theseus", + "language": "python", + "name": "theseus" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/tutorials/01_least_squares_optimization.ipynb b/tutorials/01_least_squares_optimization.ipynb new file mode 100644 index 000000000..66cb5da86 --- /dev/null +++ b/tutorials/01_least_squares_optimization.ipynb @@ -0,0 +1,301 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Least-squares Optimization with Theseus

\n", + "This tutorial demonstrates how to solve a curve-fitting problem with Theseus. The examples in this tutorial are inspired by the [Ceres](https://ceres-solver.org/) [tutorial](http://ceres-solver.org/nnls_tutorial.html), and structured like the [curve-fitting example](http://ceres-solver.org/nnls_tutorial.html#curve-fitting) and [robust curve-fitting example](http://ceres-solver.org/nnls_tutorial.html#robust-curve-fitting) in Ceres.\n", + "\n", + "

Quadratic curve-fitting

\n", + "In this tutorial, we will show how we can fit a quadratic function: y = ax2 + b\n", + "\n", + "

Step 0: Generating Data

\n", + "We first generate data by sampling points from the quadratic function x2 + 0.5. To this, we add Gaussian noise with σ = 0.01." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "\n", + "torch.manual_seed(0)\n", + "\n", + "def generate_data(num_points=100, a=1, b=0.5, noise_factor=0.01):\n", + " # Generate data: 100 points sampled from the quadratic curve listed above\n", + " data_x = torch.rand((1, num_points))\n", + " noise = torch.randn((1, num_points)) * noise_factor\n", + " data_y = a * data_x.square() + b + noise\n", + " return data_x, data_y\n", + "\n", + "data_x, data_y = generate_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We demonstrate how to use Theseus to solve this curve-fitting problem in 3 steps:\n", + "
    \n", + "
  • Step 1: Represent data and variables\n", + "
  • Step 2: Set up optimization\n", + "
  • Step 3: Run optimization\n", + "
\n", + "\n", + "

Step 1: Represent data and variables in Theseus

\n", + "As we described in Tutorial 0, Theseus Variables are semantically divided into two main classes:\n", + "
    \n", + "
  • optimization variables: those that will be modified by our non-linear least-squares optimizers to minimize the total cost function\n", + "
  • auxiliary variables: other variables required by the cost functions to carry out the optimization, but which will not be optimized by the non-linear least-squares optimizers, e.g., application data in this example (we will see more examples of )\n", + "
\n", + "\n", + "Our first step is to represent the data (x, y) and the optimization variables (a and b) in Theseus data structures.\n", + "The optimization variables must be of type `Manifold`. For this example, we choose its `Vector` sub-class to represent a and b. Because they are one-dimensional quantities, we require only 1 degree-of-freedom in initializing these `Vector` objects. (Alternately, we could also represent both variables as a single 2-dimensional `Vector` object; however, this would change how the error functions are written.) The (auxiliary) data variables may be an instance of any `Variable` type. For this example, the type `Variable` itself suffices." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import theseus as th\n", + "\n", + "# data is of type Variable\n", + "x = th.Variable(data_x, name=\"x\")\n", + "y = th.Variable(data_y, name=\"y\")\n", + "\n", + "# optimization variables are of type Vector with 1 degree of freedom (dof)\n", + "a = th.Vector(1, name=\"a\")\n", + "b = th.Vector(1, name=\"b\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Step 2: Set up optimization

\n", + " \n", + "The residual errors of the least-squares fit is captured in a `CostFunction`. In this example, we will use the `AutoDiffCostFunction` provided by Theseus, which provides an easy-to-use way to capture arbitrary cost functions. The `AutoDiffCostFunction` only requires that we define the optimization variables and the auxiliary variables, and provide an error function that computes the residual errors. From there, it uses the PyTorch autograd to compute the Jacobians for the optimization variables via automatic differentiation. \n", + "\n", + "In the example below, the `quad_error_fn` captures the least-squares error of the quadratic function fitted with the two 1-dimensional `Vector` objects `a`, `b`. \n", + "\n", + "The total least-squares error can be captured by either one 100-dimensional `AutoDiffCostFunction` (where each dimension represents the error of one data point), or a set of 100 one-dimensional `AutoDiffCostFunction` (where instead each cost function captures the error of one data point). We use the former (i.e., one 100-dimensional `AutoDiffCostFunction`) in this example, but we will see examples of the latter in Tutorials 4 & 5.\n", + "\n", + "Finally, we combine the cost functions into a Theseus optimization problem:\n", + "- The optimization criteria is represented by the `Objective`. This is constructed by adding all the cost functions to it.\n", + "- We can then choose an optimizer and set some of its default configuration (e.g., `GaussNewton` with `max_iterations=15` in the example below).\n", + "- The objective and its associated optimizer are then used to construct the `TheseusLayer`, which represents one layer of optimization" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "def quad_error_fn(optim_vars, aux_vars):\n", + " a, b = optim_vars \n", + " x, y = aux_vars\n", + " est = a.data * x.data.square() + b.data\n", + " err = y.data - est\n", + " return err\n", + "\n", + "optim_vars = a, b\n", + "aux_vars = x, y\n", + "cost_function = th.AutoDiffCostFunction(\n", + " optim_vars, quad_error_fn, 100, aux_vars=aux_vars, name=\"quadratic_cost_fn\"\n", + ")\n", + "objective = th.Objective()\n", + "objective.add(cost_function)\n", + "optimizer = th.GaussNewton(\n", + " objective,\n", + " max_iterations=15,\n", + " step_size=0.5,\n", + ")\n", + "theseus_optim = th.TheseusLayer(optimizer)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Step 3: Run optimization

\n", + "Running the optimization problem now only requires that we provide the input data and initial values, and call the forward function on the `TheseusLayer`.\n", + "\n", + "The input is provided as a dictionary, where the keys represent either the optimization variables (which are paired with their initial values), or the auxiliary variables (which are paired with their data). The dictionary `theseus_inputs` shows an example of this.\n", + "\n", + "With this input, we can now run the least squares optimization in Theseus. We do this by calling the `forward` function on the `TheseusLayer`. Two quantities are returned after each call to the `forward` function:\n", + "1. The `updated_inputs` object, which holds the final values for the optimized variables, along with unchanged auxiliary variable values. This allows us to use the `updated_inputs` as input to downstream functions or Theseus layers (e.g., for problems that require multiple forward passes, as we will see in Tutorial 2.)\n", + "2. The `info` object, which can track the best solution if necessary, and holds other useful information about the optimization. The best solution is useful to track because the optimization algorithm does not stop if the error increases from an earlier iteration. (The best solution is not as useful when backpropagation is carried out, because backpropagation uses the entire optimization sequence; see Tutorial 2.)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Nonlinear optimizer. Iteration: 0. Error: 38.42743682861328\n", + "Nonlinear optimizer. Iteration: 1. Error: 9.609882354736328\n", + "Nonlinear optimizer. Iteration: 2. Error: 2.4054908752441406\n", + "Nonlinear optimizer. Iteration: 3. Error: 0.6043919324874878\n", + "Nonlinear optimizer. Iteration: 4. Error: 0.15411755442619324\n", + "Nonlinear optimizer. Iteration: 5. Error: 0.04154859483242035\n", + "Nonlinear optimizer. Iteration: 6. Error: 0.013406438753008842\n", + "Nonlinear optimizer. Iteration: 7. Error: 0.006370890885591507\n", + "Nonlinear optimizer. Iteration: 8. Error: 0.0046120136976242065\n", + "Nonlinear optimizer. Iteration: 9. Error: 0.0041722883470356464\n", + "Nonlinear optimizer. Iteration: 10. Error: 0.004062363877892494\n", + "Nonlinear optimizer. Iteration: 11. Error: 0.004034876357764006\n", + "Nonlinear optimizer. Iteration: 12. Error: 0.004028005059808493\n", + "Nonlinear optimizer. Iteration: 13. Error: 0.004026289563626051\n", + "Nonlinear optimizer. Iteration: 14. Error: 0.004025860223919153\n", + "Nonlinear optimizer. Iteration: 15. Error: 0.00402575358748436\n", + "Best solution: {'a': tensor([[0.9945]]), 'b': tensor([[0.5018]])}\n" + ] + } + ], + "source": [ + "theseus_inputs = {\n", + "\"x\": data_x,\n", + "\"y\": data_y,\n", + "\"a\": 2 * torch.ones((1, 1)),\n", + "\"b\": torch.ones((1, 1))\n", + "}\n", + "with torch.no_grad():\n", + " updated_inputs, info = theseus_optim.forward(theseus_inputs, track_best_solution=True, verbose=True)\n", + "print(\"Best solution:\", info.best_solution)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We observe that we have recovered almost exactly the original a, b values used in the quadratic function we sampled from.\n", + "\n", + "

Robust Quadratic curve-fitting

\n", + "\n", + "This example can also be adapted for a problem where the errors are weighted, e.g., with a Cauchy loss that reduces the weight of data points with extremely high errors. This is similar to the [robust curve-fitting example](http://ceres-solver.org/nnls_tutorial.html#robust-curve-fitting) in the Ceres solver.\n", + "\n", + "In this tutorial, we make a simple modification to add a Cauchy-loss weighting to the error function: we replace the `quad_error_fn` above in the `AutoDiffCostFunction` by creating the following `cauchy_loss_quad_error_fn` that weights it." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def cauchy_fn(x):\n", + " return torch.sqrt(0.5 * torch.log(1 + x ** 2))\n", + "\n", + "def cauchy_loss_quad_error_fn(optim_vars, aux_vars):\n", + " err = quad_error_fn(optim_vars, aux_vars)\n", + " return cauchy_fn(err)\n", + "\n", + "wt_cost_function = th.AutoDiffCostFunction(\n", + " optim_vars, cauchy_loss_quad_error_fn, 100, aux_vars=aux_vars, name=\"cauchy_quad_cost_fn\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Similar to the example above, we can now construct the Theseus optimization problem with this weighted cost function: create `Objective`, an optimizer, and a `TheseusLayer`, and run the optimization." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Nonlinear optimizer. Iteration: 0. Error: 13.170896530151367\n", + "Nonlinear optimizer. Iteration: 1. Error: 5.595991134643555\n", + "Nonlinear optimizer. Iteration: 2. Error: 2.604552984237671\n", + "Nonlinear optimizer. Iteration: 3. Error: 1.2485320568084717\n", + "Nonlinear optimizer. Iteration: 4. Error: 0.6063637733459473\n", + "Nonlinear optimizer. Iteration: 5. Error: 0.2966576814651489\n", + "Nonlinear optimizer. Iteration: 6. Error: 0.14604422450065613\n", + "Nonlinear optimizer. Iteration: 7. Error: 0.0725097507238388\n", + "Nonlinear optimizer. Iteration: 8. Error: 0.03653936833143234\n", + "Nonlinear optimizer. Iteration: 9. Error: 0.018927372992038727\n", + "Nonlinear optimizer. Iteration: 10. Error: 0.01030135527253151\n", + "Nonlinear optimizer. Iteration: 11. Error: 0.0060737887397408485\n", + "Best solution: {'a': tensor([[1.0039]]), 'b': tensor([[0.5112]])}\n" + ] + } + ], + "source": [ + "objective = th.Objective()\n", + "objective.add(wt_cost_function)\n", + "optimizer = th.GaussNewton(\n", + " objective,\n", + " max_iterations=20,\n", + " step_size=0.3,\n", + ")\n", + "theseus_optim = th.TheseusLayer(optimizer)\n", + "theseus_inputs = {\n", + "\"x\": data_x,\n", + "\"y\": data_y,\n", + "\"a\": 2 * torch.ones((1, 1)),\n", + "\"b\": torch.ones((1, 1))\n", + "}\n", + "\n", + "# We suppress warnings in this optimization call, because we observed that with this data, Cauchy \n", + "# loss often results in singular systems with numerical computations as it approaches optimality. \n", + "# Please note: getting a singular system during the forward optimization will throw\n", + "# an error if torch's gradient tracking is enabled.\n", + "import warnings\n", + "warnings.simplefilter(\"ignore\") \n", + "\n", + "with torch.no_grad():\n", + " _, info = theseus_optim.forward(theseus_inputs, track_best_solution=True, verbose=True)\n", + "print(\"Best solution:\", info.best_solution)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For a more efficient solution to the curve-fitting problems, one can also write a custom `CostFunction` with closed-form Jacobians, rather than use the `AutoDiffCostFunction` with torch's numerically-computed Jacobians. We show an example of this in Tutorial 3." + ] + } + ], + "metadata": { + "interpreter": { + "hash": "cc5406e9a0deef8e8d80dfeae7f152b84172dd1229ee5c42b512f2c6ec6850e3" + }, + "kernelspec": { + "display_name": "Python 3.8.8 64-bit (conda)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.8" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/tutorials/02_differentiable_nlls.ipynb b/tutorials/02_differentiable_nlls.ipynb new file mode 100644 index 000000000..39fb87dac --- /dev/null +++ b/tutorials/02_differentiable_nlls.ipynb @@ -0,0 +1,546 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Differentiating Through Nonlinear Optimization

\n", + "\n", + "This tutorial shows how we can differentiate through a least-squares optimization problem using Theseus. \n", + "\n", + "The optimization problems of Tutorial 1 are done with one application (each) of the Theseus non-linear least squares optimizers, as they are straightforward curve-fitting problems. Theseus can also be used to solve more complex optimization problems, e.g., with dependencies between the quantities being optimized. In this tutorial, we will solve a set of curve-fitting problems that share one common parameter. As in Tutorial 1, we choose quadratic functions for simplicity: we wish to fit y = ax2 + b, where a is fixed for all problems, and b is different for each problem. \n", + "\n", + "At a high-level, we solve this problem by using torch automatic differentiation to optimize the value of a, and optimize b for a given a with the non-linear least squares optimizers in Theseus. The rest of this notebook works through the necessary steps in detail. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Step 0: Data Generation

\n", + "\n", + "As before, we first generate data by sampling points from a set of quadratic functions 3x2 + b, where b = 3, 5, ..., 21. To this, we add Gaussian noise with σ = 0.01." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "\n", + "torch.manual_seed(0)\n", + "\n", + "def generate_data(num_points=100, a=1, b=0.5, noise_factor=0.01):\n", + " # Generate data: 100 points sampled from the quadratic curve listed above\n", + " data_x = torch.rand((1, num_points))\n", + " noise = torch.randn((1, num_points)) * noise_factor\n", + " data_y = a * data_x.square() + b + noise\n", + " return data_x, data_y\n", + "\n", + "def generate_learning_data(num_points, num_models):\n", + " a, b = 3, 1\n", + " data_batches = []\n", + " for i in range(num_models):\n", + " b = b + 2\n", + " data = generate_data(num_points, a, b)\n", + " data_batches.append(data)\n", + " return data_batches\n", + "\n", + "num_models = 10\n", + "data_batches = generate_learning_data(100, num_models)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Step 1: Set up Theseus Optimization

\n", + "\n", + "Next, we set up the Theseus optimization problem similar to Tutorial 1, but with one key change: a is no longer an optimization variable for the Theseus NLLS optimizer; instead, its an auxiliary variable whose value will be optimized by PyTorch through backpropagation. b remains the only optimization variable for the Theseus NLLS optimizer. The code below illustrates this." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import theseus as th\n", + "\n", + "data_x, data_y = data_batches[0]\n", + "x = th.Variable(data_x, name=\"x\")\n", + "y = th.Variable(data_y, name=\"y\")\n", + "a = th.Vector(1, name=\"a\")\n", + "b = th.Vector(1, name=\"b\")\n", + "\n", + "# Note 'b' is the only optim_var, and 'a' is part of aux_vars\n", + "optim_vars = [b]\n", + "aux_vars = a, x, y\n", + "\n", + "# updated error function reflects change in 'a'\n", + "def quad_error_fn2(optim_vars, aux_vars):\n", + " [b] = optim_vars \n", + " a, x, y = aux_vars\n", + " est = a.data * x.data.square() + b.data\n", + " err = y.data - est\n", + " return err\n", + "\n", + "cost_function = th.AutoDiffCostFunction(\n", + " optim_vars, quad_error_fn2, 100, aux_vars=aux_vars, name=\"quadratic_cost_fn\"\n", + ")\n", + "objective = th.Objective()\n", + "objective.add(cost_function)\n", + "optimizer = th.GaussNewton(\n", + " objective,\n", + " max_iterations=50,\n", + " step_size=0.5,\n", + ")\n", + "theseus_optim = th.TheseusLayer(optimizer)\n", + "\n", + "# The value for Variable 'a' is optimized by PyTorch backpropagation\n", + "a_tensor = torch.nn.Parameter(torch.rand(1, 1))\n", + "model_optimizer = torch.optim.Adam([a_tensor], lr=0.1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Step 2: Run Optimization and Learning

\n", + "\n", + "Finally, we compute a and b by using a learning loop around the Theseus NLLS optimizer, with each model's data taken as a single batch. The Theseus NLLS optimizer optimizes `b` for the current `a` (referred to as the inner loop optimization), while the PyTorch backpropagation learns the correct value to use for `a` across the batches (referred to as the outer loop optimization). \n", + "\n", + "For clarity, we describe the high-level steps required:\n", + "- Step 2.1: As in Tutorial 1, we begin by creating a dictionary of inputs, and pass it to the `forward` function of the `TheseusLayer`. Note that the `forward` function here does not track the best solution in this example, as the entire NLLS optimization sequence needs to be used for backpropagation. \n", + "After the optimization completes in the `TheseusLayer`, we get a dictionary `updated_inputs` with latest values of the optimization variables (and all other dictionary values unchanged). \n", + "- Step 2.2: We then update the `objective` with the `updated_inputs` dictionary, and use it to compute the loss. This kind of use of the Theseus optimization variables is the reason the `forward` returns a dictionary of inputs.\n", + "- Step 2.3: This loss is used for backpropagation. The PyTorch learning optimizer (i.e. Adam here) will now take one optimization step for the learning parameters (i.e., `a`'s value in this example).\n", + "- Iterate: We now make a new call to `forward`, repeating Steps 2.1-2.3. \n", + "\n", + "We illustrate these steps in the code below." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial a value: 0.8671661019325256\n", + "Epoch: 0 Loss: 2.550027549266815\n", + "Epoch: 1 Loss: 0.5708533525466919\n", + "Epoch: 2 Loss: 0.0265085999853909\n", + "Epoch: 3 Loss: 0.04149620106909424\n", + "Epoch: 4 Loss: 0.03596132283564657\n", + " ---------------- Solutions at Epoch 04 -------------- \n", + " a value: 3.1208860874176025\n", + " b values: [2.9216177463531494, 4.930838584899902, 6.922538757324219, 8.923272132873535, 10.926830291748047, 12.952813148498535, 14.932782173156738, 16.949243545532227, 18.946279525756836, 20.954736709594727]\n", + " ----------------------------------------------------- \n", + "Epoch: 5 Loss: 0.005245754415227566\n", + "Epoch: 6 Loss: 0.002350384122109972\n", + "Epoch: 7 Loss: 0.0025651332980487496\n", + "Epoch: 8 Loss: 0.0010936586622847244\n", + "Epoch: 9 Loss: 0.001083762341295369\n", + " ---------------- Solutions at Epoch 09 -------------- \n", + " a value: 3.01115083694458\n", + " b values: [2.99800181388855, 4.999911308288574, 6.998246669769287, 8.995952606201172, 10.996501922607422, 12.998129844665527, 14.996124267578125, 16.99513053894043, 18.9946346282959, 20.996692657470703]\n", + " ----------------------------------------------------- \n", + "Epoch: 10 Loss: 0.0010451501148054376\n", + "Epoch: 11 Loss: 0.0009841886785579845\n", + "Epoch: 12 Loss: 0.0009838700643740594\n", + "Epoch: 13 Loss: 0.0009751142497407272\n", + "Epoch: 14 Loss: 0.0009768658710527234\n", + " ---------------- Solutions at Epoch 14 -------------- \n", + " a value: 3.0001351833343506\n", + " b values: [2.99999737739563, 5.002256870269775, 7.0013861656188965, 8.999483108520508, 11.000371932983398, 13.000956535339355, 15.000521659851074, 16.998645782470703, 18.998701095581055, 21.00054931640625]\n", + " ----------------------------------------------------- \n", + "Epoch: 15 Loss: 0.00097720912162913\n", + "Epoch: 16 Loss: 0.0009765262075234205\n", + "Epoch: 17 Loss: 0.0009760940811247565\n", + "Epoch: 18 Loss: 0.0009761785477166995\n", + "Epoch: 19 Loss: 0.0009763608395587653\n", + " ---------------- Solutions at Epoch 19 -------------- \n", + " a value: 2.999293088912964\n", + " b values: [3.000174045562744, 5.00246524810791, 7.0016632080078125, 8.999787330627441, 11.000700950622559, 13.00119400024414, 15.000886917114258, 16.99893569946289, 18.999032974243164, 21.000858306884766]\n", + " ----------------------------------------------------- \n" + ] + } + ], + "source": [ + "num_batches = len(data_batches)\n", + "num_epochs = 20\n", + "\n", + "print(f\"Initial a value: {a_tensor.item()}\")\n", + "\n", + "for epoch in range(num_epochs):\n", + " epoch_loss = 0.\n", + " epoch_b = [] # keep track of the current b values for each model in this epoch\n", + " for i in range(num_batches):\n", + " model_optimizer.zero_grad()\n", + " data_x, data_y = data_batches[i]\n", + " # Step 2.1: Create input dictionary for TheseusLayer, pass to forward function\n", + " # The value for variable `a` is the updated `a_tensor` by Adam\n", + " # Since we are always passing the same tensor, this update is technically redundant, \n", + " # we include it to explicitly illustrate where variable values come from.\n", + " # An alternative way to do this (try it!)\n", + " # would be to call `a.update(a_tensor)` before the learning loop starts\n", + " # and just let Adam change its value under the hood (i.e., update only `x`, `y`, and `b`\n", + " # inside the loop)\n", + " theseus_inputs = {\n", + " \"a\": a_tensor,\n", + " \"x\": data_x,\n", + " \"y\": data_y,\n", + " \"b\": torch.ones((1, 1)),\n", + " }\n", + " updated_inputs, info = theseus_optim.forward(theseus_inputs)\n", + " epoch_b.append(updated_inputs[\"b\"].item()) # add the optimized \"b\" of the current batch to `epoch_b` list. \n", + " # Note: here, we do not track the best solution, as we \n", + " # backpropagate through the entire optimization sequence.\n", + " # Step 2.2: Update objective function with updated inputs\n", + " objective.update(updated_inputs)\n", + " loss = cost_function.error().square().mean()\n", + " # Step 2.3: PyTorch backpropagation\n", + " loss.backward()\n", + " model_optimizer.step()\n", + "\n", + " loss_value = loss.item()\n", + " epoch_loss += loss_value\n", + " print(f\"Epoch: {epoch} Loss: {epoch_loss}\")\n", + " if epoch % 5 == 4:\n", + " print(f\" ---------------- Solutions at Epoch {epoch:02d} -------------- \")\n", + " print(\" a value:\", a.data.item())\n", + " print(\" b values: \", epoch_b)\n", + " print(f\" ----------------------------------------------------- \")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We observe that we are able to recover `a` and `b` very close to the values we sampled from. \n", + "\n", + "

Step 3 (Optional): Solving all Optimization Problems Simultaneously

\n", + "\n", + "The above is only one of many ways to model our problem with Theseus. Theseus also supports solving multiple optimization problems simultaneously, so we could also solve all of the 10 quadratic-fit problems simultaneously. We can do this in two natural ways:\n", + "- Version A: by creating 10 `AutoDiffCostFunction`s, one for each optimization problem. Here, we need each `AutoDiffCostFunction` to have a separate `b, x, y` variables (e.g., `[b1, x1, y1]`, `[b2, x2, y2]` etc). All cost functions are added to the objective, and they can be optimized jointly with one `forward`, and differentiated through jointly with the following `backward`. \n", + "- Version B: by changing the `b, x, y` variables to be batched, and having the error function above `quad_err_fn2` to support batches, we can use a single `AutoDiffCostFunction` to capture the cost of their fit. However, because the error may now be batched, the loss has to be computed as an aggregate of the evaluated objective. \n", + "\n", + "Version A is more commonly used in situations where each variable and cost-function has a different sematic interpretation (e.g., different time-steps in the motion-planning problem of Tutorial 4 & 5), while Version B is commonly used for multiple instances of the same problem (e.g., different maps in Tutorials 4 & 5). \n", + "\n", + "We show next the complete code for each version, and see that both versions find very similar `a` and `b` values. \n", + "\n", + "Because both versions need a common learning routine, we first create a subroutine `optimize_and_learn_models_jointly` for readability. " + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# Sub-routine to optimize and learn models simultaneously\n", + "\n", + "def optimize_and_learn_models_jointly(theseus_optim, model_optimizer, num_epochs=20):\n", + " # again, assume a_tensor is initialized outside this loop\n", + " print(f\"Initial a value: {a_tensor.item()}\")\n", + " \n", + " for epoch in range(num_epochs):\n", + " model_optimizer.zero_grad()\n", + " # Step 2.1: Create input dictionary for TheseusLayer, pass to forward function\n", + " theseus_inputs = construct_theseus_layer_inputs()\n", + " updated_inputs, _ = theseus_optim.forward(theseus_inputs)\n", + "\n", + " # Step 2.2: Update objective function with updated inputs\n", + " objective.update(updated_inputs)\n", + " loss = objective.error_squared_norm() \n", + " loss = loss.sum() # Note `loss` now needs a final aggregation (for version B)\n", + "\n", + " # Step 2.3: PyTorch backpropagation\n", + " loss.backward()\n", + " model_optimizer.step()\n", + "\n", + " if epoch == 0:\n", + " min_loss = loss\n", + " if loss <= min_loss:\n", + " min_loss = loss\n", + " best_model_a = a.data.item()\n", + " best_model_b = [b.data.item() for b in all_b]\n", + "\n", + " print(f\"Epoch: {epoch} Loss: {loss.item()}\")\n", + " if epoch % 10 == 9:\n", + " print(f\" ---------------- Solutions at Epoch {epoch:02d} -------------- \")\n", + " print(\" a value:\", a.data.item())\n", + " print(\" b values: \", [b.data.item() for b in all_b])\n", + " print(f\" ----------------------------------------------------- \")\n", + "\n", + " return best_model_a, best_model_b" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Step 3.1: Example Version A

\n", + "\n", + "Now we show Version A. This version makes the following changes from the original code snippet:\n", + "- create 10 `x`, `y` and `b` variables.\n", + "- create 10 `AutoDiffCostFunctions`, each using the same `a` but the corresponding `b_i`, `x_i` and `y_i`. All cost functions are added to the objective.\n", + "- construct the `theseus_inputs` dictionary with the `b_i`, `x_i` and `y_i` mapped to the correct data batch.\n", + "\n", + "Note that `a` and its setup the PyTorch optimizer remain the same. Once the optimization problem is setup, we call `optimize_and_learn_models_jointly` sub-routine to compute the `a` and `b` values. " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial a value: 0.4854246973991394\n", + "Epoch: 0 Loss: 548.7742919921875\n", + "Epoch: 1 Loss: 485.24896240234375\n", + "Epoch: 2 Loss: 425.75042724609375\n", + "Epoch: 3 Loss: 370.3395690917969\n", + "Epoch: 4 Loss: 319.0608215332031\n", + "Epoch: 5 Loss: 271.9399108886719\n", + "Epoch: 6 Loss: 228.9813232421875\n", + "Epoch: 7 Loss: 190.1656951904297\n", + "Epoch: 8 Loss: 155.44735717773438\n", + "Epoch: 9 Loss: 124.75199890136719\n", + " ---------------- Solutions at Epoch 09 -------------- \n", + " a value: 1.9375165700912476\n", + " b values: [3.388939619064331, 5.365548133850098, 7.41563081741333, 9.417536735534668, 11.4259614944458, 13.298116683959961, 15.449816703796387, 17.354942321777344, 19.413862228393555, 21.403715133666992]\n", + " ----------------------------------------------------- \n", + "Epoch: 10 Loss: 97.9747314453125\n", + "Epoch: 11 Loss: 74.97842407226562\n", + "Epoch: 12 Loss: 55.59276580810547\n", + "Epoch: 13 Loss: 39.614349365234375\n", + "Epoch: 14 Loss: 26.807687759399414\n", + "Epoch: 15 Loss: 16.907485961914062\n", + "Epoch: 16 Loss: 9.62230396270752\n", + "Epoch: 17 Loss: 4.639813423156738\n", + "Epoch: 18 Loss: 1.6331729888916016\n", + "Epoch: 19 Loss: 0.2687584459781647\n", + " ---------------- Solutions at Epoch 19 -------------- \n", + " a value: 3.0359597206115723\n", + " b values: [3.0146169662475586, 5.015951633453369, 7.017027378082275, 9.015321731567383, 11.016518592834473, 13.012236595153809, 15.01756763458252, 17.01212501525879, 19.014381408691406, 21.015764236450195]\n", + " ----------------------------------------------------- \n", + " ---------------- Final Solutions -------------- \n", + " a value: 3.0359597206115723\n", + " b values: [3.0146169662475586, 5.015951633453369, 7.017027378082275, 9.015321731567383, 11.016518592834473, 13.012236595153809, 15.01756763458252, 17.01212501525879, 19.014381408691406, 21.015764236450195]\n", + " ----------------------------------------------- \n" + ] + } + ], + "source": [ + "# Version A\n", + "\n", + "# replace x and y with 10 different variables x0 ... x9, y0 ... y9\n", + "all_x, all_y = [], []\n", + "for i in range(num_models): \n", + " all_x.append(th.Variable(data_x, name=f\"x{i}\"))\n", + " all_y.append(th.Variable(data_y, name=f\"y{i}\"))\n", + "\n", + "# replace b with 10 different variables b0 ... b9\n", + "all_b = []\n", + "for i in range(num_models):\n", + " all_b.append(th.Vector(1, name=f\"b{i}\"))\n", + "\n", + "# a remains the same\n", + "a = th.Vector(1, name=\"a\")\n", + "\n", + "# objective now has 10 different cost functions\n", + "objective = th.Objective()\n", + "for i in range(num_models):\n", + " # each cost function i uses b_i as optim_var and x_i, y_i and a as aux_var\n", + " optim_vars = [all_b[i]]\n", + " aux_vars = a, all_x[i], all_y[i]\n", + " cost_function = th.AutoDiffCostFunction(\n", + " optim_vars, quad_error_fn2, 100, aux_vars=aux_vars, name=f\"quadratic_cost_fn_{i}\"\n", + " )\n", + " objective.add(cost_function)\n", + "\n", + "# optimizer, TheseusLayer and model optimizer remains the same\n", + "optimizer = th.GaussNewton(\n", + " objective, max_iterations=50, step_size=0.4,\n", + ")\n", + "theseus_optim = th.TheseusLayer(optimizer)\n", + "a_tensor = torch.nn.Parameter(torch.rand(1, 1))\n", + "model_optimizer = torch.optim.Adam([a_tensor], lr=0.15)\n", + "\n", + "# TheseusLayer dictionary now needs to construct b0 ... b9, x0 ... x9, y0 ... y9\n", + "def construct_theseus_layer_inputs():\n", + " theseus_inputs = {}\n", + " for i in range(num_models):\n", + " data_x, data_y = data_batches[i]\n", + " theseus_inputs.update({\n", + " f\"x{i}\": data_x,\n", + " f\"y{i}\": data_y,\n", + " f\"b{i}\": torch.ones((1, 1)),\n", + " })\n", + " theseus_inputs.update({\"a\": a_tensor})\n", + " return theseus_inputs\n", + "\n", + "# Run Theseus optimization and learning\n", + "best_model = optimize_and_learn_models_jointly(theseus_optim, model_optimizer)\n", + "\n", + "print(f\" ---------------- Final Solutions -------------- \")\n", + "print(\" a value:\", best_model[0])\n", + "print(\" b values: \", best_model[1])\n", + "print(f\" ----------------------------------------------- \")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Step 3.2: Example Version B

\n", + "\n", + "Now we show Version B. This version makes the following changes from the original code snippet:\n", + "- create `x`, `y` variables of sizes `[num_models, 100]` instead of `[1, 100]`\n", + "- create `b` variable of size `[num_models, 1]` instead of `[1, 1]`\n", + "- construct the `theseus_inputs` dictionary with `b`, `x` and `y` data batched\n", + "\n", + "In this example, we do not need to change the error function, because PyTorch tensors handle the broadcasting for the batched version to work. \n", + "\n", + "As before, `a` and its setup for the PyTorch optimizer remain the same. Once the optimization problem is setup, we call `optimize_and_learn_models_jointly` sub-routine to solve the problem." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial a value: 0.9839857220649719\n", + "Epoch: 0 Loss: 352.720947265625\n", + "Epoch: 1 Loss: 286.2037658691406\n", + "Epoch: 2 Loss: 226.8613739013672\n", + "Epoch: 3 Loss: 174.77915954589844\n", + "Epoch: 4 Loss: 129.97665405273438\n", + "Epoch: 5 Loss: 92.39059448242188\n", + "Epoch: 6 Loss: 61.85683822631836\n", + "Epoch: 7 Loss: 38.09175109863281\n", + "Epoch: 8 Loss: 20.675710678100586\n", + "Epoch: 9 Loss: 9.0426025390625\n", + " ---------------- Solutions at Epoch 09 -------------- \n", + " a value: 2.8336267471313477\n", + " b values: [3.0146169662475586, 5.015951633453369, 7.017027378082275, 9.015321731567383, 11.016518592834473, 13.012236595153809, 15.01756763458252, 17.01212501525879, 19.014381408691406, 21.015764236450195]\n", + " ----------------------------------------------------- \n", + "Epoch: 10 Loss: 2.4795351028442383\n", + "Epoch: 11 Loss: 0.1416916847229004\n", + "Epoch: 12 Loss: 1.0855286121368408\n", + "Epoch: 13 Loss: 4.320140838623047\n", + "Epoch: 14 Loss: 8.871736526489258\n", + "Epoch: 15 Loss: 13.85158634185791\n", + "Epoch: 16 Loss: 18.51568603515625\n", + "Epoch: 17 Loss: 22.30596923828125\n", + "Epoch: 18 Loss: 24.867490768432617\n", + "Epoch: 19 Loss: 26.042295455932617\n", + " ---------------- Solutions at Epoch 19 -------------- \n", + " a value: 3.5438120365142822\n", + " b values: [3.0146169662475586, 5.015951633453369, 7.017027378082275, 9.015321731567383, 11.016518592834473, 13.012236595153809, 15.01756763458252, 17.01212501525879, 19.014381408691406, 21.015764236450195]\n", + " ----------------------------------------------------- \n", + " ---------------- Final Solutions -------------- \n", + " a value: 3.1059281826019287\n", + " b values: [3.0146169662475586, 5.015951633453369, 7.017027378082275, 9.015321731567383, 11.016518592834473, 13.012236595153809, 15.01756763458252, 17.01212501525879, 19.014381408691406, 21.015764236450195]\n", + " ----------------------------------------------- \n" + ] + } + ], + "source": [ + "# Version B\n", + "\n", + "# convert data_x, data_y into torch.tensors of shape [num_models, 100]\n", + "data_x = torch.stack([data_x.squeeze() for data_x, _ in data_batches])\n", + "data_y = torch.stack([data_y.squeeze() for _, data_y in data_batches])\n", + "\n", + "# construct one variable each of x, y of shape [num_models, 100]\n", + "x = th.Variable(data_x, name=\"x\")\n", + "y = th.Variable(data_y, name=\"y\")\n", + "\n", + "# construct a as before\n", + "a = th.Vector(1, name=\"a\")\n", + "\n", + "# construct one variable b, now of shape [num_models, 1]\n", + "b = th.Vector(data=torch.rand(num_models, 1), name=\"b\")\n", + "\n", + "# Again, 'b' is the only optim_var, and 'a' is part of aux_vars along with x, y\n", + "optim_vars = [b]\n", + "aux_vars = a, x, y\n", + "\n", + "# cost function constructed as before \n", + "cost_function = th.AutoDiffCostFunction(\n", + " optim_vars, quad_error_fn2, 100, aux_vars=aux_vars, name=\"quadratic_cost_fn\"\n", + ")\n", + "\n", + "# objective, optimizer and theseus layer constructed as before\n", + "objective = th.Objective()\n", + "objective.add(cost_function)\n", + "optimizer = th.GaussNewton(\n", + " objective, max_iterations=50, step_size=0.5,\n", + ")\n", + "theseus_optim = th.TheseusLayer(optimizer)\n", + "\n", + "# As before, 'a' is optimized by PyTorch backpropagation\n", + "# model_optimizer constructed the same way \n", + "a_tensor = torch.nn.Parameter(torch.rand(1, 1))\n", + "model_optimizer = torch.optim.Adam([a_tensor], lr=0.2)\n", + "\n", + "# The theseus_inputs dictionary is also constructed similarly to before,\n", + "# but with data matching the new shapes of the variables\n", + "def construct_theseus_layer_inputs():\n", + " theseus_inputs = {}\n", + " theseus_inputs.update({\n", + " \"x\": data_x,\n", + " \"y\": data_y,\n", + " \"b\": torch.ones((num_models, 1)),\n", + " \"a\": a_tensor,\n", + " })\n", + " return theseus_inputs\n", + "\n", + "# Run Theseus optimization and learning\n", + "best_model = optimize_and_learn_models_jointly(theseus_optim, model_optimizer)\n", + "print(f\" ---------------- Final Solutions -------------- \")\n", + "print(\" a value:\", best_model[0])\n", + "print(\" b values: \", best_model[1])\n", + "print(f\" ----------------------------------------------- \")" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "cc5406e9a0deef8e8d80dfeae7f152b84172dd1229ee5c42b512f2c6ec6850e3" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.8" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/tutorials/03_custom_cost_functions.ipynb b/tutorials/03_custom_cost_functions.ipynb new file mode 100644 index 000000000..e061ba29b --- /dev/null +++ b/tutorials/03_custom_cost_functions.ipynb @@ -0,0 +1,202 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Creating Custom Cost Functions

\n", + "\n", + "In this tutorial, we show how to create a custom cost function that might be needed for an application. While we can always use the `AutoDiffCostFunction` by simply writing an error function, it is often more efficient for compute-intensive applications to derive a new `CostFunction` subclass and use closed-form Jacobians. \n", + "\n", + "We will show how to write a custom `VectorDifference` cost function in this tutorial. This cost function provides the difference between two `Vector`s as the error. \n", + "\n", + "Note: `VectorDifference` is a simplified version of the `VariableDifference` cost function already provided in the Theseus library, and shown in Tutorial 0. `VariableDifference` can be used on any LieGroup, while `VectorDifference` can only be used on Vectors." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Initialization

\n", + "\n", + "Any `CostFunction` subclass should be initialized with a `CostWeight` and all arguments needed to compute the cost function. In this example, we set up `__init__` function for `VectorDifference` to require as input the two `Vector`s whose difference we wish to compute: the `Vector` to be optimized, `var`, and the `Vector` that is the reference for comparison, `target`. \n", + "\n", + "In addition, the `__init__` function also needs to register the optimization variables and all the auxiliary variables. In this example, optimization variable `var` is registered with `register_optim_vars`. The other input necessary to evaluate the cost, `target` is registered with `register_aux_vars`. This is required for the nonlinear optimizers to work correctly: these functions register the optimization and auxiliary variables into internal lists, and then are easily used by the relevant `Objective` to add them, ensure no name collisions, and to update them with new values.\n", + "\n", + "The `CostWeight` is used to weight the errors and jacobians, and is required by every `CostFunction` sub-class (the error and jacobian weighting functions are inherited from the parent `CostFunction` class.)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import List, Optional, Tuple\n", + "import theseus as th\n", + "\n", + "class VectorDifference(th.CostFunction):\n", + " def __init__(\n", + " self,\n", + " cost_weight: th.CostWeight,\n", + " var: th.Vector,\n", + " target: th.Vector,\n", + " name: Optional[str] = None,\n", + " ):\n", + " super().__init__(cost_weight, name=name) \n", + "\n", + " # add checks to ensure the input arguments are of the same class and dof:\n", + " if not isinstance(var, target.__class__):\n", + " raise ValueError(\n", + " \"Variable for the VectorDifference inconsistent with the given target.\"\n", + " )\n", + " if not var.dof() == target.dof():\n", + " raise ValueError(\n", + " \"Variable and target in the VectorDifference must have identical dof.\"\n", + " )\n", + "\n", + " self.var = var\n", + " self.target = target\n", + "\n", + " # register variable and target\n", + " self.register_optim_vars([\"var\"])\n", + " self.register_aux_vars([\"target\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Implement abstract functions

\n", + "\n", + "Next, we need to implement the abstract functions of `CostFunction`: `dim`, `error`, `jacobians`, and `_copy_impl`:\n", + "- `dim`: returns the degrees of freedom (`dof`) of the error; in this case, this is the `dof` of the optimization variable `var`\n", + "- `error`: returns the difference of Vectors i.e. `var` - `target`\n", + "- `jacobian`: returns the Jacobian of the error with respect to the `var`\n", + "- `_copy_impl`: creates a deep copy of the internal class members\n", + "\n", + "We illustrate these below (including once again the `__init__` function from above, so the class is fully defined.)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import torch \n", + "\n", + "class VectorDifference(th.CostFunction):\n", + " def __init__(\n", + " self,\n", + " cost_weight: th.CostWeight,\n", + " var: th.Vector,\n", + " target: th.Vector,\n", + " name: Optional[str] = None,\n", + " ):\n", + " super().__init__(cost_weight, name=name) \n", + " self.var = var\n", + " self.target = target\n", + " # to improve readability, we have skipped the data checks from code block above\n", + " self.register_optim_vars([\"var\"])\n", + " self.register_aux_vars([\"target\"])\n", + "\n", + " def error(self) -> torch.Tensor:\n", + " return (self.var - self.target).data\n", + "\n", + " def jacobians(self) -> Tuple[List[torch.Tensor], torch.Tensor]:\n", + " return [\n", + " # jacobian of error function wrt var is identity matrix I\n", + " torch.eye(self.dim(), dtype=self.var.dtype) \n", + " # repeat jacobian across each element in the batch\n", + " .repeat(self.var.shape[0], 1, 1) \n", + " # send to variable device\n", + " .to(self.var.device) \n", + " ], self.error()\n", + "\n", + " def dim(self) -> int:\n", + " return self.var.dof()\n", + "\n", + " def _copy_impl(self, new_name: Optional[str] = None) -> \"VectorDifference\":\n", + " return VectorDifference( # type: ignore\n", + " self.var.copy(), self.weight.copy(), self.target.copy(), name=new_name\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Usage

\n", + "\n", + "We show now that the `VectorDifference` cost function works as expected. \n", + "\n", + "For this, we create a set of `VectorDifference` cost functions each over a pair of `Vector`s a_i and b_i, and add them to an `Objective`. We then create the data for each `Vector` a_i and b_i of the `VectorDifference` cost functions, and `update` the `Objective` with it. The code snippet below shows that the `Objective` error is correctly computed.\n", + "\n", + "We use a `ScaleCostWeight` as the input `CostWeight` here: this is a scalar real-valued `CostWeight` used to weight the `CostFunction`; for simplicity we use a fixed value of 1. in this example." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sample error squared norm: 20.0\n" + ] + } + ], + "source": [ + "cost_weight = th.ScaleCostWeight(1.0)\n", + "\n", + "# construct cost functions and add to objective\n", + "objective = th.Objective()\n", + "num_test_fns = 10\n", + "for i in range(num_test_fns):\n", + " a = th.Vector(2, name=f\"a_{i}\")\n", + " b = th.Vector(2, name=f\"b_{i}\")\n", + " cost_fn = VectorDifference(cost_weight, a, b)\n", + " objective.add(cost_fn)\n", + " \n", + "# create data for adding to the objective\n", + "theseus_inputs = {}\n", + "for i in range(num_test_fns):\n", + " # each pair of var/target has a difference of [1, 1]\n", + " theseus_inputs.update({f\"a_{i}\": torch.ones((1,2)), f\"b_{i}\": 2 * torch.ones((1,2))})\n", + "\n", + "objective.update(theseus_inputs)\n", + "# sum of squares of errors [1, 1] for 10 cost fns: the result should be 20\n", + "error_sq = objective.error_squared_norm()\n", + "print(f\"Sample error squared norm: {error_sq.item()}\")" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "d06c554396a5eba28728833f301e794fa84669cf40517768fd940f3df56e77b3" + }, + "kernelspec": { + "display_name": "Python 3.9.2 64-bit ('my_env': conda)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.8" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/tutorials/04_motion_planning.ipynb b/tutorials/04_motion_planning.ipynb new file mode 100644 index 000000000..15869b98c --- /dev/null +++ b/tutorials/04_motion_planning.ipynb @@ -0,0 +1,765 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "c939a04e", + "metadata": {}, + "source": [ + "# Motion Planning Part 1: motion planning as nonlinear least squares optimization" + ] + }, + { + "cell_type": "markdown", + "id": "2b2d06e2", + "metadata": {}, + "source": [ + "In this tutorial, we will learn how to implement the [GPMP2](https://journals.sagepub.com/doi/pdf/10.1177/0278364918790369) (Mukadam et al, 2018) motion planning algorithm, for a 2D robot in a planar environment.\n", + "\n", + "The goal is to find the trajectory (pose and velocity) of the robot given a start and goal pose and some representation of the environment. This can be solved as an optimization problem where the variables to be optimized for are the 2D pose and 2D velocity of the robot along a trajectory of some total time steps (at some fixed time interval). In this example, we formulate the objective of the optimization with the following cost terms that are balanced by their respective weights:\n", + "* **Boundary condition**: the trajectory should begin at the start pose with zero velocity and end at the goal pose with zero velocity.\n", + "* **Collision avoidance**: the trajectory should avoid colliding with obstacles in the environment (we use a signed distance fields).\n", + "* **Smoothness**: the trajectory should be smooth (we use a zero acceleration prior)." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "89832f6d", + "metadata": {}, + "outputs": [], + "source": [ + "import random\n", + "\n", + "import matplotlib as mpl\n", + "import numpy as np\n", + "import torch\n", + "import torch.utils.data\n", + "\n", + "import theseus as th\n", + "import theseus.utils.examples as theg\n", + "\n", + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "torch.set_default_dtype(torch.double)\n", + "\n", + "device = \"cuda:0\" if torch.cuda.is_available else \"cpu\"\n", + "seed = 0\n", + "torch.random.manual_seed(seed)\n", + "random.seed(seed)\n", + "np.random.seed(seed)\n", + "\n", + "mpl.rcParams[\"figure.facecolor\"] = \"white\"\n", + "mpl.rcParams[\"font.size\"] = 16" + ] + }, + { + "cell_type": "markdown", + "id": "5f234c78", + "metadata": {}, + "source": [ + "## 1. Loading and visualizing the trajectory data" + ] + }, + { + "cell_type": "markdown", + "id": "ff061123", + "metadata": {}, + "source": [ + "First, let's load some motion planning problems from a dataset of maps and trajectories generated using the code in [dgpmp2](https://github.com/mhmukadam/dgpmp2)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d96aa120", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "data/motion_planning_2d/im_sdf/tarpit/0_im.png\n", + "data/motion_planning_2d/im_sdf/tarpit/0_sdf.npy\n", + "data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_0_prob_0.npz\n", + "data/motion_planning_2d/im_sdf/tarpit/1_im.png\n", + "data/motion_planning_2d/im_sdf/tarpit/1_sdf.npy\n", + "data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_1_prob_0.npz\n" + ] + } + ], + "source": [ + "dataset_dir = \"data/motion_planning_2d\"\n", + "dataset = theg.TrajectoryDataset(True, 2, dataset_dir, map_type=\"tarpit\")\n", + "data_loader = torch.utils.data.DataLoader(dataset, 2)\n", + "\n", + "batch = next(iter(data_loader))" + ] + }, + { + "cell_type": "markdown", + "id": "0a64c151", + "metadata": {}, + "source": [ + "The batch is a dictionary of strings to `torch.Tensor` containing the following keys:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "286f4b08", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "map_tensor : torch.Size([2, 128, 128])\n", + "sdf_origin : torch.Size([2, 2])\n", + "cell_size : torch.Size([2, 1])\n", + "sdf_data : torch.Size([2, 128, 128])\n", + "expert_trajectory : torch.Size([2, 4, 101])\n" + ] + } + ], + "source": [ + "for k, v in batch.items():\n", + " if k != \"file_id\":\n", + " print(f\"{k:20s}: {v.shape}\")" + ] + }, + { + "cell_type": "markdown", + "id": "f66ac05c", + "metadata": {}, + "source": [ + "Let's plot the maps and trajectories loaded. `th.eb.SignedDistanceField2D` is a signed distance field object, which includes a function to convert *x,y*-coordinates to map cells that we use here for plotting. For completeness, we show the expert trajectories loaded, although we won't use them in this example (we will do so in Part 2 of this tutorial)." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "683848c9", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQ4AAAEECAYAAADZKtrDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAABBk0lEQVR4nO2dd3iUVdqH7ynJTHoj1AAJIi2BAAIfESEKLuJqwoIgCKwFJbCuuqJmBUFEpCyKrqICGywsC4sIFoqLWABdUVeiIEIUpYReAgRC6rT3++PMmTchARKYSePc1zXXzLwtZyZ5f3nOc55i0DRNQ6FQKKqAsaYHoFAo6h5KOBQKRZVRwqFQKKqMEg6FQlFllHAoFIoqo4RDoVBUmRoTjoMHDzJkyBDCwsIIDQ1l8ODBHDhwoKaGo1AoqoChJuI4CgsLSUxMxGKxMH36dAwGA5MnT6awsJDt27cTFBRU3UNSKBRVwFwTP3ThwoXs3buXXbt20bp1awA6derEtddeyz/+8Q8ee+yxmhiWQqGoJDVicfTr14/i4mI2b95cZntycjIAX3zxRXUPSaFQVIEa8XHs3LmThISEctvj4+PJysqqgREpFIqqUCNTldOnTxMREVFue2RkJLm5uZc8v0GDBsTGxvpgZAqFojTZ2dmcPHmy3PYaEY7LISMjg4yMDACCgoLIzMys4REpFPWfbt26Vbi9RqYqERERFVoWF7JEANLS0sjMzCQzM5Po6GhfD1GhUFyEGhGO+Ph4du7cWW57VlYWHTp0qIERKRSKqlAjwpGamsq3337L3r17Pduys7PZvHkzqampNTEkhUJRBWpkObagoIDExEQCAgI8AWBPP/00586dY/v27QQHB1/0/G7duikfRw1gt9s5dOgQxcXFNT0UhZexWq3ExMTg5+dXZvuF7rUacY4GBQWxYcMGxo8fzx//+Ec0TaNfv368/PLLlxQNRc1x6NAhQkJCiI2NxWAw1PRwFF5C0zROnTrFoUOHiIuLq9Q5Nbaq0qJFC957772a+vGKy6C4uFiJRj3EYDAQFRVFTk5Opc9R2bGKKqFEo35S1d+rEg6F4iIsWrSII0eOVHnfxViwYAGLFy+u8nlnzpxh3rx5VT7PF9SZADBF7aPxnMYcLzjutes1CmrEsSeOee16V4rT6WTRokUkJCTQtGnTcvsvts/pdGIymSq87rhx4y5rPFI4HnzwwUqf43A4MJu9f5sri0Nx2XhTNCp7vSVLltCjRw86d+7M2LFjcTqdbNmyhU6dOlFcXExBQQHx8fHs2LGDTZs20adPH2677Tbatm3LuHHjcLlcAHzyySckJSXRtWtXhg4dSn5+PgCxsbE8+eSTdO3alWXLlpGZmcnIkSPp3LkzRUVFnnGsXLmy3L7S565YsYKFCxfSvXt3EhMTueOOOygsLARg6tSpzJkzB4A9e/YwYMAArrvuOnr37s0vv/wivovjxxk0aBCJiYkkJiby9ddfM2HCBPbs2UPnzp1JT09H0zTS09NJSEigY8eOLF++HIBNmzbRu3dvUlNT6dChA1OmTOHll1/2jH3SpEm88sorV/S7UhaHos7w888/s3z5cjZv3oyfnx8PPvggS5cu5e677yY1NZXJkydTVFTEqFGjSEhIYNOmTXz33XdkZWXRsmVLBgwYwPvvv8+NN97I9OnT+eyzzwgKCmL27Nm89NJLTJkyBYCoqCh++OEHAN544w3mzJlTLvR6yJAhvPbaa+X2lT731KlTjBkzBoDJkyfz5ptv8vDDD5e5TlpaGgsWLODaa6/lf//7Hw8++CAbNmzgkUceITk5mQ8++ACn00l+fj5/+9vf2LFjB9u2bQPgvffeY9u2bfz444+cPHmS7t2706dPHwB++OEHduzYQVxcHNnZ2QwePJhHH30Ul8vFO++8w3fffXdFvwslHIo6w+eff873339P9+7dASgqKqJhw4YATJkyhe7du2O1Wpk7d67nnB49etCqVSsA7rrrLr766iusVitZWVn06tULAJvNRlJSkuecYcOGXfYYS5+7Y8cOJk+ezJkzZ8jPz+eWW24pc2x+fj5ff/01Q4cO9WwrKSkBYMOGDR4/iMlkIiwsrFyaxldffcVdd92FyWSiUaNGJCcns2XLFkJDQ+nRo4dnaTU2NpaoqCi2bt3K8ePH6dKlC1FRUZf9GUEJh6IOoWka99xzD7NmzSq379SpU+Tn52O32ykuLvZUkTt/tcBgMKBpGr/73e9YtmxZhT/nSirQlT733nvv5cMPPyQxMZFFixaxadOmMse6XC7Cw8M9FoQ3Of8zPPDAAyxatIhjx44xevToK76+8nEo6gz9+vVj5cqVnDhxAhBJkfv37wdg7NixPPfcc4wcOZInn3zSc853333Hvn37cLlcLF++nBtuuIGePXuyefNmdu/eDYhI5l9//bXCnxkSEsK5c+eqvA/g3LlzNGnSBLvdztKlS8vtDw0NJS4ujhUrVgBCGH/88UfPZ50/fz4gHK1nz54t9/N69+7N8uXLcTqd5OTk8OWXX9KjR48KxzJo0CA+/vhjtmzZUs7yuRyUcCjqDB06dGD69On079+fTp068bvf/Y6jR4+yePFi/Pz8GDFiBBMmTGDLli1s2LABgO7du/PQQw/Rvn174uLiGDRoENHR0SxatIi77rqLTp06kZSU5HFKns+9997LuHHjyjlHL7UP4LnnnuP//u//6NWrF+3atSuzT1pCS5cu5c033yQxMZH4+HhWrVoFwCuvvMLGjRvp2LEj1113HVlZWURFRdGrVy8SEhJIT09n0KBBdOrUicTERPr27cvzzz9P48aNK/wc/v7+3HTTTdx5550XXO2pCjWSq3KlqFyVmuHnn3+mffv2nve1fTl206ZNzJkzh7Vr13rtmt7g4YcfpmvXrtx3333V9jNdLpdntefaa6+t8Jjzf79Qy3JVFPWD2hRzUVd4+umn+d///sfUqVOr7WdmZWVx++23M2jQoAuKRlVRFoei0lT0H0lRf6iKxaF8HAqFosoo4VAoFFVGCYdCoagySjgUCkWVUcKhqPds2rSJ22+/HYDVq1fzt7/9rdrHMGvWLFq3bk3btm1Zv379RY995JFHan0lPLUcq7h8GgPeTJBtBFRyhVfTNDRNw2is2v++1NTUai+InZWVxTvvvMPOnTs5cuQIN998M7/++muFgViZmZmVakpW0yiLQ3H5eDer/pLXy87Opm3bttx9990kJCRw8OBB/vSnP9GtWzfi4+N55plnPMd+/PHHtGvXjq5du/L+++97ti9atIiHHnoIEJGfK1eu9OyT/+WPHj1Knz596Ny5MwkJCfz3v/+9oo+1atUqhg8fjsViIS4ujtatW1eYnep0OklPT+f555+/op9XHSiLQ1Gn+O233/jnP/9Jz549AZgxYwaRkZE4nU769evH9u3badOmDWPGjGHDhg20bt26ytmu//73v7nllluYNGkSTqfTU0ejNOPHj2fjxo3ltg8fPpwJEyaU2Xb48GHPeAFiYmI4fPhwuXNfe+01UlNTadKkSZXGWxMo4VDUKVq2bFnmJnz33XfJyMjA4XBw9OhRsrKycLlcxMXFeaIkR40a5WkfWhm6d+/O6NGjsdvt/OEPf6Bz587ljvn73/9+xZ+lNEeOHGHFihXlMmhrK2qqoqhTlE4X37dvH3PmzOHzzz9n+/bt3HbbbVXq+WI2mz0VwVwuFzabDYA+ffrw5Zdf0qxZM+69994K64OOHz+ezp07l3tU5Hht1qwZBw8e9Lw/dOgQzZo1K3PM1q1b2b17N61btyY2NpbCwkJat25d6c9S3SiLQ1FnycvLIygoiLCwMI4fP866deu48cYbadeuHdnZ2ezZs4drrrnmgnU3YmNj+f7777nzzjtZvXo1drsdgP379xMTE8OYMWMoKSnhhx9+4O677y5zblUsjtTUVEaMGMFjjz3GkSNH+O2338qlv992220cO6Z7hoODgz1p/7URJRyKOktiYiJdunShXbt2NG/e3FPRy2q1kpGRwW233UZgYCC9e/eusG7GmDFjGDhwIImJiQwYMMBjzWzatIkXXngBPz8/goODL6sieWni4+O588476dChA2azmddff92zovL73/+eN954o8KCx7UZleSmqDTlkqBqcDlW4X1UWr2ielA3+VWLco4qFIoqo4RDoVBUGSUciipRB11iikpQ1d+rEg5FpbFarZw6dUqJRz1D0zROnTqF1Wqt9DledY6uXLnS0zbvxIkTtGjRgsGDB/PUU08REhICiHwD2SjmfHJzcwkPD/fmkBReJCYmhkOHDpGTk1PTQ1F4GavVSkxMTKWP96pwzJkzhxYtWjBz5kxiYmLYunUrU6dOZePGjXz99ddlMhknTpxYLktRiouiduLn53dB0a8KR44cYerUqRXmgHiDli1bMmXKFCwWi0+ur/CycKxZs4bo6GjP++TkZCIjI7nnnnvYtGkTffv29exr1apVmZwDxdVDXl4e7777LmfPnvXJ9RMTE3nqqaeUcPgQr/o4SouGRPb5rCgbUKFQ1E187hz94osvAMpFpE2cOBGz2UxYWBipqan89NNPvh6KQqHwEj6NHD18+DBTpkzh5ptvplu3bgBYLBbGjh1L//79iY6O5pdffmHmzJlcf/31fPfdd6pvh0JRB/CZcOTn5zNw4EDMZjNvv/22Z3uTJk1YsGCB533v3r0ZMGAA8fHxzJgxgyVLllR4vYyMDE9NBeXVVyhqFp9MVYqKikhJSWHv3r2sX7/+kss8zZs354YbbmDLli0XPCYtLY3MzEwyMzMr9KUoFIrqw+sWh91uZ8iQIWRmZvLpp5/SsWPHSp8rO3grFIrajVeFw+VyMXLkSDZs2MDatWsrvdx64MABvvrqK/7whz94cziKy8Rms7Fv3z6cTqdPru/LayuqB68Kx5///GdWrFjBpEmTCAoK4ttvv/Xsi4mJISYmhscffxyXy0VSUhLR0dHs2rWLWbNmYTQamTRpkjeHo7hMjh49yq233srJkyd9cn2Xy0VBQYFPrq2oJjQv0rJlSw2o8PHMM89omqZpb775ptatWzctPDxcM5vNWqNGjbS77rpL++WXXyr9c6677jpvDltxHvv27dMaNGhwwd9lbX8kJiZq+fn5Nf011gsudK951eLIzs6+5DGjR49m9OjR3vyxCoWimlHZsQqFosoo4VAoFFVGCYdCoagyqlhxNeFyufjf//7HmTNnfHJ9o9FIz549CQsL88n1FYrSKOGoJhwOB+np6XzzzTc+uX5AQAD//e9/6dKli0+ur1CURglHNeJyuTwtB31xbU2V9FNUE8rHoVAoqowSDoVCUWXUVEWhqGmcgJzBGgFTDY6lkijhUChqAheQB2QDpxDiAeCH6MkbAwQDtTRhXAmHQlHdnAJ2AEcBO0IsQFgbRuAMsBfRhLsNEFr9Q7wUSjgU9Y5aXdflELAJYWE4ERZF6YU2k/thR1gjuUAXoEF1DvLSKOFQVDtNmzZl9uzZBAYG+uT6YWFhtbM1wlHgY8Dhfu9Ez+k1IO5GU6mHH8L6+B7oCdSi2D4lHIpqJyQkhNtvv/3q6tpnBz5BCIEZXTRAiIbRfYwZ3UHqQrdMfgKup9asgyrhUCiqg9+AXQh/hRNhdZw/ozK59xnR/R5SXA4D56g1VocSDoWiOvgMIRZ2hBg4EAIhhcGIbmFIa0P6PjT36/1Ap+ob8sWoJYaPQlGPyUZMNfyAEqAIISAlgE28zrZn47K5xPvSD3mMC9iHvmxbwyjhUCh8zQ8IsXAghMBe9uG0O7nBdAPXGK7hkO2Q2C6tE5v7tQMoRAmHQnFVUAT8iLjTzrci3NbGOtZx2HgYEyaaOZt5tnsEQ4pIMfrUpoZRPg6FwpfsRkSIOtCnHKWdogaYFzQPgLTiNAwOQ9l/5wb3uRoQQK35V6+EQ1EOf39/OnXq5LOiQ7GxsZhMdSAh40pxANvR77Ji9BvfLR6/mX9jnWUdFs3C6MLRejyHEd2BakdMU1qgr7bUMEo4FOVo3Lgxa9eu9Vl9D6PRWDsDtLzNQeA4YAGsiOXUoLKHzA2dC8DIwpE0sDUQwiADxPwQloYNKABqUT92JRyKchiNRgICAmp6GHUbFyIfxYVYXrUgwsft7v0GyPXL5e0g0ZD9L2f/ootE6dwVOb2xAtdU2+gviRKOasRoNPosj8KX11ZcBieAY+7XRsAf4aMocD8bYEHQAgqMBfyu8Hd0snXSpykyOMzlfn0cGIYQj1qCEo5qwmw2M3v2bHJzc31yfZPJRKtWrXxybUUVcSF8G3b0IC8/IASxonIOigOLeSX8FQDST6cLgTChPzvd5+chsmSTq/kzXAIlHNWE0WikV69eNT0MRXVwCmFtSANQOkT9ECHnefB20NscNx+nS1EXbj53c9niPS7EMq4LITa/cz/XIpRwKBTeRAN2InwTIG5+DeHjcCez2Y12ZjeZDcDEIxMx5BuEcBjRpyvhCLFoDPSozg9QOZRwKBTe5AQiIc2EbjWcF3vxr7B/sd9vP+3s7bjDeIdIXJNFfKRoBCH8Il2AiOoafOVRwqFQeAsXIgNWWhmSUvU27EY7M6wzAJhUNAljiFH4M2TshhUhGhZE6cDO1Tb6KqGEo5pwOBy8/PLL7N2712vX7NixI+PGjVOrKbWF4+grKTIWw4AejwH8y/Av9hr30sbVhruMd4l9gQixkfEeVoS1cS0QWW2jrxJeFY5NmzZx0003ldseFhZWJgoxNzeX9PR0PvzwQ4qKikhKSuLvf/87HTt29OZwahUul4v333/fq53cUlJSGDt2rBKO2oADYW3IFHgNsSoil1Q1sBltPGd+DoApjimYzCYhEBq6WAS4n6W1UUtCzM/HJxbH3Llz6d69u/5DzPqP0TSNlJQUsrOzefXVV4mIiGDWrFncdNNNbNu2jZiYGF8MSaHwLceAk5SdpkifhTuQ6w3HG2STTQetA8P9hwsHqiwX6O9+lvkoLYGoav4MVcAnwtG+fXt69uxZ4b7Vq1ezefNmNmzY4LFOkpKSiIuL4/nnn2fu3Lm+GJJC4TtKgF/cr2X8ha3UexsUuAp4DmFtPGd4DpPTJKYoDsQUxYiwMiyI6UsCtdbagBoY2urVq2natGmZKU1YWBgpKSmsWrWquoejUFwZGnAAEU4uK3iBHvlpBCzwiukVjnGMbnRjkP8gIRAgLAwz+hTFhEhmq2VVzc/HJ8IxcuRITCYTUVFRjBgxggMHDnj27dy5k4SEhHLnxMfHc+DAAfLz830xJIXCNxQiKnPJLFYZiyGdogY46TjJbIeI2/ib9W8YnAaxr/RDCkcQwtqo5W4rr05VwsLCePzxx0lOTiY0NJStW7cyc+ZMkpKS2Lp1Kw0bNuT06dPExsaWOzcyUriPc3NzCQ4OLrc/IyODjIwMAHJycrw5bIXi8nAh6m3IeA3cr23o1cnNMK1kGnnkMcAwgH70EyLhQDyDcIxKX0cLamXcxvl4VTi6dOlCly5dPO+Tk5Pp06cPPXr0YO7cuUyfPv2yr52WlkZaWhoA3bp1u+KxKhRXzGlEr5RSKyeY0YvvOODXgl+Zb5+PAQOzA2frTZgCEUJhdr82I6Yv11KrfRsSn8dxdO3alTZt2rBlyxYAIiIiKkz0On36tGe/4urg3LlznDp1ymvXMxgMNG7cuHpqfTgQ1oaz1KMYMV2RDlIjPOF6AgcO7jfdTydTJ2FllG776M6UxQDEIaJG6wDVFgAmYw3i4+P55JNPyu3PysqiRYsWFU5TFPWTNWvW8Oijj3rtelarldWrV9O5c2evXfOCHEYks0lLQ/ZCMSJWWczw6blPWWNfQzDBTA+frtcSlQFhcjXFihCQ1tR634bE58KRmZnJrl27GDJkCACpqam8/fbbfPHFFyQni1zhvLw81qxZw4gRI3w9HEUtoqioyKv+KqvVisPhuPSBV0ohoseJnJLYENYG+rPdYefRkkcBeMr6FI1tjYVAyOVXKRwy+S2OctXBajNeFY6RI0cSFxdH165dCQ8PZ+vWrcyaNYtmzZrxyCOPAEI4kpKSGDVqFC+88IInAEzTNP761796czgKhfdxITrJ51M2A9YfIRqBQAnMK55HljOLawzXMD5kvG6VBKNHisoesaFALHXG2gAvC0dCQgLLli3j1VdfpbCwkMaNGzN48GCeffZZGjQQC9NGo5G1a9fyxBNP8OCDD1JcXExSUhIbN26kefPm3hyOQuF9coAj7tfSr2FATE9cQAEcMx5jSsEUAP4e8nesWIWwSKGR0xr3ci1t0FdY6gheFY6JEycyceLESx4XGRnJW2+9xVtvveXNH69Q+JYShENUBnrJ0HKZn+IO5krPSSdPy+M2/9tICUzRHaaB7uNldKgZkcTWpBo/g5dQ2bEKRWXQEH6NAvSm0TZ0Z6gJyION2kaWlCzBipW5gXPF/hD38dLHISNETUA76uRdWAdWjBWKWsBpxEoK6MutIMTDvZxaElrCuLPjAHgq5ClahbUSx5a4j7EjRMMfcec1p9amzV+KOqh1CkU1Y0NMUUoQFoOcppjc24xAEczKn8Wvzl9pZ27HX/3/KvbJuI1A93n+iLvOinCI1tF/3XV02ApFNSGnKGcRVoOGEAQnYlnWLF5nWbKYWTATgH+E/QNLhEUPI5ch5dK3AWL5NbQaP4eXUcKhUFyM04iObC6E5SHLAEoBcIpu8/cfvR87dtJC0+jj30fkrMiGSmb3s7Q2IoBm1fw5vIyaqigUF6IY2ON+7cCz3OrpuGYFSmCuYy7f2r+lqbEps4NnC4FwIgK6pFPUhb6Sci3imDqMEo5qJCYmhmuu8V4fv8aNG3vtWorzcCHS5fMQIiBvfLkaYgeK4DfHb0w6NQmAf0T/g3BzeNmer1JEAtznNabOOkRLo4SjmvDz82PhwoVeDYn29/dX9UZ9xTFEqwMj+hJsPkJASoBAcBY5ue/EfRRpRYwKHMXtAbfrqyhB7nNkeLmsuXEN9cJBoISjmjAYDISFhdX0MBSV4RzC2nCgL7vKMn/ueA3y4cX8F9lcspkmpibMjZyrJ6zZ0VPm5dKrBrSizkWIXgglHApFaWwIv0YJ4mYvQl9BkWUBg2H72e08nfs0AG82fJMIa4Q41oDe+1WKhhERHdqoGj+Hj1HCofDgcrl46aWX2LFjh9eu2a5dO9LT0zGZTJc+uKZxIeqHniz1Xi6j5iOmG3lQXFLMyJyR2LAxLngctxpvFceFIaySEPRgLyPCymhJ2f6wdRwlHAoPmqbx6aefVlgv5XK58cYbeeKJJ7x2PZ+Sg6joZUAvB1iAnj7vFocJByaww7aDNn5tmBMzRxyXj/BryGpgoE9LYql1TaOvFCUcCgUIv8VexI0vi+3YEGnwRQjrIQ8+sn/EK3mvYMbM0oilBBUGiUAuGeglfdUW97WiESsp9Yx64N9VKK6QEoRo2NyvixAxHOfQywEa4EjwEe49eS8AMxrMoFujbkJgiikb4yHT5gMRDtF6+O+5Hn4khaIKOBArKKfRrQUZryGtDxM4zjgYcXoEJ10nuTnwZp7wf0IIhhXhCA1FCI+s6CWretXTSphKOBRXLxoi4/U0eu3QIvdzHkIE3LEYz9qe5YviL2hsbMySFkswmo3COpHXcVslnojQRtT6pkpXgpqqKK5ONIQz9DB6DoosNiy7q7kjRtcdW8f0k9MxYmRpo6U0KmokhEZOSYLQRUOWB4yjXv9brscfTaG4CHmIKYodYTnITNcC9I5sTthv3s+oU6MAmBY1jb4N+pZtb2BHryMqw8pjEVOYeowSDkWNER4eTtu2bb12PYvFgtVaiTs2H1FfowBxo8uITxdi2TQfMIgq7IOODuK06zS/D/49E0Mn6kV5bAi/hqzHgfu5GfV6iiJRwqGoMVJSUujXr5/XrmcwGAgKukSPgRKEpSFXS2zoEZ8FCOvBAJpRY9zZcWy1baWVuRVLYpZgNBnFcRb0tHkrej5KGBBDnapWfrko4VDUGP7+/vj7V2N+uR0hGmcR1oWsqyFXQ+wIQdHg5WMvs/jMYgINgXzQ9AMitAh9lcXsPs+JXqk8CLH06sdVgRKOWoymaZc+qIpctdm0TvRwctkUugBxB8hG0e7eKJ+UfMITp0W066Imi+gU0kmIRrD7OBNlCw7L+qH1dOm1IpRw1FJsNhvTp09nz549lz64kvTo0YNHHnnk6hMPJ6KK1wnEaogBcbPLFrNBiGAvF/xc9DNDDw7FhYtJUZMYGjpUDzmXEaSl4zXMCNFoWJ0fqOZRwlFLcblcrF+/nu+++85r1ywpKfF01Ltq0BC1NY6hR4aWrjzurq2BP+TYcrj9yO3kufK4I/gOpkVP01s1ynT5QPez9GtEAk25KvwapVFxHIr6iwuRtHYQYS2Y0OMtzOhJaIVQZC9i4IGB7LXv5Trrdfyz2T8xWoy601Q2XXK5zzEhnKFxXDV+jdIoi0NRPzk/wEt2ipd+DZnNGgDOQid3H72bb4q/obm5OWti1hCkuVdnwhCiIyuUS4vDhEiVryeFeaqKEg5F/UND+DP2owdzgb4aYsbj19DMGuNzxrMybyWhxlD+0/w/NAlpInwaheg1NGSkqAshInFAeHV9oNqHEg5F/UJDrJwcQPgvZCUvKQLuIsMEAhaYnTObV0+/ir/Bnw9jPiTBP0GPBpWl/yzoOShm9DyUq8yvURrl41DUH0pbGrJ7vPRJyHBw2VmtABaeWcjEExMxYGBx08XcFHWTsERseOI5PB3mpZM0inpXzety8Kpw3HjjjRgMhgofAwYMACA7O/uCx5w5c8abw1FcTbgQPo396HVCpV+jECEk0skZDCvOrmDcYdHn9bWmrzEsaJg4zg8RSu4vjiMQITbyfQuuetEAL09V5s2bR15eXplt33zzDY899hipqalltk+cOLHctpCQelZfTVE9aMBx4BBCLGTzJFkwWGa+2oBzsLZkLSOOjMCFi2kNpvFgowd1h2kx+pRGrpZIS6UV9T55rbJ4VTg6dOhQbtvChQvx9/dn+PDhZba3atWKnj17evPHK65GXIgYjUPoN72s+VmMsKkLEVOQQPjk+CcMOTQEBw7SG6YzOWKyEIwI9MjPQITIBKI3Yop1X0MB+Ng5WlhYyIoVK0hJSSEysh60r1LULlwIwTiBvuQKQjAC0UPDXUAefO78nIGHBlKilfDn8D8zu/FsDAEGMYUpQe/xail1/QCEaERU02eqI/jUOfrBBx9w7tw57rnnnnL7Jk6ciNlsJiwsjNTUVH766SdfDkVR37AjAruOUza13YC44QsRvo5CwAKf2z8nJTuFYq2YsQ3GMrfJXAyFBjGdCURYFSEIqyMAfSWlGfWiZaO38anFsXjxYho2bMitt97q2WaxWBg7diz9+/cnOjqaX375hZkzZ3L99dfz3Xff0b59e18OSXERjEYjkyZN4oEHHvDaNaOjozEavfz/yQFkI0r+lSAsDNnSIAA969UAnIVP8j5h4IGBFGvF3B96P/OazsMYYBTnOdFL/0nfiAshJs0QOShX8bLrhfCZcBw5coTPPvuMv/zlL5jN+o9p0qQJCxYs8Lzv3bs3AwYMID4+nhkzZrBkyZIKr5eRkUFGRgYAOTk5vhr2VY3BYKBPnz41PYyLU4yI0chFz2iVwVmyOrms6hUMa51rGbJ/CCVaCWnRacxvNB9jgVGP0UAc58lZkY2UohHxGipgoUJ89rUsWbIEl8tV4TTlfJo3b84NN9zAli1bLnhMWloamZmZZGZmEh0d7c2hKuoKhYj2jKfQy/3luR+yWLBs7myHFUdWMCh7ECVaCQ+GPcj8JvMxBhvFlEROaaRvRDZSsiKCu5qjll0vgs+E45///CeJiYkkJiZW+pyrLt1bUXnOIsr95SFucNnCwOp+LnFvzwMM8FbJWww/NByH5iC9cTqvxb6GsdBdmdxfHEMoYqVExmv4IwK8VKzGJfGJcGRmZpKVlVUpawPgwIEDfPXVV/To0cMXw1HUZTSEL+M3RM0MmUNS6H5fiPgrlrEWDnhp/0vcf+B+XLh4NvJZZke7V09CEFMUaWnY0at6WRCWRktUIkYl8MlXtHjxYsxmMyNHjiy37/HHH8flcpGUlER0dDS7du1i1qxZHsecQuFBpsUfQ3dcyopbfu5HqemGFqIxMW8is4/PBmBui7k8HPWwnhFrQgiRLDIchB7oFYGYnijRqBRe/5rsdjvLli1jwIABNGxYvixSfHw88+fPZ9GiReTn5xMVFUXfvn155plnvFrxWlHHsQNH0JdbQbcuzrn3g6ctgb3YTtqvaSw6uwgzZt5u9DajwkfpMRlylUWWCXSgWxqRiFgNJRqVxutflZ+f30VXPUaPHs3o0aO9/WMV9YlixHJrrvu9LPcni/DI1RB33dD80HyGHh/Kx2c/JtAQyMo2K7k1+FYREWp2n+fOUcGMmLLI1ZMIhE9DiUaVUF+XovagIayJ/eiNnEEXjnx0v4QGWOFo/lFu33E7P5T8QANzA9Y2Xsv/Wf9P77Am+6UUovd5tblfR6JE4zJRX5midlC6Ylc+YkpS4H52oGeoylyUAtjpv5PfH/g9B2wHuMbvGj7u8DGt/VrrtTek4zPY/T4Y3dKIQvk0rgD1tSlqHju6E1RWIDegL4nKpVan+70VPj39KUN2DyHPlUfPoJ6sbriaaC1aX6INRC9KbEevrRGIEI0Y1JLrFaCEQ1GzlCD8GWcpW4Fchn7LBtB+iOlLHiywLeChAw/hxMmQkCEsbr+YAFeAHjbuRLy2IqwLK0I4TEBjRESoEo0rQglHLcXPz4/Zs2eTm5t76YMrSdOmTb12rStGQwRr7adsD1cnYqXDifB3OPGkyTssDh4/8DhzT88FYGKTiUwPno6xxKj7MgIpm3NiQE9kk6KhwsivGCUctRSTycSNN95Y08PwDU70uqDyvfRrFLm3yYhQdyGe3NO5DDs1jE/zPsUPPzKaZHBvy3uFSEgrQz6bEOIjLRUDwp8RhUpY8xJKOBTVix1hZeSiJ6PJwCwzekNnmbBmgJ+1n0k9nMpu226izdF80OYDerl6if0WhGURhB6GLjusyQbRLRFtDpRoeA0lHIrqQUNYFYfwtFv0rJiAHpgly/W5LY01R9Yw8vhIzrnOkWhJZFXzVbQMbSmOleHjMpir1Hme5dgWiLgNhVdRsz2F73EhqnT9il5D4yzCtyFjNKyIv0Z3cWGX3cXUE1NJPZrKOdc57oy8k80Jm2lpdouG7HFSuviODA6TRXliUaLhI5TFofAtJQgr4yT6vymZZ2JCz3bV8AR1nfE7wx9/+SNrC9ZiwMDM6Jk82ehJDH4GTz8UT68Um/s16NOThojlVhlhqvA6SjgUvkFGgWYjxEPWAy1y73MX2vEEdTmBPNhu287gPYPZU7KHCGMEy9os45aQW4RIyHgMB8KSMKInqsnryBgN9ZftU9TXq/A+TkSC2gmEAxP0Kl0m96MYMVWRqyFWWFy0mHGHxlGkFdE5sDPvNX6PVpZW+uqKO23eE9Ql/Rtm97VjEJW71ATc5yjhUHiXIkTY+En0ICtZYasYvQI5iOmKEYpzi3nk8CMszFkIwL1h9zKv7Tw9qMuOEBgXwhdiQYiIjNMIQIhGJGrlpJpQwqHwDi7EEut+9LT1PPTqXPKGl3EX7lJ/v7l+Y+jhofxY/CMWg4XXY19ntP9oDDaDvtQqa4LKFRgjer3RYEQDaClGimpBCYfiypEO0DPoYd+ycI4/wsqQAV4uPNOOdw++ywM5D3DOdY5r/K5hZfOVdG7YWV81KR3U5YcQElnER0P4M5qh19xQVBtKOBSXj4ZYVi3dGd6AHkJ+DmERyIAsgHwo0ooYf3Q8/zjxDwCGRg7ljbg3CC0IFdeQlb1CEFMfGTauoU9PGgFNUTknNYQSDsXl4UBktB5FFwzZtNmIsCrM6FMMDQiELGMWw38ezk+2n7AYLLzU8CX+1PBPYqlVNkKSiW6lO8w7EdMdP/QmScoJWmMo4VBUDRkBegBhbchIT2lVuJdVPc5LC2AC7ZTGwtyFPHr4UYpcRbTxa8PydsvpbO2sx2NIx2YI+tRElg30R/gxWqD3eFXUGEo4FJXHgVgtOeh+76RsXIa0NCwIEXD7O05bTzPm5BjeP/s+APdF38fciLkEG4P1hLQA9B4n0oeBe7sRsczaDBXUVUtQwqGoHOcQgpGLXhSndBq8XG4t1UWNINh4aCN/zPkjh+2HCTGGsKDRAka0HCFESLYocCKEJhA91kO+tiAiQRui/Bm1CCUciosjfRnH0StwGdEtggL0aYYsJmyDkrwSJp+dzItHX0RDIykoiaWtlxJXEqdntfohBKbQfW0nerMk2aIxDtHOQFGrUMKhqBgNIQr70RshGdyvjYibPxg958SOmLaYYQc7GLV3FD/afsSEickNJjM5ejLmAHPZgj0yhFzGYzjxtG+kAWLVJKC6PrCiKijhUJTHgaj/eQQhBib3sxQJP/f7QnTrwQKuIhcv73+ZiScmYtNstDK3Ysm1S0gKSRKCI2tvGNAtDdnDtXSflBigCWrVpBajhEOhI8v5yRUTf/SaGWaESEgBkTUvXMA5yA7I5t5D9/JF/hcAjIkew0sRLxFsChbXkJGj0tKQuSsW9z4zwrpohiq6UwdQwqEQ2BB+jMOltsmCO4Xo2acyTqNIPGt+Gm+ee5Pxe8aT78qnkbkRC6MXktIiRZ/u2BCCIQVE+jJkWT8zEI4o76eiQOsESjiudlwI6yIbEZ8BHlHw9Ff1Q196dSLiLILg8OnDjMkdw7q8dQAMCR/C/FbzaVDcQIhNIHqrgmL0XBPZ80T6L5ojfBpq1aTOoITjaqYQscR60v1eFvqV+SDS4pCdzwKBU6DlaywuXMyjBx/ljOsMEaYIXmv2Gnf53yUiQGXEaGkHqJWyEaAO9NJ+QaipSR1DCcfViKwyvh+9poUs21eC3i5RBnQ5EFZJOBz2P8zYfWP5qPAjAG4LvI2MVhk0DWoqLBZ5ruzRKq2X0vVEXYgVkyaltinqFEo4riakz+EAIpPVhl5UR+aTSN+D2+kp64FqxRpvHXqLx048Rp4zjzBjGK+0fIW7Q+7GUOQu6SdT242lrinricq4jdIOULVqUmdRwnG1YEcsrx5FWAVyOiHT3EEIiWzsHIpYMs2HbL9s0o6k8Wn+pwCkhKWwIHwBTcOblk1KkwFggei5JtIB6odwgDZDxWbUAyql+YcOHeLhhx8mKSmJwMBADAYD2dnZ5Y4rLi4mPT2dJk2aEBAQQFJSEl9++WW541wuF7NmzSI2Nhar1UpiYiLvvffeFX8YRQW4ENOS7cA+9CjPYoQlYEdYIbJrmvRDFIDT5GTu2bkk/JzAp/mfEmWKYmmTpaxqs4qmkU31cyyIaUkAet6KTIWXqzEtgVYo0agnVEo4du/ezbvvvktERAS9e/e+4HH3338/CxcuZNq0aaxdu5YmTZpwyy23sG3btjLHPf3000ydOpWHHnqIdevW0bNnT4YOHcp//vOfK/owilLIYsG7gCz0YKtihJhIsbAjrAGze5sdCIAdhTvo9XMv/nLiLxRoBQwLG0ZWfBYjgkdgsLurcwW5f5YstmNGWCpW97M/wsrogFg1UVOTeoNB0zTtUge5XC6MRvFbf+ONNxgzZgz79u0jNjbWc8yPP/5I586deeutt7jvvvsAcDgcxMfH07ZtW1avXg3AiRMnaN68ORMmTODZZ5/1nN+vXz9ycnLYvn37JQfdrVs3MjMzq/RBrypk9/fD6GnppYvsWNzHWBBJa7LmRSEUa8VMz53O7BOzceCgqV9T5jWax8CggUIMpO8jxP06FF18HOjp9NKXoZLT6jQXutcq9T9AisbFWL16NX5+fgwbNsyzzWw2M3z4cNavX09JifgLXr9+PTabjVGjRpU5f9SoUfz000/s27evMkNSVIQGnEJMS/YgbmgbepFgudwqa2oUoU8vimBj0UY67evEjBMzcOBgXPg4sq7JYmCTgXr2q+yQJgXCXuq6gQgrowHQHtHkWYlGvcRrxuPOnTuJi4sjMLBs1dj4+HhsNhu7d+/2HGexWGjdunW54wCysrK8NaSri3zgZ+BH9LyQQnRLQwZ05aPX8HQXDT7pOMl9R++j7/6+/Gb7jfb+7fmq9VfMbzafMGeYuL7MUJXh4rJbWiBihcQfMUVpClyLis2o53htVeX06dNERESU2x4ZGenZL5/Dw8MxGAwXPU5RSWyIQsGH0UO7QU97Bz27NRB9+VUDDY1FZxeRfiydU65TWAwWJkdP5q8Rf8Xf6C9ufidCgAIoGwUq63E4EKIRgqrOdRVRZ5ZjMzIyyMjIACAnJ6eGR1MLcCIaHu1Fj5mQN7Is6luI3gBJCoDb/7Dz7E4ePP4gXxaLVa++AX2Z33Q+bQLa6LkoToR4yPobTvQoUlm5y4DeCKnO/DUprhSvTVUiIiLIzc0tt11aENKiiIiI4MyZM5zvkz3/uPNJS0sjMzOTzMxMoqOjvTXsuoeGcGhuB3ai55e4EDe77GfiQM8JKcITr1FgK2DCwQl03t+ZL4u/JNoUzb+a/ovP4j6jjV8b3YKQDlR5nQDECkkAejm/EHRfhhKNqwqvCUd8fDz79u2jsLCwzPasrCz8/f09Po34+HhKSkrYs2dPueMAOnTo4K0h1T8KgV+ArQhrQ1oBMgM1HyEQoBfWcUeDavkaH+R+QPvD7ZmdOxsnTsaGjWVXy12MihiFQTPoLRZL+zCkSMjery73+2uANujtDxRXFV4TjpSUFOx2OytWrPBsczgcLF++nP79+2OxiHzpAQMG4Ofnx9KlS8ucv2TJEhISEoiLi/PWkOoPNkT26hbE1ER2QpM1PqVgGNBXUWQUZz7sLtnNbWduY/ChwRx0HKSLfxe+af4NC5ovIMI/Qm/kbEYIgawhKnuZyLgMP4TzMwHR10StmFy1VNrAXLlyJQDff/89AOvWrSM6Opro6GiSk5Pp0qULw4YN49FHH8VutxMXF8f8+fPZt29fGZFo2LAhjz32GLNmzSIkJISuXbuyfPlyNmzY4In1ULhxICyL3QgHp5x6yKVVmVAmjy1ArHBYgDwoCC5gVt4sXjj0AjZshBnCmN5wOn+K/BOmElPZIsEy+EtGe5pLPUvxaIZKf1cAlQwAA8qtgkiSk5PZtGkTAEVFRUyaNIl///vfnDlzhsTERGbPns2NN95Y5hyn08msWbNYuHAhx44do23btkyZMoUhQ4ZUatD1PgBM9mH9DRGXIS0C6fyUVbhk4FUeen9VF2gBGitPreTx3Mc56BS9DO4Ju4fno56nobNh2aCtIvTphryG2f2QVks0YsVEFdm56rjQvVZp4ahN1FvhkKX79iIS0mTvVLmvGPHf3oa4ic+hN3I2iH07CnfwyJlH2Fi8EYAuli68Fvka1/tfL6wRudKiIQTE4b5GEXpymhSOQPQq48qPcVVyoXtN+cJrC4WIJLRshECAsDxkvQzZ9V02LtIQN7N76TWXXJ45/Qzzzs3DiZNIYyQzwmYwJmwMphCTqKch+7KWrsilua8dhl4a0IQeLq4aICkqQAlHTVOCsDCyEU5OE/rKhoYeUyHL98kb3gLYwGFysLBgIU+feZpT2imMGPlzyJ+ZFjGNyMBIPYNVWhMyPsMfEaPhQrcynOi1P0Oq4bMr6ixKOGoKO6Js3y8Ia0D6L1wIwZBTk9K+jBLEDe8Srz/ncx499Cg7nDsAuNF6I6+Ev0KnoE76ykogevk+WT9UXtNB2TJ+LVFZrIpKoYSjunEiKnDtQtTJkDkkoDtBpaUhK3Q50HuSFMGvxl9JP5HOartYhYo1x/JC+Avc4X+HqPlpQG/YLJPS7OhTHz+E30K2OGiJKOOnnJ+KSqKEo7qQrRS3u58lMohLxmHIxkRyalIqqey0/TTTiqbxevHrOHAQbAjmqbCnGG8ajzXUqlsrdvQkM9laUYqJbLFoQkxHYt3PyspQVAElHL7GiVghyUR0R5MrIFIwpHVhR1/tAD3fxAY2k435Z+bzrO1ZcrVcDBgYHTia6f7TaRLRRG/ebEQIhnwvu8Bb0H/TsgZoC4TzU/0FKC4D9WfjK+SU5BvEaom0KGQLRBkaLqcOpWtluAO9NKPG+4XvM8Exgd0uUZagr7kvLwa9SOfgzuL8QvRuaEUIwZC5JvJ6UqjMiNUSFZOhuEKUcHgbJyL57CPEaom8kaU1YETvXWJECEcgenKZuwTfN8XfkE46m52bAWhrbMsL1he4PeR2DEaDcH6Gojdwls5PC7o1IwXEibAuWqJiMhReQQmHt3AhIj0/RoSIa+gd2IsRId1n0NsghqC3U7Th8WPscuxiUtEk3nOJ4s3RRDPVbypjwsbgZ/fz1AT1OD9l/xIzugPUihANDSFKsahQcYVXUcLhDZyIKclXiJu5EXpzoxL0uhUOxNQiFz22wr3vmPEYz9qeZaFhIU6XkwACeNz4OOlB6YQaQ8V1ZHE1WRPDWuq6ZoTFIetv+COmJTGoaYnC6yjhuFJciKzVH9ArbMl+JdKPIR2e0tpwIULLnXDWdJY5ljm8ZHmJQkMhRozcr93Ps9ZnaUYzvXKXbMUY7L6O9JPIQK7SDaEbI6wMVb5P4SOUcFwp+xBBXIHoUZkyFkPetOc/W6DYVMzr1teZFTmLU6ZTAAx0DmQWs2jv116IhLzxSzs8JX7orRbltCQcUScjArW8qvApSjiuBDuimriM+pQ+hopuWnf5PTt2FoUt4tmwZznsdxiA3rbe/K34b1wfeL2+yiKT1+SKicSM7sOQKzRBiGZHjVC/UUW1oP7MroTTiOhPGXgFZacGBv290+TkHfM7PNPgGfb4iepnicWJzDo8iwHWARgCDXqKuww5D0Dv4ypjMmSNDNDjMVpSVlwUCh+jhONKyHY/X8jCMIDL4OI963tMtU4lyyzKI7axt+HZM89yZ+6dGIuMQhiC0BPNLOjTEplvIptByw7yTYHWKD+GokZQwnG5yDaK8maXvUvccRoup4v3Te8zLWgaP5l+AqClsyVT8qZwd/7dmJ1mvbqWH7pPxIreqLl0A2eZwdoQaIfwZyg/hqKGUMJxuZReKZFTEhMeITljPMN9hvvIN+QT44phUtEkRheNxt/hX/Zbl1Gj0q8hl1NlrIYUjShEceAmKMFQ1DhKOC6X0haGAV0A3NsjjZFMc0zD4rBwv+1+LE6L2GdGL8xDqXPN6L1KZMFgAyLSswMiHkMV1VHUEpRwXC5GxE2di/gWbejC4I7hGO8YrwuMzBWROSmll2iDEaIQjJiaSL9GO8TyakD1fCSForIo4bgSWiJaL8q8EBn0JS0Rf4Sg+J13XunsWGlhyLBxC0IwElDtFBW1FiUcV0I4EAnkoFchlw5PEKLhj15jQya4yTqiDoTVYkVYFR2AREQAl1opUdRilHBcCSZEaPcp9HgLKNvw2U7ZgjoydFwKRxTCuuiJCBVXgqGoAyjhuFKaAG0RYeelw8xBX2mRtURt6Mu4BUB34DbElEetlCjqEEo4rhQjcC1CIHYiLAzZzV12d5c1Qx2IquNBwJ1AN1Squ6JOooTDG8gmzFGIpLej6B3jZQlAENbJ7YgO74HlL6NQ1BWUcHgLA8KpGYHIOTmJmI64K3oRgWo9oKg3KOHwBQGIpkYKRT1F/f9TKBRVRgmHQqGoMko4FApFlamUcBw6dIiHH36YpKQkAgMDMRgMZGdnlzkmMzOTtLQ02rVrR2BgIC1atGDkyJHs27ev3PViY2MxGAzlHh9++KE3PpNCofAxlXKO7t69m3fffZfrrruO3r1788knn5Q75p133mHnzp088sgjxMfHc/jwYZ577jm6devGtm3baN68rLfwlltuYerUqWW2tW3b9vI/iUKhqDYqJRx9+vTh+PHjALzxxhsVCseTTz5JdHR0mW29evUiLi6OhQsXMm3atDL7GjRoQM+ePS933AqFogap1FTFaLz0YeeLBkDLli2Jjo7m8OHDVR+ZQqGotfjUOfrzzz9z4sQJ2rdvX27fmjVrCAwMxGKx0LNnT+XfUCjqED4TDofDwbhx44iOjub+++8vsy8lJYVXX32V9evXs3TpUqxWK4MGDWLJkiW+Go5CofAiPoscfeihh/j666/56KOPiIiIKLPv1VdfLfN+0KBB9OzZk4kTJzJq1KgKr5eRkUFGRgYAOTk5vhm0QqGoFD6xOCZMmEBGRgZvvfUW/fv3v+TxJpOJoUOHcujQIY4ePVrhMWlpaWRmZpKZmVmhP0WhUFQfXrc4ZsyYwezZs3n11Vf54x//WOXzDQZVyUahqO141eKYO3cukydPZsaMGTz00EOVPs/hcLB8+XJatGhB48aNvTkkhULhAyptcaxcuRKA77//HoB169YRHR1NdHQ0ycnJvPPOOzz66KMMGDCAvn378u2333rODQ0NpUOHDgAsW7aMVatW8fvf/57mzZtz/PhxXn/9dX744QeWLVtWqbFkZ2d7lnoV3iMnJ0d9pz6gLn+v50eIe9AqCXpH0zKP5ORkTdM07Z577rnkMZqmad9884120003aQ0bNtTMZrMWFham9evXT/v4448rOxRN0zTtuuuuq9LxikujvlPfUB+/V4OmaVrFklK76datG5mZmTU9jHqF+k59Q338XlV2rEKhqDJ1VjjS0tJqegj1DvWd+ob6+L3W2amKQqGoOeqsxaFQKGqOOiMcBw8eZMiQIYSFhREaGsrgwYM5cOBATQ+rzrBp06YKiyeFh4eXOS43N5cHHniABg0aEBQUxM0338xPP/1UM4OuZVSmoBVAcXEx6enpNGnShICAAJKSkvjyyy/LHedyuZg1axaxsbFYrVYSExN57733quGTeIGaXdSpHAUFBVrr1q21+Ph47YMPPtA+/PBDLSEhQWvVqpWWn59f08OrE2zcuFEDtLlz52rffPON57FlyxbPMS6XS+vVq5fWrFkz7d///re2bt06rU+fPlpUVJR28ODBGhx97WDjxo1aw4YNtVtvvVXr37+/Bmj79u0rd9yIESO0sLAwLSMjQ/vss8+0QYMGaVarVdu6dWuZ45566inN399fe+GFF7QNGzZoaWlpmsFg0D766KPq+UBXQJ0QjpdfflkzGo3ab7/95tm2d+9ezWQyaS+++GINjqzuIIXj008/veAxH374oQZoGzZs8Gw7c+aMFhERoT388MPVMcxajdPp9LxeuHBhhcKxbds2DdDeeustzza73a61adNGS0lJ8Ww7fvy45u/vr02ZMqXM+X379tU6duzomw/gRerEVGX16tX07NmT1q1be7bFxcXRq1cvVq1aVYMjq1+sXr2apk2bctNNN3m2hYWFkZKSor5nKlfQavXq1fj5+TFs2DDPNrPZzPDhw1m/fj0lJSUArF+/HpvNVi4bfNSoUfz0008V1uqtTdQJ4di5cycJCQnltsfHx5OVlVUDI6q7jBw5EpPJRFRUFCNGjCjjJ7rY93zgwAHy8/Orc6h1kp07dxIXF0dgYNken/Hx8dhsNnbv3u05zmKxlPlnKI8Dav3fdZ3o5Hb69OlyNT0AIiMjyc3NrYER1T3CwsJ4/PHHSU5OJjQ0lK1btzJz5kySkpLYunUrDRs25PTp08TGxpY7NzIyEhCO0+Dg4Goeed3iYn+rcr98Dg8PL5cNfv5xtZU6IRyKK6dLly506dLF8z45OZk+ffrQo0cP5s6dy/Tp02twdIq6Rp2YqkRERFRoWVxI3RWVo2vXrrRp04YtW7YAF/+e5X7FxbnUdygtioiICM6cOYN2Xvzl+cfVVuqEcMTHx7Nz585y27Oysjzp+orLR5rLF/ueW7RooaYplSA+Pp59+/ZRWFhYZntWVhb+/v4en0Z8fDwlJSXs2bOn3HFArf+7rhPCkZqayrfffsvevXs927Kzs9m8eTOpqak1OLK6TWZmJrt27aJHjx6A+J4PHz7MF1984TkmLy+PNWvWqO+5kqSkpGC321mxYoVnmyxU1b9/fywWCwADBgzAz8+PpUuXljl/yZIlJCQkEBcXV63jrjI1vR5cGfLz87VrrrlGS0hI0D788ENt1apVWqdOnbS4uDjt3LlzNT28OsGIESO0SZMmae+99572+eefa3PmzNGioqK05s2bazk5OZqmiTiFpKQkLSYmRlu2bJn28ccfa8nJyVpERIR24MCBGv4EtYMVK1ZoK1as0MaNG6cB2rx587QVK1ZomzZt8hwzbNgwLTw8XFu4cKH22WefaXfccYdmsVi077//vsy1nnzySc1isWgvvviitnHjRm3cuHGawWDQ1qxZU90fq8rUCeHQNE3bv3+/NnjwYC0kJEQLDg7WBg4cWGHUnqJiZs6cqXXs2FELDQ3VzGazFhMTo40ZM0Y7cuRImeNOnTql3XfffVpERIQWEBCg9e3bV9u2bVsNjbr2QSWKVRUWFmrjx4/XGjVqpFksFq1Hjx7axo0by13L4XBozz33nNaiRQvN399f69ixo7ZixYrq+zBXgMqOVSgUVaZO+DgUCkXtQgmHQqGoMko4FApFlVHCoVAoqowSDoVCUWWUcCgUiiqjhEOhUFQZJRwKhaLKKOFQKBRV5v8BjD18V4v6/R0AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQ4AAAEECAYAAADZKtrDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAABFeklEQVR4nO2deXyU1fX/37NP9oQQEAmYILIlGEBAIoUIKlCQWKgr0KIo6NevWJdapVLkpyBubS1a9RusRYsiolXQFq0KaAURwqJAFBQJ+56E7Mlk5vn9cZ87d0KCJjiTScJ985pXhmebO8/M/cy555x7rsUwDAONRqNpBNZwN0Cj0bQ8tHBoNJpGo4VDo9E0Gi0cGo2m0Wjh0Gg0jUYLh0ajaTRhE459+/Zx9dVXExcXR2xsLOPHj2fv3r3hao5Go2kElnDkcZSXl5ORkYHL5WLOnDlYLBZmzpxJeXk5X331FVFRUU3dJI1G0wjs4XjRBQsW8P3337Njxw66du0KwIUXXsgFF1zA//3f/3HPPfeEo1kajaaBhMXiuOyyy6isrGTNmjW1tmdlZQHwySefNHWTNBpNIwiLj2P79u2kp6fX2Z6WlkZeXl4YWqTRaBpDWIYqBQUFJCQk1Nnepk0bCgsLf/T8tm3bkpKSEoKWaTSaQPLz8zl+/Hid7WERjjMhJyeHnJwcAKKiosjNzQ1zizSa1k///v3r3R6WoUpCQkK9lsXpLBGAadOmkZubS25uLklJSaFuokaj+QHCIhxpaWls3769zva8vDx69eoVhhZpNJrGEBbhyM7OZt26dXz//ff+bfn5+axZs4bs7OxwNEmj0TSCsAjH1KlTSUlJ4aqrrmLZsmUsX76cq666ik6dOnHrrbeGo0kajaYRhEU4oqKiWLlyJd26deNXv/oVEydOJDU1lZUrVxIdHR2OJmk0mkYQtqhK586deeutt8L18hqN5iegZ8dqNJpGo4VDo9E0Gi0cGo2m0Wjh0Gg0jUYLh0ajaTRaODQaTaPRwqHRaBqNFg6NRtNotHBoNJpGo4VDo9E0Gi0cGo2m0Wjh0Gg0jUYLh0ajaTRaODThxzAf9T3/sfNOfd7ki32cnbSYYsWaFooB+MxHDVANeIBK86/X3If53Irq/BbzIZ/7Av5vnLLNGrDNgvhmuwGH+ddpbpPna34SWjg0P4y0AHyoDl0TsL8mYF+gEFSZz6vMY6pQAmIgOjTmMXbzGC+iYwdaHYEd3UDZyEbAsaCEw2Ze2xtwvg0hHE4gAYgBXGgR+Qlo4ThbMQIesvN7ERaB7MSVKJHwoYTBYv6tQXRSD0oQrOY1rAHXlJaB3TzWjhCSCiAKKDXPiTWfW4AIoMT8a5jHRwNl5vlOoNg8R4pTjNnmMiASJVAV5iMSOGH+jQDama8vRUzTYLRwtHZk55W//tISqEB1eCkY8hfYG3CuHWVByF/pckRnsyI6L4hfdWl1SAuiHNFJK81jPebfcvNYN1BkXssJHEWIgwEcQ1gHxea1I8xtUihKzecnzHa5gOPmcVaUANUAceY1C83jTpptOY4Qm3PM19K9ocHoW9UaMFDi4EH9wkpfghQFS8BzadYbqF/jGvO5K+CaUlyc5rUqER2x/JTnVaghQrV5jhvRsaPN/5eax8tr2szXlK9lMV9fWi6l5vvzIqwIm7lf+kQqzL8ealtPVoS1Em+2+whKkLzmdQrM93zSfJ0I4Fwg0dyv+UG0cLQkZMeQHboc0aGk01FaAt6A46W5Lq2NSFTHxTxWDgtqEJ2tFCEK8eY1Zcf0IDq3tDLcCCshCtHZTiA6nhw6SP9Cpfl6Eea5VoQ4FZrnGuZ2+XqGub0I0eGltRRjtlNaKCfNc6SgxZj3w2q+z0ARkvdOWio21JDLap6/x3zNjub52gdyWrRwNEekM7IGZTWUIjqPFIxAq0GKSZS5z4voXDXm/iLzurHmcx+iwxWfcg0QnVF28iKUv+Gkec1IxK+1G2WxSJFwIjqu13xeguj41Wbb3eZzB8pnEvhe5V9pGQQ6XqsDzrGjLCzfKcdWovwbJxEWjrQwogPuixSRGJQoVZv3pBRIBtqiExZOgxaOcBM4JPAgvriVAQ/ZueQnVYUQCGmmV5vnRyJ8ABaEQJw090ehrAs57ncgrIPAX247SiwcCIugGiUO0uKQVofHvKbNbBMosXCjOjuoDi7FSXZcG0oQpGUioyRVKCeq3F8VcI6MxlSirIbqeq55ashXOk9jAt5btdnuwFDuEfP1OqLFox60cDQlUiSqUV/WMoQIyM4ghxYyMgHql7YCIQqFKKef9ANIK8Ri7pdhyQKU87AQ0akjzOvL15KvK30b0tKRry0FAETHkx1ORi9k53Whfv1l281O7DE87C3Zy67yXXxX/R3feL5hR9UOCr2FlPvKqaYaK1bsFjtW85/FYsFiBIwXAsOvgGExMAwDn18VwGqxEmeJI8meRCdHJy5xX8KQqCGcYztHtBdUxKcEZQF5zPdSbt4f+b6tQAe0eJyCFo5QESgSlQhLogzxxZR5DaDMdiu1hxZOREepNJ/LY6QlEYEQAjmel+dEIjqEEyUK8tcblKkfGFKV7Q2MrAQ+rwCfxcdJ30kKigs46TzJSe9JiouLKa0opbSqlJLKEkoKSyipKKHYV0whhRRWF3Lcd5zDnsMc9x7HCENa59M8DcBA10B+Ffsrrk+8nraVbVUeiLzPVShLKjBsfMT8277Jm96s0cIRLKRVUI6wAqS3XjoGpd8B1LjehujkMnxpR4iL/OUOjEKUAW1QURIpCh7El18OJeSvpNwnf6VrTnluUlNZw6HqQ+yt2cu+gn0crD7IQe9BDlYd5HDNYY7UHOFo9VEKvAW1ftkbiwULneydON95Puc7z6eHswfdXd1pZ2tHJJE4nU58VT5qrDX48GFYDHyGz29hGBhY7BZlHYmLCssECxafRVgfLh9FRUUcdR1lZ8VO/lv2X9ZUrWF91XrWH1vPb4//lsmRk7m74930MHooZ6zPvO+g/Cfl5mdyCDHk04sM+tHCcabIoUMxwl9wEhUClQ5HqJ0KLcfo0iyWUQUZBo2ldvhT5kzI65WgHI9lAdsrqW0pEPB6wEnfSb498S3fVX7HrppdfF/xPburdrPbs5t9Vfvw+sMwP0ysLZY21jbEO+KJtcUS440hNiKWGEsM0TXRxETFEOONIcaIISEmgYSqBBKdiZwTcQ5JVUnY3Xblb3CihkNy6CAjLDLHoxzlL5EOzFLzXFAOV3nPZBQnyjzWDrSFcl85y08s55WqV1hRtoKcshwW7FzA1MipzO04l7autkpUPShnM6hEuL1Ad3So1sRiGEaLmxbUv39/cnNzm+4FaxACUYBwQB5HfJmqUH4CG8rbL6MU0pKQnUT+lY5HO+rLKiMiDnN7CeLLL0OtMivShwo1WsxrVYDX6iXfyCfvZB47qncIH0LZDnZW7+RozdEffHvn2M+hs6szneydSHYmc27EuXQwOtDB1YF2Ee1o52lHYkQiDpdD3IdIs41FZrss5vMEVLQiwXwPmO+twDwWVEi03Hw/MQHXkh013jxHJm+Vm9uKUBGdk+bryG2BWa4O8zOTIVzTittRsYM/lv+Rv5/8OzXUkGBL4B9J/2BMzBj1ecWaf93me4sxr3kBZ92Q5XR9TVscP0Qx8B2QBxxE/MpLi8KN+FK7EHdRikaEeYwbZWFIcZDWQWBkwIX65ZThRPnFDYwmRAMW8FX72GPdw9airWyr2MZ273a2l21nR9UOKo3Ket9GhDWCrs6udHV15fzI8+li60IXVxdSo1PpTGfcNrdyEMpf7cA8jMAhlnQcyvCvjGjI55jvSXZe6ZuRmabSySjvmcxOtQbcK2ltybCqxbw/XlRnxmyLjII4Au6zfC9y6CFfowq6u7uT48zh7qi7ubPoTj4q/4jsw9n8mT9zZ9s7VVvke5fZpwbC6ogPuO5ZjBaO+qgCtgM7URGKeETnCIyI7Ed8iWIQX3L5hZOdSYZO5ZyKIsSXXWY/yvRrmZUZmNNgg9KKUrYaW9lycgtfFX3FlxVfsq18GyWG/CmvTUdnR3q5etHD2YMeMT3obu9Od2d3zo0+F6vXqmaRSqtI5kg4UaFMu9k+2UFl6NJntlXmPXjM5/K9yHkkFvO59A9IB69M5JK/6NL5KMOosQH3Td4jmdBlN88vMo+Tn0GMeW/lcE++ngzjSlGWQiL9TF7o6ezJf5L+w8OFDzO7eDa/Ofwbuli6cGXslWq4IkUj2rxWJeIHJLXe239WoYXjVCqBjQhvuhWVwCQtBIk010sC9kejfulKUROs5HVlpMWBCvuViGudqD7BpvJNbPJsYvPhzWyu2My31d/WG4lob29P78jepFvSSU9MJ82VRk9fT+Li4pQPIfDXXJrwblTqthth6keY7ZGOWJt5jLQQygKuJfNJAv01MpdCDtdkqrncL5PH7OZ9kUlhMnwcZe6PNM+Rlod0/krR8iKcw9LHI4cm0QGvK62+GpSY2MxryrT6Svzfeoth4aHEh4hwRXD/sfu55cgtbI3dSpI1SYmFFSXsLuAwkMRZ7ygNqnC8+eabLF68mNzcXI4ePUrnzp0ZP348v//974mJEQPc/Px8UlPrl+zCwkLi4+OD2aTGUQ18hfBhgPKuB04tl4EFmasgf4mOU3sKtzT77aecXwMFFLCxYiO51bnkluaysXoje2r21GmOHTtpEWlkODPIcGWQkZhBb09v2kW3Ex2nGDWpS3YYmekpHabSnyLDszIrNHB4IDu1TM+WQwRQlgIIYZFmeyLKT5OA6JjyuuWoDl2K6PA1ZpsSUfNhpGUQjxquyWGSDEGDEi5p0cmhitXcV42aawO1fUzSqpORKsznUhAd8Fv3b1nhWsHqqtX8/ujvWZC0oPYEPnl/5XyX/UA3zurcjqAKx1NPPUXnzp159NFHSU5OZvPmzcyePZtVq1axdu1arFZ1p2fMmEF2dnat86W4hI0jCIecE/FlA2UlBAqG3C7H3zKnohLl5TedoRXeCjZXbBbhwPL1rK9Zz66aXXVeOtISSR9XH/pF9KOvuy99Y/rSy9sLV7xLDRPkhDL52jIMK0OJVQHbZV6CHP+D8huA6ghyYpl0QpahnJwlqI5+qoNWCo38RZdCIOeZyKn0CaghiHwuBUIO5UAMQaSVYUdFn6TfQ/pM5JBK7pdDGynUgbN+QYmHtKKkn0mKZZlIGnuh4wv0/r43fyv8G7dG3kp/e39l0QSGzuVnLR2zZylBFY53332XpKQk//+zsrJo06YNkydPZvXq1QwfPty/r0uXLgwaNCiYL//T8CLGr3LC06lzJeSvOdQtU2ea8EapwS7LLtY51rGuZh3rqtbxpfdLampVvgG3xU1fZ1/6u/vTP6o/F/kuokfbHthqbOo15CQvUMMDOVSQyVqRqCiEjG7IaIsMY0pHrbQEIhHCIiMWcmiC+d6lr8FA1KuoQPl4ZGQoMKQs74+0UmRoVTpCZTp8oHUjhxpSQKT4SCE2Tnku74m8LvXsl0IFKu/lOCJULsO6MplOCqrMoYmE7tXduSvqLp4se5I7C+9kTcwaLE6LKgcgBU4O/Q6gQr5nIUF924GiIRkwYAAABw4cCOZLBZ9ChGkqY/iBX15O+WvmDFRQQa4llzXWNayNXcvnts85bj2uskIRCUq9bb0Z6B7IxY6LGeAcQFpEGg6HQ3whA8OSsgNJ/4AcWoDytQTOKZEdUfoAbIhhQbn5PA4VqpQT1GRWZHtU0Z44ahfiCfQXyEI3cogjf7XlnBfpcA18Lr9VtoDn1oDzQj3rNAJxXzsg7uVh4FtU7ou81xGonBgPzDx3Jq/sfoXPyz/ntaLXmJgwUc3mlSIo/TLFiO9M3a/8WUHI9fKTTz4BoGfPnrW2z5gxg9tuu42oqCiysrKYO3cuvXv3DnVzTk8h6pdIzuMI9EsaUGAUsKZmDZ/6PuUz4zM2WjbicXhqXSbJl0SmNZNBjkFkWjPpH9OfaFu0crLJa0qzW5r90jKQ/hGZryF/8WRehOx8gU7DRFSnsKOmuVcgLAU5LIgwX1uO9+XrScEKrM0p/xKwX/oYIPSdP1i4gPMQIrIeFQmTRqAU3CiI9cUyL2EeU45N4XfHf8cvEn5BlC1K5ZfIBDX5kR9ECNRZGJ4NqXAcOHCAWbNmcfnll9O/f38AXC4Xt956KyNGjCApKYlvvvmGRx99lEsuuYT169fXEZgmIzBVW/7ySixwRdkVfFzzca0oh8Vi4ULfhQxmMJdUX0JmZSZdLF2wJFjUl0mKkfy1kmY+iI5chfjFl53SQFgNMgFMZipWm8cHRibkr7fcJ8OqMiIi22BDRQkcAdeRVkBTWQLhxAkMQLznIoTQRiPEVX7uPpicMJnnSp8jtyKXx489zsMpD6sCy/JzlE7SKkRCYHJTv5nwE7LM0dLSUi699FIOHjzI+vXrSU4+/d3dt28faWlpZGdns2jRonqPycnJIScnB4Bjx46xZ0/dKMRPIg8xJ0HOUahG5ShUwdUFV/Oe5z0GWgcyxDqEITVDyLRmElcVJ873onIKolDFYmSUQiKf+xCCIeemSCdmGSp6Ib/QslaEHDLIZDEZ2pXDBukojEQJhJPaw4SznWOIHB2ZKyfvv6x7YoM1FWv4Wf7PcFvc7EjdQWdrZ/F5goqaGagEwK4oH0gr43SZoyERjoqKCkaPHs2XX37JJ5980qAhyOjRo9m1axc7duz40WNDknL+LcKMlUVnZC0M8wt1yHeIhKoE3LjVUEb6MqRP4gSik8cjhCAeZb3IL5aBqpspRUWKhBw/V6E6vBzSSAepGzWsiDKP0wLRcLzADkQEzUBZHMXUmlF8w64beL3kdW6IvYHXur+mZi/LKIvMoHUgnMjn0Srv++n6WtAj0R6Ph6uvvprc3Fz+/e9/N8pvYbGE8c63oXbHC0w9dkAHWwfcFnftyWvSvLdRO4wYiSqgE4X6kkWikqNkpW2Zxh2J+FLWoJyYbvP5uUAXoAfQE+iFmDfREZWMpNcNaRg2RCeXwzY5Yc6FCmOXwmPtH8NtcbO4eDGfl3yuHOcliOey6LMsilTOWUVQfRw+n4+JEyeycuVK3nvvvQaHW/fu3ctnn33GL37xi2A2p3HEoUr1S+ehHP/LcKz0IcgvmEz+klaBC9GRZZ2HUxOuZEJWYLKVfA05tIhGDXdcAftbKYZhcODAAYqLi3/84AbicrlITU2tlTdUi0hEZfN9iHteTu25NnFwnv087i27l7mH5nLP7ntY220tFpdFCTuoeUcOxDA3lVb9WQUSVOH43//9X5YuXcqDDz5IVFQU69at8+9LTk4mOTmZe++9F5/PR2ZmJklJSezYsYN58+ZhtVp58MEHg9mcxmFHfJl2mv+XHVaGO6UIBM54lWIh09FltmQUKndC/rIFJj45EEMa6Qs5S0TidMyaNYs333wzaNfr3bs3H374IZGRkfUfYEFYa0WoYWdg6ryZD3O/+35etL/Iuqp1LK1cyrXOa1VZRTlXR35ehYghSyxnBUEVjhUrVgAwd+5c5s6dW2vfQw89xOzZs0lLS+P5559n4cKFlJaWkpiYyPDhw3nooYfo3r17MJvTeNojnGcFKMGQogG1Q3FyFqassCWtDVkQ12ZuizAfCYgvlZwQJ49pIVRXV7Ns2TIKCwuDds3u3bszdOhQKioqKCmpf+LemVBWVsaPuu5ciBDtbsTnIUPWMnICxLSN4WEe5tbdt/LAnge4qsdVuHCpgs6BUTCoXfG9lRNU4cjPz//RY6ZMmcKUKVOC+bLBw4HwI+QhxEN+9wInjcmkMBnxkMd1QFWJSkJ8mZJQ1kcLd1pWVFQwa9Ysvvnmm6Bdc8qUKQwdOjRo12s0SYjPT/ospDUondPAlLgp/MX1F/Kq8ni+4HnuOvcu8XnHBBwr0/Bl2cfEJn0XYeEsnqZzGiKAdIQpKy0NmRglhx0WhGl61Dz+YmAYMNp8DDGv0R4hJNpp2TyxI3IwpO9JOrQDsnjtFjuPdXoMgDmH51BUVaR8VtWI74YMp3sRFuuZV1hsMWjhqA8XInIxGOiNCKvKMKj0YVwC3AJMB8YihKIdqriMpmUQg1g/RVqSPsSQUkbC7HBl/JUMjR7KiZoTPHn0SSEYJxERlkqE1RJYqKmg6d9GU3OWTtFpIJGIMOh5qOnkMlR7FoxjzwqsiHB3EWqekA81I9gBFruFxzs/TmZeJk8feZrp7aeL5RbkJDeZVSoTAI+hcnlaKdriaAjS0SnNUi0arYsIhF9CWg2xiM+6jfncCoPcg7gq/irKfeXMOTBHOUHlD4rMCZE1SEqb/m00JVo4NBoLwrktE/Hk2rdeREp6qThmbse5WLCQczyH/OJ8MVwpMo87ibA4ZA7PEVQErhWihUOjAWFhSF+HHWV1gL/ebFpiGhMTJ+IxPDyy7xEhEvEov5asPl+CWs6zlaKFQ6ORJKJqmVpQ9WNldK0MHmr/EDZsvHziZXZad9bO95FDFbm4dgG169S2IrRwaDQSmRTmRKWgG4hISZH4f9f4rtx4zo148TJnzxy1IJecHV2Amr9Ugari1srQwqHRBBKPGHLIOUkJCEGJQzhL3fDguQ9ix86rR19lZ+lOdY5cTEsOU6woUWllaOHQaAKxIzJKZRZpJWISnMwULoFUTyo3Jt6IDx9zCueooYysw+pEVWk/iarp2orQwqHRnIr0dchaJ7LMgSz1GAe/v+D32Cw2XjvyGrsKd4khSjVCZApRhZfsiLyOVmZ1aOFoYRiGEfLHWY8NVR5BVgiTjs848TzVncqvEn+FFy+P73lclUSQ1ocVYW1UmY/gzeFrFrTi3LbWh2EYPPHEE3zxxRchub7VamXmzJn06dMnJNdvUcShoiIWVL6GXEqyDGa0ncHLx19mYeFC/mD5A53opGZNy+iMdJIWm9dsJdMRtHC0MNatW8c777wTkmtbrVamTp0akmu3OKSvoxBVxS3Az0E0dGvTjWsLrmXJ0SX8ec+f+VPHP4lz5epxPsSwx4cQnrbUrj/bgtFDFY3mdMSgFrmWFeHkolQuwAL3d7wfgJxDORRUFajFuuWaNaWohcVlndNWgBYOjeZ02BBWglxeQk5+kwWdSqFvTV9Gxo2kzFfGs8XPqhIKFmova1mBf+Gn1oAWDo3mh4hHLYfgNP8vi/jYgUS4v4uwOp7d/ywVFRXCwnCgFtqWVeAqEUOfVoAWDo3mhzDFwT/rFfxr7chFrS6Nu5R+Uf045jnGon2L1Op7ct5KBUJMXAhfRxUtHu0c1TQIi8WC2+0mIiLixw9uIE6n88cPag7EIRawjkL0mOOoyvc+sJRb+G3b3zKhbAJ/LPgjN19wM1aLVRU0jkAt7mVBiUgLRguHpkFERUXx6quvUllZ+eMHN5DExBZSnNOJsDr2opaxkJPbSoFouDruau4/fD87ynfw/on3GR09WhwjSwpKP0kZIswrl/1soWjh0DQIm81Gr169gn5dwzBITU2lX79+Qbtmt27dTr+mypkSj8gAlbU6ZJlAsyKcAwfTO07nd9//jqd3P83o7qPFMWXm8Q7UfJYqRF5HfHCb2JRo4dCEFYvFwuzZs5k5c2bQrmm1WnG5gjwWcCCiJIWI4UcVKszqAWrglshbmG2dzYclH7Ldup00R5oKv0prowaRH1JCi04I08KhCTtOp7P5+zssiGLUshCxHTFz1obI2SiDhMQEJneYzPMHnueZ/c/wQrcXai/UJRcar0Bsl5ZIC6QFj7I0mibGifJNmOUE/TkaZi3aO5LvAGDR4UWcLDqp/CCBs2RlbsfxJm19UNHCodE0FFkqUCZ3ORG+Ch/CeqiGXkYvhscMp8xXxsslL9deQ9iGGN6cDDi3hc6a1cKh0TQGWYtULg0ajRATORyxw/+e978APHfoOQwMNfFNzp4NzPMIXpCqSdHCodE0BiuiEpgTMQTxmdtkrQ43ZLfNpqOrIzvKd/DJkU9U/oYsCuQyH15EhbAWOH9FC4dG01iiUKvWRyMsB5lV6gG7x87NiTcD8MKRF9T6K1UIP0clYpjiQAhJC7Q6tHBoNI3FhRieeFCVwuIRguICDLil4y1YsfLPgn9y1HdULeKVYJ5ThZpBW9G0zQ8GWjg0mjMhHpVKLssEyuGIAzrFdGJM2zF4DA+v7H9FzW2RkRg7QmiqUUWCWhBBFY7Vq1djsVjqPOLj42sdV1hYyC233ELbtm2Jiori8ssvZ+vWrcFsikYTWpwoa8OFsD6KUP6KGri5rRiu/O3w34STtAoxpDmBEBgvQnDklPsWREgSwObPn8+AAQPUi9jVyxiGwdixY8nPz+eZZ54hISGBefPmMWzYMLZs2UJycnIomqTRBBcLIgN0FyKVXCZ0yUSvahgdM5pzHOfwTeU3rK1cy+D4weIYWQC5BFUlrAQRsWkhhEQ4evbsyaBBg+rdt3z5ctasWcPKlSsZNmwYAJmZmaSmpvLEE08wf/78UDSp1dCtWzcuvvjikFzbarXWsQ41P0A0yjlajrBAZNlAAxzRDiZ3mMzjex/n74f+zuCowWqIYkGIRpR5vgUhKi0kl7vJm7l8+XLOPfdcv2gAxMXFMXbsWJYtW6aF40d45JFH8Pl8Ibu+w9FCc6DDgQ3R8WVlMLlIdUAq+Y3tb+TxvY/zxpE3+EunvxBFlFpT1ooQHitqGBPflG/gzAmJcEycOJHjx48THx/PyJEjeeyxx+jcuTMA27dvJz09vc45aWlpvPLKK5SWlhIdHR2KZrV4LBZL85/TcQp79+5lyZIlIRO7888/n1/+8pdYLGGaLRaPmL8i116pRE25N6CHtQeDogexrnQdbxe/zaQOk8R5VQgLRS6bkIiYQBfflI0/c4IqHHFxcdx7771kZWURGxvL5s2befTRR8nMzGTz5s20a9eOgoICUlJS6pzbpk0bQDhO6xOOnJwccnJyADh27Fgwmx10DMMIqVUAYlgRts7SCHbt2sWMGTPwekMTNhg9ejTjx48P372IQMyWlUspyDVYvAgLwgWTO05m3Y51vHzkZSEcHoSlYYZuqUJYLHL5yBZg9AVVOPr27Uvfvn39/8/KymLo0KEMHDiQ+fPnM2fOnDO+9rRp05g2bRoA/fv3/8ltDSUlJSX85je/4dChQyG5fps2bZg/fz5t27YNyfU1jcCKcGrKOSgWhBDIWh02uK7ddfxm52/4uPBjDhYf5Nzoc4VgyOrpsah5LKWIXI9mTsh9HP369aNbt25s2LABgISEBAoL61ZsLSgo8O9v6Xg8Hj799FO+//77kFy/Q4cOVFW1gsKVrYVIhEjI0oInqBVhSfAmMCZ+DG8Xvs3iE4u5N/JeIS5e1JwXp3mdFlIdrMmaJ03JtLQ0tm/fXmd/Xl4enTt31v4NTcvDhbAa5CpvDsR8FnO5SCz4fRuLji9SyWJy3osF4evwIqyQFrCEQsgtjtzcXHbs2MHVV18NQHZ2Nn//+9/55JNPyMrKAqC4uJh3332XCRMmhLo5rZJNmzaxc+fOoF0vKiqKK664ArfbHbRrtmrkNHtZcyMwscvMGB3TbgxxO+PYUrqFbwq/oUdUD+HnkOfLUG4ZLaKYcVCFY+LEif76kfHx8WzevJl58+bRsWNH7rzzTkAIR2ZmJpMmTeLJJ5/0J4AZhsHvfve7YDbnrOHll18Oahg7JSWF9evXa+FoDG7E0ERmlEoRcQAecOFifMJ4/n7s7yw+upj/l/r/1MS4GtQSDDZEdKWZD1eC2rT09HSWL1/OTTfdxMiRI3n66acZP348X3zxhd+RZ7Vaee+997jiiiu4/fbbGTduHDabjVWrVtGpU6dgNkdzhugV688AB6KzyyQuJ8LJKSe+eeGGpBsAWFywGMNh+IcxRJjnHUctM1nd1G+gcQTV4pgxYwYzZsz40ePatGnDSy+9xEsvvRTMl9dowkssovNXIoYoUajV3BwwrN0w2n3bjm8rvuXL4i/pE9lHiEcEatlIH6puRzM2+JqxMaTRtDCcqFKBkaglH62ABezYGZ84HoClB5cKX4gH4RiVk9zkvJcihIg0U7RwaDTBwo6wOioRjk8nKkQLUAXXxF8DwBtFb4jhiqzTUY6wTk6iKqc344i7Fg6NJpjEIwSjAmFNRKKGIjYY2n4oSY4kvqv4ji/LvxSJYD6EPyTCPMdcp0ULh0ZztuBC+CZkQeNqRPSkBnCC3WZnXNI4AN4++Lbf/1FrWr4c5sgK6s0QLRwaTTCxIIYp5YjkLikKICyJShgXJYTjn4X/FPtl+cBCsR8fqhhyM00G08Kh0QQbmQ1agxCNeESo1iwdOLzdcGJtsWwr38a3nm+VD0QW+DmJqkPaTAsZa+HQaIKNdIpaUSUCZYjWCU6XkyvbXgnAssPLxD4rKnvUjhCZSoTV0QzTarRwaDTBxoqwHDyoma6y7oa54ttVsVcBsPz4cpW/UYKwNkyBwYrwczTD4YoWDo0mFLRBWBoysSsRVYPDC6PiRuGwOFhTuobjluPC1+FAWBd2hIDI7NFmmEWqhUOjCQUuhNUhywpaEaJQKfbFxsYyLGEYPnz868S/VBg2BjXvBYRoNMMK6Fo4NJpQIOuRyrqiVdTKIsWAsQljAfjXwX/5Cxz7s0irzfO8NMvFqVtITWVNS8Vut5OQkEBNTU1Irt+s67fEA0dQAiAnwdUAZTDGNYbpTOeD0g+ojqnGaTXryVYghi1liB5q+kWIaOo3cHq0cGhCykUXXcTatWtDNuM2Kiqq+dZejURYHXK5xwjUAtVRkBqXSq89vcgry+Ozos8YnjC8drZpwAQ5KtHCoTl7iIyM5IILLgh3M8KDHeGvMBBDF7limwu/k2B0m9HkleXx/sH3Gc5wIQ4+hGj4UMOcEoQF00w0Uvs4NJpQIgsZx6MK/ci8jmL4uePnAKwoWaGSxHzUriZmQ63X0kzQwqHRhJIYVBlBAyEk0mnqhsGdBhNli2JbxTb2e/arHulAiE1lwLnNKCyrhSNE2O12HA5HSB6Ba/FqmjmyerkDFZotwV8lzGVzMTx+OAAfHvpQ1OEoN88zUPVIm9nC1PobGAJiY2P5xz/+QWVlaCYaOJ1OvaZKS8GCWmA6ATiGEARZ0bwaRrpG8i7v8p+T/+GmDjeJoYlcpAmgnfm3GFWbNMxo4QgBDoeDgQMHhrsZmuZCHHAUf8VzolHzUOwwosMIOAgflnyIz+XDarEKcZEr25eiZtHKqfdhRg9VNJpQ40aVBJR1NyrU9q6xXTnPdR4nPCfYcnyLEIpSVClCG8JyqaTZ+Dm0xdEKuPjii5kyZUrQrpeYmKiXRggmdoTFYENYG0dQKegesHgsXBZ1GS9VvcTHxz+mX3I/IRhV5sMGJCGEowjhcA0zWjhaARMmTNCLWTVnLIjOXohavEnOfC0X2y5LuIyXCl7i46qPuS/mPnVuFaKXliDEpBLlNA0jeqii0TQF0YjhShmi08uZs/HicVmHywD4tOhTqsuqhaAUI3wiEajapDILNcxo4dBomgKH+TfK/CtXtgfwQPuq9vRy96LCV8H64+uVpeFB1eSIQPhGypuw3adBC4dG0xTYUfNP5KpvToQoFAEeGBYzDIBVNauEJSLzP2TGaClqfdkwo4VDo2kKLAixcKDCqwZCTNoCCWKlN4DVhauFOJQhLBMHYqhTY15HTsEPI1o4NJqmQs5ujUatSi8FpBKG2oYC8Hnp51RXVquIihchID6E5dIMyglq4dBomgoHqpSgCxFdqQYKgDJIsifR092TCl8FuUauGK7IFeFkxXRZ2Liinus3IVo4NJqmQi4RaeZv+BO7YhEiEQVZ8VkAfFrwqQjBliB8IA7U8pIGYXeQBlU4Lr30UiwWS72PUaNGAZCfn3/aY4qKioLZHI2m+SGjKi5UZEVOta+AIe4hAKwpW6PCttJBKteVlTkgYVzlLagJYM899xzFxcW1tn3++efcc889ZGdn19o+Y8aMOttiYppBSpxGE0oi8S8HSQRqZXoP4IDB0YMBWFOyBh8+rJFW0UvlGi1yQWrp+wjTmCGowtGrV6862xYsWIDT6eT666+vtb1Lly4MGjQomC+v0TR/XAiroxghHrKkYBTgg872ziQ7ktnv2U9eWR7pvnRxXmAtD1lFrBqVH9LEhFSvysvLWbp0KWPHjqVNmzahfCmNpmUgq4BJ0ZCT2QCqwFJjYXCMsDo+r/xczYSVw5kTqFXtS5uw3acQUuF4++23KSkpYfLkyXX2zZgxA7vdTlxcHNnZ2WzdujWUTdFomgeyPodceCkGYUEU4a9NeknUJQCsLVyrrAqnebys1eEjrMIR0klur7zyCu3atePnP/+5f5vL5eLWW29lxIgRJCUl8c033/Doo49yySWXsH79enr27BnKJmk04ScGMUPW9Gv4hcFclCnTkQnA2oq1arX7UtQC1nb8zlR/TdImxmKEqG79wYMH6dSpE7/5zW/405/+9IPH7tu3j7S0NLKzs1m0aFG9x+Tk5JCTkwPAsWPH2LNnT9DbrNE0CVXA1wihKEANVUoBC1Rbq4nLjaPSqOREvxO0sbRRWaZyeYX25t8LCemyCf379yc3N7fO9pANVRYtWoTP56t3mHIqnTp14mc/+xkbNmw47THTpk0jNzeX3NxckpKSgtlUjaZpsZsPC8LZaVBrqOL0Ounn7gfA+sPrhWURuGSCCzG8MRAiFAZCJhwvv/wyGRkZZGRkNPicZruwjkYTTGyI4Uo1ovMbiCGIE38q+qBIEXFcZ1knoiluhHAUU3ueSu3shyYjJMKRm5tLXl5eg6wNgL179/LZZ5/pOp2as4coVCjVi7AkPOb2eBgYJ/rChuINYggjV6+PR82QleUEw5AIFhLn6CuvvILdbmfixIl19t177734fD4yMzNJSkpix44dzJs3D6vVyoMPPhiK5mg0zQ+5wpsV4aMoQPkvPDDQEMKxvnw9RrWBxW0RvVUOUeLNY8sJSyJY0IXD4/GwePFiRo0aRbt27ersT0tL4/nnn2fhwoWUlpaSmJjI8OHDeeihh+jevXuwm6PRNE9c5kPWFY3Hv7obFkiJSqGtrS3HvcfZY99Dii1FWBg+hKUiw7Ty/CZOBAu6cDgcDo4dO3ba/VOmTAlqYV2NpkViQwiHrCNajhCDaMAHlioL/d39eb/sfXJP5pISnSIsCyk4cnhSbZ4f3bTN17NjNZpwYEUkglUihi0gLIpihD/DA/0j+gOQW56rIjCRCLGQQxYfYgZtE6OFQ6MJF9JB6kGtnyKLE1vgIttFAGyq2iSEoxohLBWIqEwFYXOQauHQaMKFC2E1OBAWRDWiR5rVvfrFi1yOTVWbMLyGqvoVgRjayLyOYsQwpgnRwqHRhAvpr6hBDEFkYpe5rmynsk4kWhM54T3B/tL9qnpYBco5Wm2e38SJYFo4NJpwYUOlkVchBCAWIQ5WsERa6OPqA8BmY7NaT7Y64BwQQhKa9c1PixYOjSZcWBGOUTnZTRb1kXigr6MvAFvKtwhxqEQIS415nkwea+KZslo4NJpwEoPohXK9lQiEJWGGZzMQUza+rPxSHC8rf8nV3DwoP0cTLpmghUOjCSduhAVhRww5POY2M8rSJ6IPAF/WfKmKHMtsUTm1HkRItgkdpFo4NJpw4kZZEQ5UXkYV4IHu3u44cLDLu4vSklKxXa50Lyudy/Vka+pcPWRo4dBowonM3ZBFiOWaK+ZwxOFx0NMqiltt820TglGNEBdpgXgRQ5UmdJBq4dBowokNlQ0qK5nLrFDz0dvSG4BtFdugEPGQztEyhMVh0KQZpCEtHajRaBqAXGvFgyrwU4oQkWpIr0kHF2wt3SoERq7sVoFYj7YMITqFQKemabIWDo0m3EQhhhsWhAhUIKyIUvE3zZ4GLshz5alQrFyMuhIxs9aGWl+2CcYRWjg0mnATiRIEAyEeRxG90w1ptjQAtlu2qyrpsmBxFXAcISJyCOMk5Ggfh0YTbpyonliAsBziEVmkTjjPdh4RRgSHLIcoiihS0Rcb0Abx/+MIsWkiB6kWDo0m3NhR81TKUWnoAA6wOWx0t4giV9/4vlGhW1mHNM58fpAmyyDVwqHRhBsbtcsCRiCGI7KqlwE9ESHZr+1fq+n4NQjBsCKsExDi0QRoH4dG0xyIRYRTo1E1Oqz4/RXdPd2hGnYYO4Rg2FErwjlQhYEONU1ztXBoNM0BuZZsG2pnkdaI/3f3iaHKTstOIRCB81usiJ6cgFhbtglWd9PCodE0B6IQww65PqwPVZjYDd1qusFJ2OnbqeakuFDzXGzUXm5BC4dGcxYQiZgpK1el9yF6p0/8v6ulKwC7fLvwRfiw2qziOBmRkWvQynksIUYLh0bTHIhC+DlsqJCFB78PI9YSy28jf0sHawdqjBqcFqdaM1ZaHDbzOjoBTKM5S4gE2iF8FDKl3GXuMwsRP9n2SX91ML9V4UJZHBYgkSbp1Tocq9E0B2xAd9RsWfmwI3qpfMi6o7Iauoy8OMxjO5v7Q4y2ODSa5kJHRGREhlt9Ac8lloC/0sqwI8TDhYjKNAFaODSa5kIkcAGwy/y/FyUgoNaaxfxrC3hYgFSabClILRwaTXPifMTEtUPUzuUAIRzSxyHDrXIafjLCYmkitHBoNM0JG9ATIRpHURXQfSjhkMMTmSiWjKjD0YQeSy0cGk1zww50A85BWB4nUMIBKoqSCLRH5H80gUP01CZqNJrmhhUxtT4OMVW+FDFkkdZGNGqx6jDQIONm//79TJ8+nczMTCIjI7FYLOTn59c5rrKykvvuu48OHToQERFBZmYmn376aZ3jfD4f8+bNIyUlBbfbTUZGBm+99dZPfjMaTavDggi7JgEdEFZIW8IqGtBA4fjuu+944403SEhIYMiQIac97uabb2bBggU8/PDDvPfee3To0IGRI0eyZcuWWsf94Q9/YPbs2dxxxx2sWLGCQYMGcc011/Dvf//7J70ZjUbTRBgNwOv1+p8vWLDAAIzdu3fXOmbLli0GYLz00kv+bR6Px+jWrZsxduxY/7YjR44YTqfTmDVrVq3zhw8fbvTu3bshzTEuuuiiBh2n0Wh+Gqfraw2yOKzWHz9s+fLlOBwOrrvuOv82u93O9ddfzwcffEBVlVgh94MPPqC6uppJkybVOn/SpEls3bqV3bt3N1z1NBpNWAhaAGf79u2kpqYSGRlZa3taWhrV1dV89913/uNcLhddu3atcxxAXl5esJqk0WhCRNCEo6CggISEhDrb27Rp498v/8bHx2OxWH7wOI1G03xpMeHYnJwccnJyADh27FiYW6PRnN0EzeJISEigsLCwznZpQUiLIiEhgaKiIgzD+MHjTmXatGnk5uaSm5tLUlJSsJqt0WjOgKAJR1paGrt376a8vLzW9ry8PJxOp9+nkZaWRlVVFbt27apzHECvXr2C1SSNRhMigiYcY8eOxePxsHTpUv+2mpoalixZwogRI3C5RFWSUaNG4XA4ePXVV2udv2jRItLT00lNTQ1WkzQaTYhosI/jzTffBGDjxo0ArFixgqSkJJKSksjKyqJv375cd9113HXXXXg8HlJTU3n++efZvXt3LZFo164d99xzD/PmzSMmJoZ+/fqxZMkSVq5cyfLly4P89jQaTUhoaCIIYopNnUdWVpb/mPLycuPuu+822rdvb7hcLmPgwIHGqlWr6lyrpqbGeOSRR4zOnTsbTqfT6N27t7F06dKfnJSi0WiCy+n6msUwjCaoiRxc+vfvT25ubribodG0ek7X13TNUY1G02i0cGg0mkbTYhLAfgyPx8P+/fuprKwMd1M0QcbtdpOcnIzD0UQFNTU/SqsRjv379xMTE0NKSkqddHZNy8UwDE6cOMH+/ft1qL4Z0WqGKpWVlSQmJmrRaGVYLBYSExO1JdnMaDXCAWjRaKXoz7X50aqE42xj4cKFHDx4sNH7fogXXniBV155pdHnFRUV8dxzzzX6PE3LpNX4OE7lnKfO4UjZkaBdr31Uew7/9nDQrvdT8Xq9LFy4kPT0dM4999w6+39on9frxWaz1dkOcNttt51Re6Rw3H777Q0+p6amBru91X4FWzWt1uIIpmg09HqLFi1i4MCB9OnTh1tvvRWv18uGDRu48MILqayspKysjLS0NLZt28bq1asZOnQoY8aMoXv37tx22234fGLJrv/85z9kZmbSr18/rrnmGkpLSwFISUnh/vvvp1+/fixevJjc3FwmTpxInz59qKio8LfjzTffrLMv8NylS5eyYMECBgwYQEZGBr/85S/9kxNnz57NU089BcCuXbsYNWoUF110EUOGDOGbb74R9+LIEcaNG0dGRgYZGRmsXbuWBx54gF27dtGnTx/uu+8+DMPgvvvuIz09nd69e7NkyRIAVq9ezZAhQ8jOzqZXr17MmjWLp59+2t/2Bx98kL/85S8//QPThBQt90Hi66+/ZsmSJaxZswaHw8Htt9/Oq6++yq9//Wuys7OZOXMmFRUVTJo0ifT0dFavXs369evJy8vjvPPOY9SoUfzzn//k0ksvZc6cOXz00UdERUXx+OOP86c//YlZs2YBkJiYyKZNmwB48cUXeeqpp+jfv3+ttlx99dU8++yzdfYFnnvixAmmTp0KwMyZM/nb3/7G9OnTa11n2rRpvPDCC1xwwQV88cUX3H777axcuZI777yTrKws3n77bbxeL6WlpTz22GNs27bNX5j6rbfeYsuWLXz55ZccP36cAQMGMHToUAA2bdrEtm3bSE1NJT8/n/Hjx3PXXXfh8/l4/fXXWb9+ffA/IE1Q0cIRJD7++GM2btzIgAEDAKioqKBdu3YAzJo1iwEDBuB2u5k/f77/nIEDB9KlSxcAbrjhBj777DPcbjd5eXkMHjwYgOrqajIzM/3nBNZ0bSyB527bto2ZM2dSVFREaWkpI0eOrHVsaWkpa9eu5ZprrvFvk3VjV65c6feD2Gw24uLi6tRi+eyzz7jhhhuw2Wy0b9+erKwsNmzYQGxsLAMHDvSHVlNSUkhMTGTz5s0cOXKEvn37kpiYeMbvUdM0aOEIEoZhMHnyZObNm1dn34kTJygtLcXj8VBZWUlUVBRQN1pgsVgwDIMrrriCxYsX1/s68twzIfDcG2+8kXfeeYeMjAwWLlzI6tWrax3r8/mIj4+vs7RFMDj1Pdxyyy0sXLiQw4cPM2XKlKC/nib4tFofR1Nz2WWX8eabb3L06FFAVDTbs2cPALfeeiuPPPIIEydO5P777/efs379enbv3o3P52PJkiX87Gc/Y9CgQaxZs8Zf3LmsrIydO3fW+5oxMTGUlJQ0eh9ASUkJHTp0wOPx1KmNAhAbG0tqaqq/vophGHz55Zf+9/r8888DwtF68uTJOq83ZMgQlixZgtfr5dixY3z66acMHDiw3raMGzeO999/nw0bNtSxfDTNEy0cQaJXr17MmTOHESNGcOGFF3LFFVdw6NAhXnnlFRwOBxMmTOCBBx5gw4YNrFy5EoABAwZwxx130LNnT1JTUxk3bhxJSUksXLiQG264gQsvvJDMzEy/U/JUbrzxRm677bY6ztEf2wfwyCOPcPHFFzN48GB69OhRa5+0hF599VX+9re/kZGRQVpaGsuWLQPgL3/5C6tWraJ3795cdNFF5OXlkZiYyODBg0lPT+e+++5j3LhxXHjhhWRkZDB8+HCeeOIJzjnnnHrfh9PpZNiwYVx77bWnjfZomhetZlr9119/Tc+ePf3/b+7h2NWrV/PUU0/x3nvvBe2awWD69On069ePm266qcle0+fz+aM9F1xwQb3HnPr5apqG002rb7U+juaUc9FS+MMf/sAXX3zB7Nmzm+w18/LyuPLKKxk3btxpRUPT/Gi1FoemdaE/3/CgC/loNJqgoYVDo9E0Gi0cGo2m0Wjh0Gg0jUYLR5hZvXo1V155JQDLly/nsccea/I2zJs3j65du9K9e3c++OCDHzz2zjvvJDo6uolapmmutNpwLOcAwZwg2x5oYITXMAwMw8BqbZwuZ2dnk52d3fi2/QTy8vJ4/fXX2b59OwcPHuTyyy9n586d9SZi5ebm1rs+sObso/VaHMGdVf+j18vPz6d79+78+te/Jj09nX379vE///M/9O/fn7S0NB566CH/se+//z49evSgX79+/POf//RvX7hwIXfccQcgMj/l6nmA/1f+0KFDDB06lD59+pCens5///vfn/S2li1bxvXXX4/L5SI1NZWuXbvWOzvV6/Vy33338cQTT/yk19O0DlqvxREGvv32W15++WUGDRoEwNy5c2nTpg1er5fLLruMr776im7dujF16lRWrlxJ165dGz3b9bXXXmPkyJE8+OCDeL3eOot8A9x9992sWrWqzvbrr7+eBx54oNa2AwcO+NsLkJyczIEDB+qc++yzz5KdnU2HDh0a1V5N60QLRxA577zzanXCN954g5ycHGpqajh06BB5eXn4fD5SU1P9WZKTJk0iJyenwa8xYMAApkyZgsfj4Re/+AV9+vSpc8yf//znn/xeAjl48CBLly6tM4NWc/bSeocqYSBwuvju3bt56qmn+Pjjj/nqq68YM2ZMoyp12+12f0Uwn89HdXU1AEOHDuXTTz+lY8eO3HjjjfXWB7377rvp06dPnUd9jteOHTuyb98+///3799Px44dax2zefNmvvvuO7p27UpKSgrl5eV07dq1we9F0/rQFkeIKC4uJioqiri4OI4cOcKKFSu49NJL6dGjB/n5+ezatYvzzz//tHU3UlJS2LhxI9deey3Lly/H4/EAsGfPHpKTk5k6dSpVVVVs2rSJX//617XObYzFkZ2dzYQJE7jnnns4ePAg3377bZ3p72PGjOHwYeUZjo6O9k/715ydaOEIERkZGfTt25cePXrQqVMnf0Uvt9tNTk4OY8aMITIykiFDhtRbN2Pq1KlcddVVZGRkMGrUKL81s3r1ap588kkcDgfR0dFnVJE8kLS0NK699lp69eqF3W7nr3/9qz+iMnr0aF588cV6Cx5rzm5a7yS3MIZjNcFHT3ILDz9pktv+/fuZPn06mZmZREZGYrFYyM/Pr3VMbm4u06ZNo0ePHkRGRtK5c2cmTpzI7t2761xPLtN46uOdd945ozdXL4cBI4gPLRoajZ8GDVW+++473njjDX+Z/P/85z91jpFJRHfeeSdpaWkcOHCARx55hP79+7NlyxY6depU6/iRI0fWqfvQvXv3M38nGo2myWiQcAwdOpQjR4Td/+KLL9YrHPfffz9JSUm1tg0ePJjU1FQWLFjAww8/XGtf27Zta4UuNRpNy6FBQ5WGpE6fKhog8hqSkpLqTSgKBS3QXaNpAPpzbX6ENI/j66+/5ujRo/U6td59910iIyNxuVwMGjToJ/s33G43J06c0F+yVoZhGJw4cQK32x3upmgCCFk4tqamhttuu42kpCRuvvnmWvvGjh3LgAEDSE1N5ciRIzz77LOMGzeOf/zjH0yaNOmMXi85OZn9+/dz7NixYDRf04xwu90kJyeHuxmaAEImHHfccQdr167lX//6FwkJCbX2PfPMM7X+P27cOAYNGsSMGTNOKxw5OTn+1Oz6xMHhcPhXB9NoNKElJEOVBx54gJycHF566SVGjBjxo8fbbDauueYa9u/fz6FDh+o9Ztq0aeTm5pKbm1uvP0Wj0TQdQbc45s6dy+OPP84zzzzDr371q0aff+qyiBqNpvkRVItj/vz5zJw5k7lz5/rrSjSEmpoalixZQufOnU+72pdGo2k+NNjikEVlNm7cCMCKFStISkoiKSmJrKwsXn/9de666y5GjRrF8OHDWbdunf/c2NhYevXqBcDixYtZtmwZo0ePplOnThw5coS//vWvbNq06bQTvk4lPz/fH+rVBI9jx47pexoCWvJ9PTVD3I/RQDhNMnZWVpZhGIYxefLkHz3GMAzj888/N4YNG2a0a9fOsNvtRlxcnHHZZZcZ77//fkObYhiGYVx00UWNOl7z4+h7Ghpa431tkZPc4PSTbzRnjr6noaE13lddyEej0TSaFisc06ZNC3cTWh36noaG1nhfW+xQRaPRhI8Wa3FoNJrw0WKEY9++fVx99dXExcURGxvL+PHj2bt3b7ib1WJYvXp1vcWT4uPjax1XWFjILbfcQtu2bYmKiuLyyy9n69at4Wl0M6MhBa0AKisrue++++jQoQMRERFkZmby6aef1jnO5/Mxb948UlJScLvdZGRk8NZbbzXBOwkC4Q3qNIyysjKja9euRlpamvH2228b77zzjpGenm506dLFKC0tDXfzWgSrVq0yAGP+/PnG559/7n9s2LDBf4zP5zMGDx5sdOzY0XjttdeMFStWGEOHDjUSExONffv2hbH1zYNVq1YZ7dq1M37+858bI0aMMABj9+7ddY6bMGGCERcXZ+Tk5BgfffSRMW7cOMPtdhubN2+uddzvf/97w+l0Gk8++aSxcuVKY9q0aYbFYjH+9a9/Nc0b+gm0COF4+umnDavVanz77bf+bd9//71hs9mMP/7xj2FsWctBCseHH3542mPeeecdAzBWrlzp31ZUVGQkJCQY06dPb4pmNmu8Xq//+YIFC+oVji1bthiA8dJLL/m3eTweo1u3bsbYsWP9244cOWI4nU5j1qxZtc4fPny40bt379C8gSDSIoYqy5cvZ9CgQbXW8khNTWXw4MEsW7YsjC1rXSxfvpxzzz2XYcOG+bfFxcUxduxYfZ9pWEGr5cuX43A4aq3QZ7fbuf766/nggw+oqqoC4IMPPqC6urrObPBJkyaxdevWemv1NidahHBs376d9PT0OtvT0tLIy8sLQ4taLhMnTsRms5GYmMiECRNq+Yl+6D7v3buX0tLSpmxqi2T79u2kpqYSGRlZa3taWhrV1dX+9Wi2b9+Oy+Wqs7BVWloaQLP/XreIdVUKCgrq1PQAaNOmjV49vYHExcVx7733kpWVRWxsLJs3b+bRRx8lMzOTzZs3065dOwoKCkhJSalzbps2bQDhOJWLX2vq54e+q3K//BsfH19nNvipxzVXWoRwaH46ffv2pW/fvv7/Z2VlMXToUAYOHMj8+fOZM2dOGFunaWm0iKFKQkJCvZbF6dRd0zD69etHt27d2LBhA/DD91nu1/wwP3YPpUWRkJBAUVFRnRq5px7XXGkRwpGWlsb27dvrbM/Ly/NP19ecOdJc/qH73LlzZz1MaQBpaWns3r2b8vLyWtvz8vJwOp1+n0ZaWhpVVVXs2rWrznFAs/9etwjhyM7OZt26dXz//ff+bfn5+axZs4bs7Owwtqxlk5uby44dO/yLTGdnZ3PgwAE++eQT/zHFxcW8++67+j43kLFjx+LxeFi6dKl/myxUNWLECFwuFwCjRo3C4XDw6quv1jp/0aJFpKenN//6ueGOBzeE0tJS4/zzzzfS09ONd955x1i2bJlx4YUXGqmpqUZJSUm4m9cimDBhgvHggw8ab731lvHxxx8bTz31lJGYmGh06tTJOHbsmGEYIk8hMzPTSE5ONhYvXmy8//77RlZWlpGQkGDs3bs3zO+gebB06VJj6dKlxm233WYAxnPPPWcsXbrUWL16tf+Y6667zoiPjzcWLFhgfPTRR8Yvf/lLw+VyGRs3bqx1rfvvv99wuVzGH//4R2PVqlXGbbfdZlgsFuPdd99t6rfVaFqEcBiGYezZs8cYP368ERMTY0RHRxtXXXVVvVl7mvp59NFHjd69exuxsbGG3W43kpOTjalTpxoHDx6sddyJEyeMm266yUhISDAiIiKM4cOHG1u2bAlTq5sfNKBYVXl5uXH33Xcb7du3N1wulzFw4EBj1apVda5VU1NjPPLII0bnzp0Np9Np9O7d21i6dGnTvZmfgJ4dq9FoGk2L8HFoNJrmhRYOjUbTaLRwaDSaRqOFQ6PRNBotHBqNptFo4dBoNI1GC4dGo2k0Wjg0Gk2j0cKh0Wgazf8HgVTukgD+V5wAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "sdf = th.eb.SignedDistanceField2D(\n", + " th.Variable(batch[\"sdf_origin\"]),\n", + " th.Variable(batch[\"cell_size\"]),\n", + " th.Variable(batch[\"sdf_data\"]),\n", + ")\n", + "figs = theg.generate_trajectory_figs(\n", + " batch[\"map_tensor\"], \n", + " sdf, \n", + " [batch[\"expert_trajectory\"]], \n", + " robot_radius=0.4, \n", + " labels=[\"expert trajectory\"], \n", + " fig_idx_robot=0,\n", + " figsize=(4, 4)\n", + ")\n", + "figs[0].show()\n", + "figs[1].show()" + ] + }, + { + "cell_type": "markdown", + "id": "9f2ccac8", + "metadata": {}, + "source": [ + "The following are some constants that we will use throughout the example" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9a28ed86", + "metadata": {}, + "outputs": [], + "source": [ + "trajectory_len = batch[\"expert_trajectory\"].shape[2]\n", + "num_time_steps = trajectory_len - 1\n", + "map_size = batch[\"map_tensor\"].shape[1]\n", + "safety_distance = 0.4\n", + "robot_radius = 0.4\n", + "total_time = 10.0\n", + "dt_val = total_time / num_time_steps\n", + "Qc_inv = [[1.0, 0.0], [0.0, 1.0]]\n", + "collision_w = 20.0\n", + "boundary_w = 100.0" + ] + }, + { + "cell_type": "markdown", + "id": "2ec25c27", + "metadata": {}, + "source": [ + "## 2. Modeling the problem" + ] + }, + { + "cell_type": "markdown", + "id": "782b00ce", + "metadata": {}, + "source": [ + "### 2.1. Defining Variable objects" + ] + }, + { + "cell_type": "markdown", + "id": "1245306c", + "metadata": {}, + "source": [ + "Our goal in this example will be to use `Theseus` to produce plans for the maps loaded above. As mentioned in the introduction, we need a 2D pose and a 2D velocity for each point along the trajectory to be optimized. For this, we will create a set of `th.Point2` variables with individual names, and store them in two lists so that they can be later passed to the appropriate cost functions." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "a3fa4e42", + "metadata": {}, + "outputs": [], + "source": [ + "# Create optimization variables\n", + "poses = []\n", + "velocities = []\n", + "for i in range(trajectory_len):\n", + " poses.append(th.Point2(name=f\"pose_{i}\", dtype=torch.double))\n", + " velocities.append(th.Point2(name=f\"vel_{i}\", dtype=torch.double))" + ] + }, + { + "cell_type": "markdown", + "id": "47d3d711", + "metadata": {}, + "source": [ + "In addition to the optimization variables, we will also need a set of *auxiliary* variables to wrap map-dependent quantities involved in cost function computation, but that are constant throughout the optimization. This includes start/goal target values, as well as parameters for collision and dynamics cost functions." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "8c050c4a", + "metadata": {}, + "outputs": [], + "source": [ + "# Targets for pose boundary cost functions\n", + "start_point = th.Point2(name=\"start\")\n", + "goal_point = th.Point2(name=\"goal\")\n", + "\n", + "# For collision avoidance cost function\n", + "sdf_origin = th.Point2(name=\"sdf_origin\")\n", + "cell_size = th.Variable(torch.empty(1, 1), name=\"cell_size\")\n", + "sdf_data = th.Variable(torch.empty(1, map_size, map_size), name=\"sdf_data\")\n", + "cost_eps = th.Variable(torch.tensor(robot_radius + safety_distance).view(1, 1), name=\"cost_eps\")\n", + "\n", + "# For GP dynamics cost function\n", + "dt = th.Variable(torch.tensor(dt_val).view(1, 1), name=\"dt\")" + ] + }, + { + "cell_type": "markdown", + "id": "10043837", + "metadata": {}, + "source": [ + "### 2.2. Cost weights" + ] + }, + { + "cell_type": "markdown", + "id": "8c88f7b4", + "metadata": {}, + "source": [ + "Next we will create a series of cost weights to use for each of the cost functions involved in our optimization problem." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "09ce904c", + "metadata": {}, + "outputs": [], + "source": [ + "# Cost weight to use for all GP-dynamics cost functions\n", + "gp_cost_weight = th.eb.GPCostWeight(torch.tensor(Qc_inv), dt)\n", + "\n", + "# Cost weight to use for all collision-avoidance cost functions\n", + "collision_cost_weight = th.ScaleCostWeight(th.Variable(torch.tensor(collision_w)))\n", + "\n", + "# For all hard-constraints (end points pos/vel) we use a single scalar weight\n", + "# with high value\n", + "boundary_cost_weight = th.ScaleCostWeight(boundary_w)" + ] + }, + { + "cell_type": "markdown", + "id": "a199f5ec", + "metadata": {}, + "source": [ + "### 2.3. Cost functions" + ] + }, + { + "cell_type": "markdown", + "id": "8a906c03", + "metadata": {}, + "source": [ + "In this section, we will now create a `Theseus` objective and add the GPMP2 cost functions for motion planning. First, we create the objective:" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "0f954fdd", + "metadata": {}, + "outputs": [], + "source": [ + "objective = th.Objective(dtype=torch.double)" + ] + }, + { + "cell_type": "markdown", + "id": "a6026c1d", + "metadata": {}, + "source": [ + "#### Boundary cost functions" + ] + }, + { + "cell_type": "markdown", + "id": "a48a2c17", + "metadata": {}, + "source": [ + "Here we create cost functions for the boundary conditions, assign names to them, and add them to the `Objective`. For boundaries, we need four cost functions, and for each we use a cost function of type `th.VariableDifference`. This cost function type takes as input an optimization variable, a cost weight, a target auxiliary variable, and a name. Its error function is the local difference between the optimization variable and the target.\n", + "\n", + "For example, consider the first `VariableDifference` added below (with name `pose_0`). This cost function will tell the optimizer to try to bring the value of the variable at `poses[0]` close to that of auxiliary variable `start_point` (which is also a named variable, as explained in Section 2.1). On the other hand, for velocity constraints (for `vel_0`), we don't need to pass a *named* auxiliary variable for the target, since we know that we will want it to be a `torch.zeros(1, 2)`, no matter what the map data is (the robot start with zero velocity).\n", + "\n", + "Finally, all of these cost functions share the same boundary_cost_weight, which as you may recall, is a `ScaleCostWeight(100.0)`." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "d084676b", + "metadata": {}, + "outputs": [], + "source": [ + "# Fixed starting position\n", + "objective.add(\n", + " th.eb.VariableDifference(poses[0], boundary_cost_weight, start_point, name=\"pose_0\")\n", + ")\n", + "# Fixed initial velocity\n", + "objective.add(\n", + " th.eb.VariableDifference(\n", + " velocities[0],\n", + " boundary_cost_weight,\n", + " th.Point2(data=torch.zeros(1, 2)),\n", + " name=\"vel_0\",\n", + " )\n", + ")\n", + "objective.add(\n", + " th.eb.VariableDifference(\n", + " poses[-1], boundary_cost_weight, goal_point, name=\"pose_N\"\n", + " )\n", + ")\n", + "objective.add(\n", + " th.eb.VariableDifference(\n", + " velocities[-1],\n", + " boundary_cost_weight,\n", + " th.Point2(data=torch.zeros(1, 2)),\n", + " name=\"vel_N\",\n", + " )\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "b2eae83c", + "metadata": {}, + "source": [ + "#### Collision cost functions" + ] + }, + { + "cell_type": "markdown", + "id": "f8fee718", + "metadata": {}, + "source": [ + "For collision avoidance, we use a `th.eb.Collision2D` cost function type, which receives the following inputs:\n", + "\n", + "* A single `th.Point2` optimization variable.\n", + "* Auxiliary variables:\n", + " * Three representing signed distance field data (sdf_origin, sdf_data, cell_size).\n", + " * The distance within which collision cost is incurred (cost_eps).\n", + "* A cost weight.\n", + " \n", + "Since we need one such cost function for each internal point in the trajectory, we create the cost functions in a loop and pass the corresponding pose variable defined above. " + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "8feaabaa", + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(1, trajectory_len - 1):\n", + " objective.add(\n", + " th.eb.Collision2D(\n", + " poses[i],\n", + " collision_cost_weight,\n", + " sdf_origin,\n", + " sdf_data,\n", + " cell_size,\n", + " cost_eps,\n", + " name=f\"collision_{i}\",\n", + " )\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "bb8e094d", + "metadata": {}, + "source": [ + "#### GP-dynamics cost functions" + ] + }, + { + "cell_type": "markdown", + "id": "fd619cbd", + "metadata": {}, + "source": [ + "For ensuring smooth trajectories, we use a `th.eb.GPMotionModel` cost function, which receives the following inputs:\n", + " \n", + "* Four `th.Point2` optimization variables: pose at time t-1, velocity at time t-1, pose at time t, velocity at time t.\n", + "* One auxiliary variable describing the time differential between consecutive time steps.\n", + "* A cost weight (typically of type `th.eb.GPCostWeight`).\n", + "\n", + "We need one such cost function for each pair of consecutive states (pose and velocity), so we add these in a loop and pass the appropriate optimization variables from the lists created above." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "a91eea8c", + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(1, trajectory_len):\n", + " objective.add(\n", + " (\n", + " th.eb.GPMotionModel(\n", + " poses[i - 1],\n", + " velocities[i - 1],\n", + " poses[i],\n", + " velocities[i],\n", + " dt,\n", + " gp_cost_weight,\n", + " name=f\"gp_{i}\",\n", + " )\n", + " )\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "e989600c", + "metadata": {}, + "source": [ + "## Creating the TheseusLayer for motion planning" + ] + }, + { + "cell_type": "markdown", + "id": "f1c34e05", + "metadata": {}, + "source": [ + "For this example, we will use Levenberg-Marquardt as the non-linear optimizer, coupled with a dense linear solver based on Cholesky decomposition." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "a57a1525", + "metadata": {}, + "outputs": [], + "source": [ + "optimizer = th.LevenbergMarquardt(\n", + " objective,\n", + " th.CholeskyDenseSolver,\n", + " max_iterations=50,\n", + " step_size=1.0,\n", + ")\n", + "motion_planner = th.TheseusLayer(optimizer)\n", + "motion_planner.to(device=device, dtype=torch.double)" + ] + }, + { + "cell_type": "markdown", + "id": "3d6270f5", + "metadata": {}, + "source": [ + "## 3. Running the optimizer" + ] + }, + { + "cell_type": "markdown", + "id": "0a3a300b", + "metadata": {}, + "source": [ + "Finally, we are ready to generate some optimal plans. We first initialize all auxiliary variables whose values are map dependent (e.g., start and goal positions, or SDF data). We also provide some sensible initial values for the optimization variables; in this example, we will initialize the optimizaton variables to be on a straight line from start to goal. The following helper function will be useful for this:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "b5d7e8a7", + "metadata": {}, + "outputs": [], + "source": [ + "def get_straight_line_inputs(start, goal):\n", + " # Returns a dictionary with pose and velocity variable names associated to a \n", + " # straight line trajectory between start and goal\n", + " start_goal_dist = goal - start\n", + " avg_vel = start_goal_dist / total_time\n", + " unit_trajectory_len = start_goal_dist / (trajectory_len - 1)\n", + " input_dict = {}\n", + " for i in range(trajectory_len):\n", + " input_dict[f\"pose_{i}\"] = start + unit_trajectory_len * i\n", + " if i == 0 or i == trajectory_len - 1:\n", + " input_dict[f\"vel_{i}\"] = torch.zeros_like(avg_vel)\n", + " else:\n", + " input_dict[f\"vel_{i}\"] = avg_vel\n", + " return input_dict" + ] + }, + { + "cell_type": "markdown", + "id": "a92ba8eb", + "metadata": {}, + "source": [ + "Now, let's pass the motion planning data to our `TheseusLayer` and start create some trajectories; note that we can solve for both trajectories simultaneously by taking advantage of Theseus' batch support. For initializing variables, we create a dictionary mapping strings to `torch.Tensor`, where the keys are `th.Variable` names, and the values are the tensors that should be used for their initial values. " + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "3e7e3efe", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Nonlinear optimizer. Iteration: 0. Error: 3905.6714736579306\n", + "Nonlinear optimizer. Iteration: 1. Error: 2282.2513944732996\n", + "Nonlinear optimizer. Iteration: 2. Error: 136.22542880566306\n", + "Nonlinear optimizer. Iteration: 3. Error: 50.78312661081272\n", + "Nonlinear optimizer. Iteration: 4. Error: 4.300118887811325\n", + "Nonlinear optimizer. Iteration: 5. Error: 14.845572440720407\n", + "Nonlinear optimizer. Iteration: 6. Error: 2.369851877850203\n", + "Nonlinear optimizer. Iteration: 7. Error: 2.1764857434372713\n", + "Nonlinear optimizer. Iteration: 8. Error: 163.4663704087572\n", + "Nonlinear optimizer. Iteration: 9. Error: 2.213071348805167\n", + "Nonlinear optimizer. Iteration: 10. Error: 181.50298247224939\n", + "Nonlinear optimizer. Iteration: 11. Error: 2.2187748890964727\n", + "Nonlinear optimizer. Iteration: 12. Error: 169.89215894321373\n", + "Nonlinear optimizer. Iteration: 13. Error: 3.8054432326819017\n", + "Nonlinear optimizer. Iteration: 14. Error: 132.10105333886705\n", + "Nonlinear optimizer. Iteration: 15. Error: 1.8300133777058225\n", + "Nonlinear optimizer. Iteration: 16. Error: 137.4093184144317\n", + "Nonlinear optimizer. Iteration: 17. Error: 2.3630850988841323\n", + "Nonlinear optimizer. Iteration: 18. Error: 127.73261860664243\n", + "Nonlinear optimizer. Iteration: 19. Error: 1.6962584333306858\n", + "Nonlinear optimizer. Iteration: 20. Error: 133.13349342241605\n", + "Nonlinear optimizer. Iteration: 21. Error: 2.377931216962695\n", + "Nonlinear optimizer. Iteration: 22. Error: 100.65554701995455\n", + "Nonlinear optimizer. Iteration: 23. Error: 1.548420954814084\n", + "Nonlinear optimizer. Iteration: 24. Error: 79.11945618447689\n", + "Nonlinear optimizer. Iteration: 25. Error: 1.5620953068548142\n", + "Nonlinear optimizer. Iteration: 26. Error: 60.76672698084362\n", + "Nonlinear optimizer. Iteration: 27. Error: 1.4306538819861685\n", + "Nonlinear optimizer. Iteration: 28. Error: 1.3989819422748666\n", + "Nonlinear optimizer. Iteration: 29. Error: 1.1621472075446573\n", + "Nonlinear optimizer. Iteration: 30. Error: 1.136888025102291\n", + "Nonlinear optimizer. Iteration: 31. Error: 1.10851435524545\n", + "Nonlinear optimizer. Iteration: 32. Error: 1.0931716928138897\n", + "Nonlinear optimizer. Iteration: 33. Error: 1.080293575031519\n", + "Nonlinear optimizer. Iteration: 34. Error: 1.0712316967724433\n", + "Nonlinear optimizer. Iteration: 35. Error: 1.0644195614703333\n", + "Nonlinear optimizer. Iteration: 36. Error: 1.0603135136138322\n", + "Nonlinear optimizer. Iteration: 37. Error: 1.0569988398213175\n", + "Nonlinear optimizer. Iteration: 38. Error: 1.0549630384559179\n", + "Nonlinear optimizer. Iteration: 39. Error: 1.0530582611647814\n", + "Nonlinear optimizer. Iteration: 40. Error: 1.0515998330960084\n", + "Nonlinear optimizer. Iteration: 41. Error: 1.0504246485285222\n", + "Nonlinear optimizer. Iteration: 42. Error: 1.0497652871900054\n", + "Nonlinear optimizer. Iteration: 43. Error: 1.049020035137605\n", + "Nonlinear optimizer. Iteration: 44. Error: 1.0485510939176668\n", + "Nonlinear optimizer. Iteration: 45. Error: 1.0480953887943318\n", + "Nonlinear optimizer. Iteration: 46. Error: 1.0477923907778108\n", + "Nonlinear optimizer. Iteration: 47. Error: 1.0475756086931343\n", + "Nonlinear optimizer. Iteration: 48. Error: 1.0474201617339554\n", + "Nonlinear optimizer. Iteration: 49. Error: 1.0473066061954062\n", + "Nonlinear optimizer. Iteration: 50. Error: 1.0472228497233842\n" + ] + } + ], + "source": [ + "start = batch[\"expert_trajectory\"][:, :2, 0].to(device)\n", + "goal = batch[\"expert_trajectory\"][:, :2, -1].to(device)\n", + "planner_inputs = {\n", + " \"sdf_origin\": batch[\"sdf_origin\"].to(device),\n", + " \"start\": start.to(device),\n", + " \"goal\": goal.to(device),\n", + " \"cell_size\": batch[\"cell_size\"].to(device),\n", + " \"sdf_data\": batch[\"sdf_data\"].to(device),\n", + "}\n", + "planner_inputs.update(get_straight_line_inputs(start, goal)) \n", + "with torch.no_grad(): \n", + " final_values, info = motion_planner.forward(\n", + " planner_inputs,\n", + " track_best_solution=True,\n", + " verbose=True,\n", + " damping=0.1, # keyword arguments for optimizer.optimize()\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "121301ea", + "metadata": {}, + "source": [ + "## 4. Results" + ] + }, + { + "cell_type": "markdown", + "id": "14063cc0", + "metadata": {}, + "source": [ + "After the optimization is completed, we can query the optimization variables to obtain the final trajectory and visualize the result. The following function creates a trajectory tensor from the output dictionary of `TheseusLayer`." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "79323fe1", + "metadata": {}, + "outputs": [], + "source": [ + "def get_trajectory(values_dict):\n", + " trajectory = torch.empty(values_dict[f\"pose_0\"].shape[0], 4, trajectory_len, device=device)\n", + " for i in range(trajectory_len):\n", + " trajectory[:, :2, i] = values_dict[f\"pose_{i}\"]\n", + " trajectory[:, 2:, i] = values_dict[f\"vel_{i}\"]\n", + " return trajectory" + ] + }, + { + "cell_type": "markdown", + "id": "98e36cc9", + "metadata": {}, + "source": [ + "Let's now plot the final trajectories" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "5578fd99", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAGQCAYAAAB4X807AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAB09UlEQVR4nO2dd5hU5dn/PzM7MzvbWHbpHRRBXBDkxYL+FLEgGsFYsGLUqGgUjahYYtfwooKxxWjQgDGxIBoFjYIKYhJ9kzerQX3BglQBlbawdfr5/fGce58zLFV22HZ/rmuunTltnqPs+e7dfY7jOCiKoihKBvE39AIURVGU5o+KjaIoipJxVGwURVGUjKNioyiKomQcFRtFURQl46jYKIqiKBmnwcTm22+/5ayzzqKwsJBWrVpxxhlnsHr16oZajqIoipJBfA1RZ1NdXc3AgQPJzs7m17/+NT6fj9tvv53q6mo+++wz8vLy9vWSFEVRlAwSaIgvffrpp1m+fDlfffUVvXv3BuDggw/mgAMO4Pe//z3XX399QyxLURRFyRANYtkcf/zxRCIRPvzww7Ttw4YNA+CDDz7Y10tSFEVRMkiDWDaLFy/mtNNOq7O9pKSEWbNm7fL8tm3b0rNnzwysTFEURfmxrFy5ko0bN253X4OIzebNmykqKqqzvbi4mLKysl2e37NnT0pLSzOxNEVRFOVHMmTIkB3uaxCx+TFMmzaNadOmAbBhw4YGXo2iKIqyJzRI6nNRUdF2LZgdWTwA48aNo7S0lNLSUtq1a5fpJSqKoij1SIOITUlJCYsXL66zfcmSJRx00EENsCJFURQlkzSIG2306NHceOONLF++nP322w8wgaUPP/yQ+++/vyGWpCjKjyAej7NmzRoikUhDL0XZh4TDYbp27UowGNztcxok9bmqqoqBAweSk5NTW9R5xx13UFFRwWeffUZ+fv5Ozx8yZIgmCChKI2DFihUUFBTQpk0bfD5fQy9H2Qc4jsOmTZuoqKigV69eaft29mxuEDdaXl4eCxYsoE+fPlx44YVccMEF9OrViwULFuxSaBRFaTxEIhEVmhaGz+ejTZs2e2zNNlg2Wvfu3Xn11Vcb6usVRaknVGhaHj/m/7l2fVYURVEyTpOps1EUpfHTcWpHfqj6od6u1yGvA9/f+H29Xe/YY49l6tSpOy0+fP311+nTp09tZuydd97JMcccwwknnLBX371o0SLWrVvHKaecskfnrVu3jmuvvZZXXnllj7/z2WefZcSIEXTu3HmPz61v1LJRFKXeqE+hycT1dofXX3+dJUuW1H6+995791powIjNW2+9td19iURih+d17tz5RwkNGLFZt27dHp2TTCZ/1HftChUbRVGaLFVVVfzkJz9h4MCB9O/fn5kzZwIwf/58DjnkEAYMGMDPf/5zotFonXO9yUivvPIKF198MR999BFz5sxh4sSJDBo0iGXLlnHxxRfXPux3dN2ePXty1113MXjwYAYMGMCXX36Z9l2xWIw777yTmTNnMmjQIGbOnMndd9/NhRdeyFFHHcWFF17IypUrOfrooxk8eDCDBw/mo48+AkxZSP/+/QEjBBMnTuTQQw/l4IMP5ve//33tdzzwwAMMGDCAgQMHcsstt/DKK69QWlrKBRdcwKBBg6ipqdnp+m+++WYGDx7M/fffz+DBg2uvu3Tp0rTPPxYVG0VRmixz586lc+fOfPrpp/zf//0fI0eOJBKJcPHFFzNz5kw+//xzEokETz755G5d78gjj2T06NFMmTKFRYsWsf/++9fu29V127ZtyyeffMIvfvELpk6dmnbdUCjEvffeyznnnMOiRYs455xzAFPI/t577/Hiiy/Svn173n33XT755BNmzpzJtddeW2d9f/jDHygsLOTf//43//73v3n66adZsWIFb7/9NrNnz+Zf//oXn376KTfddBNnnXUWQ4YM4fnnn2fRokX4fL6drr9NmzZ88skn3HbbbRQWFrJo0SIAZsyYwSWXXLLb/092hIqNoihNlgEDBvDuu+9y88038/e//53CwkK++uorevXqRZ8+fQC46KKL+Nvf/rbX37Wr655xxhkA/Nd//RcrV67crWuOHj2anJwcwBTIXn755QwYMIAxY8akufKEd955h+eee45BgwZx+OGHs2nTJpYuXcp7773HJZdcQm5uLmCaGu/p+kUAAS677DJmzJhBMplk5syZnH/++bt1PztDEwQURWmy9OnTh08++YS33nqL22+/neOPP36740u2hzd9tz46IGRnZwOQlZW10xiMF+9U4ocffpgOHTrw6aefkkqlCIfDdY53HIfHH3+ck046KW37vHnz9mLldddy5plncs8993DcccfxX//1X7Rp02avr6+WjaIoTZZ169aRm5vL2LFjmThxIp988gl9+/Zl5cqVfPPNNwD86U9/qh3M6KVDhw588cUXpFIpXnvttdrtBQUFVFRU1Dl+d6+7I3Z0XWHr1q106tQJv9/Pn/70p+0G6k866SSefPJJ4vE4AF9//TVVVVWceOKJzJgxg+rqasA0Nd72O/dk/eFwmJNOOolf/OIX9eJCAxUbRVHqkQ55Hfbp9T7//HMOO+wwBg0axD333MPtt99OOBxmxowZjBkzhgEDBuD3+7nyyivrnHv//fdz6qmncuSRR9KpU6fa7eeeey5TpkzhkEMOYdmyZbXbd/e6O2L48OEsWbKkNkFgW6666ir++Mc/MnDgQL788ss0S0OssMsuu4yDDjqIwYMH079/f6644goSiQQjR45k9OjRDBkyhEGDBtXGjC6++GKuvPJKBg0ahOM4e7T+Cy64AL/fz4gRI3b7HndGg/RG21u0N5qiNA6++OIL+vXr19DLaNZ8/PHHXH/99XzwwQf79HunTp3K1q1bue+++7a7f3v/73f2bNaYjaIoSiOltLSU888/f593wz/99NNZtmwZCxYsqLdrqtgoiqI0UoYMGcLXX3+9z7/XG8OqLzRmoyiKomQcFRtFURQl46jYKIqiKBlHxUZRFEXJOCo2iqLUHx0BXz2+OmZmmQsXLuTUU08FYM6cOfs82wtg8uTJ9O7dm759++6yA8C1117b5KcYazaaoij1R31PBNiD6zmOg+M4+P179jf06NGjGT169B4ubO9YsmQJL730EosXL2bdunWccMIJfP3112RlZdU5trS0lLKysn26vkyglo2iKE2WlStX0rdvX372s5/Rv39/vv32W37xi18wZMgQSkpKuOuuu2qPnTt3LgceeCCDBw/mL3/5S+32Z599lvHjxwOkjRMAO4bgu+++45hjjmHQoEH079+fv//973u17tmzZ3PuueeSnZ1Nr1696N27N//7v/9b5zgZKfDggw/u1fc1BtSyURSlSbN06VL++Mc/csQRRwAwadIkiouLSSaTHH/88Xz22Wf06dOHyy+/nAULFtC7d++0Dse7wwsvvMBJJ53EbbfdRjKZrO1B5mXChAm8//77dbafe+653HLLLWnb1q5dW7tegK5du7J27do65/72t79l9OjRae10mioqNoqiNGl69OiR9uB++eWXmTZtGolEgu+++44lS5aQSqXo1asXBxxwAABjx45l2rRpu/0dhx56KD//+c+Jx+P89Kc/ZdCgQXWOefjhh/f6XrysW7eOWbNmsXDhwnq9bkOhbjRFUZo03oaVK1asYOrUqcyfP5/PPvuMn/zkJ3s0PiAQCJBKpQBIpVLEYjEAjjnmGP72t7/RpUsXLr74Yp577rk6506YMIFBgwbVeW0v+aBLly58++23tZ/XrFlDly5d0o75z3/+wzfffEPv3r3p2bMn1dXV9O7de7fvpbGhlo2iKM2G8vJy8vLyKCws5IcffuDtt9/m2GOP5cADD2TlypUsW7aM/fffnxdffHG75/fs2ZOPP/6Ys88+mzlz5tS28l+1ahVdu3bl8ssvJxqN8sknn/Czn/0s7dw9sWxGjx7N+eefz/XXX8+6detYunQphx12WNoxP/nJT/j+++9rP+fn59eOB2iKqNgoilJ/dKB+M9L2cGLBwIEDOeSQQzjwwAPp1q0bRx11FGDGA0ybNo2f/OQn5ObmcvTRR293tszll1/OaaedxsCBAxk5cmSt1bRw4UKmTJlCMBgkPz9/u5bNnlBSUsLZZ5/NQQcdRCAQ4IknnqjNRDvllFN45pln6Ny58159R2NDRwwoivKj0REDLZc9HTGgMRtFURQl46jYKIqiKBlHxUZRlL2iCXrilb3kx/w/V7FRFOVHEw6H2bRpkwpOC8JxHDZt2kQ4HN6j8zQbTVGUH03Xrl1Zs2YNGzZsaOilKPuQcDhM165d9+gcFRtFUX40wWCQXr16NfQylCaAutEURVGUjKNioyiKomQcFRtFURQl46jYKIqiKBlHxUZRFEXJOCo2iqIoSsZRsVEURVEyjoqNoiiKknFUbBRFUZSMo2KjKIqiZBwVG0VRFCXjqNgoiqIoGadexeaVV17hzDPPpEePHuTk5NC3b19uvfXWOrO+y8rKuOyyy2jbti15eXmccMIJfP755/W5FEVRFKURUa9iM3XqVLKysvjv//5v5s6dyy9+8QuefPJJTjzxRFKpFGBmIYwaNYq5c+fy+OOP8+qrrxKPxxk+fDhr1qypz+UoiqIojYR6HTHwxhtv0K5du9rPw4YNo7i4mIsuuoiFCxdy3HHHMWfOHD788EMWLFjA8OHDARg6dCi9evXiwQcf5LHHHqvPJSmKoiiNgHq1bLxCIxx66KEArF27FoA5c+bQuXPnWqEBKCwsZNSoUcyePbs+l6MoiqI0EjKeIPDBBx8A0K9fPwAWL15M//796xxXUlLC6tWrqayszPSSFEVRlH1MRsVm7dq13HnnnZxwwgkMGTIEgM2bN1NUVFTn2OLiYsAkDyiKoijNi4yNha6srOS0004jEAgwY8aMvb7etGnTmDZtGoDOO1cURWliZMSyqampYdSoUSxfvpx58+bRtWvX2n1FRUXbtV42b95cu397jBs3jtLSUkpLS7cbG1IURVEaL/UuNvF4nLPOOovS0lLeeustBgwYkLa/pKSExYsX1zlvyZIldO/enfz8/PpekqIoitLA1KvYpFIpLrjgAhYsWMDrr7/OEUccUeeY0aNHs3bt2trEAYDy8nLeeOMNRo8eXZ/LURRFURoJ9Rqzufrqq5k1axa33XYbeXl5/POf/6zd17VrV7p27cro0aMZOnQoY8eOZcqUKRQVFTF58mQcx+Gmm26qz+UoiqIojQSf4zhOfV2sZ8+erFq1arv77rrrLu6++27AxGduvPFGXn/9dSKRCEOHDuU3v/kNAwcO3K3vGTJkCKWlpfW1bEVpVDiOg+M4pFIp6vHXs97w+XxkZWXh8/kaeilKI2Nnz+Z6FZt9hYqN0pxJJpMsXryY//znP0QikYZeTh06duzI4YcfTseOHRt6KUojY2fP5oylPiuK8uNIJBL861//4rHHHqvN0mxMHHbYYXTu3FnFRtkjVGwUpRFSVVXF999/z8aNGxt6KXXYuHEj8Xi8oZehNDF0no2iKIqScVRsFEVRlIyjYqMoiqJkHBUbRVEUJeOo2CiKoigZR8VGURRFyTgqNoqiKErGUbFRFEVRMo6KjaIoipJxVGwURVGUjKPtapQWiXRWbox9aJPJJKlUqqGXoSj1ioqN0iKprKxk0aJFrFy5stEJTiKR4OOPPyYajTb0UhSl3lCxUVokZWVlvPrqq8yZM6dRWhHl5eVUVVU19DIUpd5QsVFaJPF4nA0bNjRKy0ZRmiOaIKAoiqJkHBUbRVEUJeOo2CiKoigZR8VGURRFyTgqNoqiKErGUbFRFEVRMo6KjaIoipJxVGwURVGUjKNFnc2csrIyVq5cSWVlZUMvpQ7Z2dl0796dDh064PP5Gno5iqJkEBWbZs7SpUt56qmn+PLLLxt6KXVo3749l156KaeccgpZWVkNvRxFUTKIik0zZ+vWrXz22Wd8/PHHDb2UOnTp0oVTTz1V28UoSgtAYzaKoihKxlGxURRFUTKOio2iKIqScVRsFEVRlIyjYqMoiqJkHBUbRVEUJeNo6rOiKEp9Ewe808YDQAsvJVOxURRF2VtSwFZgM7AFiACOuz0LCAF5QCFQDOQ0yCobFBUbRVGUH0sS+AFYC2zCCEyW+9MBfJhgRQQjRuswQlMEdAMK9v2SGwoVG0VRlB9DFfAFRkDAiEoKqHE/+7BiIy/Z9h1GnHoAnYDgPlt1g6FioyiKsqd8D3wKVGAsmSRGaFIYMfFaNQ5WbLKABFZcvsa43g4Ewvtu+Q2Bio2iNDJ8Ph/t27enc+fOBION70/efv36kZ+f39DLaDjWAP/ECEvC3SZCs702f1kY4Qm4x4s4Jd33G4HFQAnNWnBUbBSlkREIBDjqqKO44IILKCwsbOjl1KF169Z07969oZfRMGwE/geIYYQliREQH3XFRlxmsi2OzUrLco8PuPs3A18CA2i2WWsqNorSyPD5fHTr1o2jjz6adu3aNfRyFCEG/AMT6A+4n8GKjLjO5Kcg1YwiIgn3/NQ2536HSRzokbE7aFBUbBRFUXaHT4HVGFdXBOsK257QiDUjohNyt4nQgK3DkWODwHJManQzzFJTsVEURdkVW4F/YayUGEY0RGQgXXQgPSFAznGwT9wEVoBCnu/xYwStJEP30YBouxpFUZRdsQRYj4m7RDBiEfe8RGy8cZokEAWq3eP9nm1gRSvmbo+7739wv6OZkVGxGTlyJD6fj9tvvz1te1lZGZdddhlt27YlLy+PE044gc8//zyTS1EURflxxIG/Y9xnCdKFJoYRBvlZ43lFsRlnSazo4O5PudcQwZHrVmISBpoZGRObF198kU8//bTOdsdxGDVqFHPnzuXxxx/n1VdfJR6PM3z4cNasWZOp5SiKovw4VmCC9w5GFGJYkZC4izcTTeprcI+rcc/xu+dEsK41r+BIrU4Sk/XWzMiI2JSVlTFhwgR+85vf1Nk3Z84cPvzwQ/70pz9x3nnnMXLkSObMmUMqleLBBx/MxHIURVF+PP+LFRYRhRjGcpHXtpZODbY/mqRFR7BFnzXuzyjW5eYVsDK2X7PThMmI2Nx8883079+f8847r86+OXPm0LlzZ4YPH167rbCwkFGjRjF79uxMLEdRFOXHUY6p8g9gxECExdspQKwRsXLcxIB3s97lweSD/FDzg9nvw1pGPoz4yDZxz4mg1bjXbEbUezbaP/7xD5577rntutAAFi9eTP/+/etsLykp4bnnnqOysrJlVycritJ4+ALTkkbcXT7SxQbPT28KNPBQ7kPMC8wj25fNL+O/NCKSjRWRELbQU2puvO4274iCZkC9WjaxWIwrrriCG2+8kb59+273mM2bN1NUVFRne3FxMWBccNtj2rRpDBkyhCFDhrBhw4b6W7SiKMr2iGLEBmzwXgo5xTUmHQREQNyU5+VZy5mXNY9sJ5ux0bG2ZY1YM+I2E8tIBMx7zWZGvYrNgw8+SE1NDbfddlt9XhaAcePGUVpaSmlpqVZVK4qSeb7FdGaW5pmSIBDButOksFNSot14zVP+pwA4O3k2bZJtjFtMxg+I4IjYiBUj6dNiQTWzKsh6u53Vq1czadIknnnmGaLRKNFotHZfNBply5YtFBQUUFRUtF3rZfNmk+u3PatHURRln5LE1NbIQ1+sGm8djbi5ZJtrvVQ71TwTegaA8dXjbZuaCMZ1FnDfZ2NES3qtxrFZark0uyrIehOb5cuXE4lEGDt2bJ19U6dOZerUqfznP/+hpKSEd955p84xS5YsoXv37hqvUfYJ2dnZdO/enYMPPhjHaVxpP6FQiC5duhAINLM/bZsSP2CKOKUTgFgg4i7z7eA84M/5f6bMX8ZhicM4LHaY2ZiNEZ24e5DEZbxCE8IIWBToUG930miot3/NgwYN4v3336+zffjw4YwdO5ZLL72U3r17M3r0aGbMmMEHH3zAsGHDACgvL+eNN97g/PPPr6/lKMpOKS4u5pxzzuHoo49udGLj9/vp0aMHubm5Db2UlkkSk4Hm7QrgYEQgm7opyWLZYOoIH8l5BIBrq6611olYMNLAMwdb8Cmi443dqNjsmNatW3Psscdud1+PHj1q940ePZqhQ4cyduxYpkyZQlFREZMnT8ZxHG666ab6Wo6i7JTc3FwGDhzIwIEDG3opO8Tn28mfz0rm2Iwp4hSRCQB52Kr+7f1t4rrR5uXO44vgF3ROdmZM1RizLxvrHsMcRwwjMJISLUWh1RihUbHZe/x+P2+++SY33ngjV111FZFIhKFDh/L+++/TrVu3fb0cpQWjD3OlDg7wDbbYUjLEJJssyfZHOLuFmQ/lPATANVXXEMoK2WJOrxsty3NdmeIphZ3lwAkZurcGJuNisz0XRXFxMdOnT2f69OmZ/npFUZTdZytm5LM0zUxhYilhjOurDBO8hzqJAYuyF/Feznvkp/K5csuV5hoyLC3mXke6BmRj3WciahXuteuWITYLNALZAvD7/fj9jS+1xe/3q3WhNB4czDyZKup2CHAwrjTpAJBNevcA4MFi027rsqrLaO1rnS5WfozI5HjO8V4/jhGys7Fi1sxQsWnmdOrUiZNPPpkDDzywoZdSh+LiYvbff38VHKVxUA6sxTwVoxjLI8v9LJX/Oe6+Gneb23RzRWAFL+e/TMAJMGHTBNuMU7LL5FixcLzzbBIYq6YLcMi+udWGQMWmmbP//vtzxRVXEIvFdn3wPiYrK4vWrVs3SqtLaWGIVSMTOLfXIUBcZnkYd1sFtbGYKW2mkPQl+VnFz+ge726Hokl8Rqwhbw81yXCrBloDRwKtMn6nDYaKTTMnJyeHnJychl6GojRuKkiP1cjfZgFsXEWslSBGRKqAGvgu+zumtzLx55s332wD/pJxFsCKltTXBDBzaxyM0PQABmX2FhsaFRtFUVo2DmZmjbSgka4B3tEB3pdYLW4CwG8Kf0PUH+WMLWdwUOVB5tyAu1+6Asj7EMaSycK45NpjLKWDMKLTjFGxURSlZVOJ6RgANq5Sg7VAxO2Vhc0sA0jCxvBGnmzzJAC3lt1qBCrinhdyj6vwXCcG5GPcZQXuqzVQkplba0yo2CiK0nJJYWI10lQzihGcILZLM9jMMamxca2Xh0MPU+Wv4uSakxkSHALtsBYQ2MLNMCbLzIcRmBz3czbQHWiTyZtsHKjYKIrScinHWDXS0l9SlKXxphR3ivtMxCgFm4KbeDzncQDuiN1hCz8lvVnqaES4RGzyMVZPgfvzIJpd083toWKjKErLJAmsxIiCjGoWF5hkowXc7bnY/mautfNw1sNU+CoYkRjB0MBQc4xcK4ytoZG6mSysNZPjXrsbJm7TAlCxaeZUVVWxYcMGIpHIrg/OED6fj8LCQtq0aUMwuL1eH4rSAGwGNmBjKRKTSWHiOGCEBqyVEgPCsCm5iUf9jwJwV/QumzQQwoiYH5vqLO/9GJEJmmsQBvpiRxA0c1RsmjnffPMNzz//PMuXL2+wNfj9fo4//njGjBlTO5FVURqUJGY4mrjHwCYFRLHpy5K2LPvd/mZTs6ZS6avkJOckjsw6Mn3UM9g052yMeInYhLHp1O1pMVYNqNg0e9avX8/7779PaWlpg60hEAhQVFTEqFGjGmwNipLGBswUTrDTN7Owrf6ljiYHKzKuZfMDP/C4z8Rq7nXuNQIi15H2NAGstSOiJSIjmWoH0KKewC3oVlsujWVeS2NZh9LCiQOrMNaHuMekPU2lu18SA6StjKQ758LkmslUOVWMckZxWOow21kghLWUpNGmY86p/SmutnY0yzECO0PFRlGUlsV6TLsZaclXhSm0FGtExCELWzfjus++jX7Lk35TV3Nf4D5zvrSyycaKjLefmnR+FqsmgLFqWlj4UsVGUZSWQwRj1YibDEwsJRfbvyyKsT68DTTdjs33VN9DzIlxjnMOA7MG2phMjPR+aGDca3FsdpvMtGnnvloYKjaKorQc1mFiMDLELIkRoBRGfAJYKyeB7e4chq8iXzHDmUEWWdwXvM+6zKRfmtTqBLH7crExmqB77P60OKsGVGwURWkpVGAKOKXZZgSbiSbpyClMsaWIjN9974dfJX9FihTjssZxQNYB1oKRPrcy9yYb2zsti/TU5/ZA20zeZOOlBdStKorS4klhUp0lBhPHiIsPmwAgFokUZUpyQCv4p++f/CX5F3LI4a6cu8x+sU4qsMWc0jlg21RnieH0pEVaNaCWjaIoLYFyYCM2YC+utHKM9SGdmFOY5AHPCGcn4HBT8iYArg9cT+eazuYa0g1ALJpc972kTntTn4OYhpst1KoBFRtFUZo7CcwIARGCaoyFE8DWyIjrLBebheYHfDC7ejZ/T/ydtr62TGw10YhHxHMNH9YiysFaSxKjkfk3LTRWI6jYKIrSvFmPEZgYtn1MGNuipgqbKRZ33wNEIR6Mc3PVzQDcFbyLwnihbTsjbrZsjAAlsSOfE9jkAD8m+6yFN89QsVEUpfkSBdZgYyp+jPB4s88SGOGRZIAIRkxy4Knyp/g69TUH+A/gipwrbHNOsM07wbrgfJhhaDKATWpsetJieqDtCBUbRVGaJw6wGtsNoBLb80wSANxpm1S6PwswYlQOZYEy7o7cDcCD4QcJErQTN/MxgpR0r5eHTQ4Q15zU3HSi2U/h3B1UbBSlEZNMJtmyZQvl5eWkUqldn5AhQqEQRUVF5OXl4fP5dn1CY8CbFCDZZgGM5ZLACEEE6wqTGIzr/rpvy31sdjZzbOBYTis4zVwziU0skPk0Uo8jn8XFJhlo3dC8X1RsFKVRU1NTw7x583jnnXeIx+O7PiFDdO7cmTFjxnDooYc22Br2iCTGfRbBPuhjGAsm4W6XkQHSaDPlvg/DV/6veDz+OD58PBR6CF+lz1gzMhog7p4v8Rqpr4m63yWpzx2AwgzfaxNBxUZRGjHRaJRFixYxa9Ysampqdn1Chujbty9HHHFE0xGbH7D9zyIY15gf25pGUqALMHUy3lk2W+GG2A0kSHBZ3mUMbjXYiEsltm4GbLwmgNmf674k4ywX6ILtwdbCUbFRlEaO4zikUqkG7ZrtOE7T6dpdgxEb6d7sHc3sdgOo7fScjYm3VGHEJgfmOnP5a9VfKaCAX4d/bYUoG5utJl0I/BiLR9xyuNd2MLGa/IzfbZNBxUZRlOaDg+l/JgF/sWzKSXebBTBiI21pXBdYLBHjl+W/BODO/DvpkOhghCjk+Y44Ji7jd68h3s0sz3EFQGfUqvGgYStFUZoPZZjBaGLFyHhnSUHOw2agFWCzy2LmnEeqH+HrxNf0DfTl2sJrjWUiiQVh0mtqgtguAgXu94gV1Rlbr6MAKjaKojQXYpikAAn0SzaYtPePed5LxX8+tdbHWtZyX5WZUfNYwWOE4iHrHpM06bB7jrczgHR4Druf8zEuNCUNdaMpitL0cTBxmips239v002fu0+SBGow4iPiUQM3bLyBSqeSn4Z/yojsEbb6P4kRmwpswWYc4zILYzsKyPjn7uiTdTvofxJFUZo+FZhYjR8jKpIpFsS6vPKwCQM+jIvNrb+Z78xnZnQmOeTwSP4j1r0WxrjKcrDpzlLM6U00kNHQrWnxbWl2hLrRFEVp2iQwQiPFlT5sgaVkhqWwM2ak5Uwr8zMaj3L1hqsBuL3odnrk9bC1M5XY4WpiLcXczzLRMw+brdYDfaruALVsFEVp2mwAtrjvvR0CyrE1MBXYgH0E23gzH6asn8JXya/om9WXG0I32HRmaT8j3QDAdnSWdjeSbRbENNvUVOcdomKjKErTpQJYi+1vJtaN9CWTZpjSJFPiOVuAXFjmW8avK34NwFNFT5GdyjYiVYARkJDnvBz3OknPz6DnuG5oqvNOULFRFKVpkgC+w7izpCGmWBySqiwxmhxMFwHJIAuAU+Hwi62/IOpEubDgQo4tOtb2UavEuOLEJebtQCDfI5aSH+iKpjrvAhUbRVGaJhsxFopMxJQMM+kGkMKIRja2VibmHhuA57Oe5934uxT7ipkanmonbOZgizNz3J8J970ImMR0gpjYT5uM3mmzQMVGUZSmRwWmpiYLIyDV7kvcZ5KGHMbGcMBYLjWwKbCJCRsnAPBQm4doH2hveql5B57JsDWxhryjo8NYl1w3WvQEzt1FxUZRlKaFZJ/FsTUwMnkzgLFApF1N2D0GbI+zGrj+u+vZmNzI8JzhXNT6Ils7Iw06JfhfhQn6iwDlYsQG93N7jGWj7BIVG0VRmg4Oxn22FTuGuQYjDpKWLDNmxLrJwYiGO6lzLnN5LvocYcL8vuD3+FI+I0JBz3dIVpl0c5a+apIcINlsndCkgN1ExUZRlKZDJcZ9FsC4tSIYl1oQY2nInJosrHXjx4hSFCqo4Ir1VwBwT5t7OCB0gBEU71w6mW8j7rgq7CROmcKZwNTU5KDsJio2iqI0DeKYNGdxd0WxwfwARmTiWKHAPUbiL8Ctm25ldWI1g7MHc33R9cYqEfdbCFtbI6MEQu7nfPc7fe6rGGib0bttdmitq6IojR/pfVaBeeiLG0tSnmPYppkyLTNEWruaDxIf8ETlEwQIML1oOoFEwHYWiHvOyXV/yrhoKQxNYIWtM1bQlN0iI5bNW2+9xf33388nn3yC3++nT58+PPjggxx33HEAlJWVMXHiRF5//XVqamoYOnQoDz/8MAMGDMjEclo0xcXFHH744RQVFTXYGrKysujXrx/Z2VqIoPxIyoDvMeJRjXGRVWCD+VJDI24vGYzm9kGrqqni5z/8HDAtaQaGB1pLRro2exttyjXD2PY3KffVGTtSQNlt6l1sfv/73zN+/HjGjx/PHXfcQSqVYtGiRVRXVwNm4t+oUaNYuXIljz/+OEVFRUyePJnhw4ezaNEiunbtWt9LatEccMABXH311bX//RsCn89H27ZtadVK03aUH0ENRmgSmJhMBOs+k8B+HDssrRVGjKQeJgC3VtzK8sRyDg4dzK1tbjXbJfgvnQG87jTHcy3XBVc7PqAjmhTwI6hXsVm5ciXXXXcdU6ZM4brrrqvdftJJJ9W+nzNnDh9++CELFixg+PDhAAwdOpRevXrx4IMP8thjj9Xnklo8rVq14qCDDmroZSjKjyOF6RJQhREbmUsjYiCuM6mnkeLNHPt5QeUCHt/yOAECPNvhWUIJt2JTBqx5G2mmsF2hZQqnuO2SGKvGO7VT2W3qNWYzffp0/H4/V1555Q6PmTNnDp07d64VGoDCwkJGjRrF7Nmz63M5iqI0ddZjUp2DGCGowvQuK8eIyVaM+6saIxw12NTkMJRHy7lk4yUA3Fl4J4fkHmKsE/HoRt1riqjESJ+Bk4NNDmiHGSGg/CjqVWz+8Y9/cOCBB/LSSy+x//77EwgE6N27N0888UTtMYsXL6Z///51zi0pKWH16tVUVlbW2acoSgukHNPRWeIpNe77LGwgXywZV1yowYpPAK6ruI7VidUMCQ3hlna3WGtFRjkXYS2jXIyw5GLcZ7nuOhz3fRc0pWovqFc32rp161i3bh0TJ07kv//7v9l///2ZNWsW48ePJ5FI8Mtf/pLNmzfTs2fPOucWF5uJQ2VlZeTn1+3TPW3aNKZNmwbAhg0b6nPZSgvBcRzi8TiJRGLXB2cQn89HMBgkKysLn0+d/9sljqmnkVYzkjEmKc7iRvO6z0LUTt0kCa+tf40ZW2YQ9oX5Y+c/EkwEzXk12MFnfuxTUD5L7Ea+NwV0QBtt7iX1KjapVIqKigqeffZZzjjjDACOO+44Vq5cyeTJk7n22mt/9LXHjRvHuHHjABgyZEi9rFdpWUQiEf75z3/y8ccfN6jgFBcXM2zYMPr06dNga2jUpDBC41onta6tSuwAM8k2k7HNPmxwH/i+8nvGbTLPiweKH+Cg/INsS5oo6V2dZUia1NA4mDiOdAsoQmtq6oF6FZs2bdqwdOlSTjzxxLTtI0aMYO7cuXz33XcUFRVRVlZW59zNmzcDNGiKrtK8iUQivP/++zz99NNEIpEGW8cBBxxAx44dVWy2h4OJ05RjHvQV2EJNsG4z6XcmfzNsxcRXssEJOly65VI2pjZyYvhExuePN9aMt37GHQdda63EsG65BLajQA6m0aa6z/aaehWbkpIS/vnPf+5wv9/vp6SkhHfeeafOviVLltC9e/ftutAUpT5IpVLU1NSwZcuWBhWb8vJyYrHYrg9siVRgijdlFLNkhrkNNMH9nI2dVSMtaSqBGPy25re8VfkWRVlFzOg8A3/Kb9Og5fEibrkc7KhosN0DJEu/A7ZFjbJX1Kten3766QDMmzcvbfvcuXPp2rUrHTt2ZPTo0axdu5YPPvigdn95eTlvvPEGo0ePrs/lKIrSlIgB32KtFj+2uFIyxsTSCWBnykBtvObz+OdM3DARgGc6P0OX/C4mEaDAvW61e768arC90aTuRgSpEHWf1SP1atmccsopDB8+nCuuuIKNGzey3377MWvWLN555x1mzJgBwOjRoxk6dChjx45lypQptUWdjuNw00031edyFEVpKiQxfc/E4NyC7bQcx1owktos50QxApEFNYEaztt8HlGiXJZ3GWcEzjDHSOFnK/daee75Kay1Ix2iJfEgG+iOus/qkXoVG5/Px+uvv86tt97KXXfdRVlZGQceeCDPP/88559/PmBcaW+++SY33ngjV111FZFIhKFDh/L+++/TrVu3+lyOoihNAYnTbMUOQJOssyxs4aYUXoLNIqvGCFMuXL/pehZHF9Mn1IdHOj9ihKgcIzJZ2ASApOc6Uk8j1o3EdDT7rN6p93Y1rVq14oknnkirrdmW4uJipk+fzvTp0+v76xVFaWpsxVg1Uj8j0zAlrCWDzGRks2Sl+TBWSgBmbZjFU5ufIuQLMbPHTPJy8mzdjfQ4g7qZZ9LAM4ztg9YKdZ9lAB0xoChKw1GFERpJYd6KEZUI1qWVwrrVvCOb3TTnFb4VXL7lcgAeKnyIQcFBZoc00JREAK+IbW98AO52zT7LCPqfVFGUhiGOGe8cozaTrNa6kSyxbTsFOBhhcOtuYskY56w6h62prZzW6jSuLr7aCJaIFVjxkkLOELVTO2vn1/jc7dr7LGOoZaMoyr5HEgK2YB76XjeXFGmKWGyvU0AAKIeJqyby75p/0yPYg+ldp+ML+GyzTm/dbtw9X2p0Ilhhy3ePL8IMRVMygoqNoij7FhmEthUjHhXYQkoZ8SxNN8WikVYyKfenH17lVR6rfIwgQV7u8jLFAVcppDZHija91ou331lrbOpzPsaq0e5BGUPFRlGUfYeDGYS2ERO4B5sRJpM1ZS6NDyM4MolTrJ4wLI0t5edrzDC0qe2ncljiMCMc0sJG2tyIFSSfg57ridsuB+iKPg0zjP7nVRRl31GF6XvmDcjLpEywVo1YJpLyLIkCSajaUsUZ68+gPFXOWYVncU2Ha2zgX4aigbFmajBi4mCHr/nda0ryQDE6eXMfoGKjKI0Yn89HIBAgOzubVCq16xMyRCgUIisra9cH7owaTIcA6QAgBZtV2E7LUqgp2WO5WHeaH5yww7jvx/F/0f+jb7CvidP4fLZw04/NLkt6rivile/5njjGldZ+725L2T1UbBSlEZOdnc3QoUNJJBLE4/Fdn5AhOnTowH777ffjLxAHVmOD847705uCLBZMlfsS11qA2i4Cv930W16oeIE8fx5/6fAXCmIF1gKS4xKku+PkvdTtxLB1NV2wTT6VjKJioyiNmJycHI477jiOPPJIHMfZ9QkZIisri7y8vF0fuD0k86wC2xZGamXkQS8xmizsTJogdoJmCj6o+oAJ6yYA8Ieuf+CgVgfZa0jmWQqbdYb7fdKEM+F+b7a7vxPaZHMfomKjKI0Yv99Pfn5+0+2GnsLU0mzBPOSr3G1RTIKAZI5JtphknonIuMH9b6u/ZcyaMSRJMrHtRM5pfY65ftD9ngL3uilqh6fVxmayMJ0GpFNADNOORqeZ7FNUbBRFyQwpzFjncoy14XPfi7tMBMHBdgkQt5oIkx9qnBrOWHsGG1IbODH3RCYXT7ZxnyxsXEZ6mYn7LYxt2iluNAeTENABTXPex6jYKIpS/zgY8ViDbUWTg42viCB43WfiChO3VxCcmMNl311GaU0pPYM9ebHbi2T5suyQM2lpEyG9M4CkPUuYK4lJNghj3Gf65Nvn6H9yZbdxHIfy8nLKyspIJpO7PiFDZGVlUVRURKtWrUwmktL42IJtrilNsWSGjKcws3Z8gA8jRhKrcVOYH9j0AC9sfYF8fz5zes6hTXYbIxwOdkaNpDR7YzUxd3+Y9OLODtjCTmWfomKj7DaJRIKPPvqI2bNnU1FR0WDraNWqFT/96U85/vjjCQT0n3CjowITp5HMs6j7ysIOLBP3WSXp45olmywAb9S8wa82/AofPv7c5c8MyBpgzhHXmTTU9HYYECGRQWgyzyaFsWhaZ/TOlZ2gv6nKbpNKpfjqq6947bXX2LBhQ4Oto3379hx00EEMHz68wdag7IBqTC1NDBNzkbhJCpuKLIkBYAsvvXGXGvi06lPOW30eDg6T2k/itFan2WFqDkZYakh3nUmSgcSAJMkgDyM87dA4TQOiYqPsMalUqkHTcBvyu5WdIEWbEWzjTLfqvzYZYNs+8znUWjLSD+07/3ecuupUqlJVXND6Am7tcKs5Jol1nYmFJG43SG+26XePjbqfO6P1NA2Mio2iKHtPFJMMEMdYHxFsU804xpoREZCiTnF95VA7obOKKkYtG8WaxBqOyj2KP7T/Az589jregs0Q6WOdExiBCbjvE+4xXdF6mkaAio2iKHtHDNMdoBIbj5H2MZJ9JokAIjLl7s8wtfGaZCzJBesu4OPIx+wX3I/XerxGdm62uaYYszVYd5zEbryzaaSIU+prOmHcaEqDo2KjKMqPJ4axaMoxcRGJvXiba0qcRNKUpfkmGGGIgZPtcN3a65hdPpuirCLe7PUm7WhnBERcZzGMBeR1nSWw4iIxG0mxboWpqdE4TaNAxUZRlB9HHJPeLJZHJXasc8r9HMLGaSR7TObKVGJEIQkPr3mY3276LSFfiNe7vE6/UD9bPxPwvMR1luu5Zo57XRnxXIMRms7UjREpDYaKjaIoe04CO2lTYiniuhJBEItDkgQq3eMkXTkM1MDM6pncsP4GAP7Y448c0/YY23IGcwzSrUc6AXjrdyQWJNZPAabBpj7dGhX6v0NRlD0jgXGdbcIIhgiIuMnAxlTAipCIgQTus+G98ve4cPWFADzQ8QHODZ9rzsl3j4th4zQSq5HJm1IUKsPXCjCWUmc0IaARomKjKMruk8CkN1djHvAxjOtMijQrMA96sTykbQykpznH4JPyTzh91enEiXNd0XVMbDPR1uHkYxMM5PxsbBq1fA5iREZErgM6CK2Roh5NRVF2D7FoKjAFm9XYGTEST/EOQIu6x0axLWpygDh8k/qGk1efTGWqkvOKz+Oh/R4yKc7uOAGinu+VZIAQJlaT514nTHornA6YhAClUaKWjaIou0aSATZhLAoRDtz3YlkEsBX+YARKEgHctjRrIms4YekJrE+u54S8E3i2w7P4s/zGmoljhMbt+FzbN026AogFFce66UKYcQFt0cyzRoyKjaIoOyeOsWhkTEAS4zoTYZG2NNLyP4oVm1xsSnIVbPBt4MTlJ7IqvorDw4fzWo/XCBEycZk8bI80uZbMuhFrSdrTyLEpjCB1QjsENHJUbBRF2TEiNOUYl1kU83CXOTFidYgQiQtMBqFJmnMVbM3aysnLTubLyJf0z+nPW33fIj+Wby0VKfwE21NNCjLj2BTnOHaUdD6mQ4CcpzRaNGajKMr2iWI6A2zC/lmag7U6pNdZNrZjgLjYtmBiOq7lURms5JTlp9R2B3in5zsUh4tNMF8y2SqwXQDEgvKKmmShxdzvCGNSnDXzrEmglo2iKHWJYHudSUflSqzrTKwXsWC8MZQ80tKca8I1jF41mo9qPqJroCvv7fcenehkLZkwdjKnd0yS9FnztruRRIQsoCPaiqYJoWKjKEo6NRihkdYwKWzsRIh6Pku9i7jNPK6zKFHO+OoM3q96n47Bjiw4YAG9nF5GsKoxlo0kGOS47/OwgiaxmTimK4CITztMUoDSZFA3mqIolipMHY00yvRhp11mYYsrJSFAAvZJoAwrQEGIBqOMWT2GuVVzaZvVlvl95nNAqwNs8WXC/T7poSZdAMA21xQhSniu3RFNcW6CqGWjKIrNKluLTV2W6n1pmikuNT/myRHBdnjOw2Souf3QYtkxzv72bN6ofIPirGLe7fkuBzkH2fTlPIxgVZNuMUljTbGWJJNNXG0FQBv0z+QmiIqN0mIIBoMccMABHH/88USj0V2fkCG6detGhw4dGuz76yBNNNdgHvQSm5G4i+O+ZKKm1L8ksVlgIiA1EIvFOHvl2cwpn0NRVhHz95/PoKxBdky0NNHMwghILraHWhxrSYUwrjNxzeVgrBoVmiaJio3SYsjNzeWUU05hyJAhpFKpXZ+QIcLhMF26dGmw76/DJuB7bFdmaRMjrWXkKSGuNLAD0pLYWEsQotEo56w6h9mVZlTA/H7zGZQ/yBxXjbGe5PpgRwPIIDUHW2cj/4uimILNLmgtTRNGxUZpMQQCAbp27UrXrl0beimNgxRGaDa7n8U1JiLiuD8l8C/WjqQ552NSnJNALkT8Ec5ceyZvVb5lXGfd3+WQrEPMtbPc4yVzTZDOANnuZxn5LCnQIYzFpF2cmzz6v09RWiJJYCOwHvNgl4SAmPsSl1YVRnzEqpFsMDBPD3d+THVFNT9d91PerXiXtoG2vNfzPQYGB9qqf+kGEHavKxlnKff6YAVNxjzLd2rRZrNAxUZRWhoJYB0me8zBzqCRzgBizYi4SHxGJmX6sA//bKiIVDB69WgWVi+kfaA98w+aT//c/ub4KoyQtcbGXgS5hg8jMJJ1FsR2fu6KtXqUJo2KjaK0JKShZhQjNAGMIMgY5wTpVfsSR5GZNA7GdZYPhKEsWcbJ357Mv6r/RadAJ+Z3n0+/7H7mfBGwSuwgNREbETSw7jpJe05gEgO0O0CzQsVGUVoKNcB3GEtDAvYiBjKbRtKd3eFmtZMxPZaMZJWtj61nxKoRfFr9KT1CPZjffT77+/Y3bjGp7JcssgDpFlINNqssha3hkWSDztiu0kqzQMVGUVoCFRjXWSXmIV6DfbCL1SJuLpmCKd0DYqR3BwjD6urVjPh2BF/FvqJPuA/v9XuPbqFuRmiq3OPF/SUZZ7nYTDSZrClWVD52rEBn7BhopdmgYqMozRkHU2z5g/vZ2+cshK3al0r9LM9xMiEz6l6jwOz/MvYlJ64+kTXxNRycfTDv7PcOHbLduiERsPg210xhCzq9a5M2NzUYMeqCCk0zRcVGUZorKUwSwHfu+wqsdSLpzWCEZ9uuzdJ5GYzIbAXKodRXysnfnMzGxEaOyjuKN7q8QVGiyNbkgLVocrCdmqUzgDd9Oul+Ryv3+1RomjUqNorSHElgRGaL+17iL97Kf4nDyICzFEZ4xLUlwXm3O8CC9Qs4bd1pVKYqObn1ybxywCvkkmtETDLO/J5zHM81ZKRzEDubJowRPRGagnq8f6XRUa+NHz788ENGjBhB+/btKSgoYPDgwUyfPj3tmEgkwsSJE+nUqRM5OTkMHTqUv/3tb/W5DEVp2cQwrWekZb8fkwwQwXZr9jZQkGp9SYEOYERHhAB4ueJlRq4dSWWqknNbncvrvV8nNyvXnFPonhPFZrWJu07YtrGmtL8JoULTQqg3sfnss8844YQTiMfjPP300/zlL3/h0EMP5dJLL+XJJ5+sPe7SSy/l6aef5t577+XNN9+kU6dOnHTSSSxatKi+lqIoLZdqTGrzJszDvBwjGtLXrAYjQhXuS+bSOJ5rBDAP/wpzvce/e5xzl55L3Ilzbdtreb7984QiIXu8jGnOxlgsksocwU7glKaacWyLm1ygOyo0LYR6c6O99NJLJJNJ3njjDfLzjeP1xBNP5LPPPuO5557jF7/4BZ9++ikvvPAC06dP55JLLgFg2LBhlJSUcOeddzJnzpz6Wo6itDy2YHqcVWMe5jHsKAAZPia1MpIRVuOeK8WckiCQDU6Ow21rb2Py5skATO42mZs73Ywv7jNCJDEeSB9wFiJ9Hg3uWrxNNUFjNC2MerNsYrEYwWCQnJz05PjCwsLapodz5swhGAxyzjnn1O4PBAKce+65zJs3r0E78SpKkyWJsWTWYWMlMYxVE6V2YmadYk0f6Z0DpP4GiKViXPTdRUzePJksspjRfQa3dLkFn99nBEYGmcXsObUWjXyH1M6IKEktTwDTGUCFpkVRb2Jz8cUXA3Dttdeybt06tmzZwtNPP838+fOZMGECAIsXL6ZXr17k5uamnVtSUkIsFuObb76pr+UoSssgjulv9i3mQb7R/RnFxmeqsK6zKmxLGK8w5GDEYStsjW7llC9P4U+b/kSeP483ur3BxeGL00c2S9GnjAcQN5kIm/clmW9+jMusGyo0LZB6c6P179+fhQsXcvrpp/O73/0OMPNDnnrqKc4991wANm/eTFFR3VmuxcXFtft3xLRp05g2bRoAGzZsqK9lK0rTJYKJz0QwgpGNEZggtsWM/Ib7MO41SYGW+hrv/nxYs2ENP1nyEz6LfkaHYAf+2vev/Ffuf5mEgQpMMoDPc46DERxxm8mIgiS2Y3MCI2Y+jOss/W9NpYVQb2KzdOlSzjzzTEpKSnjqqafIyclh9uzZXHnllYTDYS644IK9uv64ceMYN24cAEOGDKmPJStK08Q7VRNs1+QajKBIRpl3PID4MKRbQBI7ktn1fP+n5j+cuuZU1sXX0TfUl7f7vU2vnF5mZwHpPc4C2IwzcZHJ7JtsjKUj2yXTrQvagqYFU29i86tf/YpgMMibb75JMGgS+I8//ng2bdrEL3/5S8477zyKiopYtWpVnXPFohELR1GUHeBg5s9swHZglhTiBHYeTARjicSwcRkvMsp5i/n4ZuRNzl16LlWpKo7JP4bX2r5GseP5ffT2OMvCiI60sgm7+2UGTtzzvSlM/U1ntKlmC6fexObzzz9n4MCBtUIjHHbYYbzwwgusX7+ekpISXnvtNaqrq9PiNkuWLCEUCtG7d+/6Wo6SIbKyssjOziYUCu364AwRCoXIymqBIxulUFNqYPIw1kk26dlgYnlINwAZiObzHIP7swB+u+a3/HLjL0mRYmzbsTyz3zNkJ7JN14AAtm2NNOVsjRUS6QqQ8hwTxrrQghiLpuH+uSiNhHoTm44dO7Jo0SJisVjag+hf//oX4XCY4uJiRo0axV133cWsWbO46KKLAEgkEsycOZMRI0aQna2DKxozWVlZHHzwwVx88cVUVlY22DoKCgro378/fn8LGkYfwaY1y8CxBFYQAljLRnRY0py9qcgRanucJZwE1313HU9sfAKAuzvdzZ3d78Tn85lzCrGFmeI2w/Md0uJG4jGS3ixxoyA6+Eyppd7EZvz48YwZM4ZRo0Zx1VVXkZOTw5w5c3jxxReZMGECoVCIQw45hHPOOYfrrruOeDxOr169ePLJJ1mxYgXPP/98fS1FyRBZWVkceuih9O/fvzadvSHw+/3k5ua2HOtmC8ZtVo4RAfnp87ziGCGKu/ukviaCTXMOYwRhK2zN28rZy8/mna3vEPKF+EOnPzA2f2x6TzQRqRxsQaZkmHnHA8gcnKT70wcUA+3RhlhKLT7HcZxdH7Z7vP322zzwwAMsXryYSCTC/vvvz7hx47jiiitqHww1NTXcdtttvPDCC2zZsoWBAwfywAMPcOyxx+729wwZMoTS0tL6WraiNE5SGJEpwz7gHUwsRtxYMs4ZrOBIppnEUbyxEgeWrV/GqLWj+CL2Be0C7Xit72sclX+UsZIkPVkEJ4Z1h6Uw8aFsz3c4GLdaCjsyoAhohwpNC2Rnz+Z6FZt9hYqN0uyJYYRmM+bBvgXzwJfWMzIewDuXJomJ54j4yOcQtVlg7299n7O+PovNyc0clH0Qb/Z7k15hN+MshRErcctJ1lrQnk8ck7osCQnSYFMGsbUBOpA+/llpMezs2dyCnN6K0kSoAFZjZtBIfEYEQOppZCBZBGPd1JDe3wxsxlkVEIUnv3+SEV+OYHNyM6e0OoWPOn9EL18ve7zMsMnBiomMBpBri5tNuhRIGnYI6IgKjbJDVGwUpbGQwrSdWYMNwgcxD3X5TZWOyj53n2SLxTFWDO57IQDx3DhXLbuKq1ZeRcJJcFOnm5jTbw6FrQvNOd7OACJiYUywP9/9DklpFgFKYkSvADv0rBgVGmWHqFdVURoDcUyrmfUYMdmKzSorx2aDRbB1M9J/zMEKgmSctTLHr4+vZ8yKMfyt4m+EfCGe6fkMF3a40Fw3GysiKazrLOZ+rwiaxHFknZKlJjGhzmjnZmWXqNgoSkNThWmiKa4wecgHsX3MpAWNDDgTi0bqa8TFFqZ2jPMngU/46dKf8m3sWzoHO/OXzn/h8JzD62acpTznJbGFofKdQtI9Voo2WwOd0PYzym6hYqMoDUUKY8H8gA3wBzHBdsk+82aYS5dmMCIjbWq8Pc4ACuCFVS9w6fpLiTgRhuYP5dU+r9Ip2MlktklSgbSxkZHOEqeR68rsGW+jzdbuOYWY+IyWxim7iYqNojQEcUy22Xr3s0y5DGISBHzYws0YJnYihZRimYQwYiBta8IQT8WZuHoij/7wKACXFl3KEwc8QbbfVYVWpNfKiKsu5X6WzgMSq4lhLJdsbKJCDsZ1pk8PZQ/Qfy6Ksi+R7K11GHHxdmuW7sl+bPqyiE4lRhi2tSQk42wr/JD4gXNWncMHFR8Q9AV5rOtjXBG8Al/KZxMMpEHntoWacfdakmkmbjYp2pSU6FaYYs0WUk+r1B8qNoqyr0hi3GbfYcREBES6NYfdz17XmbctjNTBiEjJb28Q/sW/OOuLs1iTXEOnYCde6fMKRxYcaY6X0QAiOCIkIm4yrdPbSFO6AfixiQSdqE08UJQ9RcVGUfYFUYzLbBM2a0we5pJKHMF2CJDW/V4LQupgEpgMtdbg+Bye/OFJrlt1HXEnzpE5R/JK31foFO5kzsnFiItkj0nGmRRkijtO6mskCUAyzsTCkhHOmtqs/EhUbBQlkzgYYZACTbEUtmJb80uzTGlHI1ZMBTbFWc4FIwpRqN5azRUbr+DPm/4MwLUdrmVKqymEEtu0WA54ru/NOJMkgW3dZhKrkaFnHTGuOkXZC1RsFCVTJDDtZn7AWBNlmAd+FUZssrHNMqWORjLERJQkQcDBPvB98HXoa8766iw+j31Orj+XZ/Z7hvPanmfEYgt2aiZYS6YAWwAawxwrqczZnu357vd1wPQ40/EASj2gYqMomaAaMxLAW6Gfhc0gkxqZBLa+JhsrQAnSJ2Judj+HYdamWVy6/FIqkhX0CfbhL73/QklhifkOP0aU/BjhkPPjpNfw+DHxF2k708pdoxSKFmDERhMBlHpCQ32KUp+kMBbMamymlzTQTHleYGMo2aS7ziRAL7gV/LGKGNctu46zl55NRbKCs4vP5t99/k1JvCQ9qUBiMRJjiXvWAnV7qIkLrQY7HqAjKjRKvaKWjaLUF1FM7Yy4zSQJIIYREWk3s22dS4D0KZtRjKutFbW/oSudlZy77lz+VfMvgr4gD/V4iPEdxptBZzIaWpIBvDGePGwWmwiPFI9u6zYrxGSc5aGJAEq9o2KjKHuLg4nBfI95sAcx1kUUE2B3sOnLcnwV9qG/7W+hxGq2Aq1h9tbZXLzsYrYkt9At0I2Xu7/MEe2OSE9hFhFxPNcTwZH4jxSGRjGC4l1jFmaqpnYEUDKEutEUZW9IYCyZb7HtZyKYGEul+14aXQoSN/F2AIiRTi7EiHH9suv56dc/ZUtyC6OKRrFowCKO8B+RfrwkFuRiMtykRiZBusvM5/kp1pAPIzbdUaFRMopaNoryYxDr5AeMwASwxZHeKZbiSpNEAUkMEKTuJYpxtblxluXR5Zy79lz+Xf1vAgR4oMcDTOg4wbjN/O71vJ0GZE3Z7nfUuNdOYVOdk9hxBTIfpz2m35nGZ5QMo2KjKHtKElOcucn9LLGXKmymVwobqBdRkPiNJAMIPoxbazMQgJerX+by5ZdTniynR7AHL7Z/kaHthlrLJIixYmQcgGScSTKCJB7kYmMzWRghkzRnB9PfTAs1lX2Eio2i7AnV2E4AQeysmUqM2MiDvxrzsJfW/2B/2+IYSwbSamGqc6uZsGIC08qnAXB60en8Yb8/UFRTZK7f2rMOETAZDZDACpy41YQUtpGmrKMTdtSzouwDVGwUZXfwWjMxrEjIrBnvDBoplKzAdkmW7gBynCQVFJljPqv6jPO+OY8lNUsI+UL8pttvuKrTVcZtFsCOfpauA2LJSF2OzKWR2p2Y+71SECrv2wFtSHe/Kco+QMVGUXZFBFug6cNYLSGsmICxLsSa8MZEpHbG8WyD2n5kzlaH30V/xw3f3kDUiXJg+EBebP8ig1oNSg/oS/cAqZXJom4tTtA9zsHOnhG3nYOpnWmDpgUpDYKKjaLsiCSmQHMjxkUWwrrNZJqlJAFIHYu4zmRMs3RtjlDbPFMe9htDG7l09aXMqZoDwGXtL+ORHo+Ql8qzvdPkN1QsowL3eyPYGTgiPNJWRroPiHstF5MIoKOblQZExUZRtodYM5KWLG4qsGMApCATrOuqCpsw4G2emY1xg1UC+fBu+bv8bNnP+D7+PYX+QqZ1m8bZnc6218/Hus0k6SDuXiuEddnlkF5j452omcB0A2iHpjUrDY6KjaJ4kUaW32MEx4cRiCDm4e/tZSZWhRROihWT9BxbgK1ryYPo1ii3rb+NhzY+BMD/K/h//Lnzn+mR7GFHMIMdLxDCiJi3K7McI61uJN250F2XvNpghEbTmpVGgIqNogg1GJGpwTzg8zCuL2kDE8E82OW9tOuXDDARgiyMxbEJY+m47V8WxxdzwdoL+DTyKVlkcXfXu7m1y61kkWW+R1rUgG39L0IiCQfeAlEZES1xHLGuCoG2pGevKUoDo2KjKF5rJoWxZMCIThU2dhJ334N1ocnsF2mw6R3nXGCu5fgcflvxW25adRMRJ8J+wf14vtPzHNHB03LGPbY2oO/tcSaFn36M8IEd0+y4xxRjhK8dJj4j61SURoKKjdKyqcGMaa7APLS9w8ykJYwE5Cvd9zJ0DGwqsyQT+LAP+hB853zHJcsvYV7VPAAubXcpD3d5mILKAmMZiXj43fPcwWi1IrJth2aJ34iYySgBB2PJ6FgApZGiYqO0TJLY5plga1YkvVncU/Jw97aGkVYxSc+2LIy7rMr9HIZZm2dx5Yor2ZzcTJusNjzd62lOb3u62S+xoBDpv4UiIgn3WiJ43lhMDNt+JoJxvbV3f2o3AKWRomKjtDyqMF0ANpJeLxPHuNMkhbgSY2l4J1V6kwBEmCTTKxuIwZbqLVyz+hr+vNmMax6ZP5I/tPsDnXM62+tkYywXaZYZJD2FWVrSyCC0uOc4MFaMWDPqNlOaACo2SsshgQnab8G4qiSwL1lfCeyDXoi4rxDpLi2xZCrcz9nmWvOj87lk9SV8m/iWXH8uD/V4iCvaXYGvxmdcdtJNAOx4AT822cBrmUjcRopDZUSArKMQdZspTQYVG6X5Ix2av8NkfYEt0pS5MhGs60z6mXldZ1JD4/Nsk3kwNVDtVHPzdzfz2x9+C8Bh4cP4U/c/0ae4jzlWZs54h5yJKy4X27tMpmqCzTyLYS2gKqzISFq1ojQBVGyU5k0M4zIrxzzMc7APfGmYmYOdZilV+eIak35mYvmUYywMqYMJwv9U/g8XLb+IpbGlBH1B7up0Fze3upkAAVs7I2Oat53SKcIiIpbjrk06FEjHAInHFGLaznhde4rSBFCxUZonDkYYvsMIjoiIjFwOY2Mk3iQAEZka91ip4IfamIwMHYsEIty14S6mfj+VFCkGhAfw3AHPMShvkPmuraRnnEmqcp57HRnLLJ0GJHVaEhS87f992G4A2ttMaYKo2CjNjwjGminDpjNLtpk0zIxjRKea9PoZ6WcmUzS3YqwLsWRcK6V0UykX/XARSyJL8OPnlja3cHfbu8nOdtUqiLFGKth+xlmu+13SU03azIhFE3OPSWCsmXZobzOlSaNiozQfti3OjGIe+lKkKZMqvdaDN51ZXGYScPfEZADIhmgoyn2r7+P+TfeTJEnfcF+e3e9Zjsg6whwbJb2zc777Pua5ruP5me0eI+1vWrk/C7Gut/boSAClyaNiozQPvOnMQWzAP4URC+mCXI6xYsLYh37WNvtbUceSoQZKI6VcvPJiFtcsxoeP64uv59fdfk1OTo69ljTPFGERKwasGCWw3QGk/b8kJoh148O0nClGkwCUZoGKjdK0SQAbMCOVpTGmbM/GBujloS4/K7E9xoKenyJUuOeHIVIZ4d5N9/LgxgdJkuSA8AFM7zCd/5f//6wLTkQlhU1EkEr/JHbImYxrFhdaNjZpoADbcqYdWjujNCtUbJSmifQwk3TmLIx1I92ZJRlA0obzsYF1sVgkiA+1dTK1YuQKzj+j/+TnK3/OF9EvjDXT7nru63kfueRa11sSmy6dgx0vIK3/vUgSgtTPFGKz0/wYa6YNWjujNDtUbJSmRxQTl6nAuq2qsUWSMgYZzAPfj632j2MtGbE0pKAyFxNnqYZqfzV3rLqDhzc/jINDn1AfpnedzlHho2yNTD5GrGqwwXtxj+VjuwzIGiRZIYkVmJj7knEAEuNRlGaGio3SdEhiEgDWYK0GSWmuwDbErME8tL3TMmW6ZjVWjLyWjOzLhYXVC7n8u8v5JvYNfvzc1Okm7mp/FzmxHBvIl5hOIcaiimOtG0k0EJdZHjYmFMRaNmLVdMCIjf42Ks0Y/eetNH68HQCqsW1lKrE1KxJv8R7vTXmWLLMwtjJfKvdd99vWmq3ctO4mpm2aBkD/cH9mdJzBkDZDbAZbDXauDZ5rSrdmsWJEcOQYETxpsimWVxe0gabSIlCxURo3MeAHTKYZmId9AGPJxDAPftnmjcmIxSJjnaWvmPQZg7TBZm9sfYMr117JusQ6gr4gt7e/nVuKbiGUFbJiFna/Yyvp7jjc60qqdAI730ayy2LuubKvrfvSTgBKC0HFRmmcJDEZZhsxgiEB/QDmIS9tZsAG6mWui2SdSUdmmUUj3QCkFiYJ31d8z7XfX8usLbMAODzncP7Q9Q+UFJdYKyqFzWoLYadpSvabuM7ATunM8aw37F5Hstakr5l2AlBaELv1z33NmjVcc801DB06lNzcXHw+HytXrqxzXCQSYeLEiXTq1ImcnByGDh3K3/72tzrHpVIpJk+eTM+ePQmHwwwcOJBXX311r29GaSZUAsuAle77ave1GWONyE9xh0nsRTozV2IHkIkYBDCi4wbmnZTD9K3T6fd1P2ZtmUWeP4+HOzzMh70/pCRQYt1sUvsio5fBDjqThppxzzEiSt7amZjnnO6YWI0KjdLC2K1/8t988w0vv/wyRUVFHH300Ts87tJLL+Xpp5/m3nvv5c0336RTp06cdNJJLFq0KO24O+64g7vvvpvx48fz9ttvc8QRRzBmzBjeeuutvboZpYkTBVZjhGYLRkBiWItABEUyvqqwDTbFsvBmmFVj/4V7BOfr5Ncct/w4Ll1zKVtSWxiZN5L/K/k/rmtzHVmOm3MshZe5GCslgnW/CUGMG64A46YTl5g36y3knt8V6IbWzigtFp/jONsOnq1DKpXC7ze/tc888wyXX345K1asoGfPnrXHfPrppwwaNIjp06dzySWXAJBIJCgpKaFv377MmTMHgPXr19OtWzduueUW7rnnntrzjz/+eDZs2MBnn322y0UPGTKE0tLSPbpRpREjWWbfYtv9+7BpwQGMtSJxkyjmwS/1LJsxD/Vcd1u5e6wIhuvmivliPLj+QX694ddEnShts9rySOdHOD/vfHw5PnOOzKfxY9OZJQNNrpnAiEwYK0AS+A+4+2qoLQqlI9blpyjNmJ09m3fLshGh2Rlz5swhGAxyzjnn1G4LBAKce+65zJs3j2g0CsC8efOIxWKMHTs27fyxY8fy+eefs2LFit1ZktIcEJfXMmAFdmYL2EB+mPSU4kqMmGzbml9qaWKeYyX1OQkfVnzI4KWDuWP9HUSdKBcXXcyX+33JBa0vwBf02QadEksRQRP8GBHJxyYfxLHdBuS9D5uG3R7jNlOhUZT68xwvXryYXr16kZubm7a9pKSEWCzGN998U3tcdnY2vXv3rnMcwJIlS+prSUpjJoJxmS3BPJwlBrPJ3VeGyfqSbDKwAXqZQ1OFjdmI6MjD342xlFHGuO/G8f9W/z8WRxfTO9Sb93q9x4yOM2jTqo2NxUitjRRryjgBrwDKGsJAESb2kocZzRxyt+Vgamb2RzsBKIqHestG27x5M0VFRXW2FxcX1+6Xn61bt8bn8+30OKWZIr3MNmDEItvzU9q+SOxDrBvpFNAKIyjyAJeWNeJOk9TmGDgJhxcrX2TCdxNYn1hPkCA3t7mZX3X8FTlZOUbYZLaMTOv0tp3xNuuswQb8vQ02ZRaOxJBCmMaZxajIKMo2NJnU52nTpjFtmim227BhQwOvRtljRDS+xVgl3hqUasy/xCps2rKIh1gzUc9xkgYtD3pxX+WYn1+nvuaqdVcxv2Y+AEfnHc3vO/yefrn90vuQxbGusSr3OpKA4O08IBltkkYt45xlFICDqZlpj7rMFGUH1JsbraioiLKysjrbxVIRy6WoqIgtW7awbV7Ctsdty7hx4ygtLaW0tJR27drV17KVTCPV/N9gYjNbsQIjQiEZZJIeDMZi8cZgJL1ZzpV4ijS2jEAkFuHu7+9mwDcDmF8znzb+NkzvMp2F+y2kX14/c47Uv0iBpYhKHnb0c5z0eI20t5FOAR0wyQiF7po6YbLNVGgUZYfUm9iUlJSwYsUKqqur07YvWbKEUChUG6MpKSkhGo2ybNmyOscBHHTQQfW1JKWhiWL6mC3BNqyUehfZLy6ycmxDTK9FUeO+5OEvwlSFjbdkwTvJdxjwzQDuWX8PMSfGJUWX8GXPL7kk7xL8+NOtlTxsVplc14ct2JT6GbFeRHwk/VmSAQqBHpgGmuo2U5SdUm9iM2rUKOLxOLNmzardlkgkmDlzJiNGjKgdlzty5EiCwSDPP/982vl//vOf6d+/P7169aqvJSkNRRLTYuZLjNhIBb/3Je1cpKeZDyM2UpAJNugvA9BEXKQqvxLWxtZy9qqzOWndSXyT+IaSUAkf7PcB07tNp212WyMC3iC/xFlakd7xGc93BjCutSJsAkABNvgfAg5ArRlF2QN2O2bzyiuvAPDxxx8D8Pbbb9OuXTvatWvHsGHDOOSQQzjnnHO47rrriMfj9OrViyeffJIVK1akCUv79u25/vrrmTx5MgUFBQwePJiZM2eyYMGC2locpYmSwlgo32FqX6T6PoRxh+VirYOI571YEt6CTImfhDzbKzEP/SyIh+I8uv5R7v72bqqcKnJ9udzd7m6uC11HMM9tWiadniXWAumNOcU1JuuR9YoIyvF52NY04kbT4kxF2SN2q6gTqJM9JgwbNoyFCxcCUFNTw2233cYLL7zAli1bGDhwIA888ADHHnts2jnJZJLJkyfz9NNP8/3339O3b1/uvPNOzjrrrN1atBZ1NkKqManMW7FxFulplsBOpZQizSDWFSYdklPYbspRd3+h5xoVQAg+SH3A1euuZnF0MQCnF5zOI10eoXuou/keqE0WqBULsOnVMmpZEgRE4GScc7XnHBGYENrTTFF2wc6ezbstNo0JFZtGRARjyWzAur/iWLdYNlZ0HEz9jFgqUtcimWHlmId5yN1X4Z7ndgZYt2UdEzdO5IXqFwDYP7Q/j7d6nJMLT7bikMSIlmS1JdzzxR1XjU1pTmDETPqXiZDI8DMZJdAKIzTaoVlRdsrOns1NJvVZaWRIAeYqzMNZOilXYx7KUrsSw7i/5IEvVk3SPSaE7RIQwMRmwLrPHIhXx3ms5jHuXn83lalKwr4wt7S9hZs73Ew4FTbWlFglYnXkkN61GWz9jLSWEStImnbKSAIRmnxMplk+Om9GUfYSFRtlz5DW/2swloi4o6QxpvQzi2KD51IXE3VfUj8jVo/EbbzJAH4gBPPL5nPN5mv4Iv4FAKPzRvNIq0fo1aqXtYpySW/YKV0FCrCuMzlWeqXJ/gLPWuVaAYzF0x79DVGUekJ/lZTdw8E0y1yLsWhEZPyYB302dgKliE3SPU+q66W3WZV73LbjmqXdTABWV6zmhq038Eq5SUzpHejNo10e5ZRWpxj3mrcAU9xb8n2yXqnBkYQAdyJnbTpzLuk90KKYeE5HrFtOUZR6QcVG2TniWlqDmZYpAlODdZf5MFaOuM6knkXEQzoDZGOLN6VFDVgB8EFNqoYp5VO4v+x+apwacn253F54O9e3vt6kz/swbq0yjLhJ2rSDsVLk+6S1jHctPmoncxLHxGLi2Amc3TBpzlozoyj1joqNsmNqsCITxQbyxdUlLWTyMA/1ENadJq4sedDHsQkBkubsiZU4AYfXtr7GDWU3sDKxEoCzc89matepdKObOTaGbcSZj627kbk3ks6Me1x8m+8F69ILk954U9OZFSWjqNgodYliMszWYiyFGOYhLQF9mdUiKctiTUjmmGSByfAyEaIkNpFAEgCi8H+V/8cvt/6SBTULABgQHMDj7R5nWPYwc67UwkgnAXG/eSdwbtuZWVKfIxgxFLebjB6QpITOGItIEwAUJaOo2CiWGLARk2FWiY23SCqx9BOLYQsh5cEvs2Mq3WtJL7MYRiSkW7PUsQRhk7OJu7bexZMVT5IiRZG/iPta38cVba4gEA/Y7wPb00yC+RLoD7vXrsR2chYRTLrHZmOD/3Fsd+Z22OJNRVEyioqNYjPMpFFmFtaakcC9TL+UWhqxEqRxZRwb9K92ryk1M1UYwXEf/PHqOE9tfIq7yu+iLFWGHz9X51/NPe3voY2vjR2aJmnQYs3IqCQpBBWhkDiOpCynsGnXYomJ26wQk86sCQCKsk9RsWnJpDAZZksxYiMxGak/8TaglDYvcp70MJOAu7dyPxsjTiIwudS62eYm5nLD1htYEjeNV4/POZ6Hix9mgG+AbRMjacw5mMwzCfZLDCiMTWkWa0YyzmQss8RopGtBPqavmc6aUZQGQcWmJZLCWDDLMMH/FFYwZAqmFGjmes6pwohMK8wDuxJbkS8P9WqMSEhKs1uF/4XzBTd8fwNvx94GYP+s/Xmo1UOMbjPatEKSAL93cmYImwggiQW47+U7vHNwYtgOAjGs660jxmUmYqkoyj5HxaalsQVYiQn+S4FlDDsWWSruJZ1Ygv+Stiz1KiHsULEaz3s8n4GN0Y3cU34PT5Y/SZIkrXytuKPNHVzT6hqyt2RbFxkY0fJmkImYZGMTFKBux2gw7jGxtGRujWSZaQcARWlwVGxaCuXAckwqs6Qcg3mAe9vMiOtLugFIR2bv/JdsrCiIa0sEy01zjhHjt1W/5b6q+9jibMGPn3E547gv/z7at2pvA/be6n0RCdzt3piMWCVSwClTPqWrtKRmS+1NZ9RlpiiNCBWb5k4Fxl22mvTZLfJwlhYy4qoS0YhjG1TWYK2DGmwWmNTZFGAe8BWmXuYv8b9w89abWZY0A/JODJ3IQ20eYkD2ALMeicmEPN8lo5ZFWGSfJCp4OwLI9+e5ayjAdjEoxLjN1GWmKI0KFZvmSgVmFPMKrPtJssdEWCLYgLy4qWLYIkxpPwN2REAO5gEvoiCtY3Lgf33/y40bbuTvyb8D0C+rH1Pyp3BK7in48Jnr5mKzw7wdlqtI76cWwM6/CXjWk8KODYi414lgKv87k54coChKo0HFprlRCXwNfIWtefGKjFTQS+YZ2NYzUk9Tg51oWYNt+SJpz95+aEFYGV3JrVW38lLkJQDa+tpyb+G9XB66nEAwYIP+SWwKsrTvF4slz72mrDPP813SoUC6EUjjThkf0NZ9qctMURotKjbNha0YgfkMY20IIjTiEgNb1S9Fm15rRirsxYoQS8b7HiAbyiJlTIpP4vGax4kRI5tsJuRM4JbALRTmF5pryGhnES2wEzhFXMC6xnKxWXESW5I1Si+zHPe89qjLTFGaCCo2TZ0NwP8Cn2KaU/o8L8keEytGxCKMtXKkANIbz5G4jAxBC2GsJLfNTKQiwhPJJ5gUmUSZUwbA2OBYJrWeRPfs7rYGR1rU5GHb1xRgBU3ccJJ9Jp0Bgu5xMtIZ7ETNEOmFmZplpihNAhWbpsoa4G/Ax9hUZG97Gak5kW7IMufF69ISl5QkC0iwXgLuYMUoAKloiheTL3Jb4jZWxVcBcFzWcUxpNYXBicFWrKT1v7dmJg8bd5HjwFgyBVhrJoJtNZMgvTlmECMymmWmKE0OFZumRhnwd4y7LAIUYYLj8iCXqv8AtnllObYzgDd1WI6TB7u4uaRpZSWQDU7c4V3e5ebozSyKLgKgv68/D+Y8yMjgSHwBny2ulImZIgbSPFN6peVg+5iJ4HmtGYnDSGsZERyZM6OjmRWlSaJi01RIYtrK/B1TmJmHdYtJY0ywlo0UZUoTSukMUO7ZL8SxDTUl5dmtVymtLuUW/y3MT80HoCtduTf3Xn6W9TOyElk24UBcZdKPLIUtsJTstyC2+l9qbLKw1owIoIhTAtNipgu2k4GiKE0SFZumQAL4BFiEfYhL639voF9ERB7W8hNs2/1KTB80STGWh7q4vADisJSl3M7tvMzLkIJCCvlV4Fdc41xDjj/HCoq46qS9jMR6xLIJYdvGSHNMsJZXgeeeZM5MAhuXKURTmRWlGaBi09hJYIL/SzBWSoT0B7xXJKQoUlrAOKQjwX5ppClpx5LWnAXr/Ou4l3t5hmdIkiTbyeZaruWW3FsoptgWWrppz7XuMxlEJrNipFGmfEcQk03m7V0m9+dNRsgCumMsGv3XqSjNBv11buysxNTNSHt/eVh7hWR7lsz2ENeZZHWVU2t5bE5u5kH/gzwWfIwaXw1+x8/PfT/n7uDddKvuZr8vGzt2Wa4lc2K8s23y3WNl3IDEYvwYa0WGoiWxFlEHdGKmojRTVGwaM1sxFo20cvG6zLa1aHYlMknSm2W6fdAqqyp5tPWjTMmfwlb/VgDOSJ3Br2t+Tb/cfra+pgbbJy3Hsx4wQihjmKV7s1g9rTCi4021FmGKu/taY+Iy+Xv0X0dRlCaEik1jJYkRGnEzeeMq8pDfNtC/Ld54TcKzLQURf4SnujzF5OLJrA+uB+CE2AlMSkzisKzDbBGo9DDzuZ/FJSZFoFJP48dOzfT2X8vGCpYMOQNr/XTFjixQFKXZomLTWNkKbMLGYCB95DHbvBe8n7fjWosH4szInsF9be5jTWANAEdUH8GkmkkclzrOdmMW0RDBCWCLQ6VYVOI2UWwLHJnWKdlp4iqTsdC4+zphOgCoyChKi0DFpjHiAKuoKzKQ7j7zio33p4iLZJplQSIrwfO5z3Nv4b0sDywH4ODowdy34T5GrR+Fr8hnq/tFHMQqkbqbELYQNO75LJluMtFT1inrkN5m2RiR0RYzitLiULFpjMQwlo24vyQhQARHREZeYEVGsr/cGE2SJDPDM7m3zb18FfgKgL7xvtyz8R7GVI/Bj9+6zKRljMSEJLMMbHxFYjFi0XiLQfMxAiQNNsX9FsRMyuyCdacpitKiULFpjESwDS8l/uF9QEvBpvz0zntxuwIkE0lmBWdxb8G9fBH4AoD9Evtx59Y7uaD6AgLxgHWxyfAx+S6xaOJYCyfk2ebDutq87jEROelploXpcNANk4GmIqMoLRYVm8ZIJVZIvMjD2usmk1iJa4kkI0lmZc/ivvB9LAksAaBHogd3VN3Bz2p+RjAStK36JbNN2st43WNigYiFI9skMcDbKSDbc0zYvW4roAdGbDQuoygtHhWbxohYNV43mSCuMrDpwwFIxBO8FHiJSXmT+DLrSwC6J7tzW+Q2Lq66mFAiZEUmhhUHb8NOEQ2Ju4SwdTPSpFOC/2LxSHZawN0XxohMW+xYZ0VRWjwqNo0RERMRGz/pBZmSDp1lj/vB/wM/9/+cuC9Oz1RPbo3fysXVFxNKhdKtGK9wiOCIC83B1szIMSHPy9vfTGJIUruTg5mU2QUN/iuKUgcVm8aIuNCkgaY3GUDiMmKhuG6tLsEu3Jm4ky6+LoyNjCXoBM2+iHtNERURFGkRIzEbsXCk0j+XdPeazKCRpAFvbU1nTFxGm2UqirIDVGwaI1Krsm1nABEMsBlkEkPJgtsTt9v2L9XYAWXSXVneu/NpyHKvIYkAudgGndJxIIxNEJBMM8kw6wz0wnYWUBRF2QEqNo2RPKxFIdaMtP8Ha4mI0Mhn6UUmFkeEdMHxu+/j2PiLtJIRC0p6nW0rMt74TQeMyGhHZkVRdhMVm8aIN9VYmlt6a18kRVnERralsCIjVomITJj0gWUBz2cZVJaHraWRmFDQ82oP7IcJ/muGmaIoe4CKTWMkhJlMuQ6bECCjAMC2ganBiouMH8CzTdrHRDFCFML2WJPYjYwXCGMtn7B7nmSnich0QkVGUZQfhYpNY8SHebCvp651IwWT0gBT3GgiGBGMkORgg/5i1UhBZrbnWtKKJh9r3UhyQBHQFxOb0XHMiqLsBSo2jZUCzMN/E7YfWQIjFDXY/3MyQE1cZhKDkaLLBDbrLEz6eAJp8tkO0+Zf3GiFQD9MhpmmMSuKUg+o2DRWsoDeQIX7WcY+ey0aQdr/h7FjAMSaEdcYpLvPHPfarTAWjIhMifu9OsBMUZR6RMWmMZOPyfr6CutGg/Q2Nn6s0ESxQhInvZjTm+7sYKwjH8aiaQ8cjLFmtFZGUZQMoGLT2OmMEZFV2C7KYCdiSjdmcZuJW01cZPISkUlheq+VAz2BI4FDMG47RVGUDKFi09jxY0TBB6zADi6T+htpximjn6UQVKr+ZZsIjSQdjASOwWS9KYqiZJjdKslbs2YN11xzDUOHDiU3Nxefz8fKlSvTjiktLWXcuHEceOCB5Obm0r17dy644AJWrFhR53qpVIrJkyfTs2dPwuEwAwcO5NVXX62XG2qWZGHcaQOxI5Slh5l0Yw5jxzLLe6mZkULOCkxM5gbgp6jQKIqyz9gtsfnmm294+eWXKSoq4uijj97uMS+99BKLFy/m2muv5e233+b+++/nk08+YciQIXz77bdpx95xxx3cfffdjB8/nrfffpsjjjiCMWPG8NZbb+39HTVXfJhiykEY4ZHOyzLITMTHW5gpsZsU0Af4OXAxxjWnKIqyD/E5jrPt1JQ6pFIp/H6jS8888wyXX345K1asoGfPnrXHbNiwgXbt2qWdt2rVKnr16sXtt9/OvffeC8D69evp1q0bt9xyC/fcc0/tsccffzwbNmzgs88+2+WihwwZQmlp6W7dYLMlBmzEuMW2YuplpMZGikBbYYSlEybjTFEUJYPs7Nm8WzEbEZqdsa3QAPTo0YN27dqxdu3a2m3z5s0jFosxduzYtGPHjh3Lz3/+c1asWEGvXr12Z1ktmxBGSDpjYjLSeka6RUsvM0VRlEZARtsofvHFF6xfv55+/frVblu8eDHZ2dn07t077diSkhIAlixZksklNU+kD1oeJl06FxUaRVEaFRkTm0QiwZVXXkm7du249NJLa7dv3ryZ1q1b4/Ol96QvLi6u3a8oiqI0LzL29+/48eP56KOP+Otf/0pR0d4HDKZNm8a0adMAEx9SFEVRmg4ZsWxuueUWpk2bxvTp0xkxYkTavqKiIrZs2cK2eQli0YiFsy3jxo2jtLSU0tLS7caHFEVRlMZLvYvNpEmTeOCBB3jssce48MIL6+wvKSkhGo2ybNmytO0SqznooIPqe0mKoihKA1OvYvPYY49x++23M2nSJMaPH7/dY0aOHEkwGOT5559P2/7nP/+Z/v37ayaaoihKM2S3YzavvPIKAB9//DEAb7/9Nu3ataNdu3YMGzaMl156ieuuu46RI0dy3HHH8c9//rP23FatWtVaLO3bt+f6669n8uTJFBQUMHjwYGbOnMmCBQuYM2dOfd6boiiK0kjYbbEZM2ZM2uerrroKgGHDhrFw4ULmzp2L4zjMnTuXuXPnph0rxwiTJk0iPz+fRx99lO+//56+ffvy8ssvc+qpp+7FrSiKoiiNld3qINDY0A4CiqIojY+dPZszWtSpKIqiKKBioyiKouwDVGwURVGUjKNioyiKomQcFRtFURQl46jYKIqiKBlHxUZRFEXJOCo2iqIoSsZRsVEURVEyjoqNoiiKknFUbBRFUZSMo2KjKIqiZBwVG0VRFCXjqNgoiqIoGadJjhho27YteXl5tGvXrqGXkjE2bNjQrO8Pmv896v01bfT+9pyVK1eycePG7e5rkmIDzX+mTXO/P2j+96j317TR+6tf1I2mKIqiZBwVG0VRFCXjNFmxGTduXEMvIaM09/uD5n+Pen9NG72/+qXJxmwURVGUpkOTtWwURVGUpkOTEptvv/2Ws846i8LCQlq1asUZZ5zB6tWrG3pZe8wrr7zCmWeeSY8ePcjJyaFv377ceuutVFRUpB1XVlbGZZddVpvqfcIJJ/D555830Kr3jpEjR+Lz+bj99tvTtjf1e3zrrbc45phjyM/Pp1WrVgwZMoQFCxbU7m/K9/fhhx8yYsQI2rdvT0FBAYMHD2b69Olpx0QiESZOnEinTp3Iyclh6NCh/O1vf2ugFe+YNWvWcM011zB06FByc3Px+XysXLmyznG7ez+pVIrJkyfTs2dPwuEwAwcO5NVXX90Hd7J9duf+SktLGTduHAceeCC5ubl0796dCy64gBUrVtS5Xkbuz2kiVFVVOb1793ZKSkqc1157zXn99ded/v37O/vtt59TWVnZ0MvbIw4//HBnzJgxzp///Gdn4cKFzsMPP+wUFhY6hx9+uJNMJh3HcZxUKuUcddRRTpcuXZwXXnjBefvtt51jjjnGadOmjfPtt9828B3sGS+88ILTsWNHB3Buu+222u1N/R6feuopJxAIONddd53zzjvvOHPnznXuv/9+54033nAcp2nf36effuqEw2Hn2GOPdV5//XXnnXfeccaNG+cAzu9+97va484//3ynsLDQmTZtmvPee+85p59+uhMOh53//Oc/Dbf47fD+++877du3d04++WRnxIgRDuCsWLGiznG7ez+/+tWvnFAo5EyZMsVZsGCBM27cOMfn8zl//etf980NbcPu3N8NN9zgHHnkkc4TTzzhLFy40Hn++eedAw880CkuLnZWr16ddmwm7q/JiM0jjzzi+P1+Z+nSpbXbli9f7mRlZTkPPfRQA65sz1m/fn2dbX/84x8dwJk/f77jOI7z+uuvO4CzYMGC2mO2bNniFBUVOddcc80+W+vesnnzZqdDhw7OCy+8UEdsmvI9rlixwgmHw87DDz+8w2Oa8v3deuutTjAYdCoqKtK2H3HEEc4RRxzhOI7jLFq0yAGc6dOn1+6Px+NOnz59nFGjRu3T9e4K+SPOcRzn6aef3u7DeHfv54cffnBCoZBz5513pp1/3HHHOQMGDMjMDeyC3bm/7T13Vq5c6fh8PueOO+6o3Zap+2sybrQ5c+ZwxBFH0Lt379ptvXr14qijjmL27NkNuLI9Z3tVu4ceeigAa9euBcz9du7cmeHDh9ceU1hYyKhRo5rU/d58883079+f8847r86+pnyP06dPx+/3c+WVV+7wmKZ8f7FYjGAwSE5OTtr2wsJCUqkUYO4vGAxyzjnn1O4PBAKce+65zJs3j2g0uk/XvDP8/l0/6nb3fubNm0csFmPs2LFp548dO5bPP/98u26pTLM797e9506PHj1o165d7XMHMnd/TUZsFi9eTP/+/etsLykpYcmSJQ2wovrlgw8+AKBfv37Azu939erVVFZW7tP1/Rj+8Y9/8Nxzz/HEE09sd39Tvsd//OMfHHjggbz00kvsv//+BAIBevfunXavTfn+Lr74YgCuvfZa1q1bx5YtW3j66aeZP38+EyZMAMz99erVi9zc3LRzS0pKiMVifPPNN/t62XvF7t7P4sWLyc7OTvvDV44DmtTz6IsvvmD9+vW1zx3I3P01GbHZvHkzRUVFdbYXFxdTVlbWACuqP9auXcudd97JCSecwJAhQ4Cd3y/Q6O85FotxxRVXcOONN9K3b9/tHtOU73HdunUsXbqUiRMncsstt/DOO+9w4oknMn78eB599FGgad9f//79WbhwIbNnz6ZLly4UFRVx9dVX89RTT3HuuecCu76/zZs379M17y27ez+bN2+mdevW+Hy+nR7X2EkkElx55ZW0a9eOSy+9tHZ7pu4v8OOXqtQHlZWVnHbaaQQCAWbMmNHQy6k3HnzwQWpqarjtttsaeikZIZVKUVFRwbPPPssZZ5wBwHHHHcfKlSuZPHky1157bQOvcO9YunQpZ555JiUlJTz11FPk5OQwe/ZsrrzySsLhMBdccEFDL1HZS8aPH89HH33EX//61+2KbH3TZMSmqKhou38J7uivkaZATU0No0aNYvny5XzwwQd07dq1dt/O7lf2N1ZWr17NpEmTeOaZZ4hGo2m++2g0ypYtWygoKGjS99imTRuWLl3KiSeemLZ9xIgRzJ07l++++65J39+vfvUrgsEgb775JsFgEIDjjz+eTZs28ctf/pLzzjuPoqIiVq1aVedcuT/5S7ipsLv3U1RUxJYtW3AcJ+2v/6Z037fccgvTpk3jj3/8IyNGjEjbl6n7azJutJKSEhYvXlxn+5IlSzjooIMaYEV7Rzwe56yzzqK0tJS33nqLAQMGpO3f2f12796d/Pz8fbXUPWb58uVEIhHGjh1LUVFR7Qtg6tSpFBUV8fnnnzfpexT/9Y7w+/1N+v4+//xzBg4cWCs0wmGHHcamTZtYv349JSUlrFixgurq6rRjlixZQigUquPzb+zs7v2UlJQQjUZZtmxZneOARv88mjRpEg888ACPPfYYF154YZ39Gbu/H53Hto95+OGHnaysLGfZsmW121asWOEEAgFn6tSpDbiyPSeZTDpjxoxxwuGw89577233mNdee80BnIULF9Zu27p1q1NcXOyMHz9+Xy31R1FWVua8//77dV6AM3bsWOf99993KioqmvQ9vvnmmw7gzJo1K237iBEjnK5duzqO07T/Hw4bNszp1auXE41G07afd955TjgcdqLRqPPJJ584gPPss8/W7o/H486BBx7onHrqqft6ybvNjlKDd/d+fvjhBycYDDp333132vnHH3+8079//4yufXfY0f05juM8+uijDuBMmjRph+dn6v6ajNhUVlY6+++/v9O/f3/n9ddfd2bPnu0cfPDBTq9everUAjR2rrzyytqak//5n/9Je0mxXzKZdIYOHep07drVefHFF525c+c6w4YNc4qKiuoUYDUV2KbOpinfYyqVcoYPH+4UFxc7Tz75pDNv3jznsssucwBnxowZjuM07fubNWuWAzgjRoxwXn/9dWfevHnO1Vdf7QDOhAkTao8755xznNatWztPP/2089577zlnnnmmk52d7Xz88ccNuPrtM2vWLGfWrFm1v3+/+93vnFmzZqX9MbC793PzzTc72dnZzkMPPeS8//77zpVXXun4fL7agt6GYFf39+KLLzo+n88ZOXJknefO4sWL066ViftrMmLjOI6zatUq54wzznAKCgqc/Px857TTTtuuejd2evTo4QDbfd111121x23atMm55JJLnKKiIicnJ8c57rjjnEWLFjXcwveSbcXGcZr2PW7dutW56qqrnPbt2zvBYNAZMGCA8/zzz6cd05Tv76233nKGDRvmtG3b1snPz3cGDhzoPPHEE04ikag9prq62pkwYYLToUMHJzs72znssMOc999/v+EWvRN29Ds3bNiw2mN2934SiYRz3333Od27d3dCoZAzYMCAOlbuvmZX93fRRRft1n8Dx8nM/WnXZ0VRFCXjNJkEAUVRFKXpomKjKIqiZBwVG0VRFCXjqNgoiqIoGUfFRlEURck4KjaKoihKxlGxURRFUTKOio2iKIqScVRsFEVRlIzz/wGESwCck6aTTAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAGQCAYAAAB4X807AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAB82ElEQVR4nO2deZhcVZn/P7VXb+kl+0oCkQCdmBCaJToQCRBQSdhlCwMqBkTkJw64gbgNgwIzjjiIJk5QFBADCmELDCQsgooRAzFB2RKykYV0d3qt/f7+OPftc6q6EzqkK92dfj/PU09X3fXcW13ne9/lvCfgeZ6HoiiKohSRYG83QFEURdn/UbFRFEVRio6KjaIoilJ0VGwURVGUoqNioyiKohQdFRtFURSl6PSa2GzYsIGzzz6byspKBg0axJlnnsn69et7qzmKoihKEQn0xjibtrY2pk6dSiwW49///d8JBAJcf/31tLW18eqrr1JWVravm6QoiqIUkXBvnHThwoW8/fbb/POf/2TixIkAfPjDH+ZDH/oQP/vZz/jyl7/cG81SFEVRikSvWDYnnHACiUSCF154IW/5zJkzAXj22Wf3dZMURVGUItIrls3q1as57bTTOi2vra1l8eLF77v/kCFDGD9+fBFapiiKonxQ1q1bx3vvvdflul4Rm/r6eqqrqzstr6mpoaGh4X33Hz9+PCtWrChG0xRFUZQPSF1d3S7X9YrYfBAWLFjAggULANi+fXsvt0ZRFEXZE3ol9bm6urpLC2ZXFg/A/PnzWbFiBStWrGDo0KHFbqKiKIrSg/SK2NTW1rJ69epOy9esWcNhhx3WCy1SFEVRikmvuNHmzp3LNddcw9tvv82BBx4ImMDSCy+8wPe///3eaJKiKB+AdDrNxo0bSSQSvd0UZR8Sj8cZM2YMkUik2/v0Supza2srU6dOpaSkpGNQ5ze/+U2am5t59dVXKS8v3+3+dXV1miCgKH2AtWvXUlFRweDBgwkEAr3dHGUf4HkeO3bsoLm5mQkTJuSt213f3CtutLKyMpYtW8bBBx/MRRddxIUXXsiECRNYtmzZ+wqNoih9h0QioUIzwAgEAgwePHiPrdley0YbN24cDzzwQG+dXlGUHkKFZuDxQb5zrfqsKIqiFJ1+M85GUZS+z4hbR7C1dWuPHW942XC2XLOlx473sY99jFtvvXW3gw8ffPBBDj744I7M2BtuuIHjjjuOE088ca/OvXLlSjZv3swnPvGJPdpv8+bNXHXVVdx///17fM5f/OIXzJ49m1GjRu3xvj2NWjaKovQYPSk0xThed3jwwQdZs2ZNx+fvfve7ey00YMTmscce63JdJpPZ5X6jRo36QEIDRmw2b968R/tks9kPdK73Q8VGUZR+S2trK5/85CeZOnUqkydP5r777gPg6aef5vDDD2fKlCl85jOfIZlMdtrXTUa6//77ueSSS3jxxRdZsmQJ1157LdOmTeOtt97ikksu6ejsd3Xc8ePH861vfYvp06czZcoU/vGPf+SdK5VKccMNN3Dfffcxbdo07rvvPr797W9z0UUX8dGPfpSLLrqIdevWceyxxzJ9+nSmT5/Oiy++CJhhIZMnTwaMEFx77bUceeSRfPjDH+ZnP/tZxzl+8IMfMGXKFKZOncrXvvY17r//flasWMGFF17ItGnTaG9v3237v/rVrzJ9+nS+//3vM3369I7jvvHGG3mfPygqNoqi9FuWLl3KqFGjeOWVV/j73//OKaecQiKR4JJLLuG+++5j1apVZDIZ7rjjjm4d7yMf+Qhz587llltuYeXKlRx00EEd697vuEOGDOHll1/m85//PLfeemvecaPRKN/97nc599xzWblyJeeeey5gBrI/9dRT3HvvvQwbNoz/+7//4+WXX+a+++7jqquu6tS+//3f/6WyspK//OUv/OUvf2HhwoWsXbuWxx9/nIceeog///nPvPLKK3zlK1/h7LPPpq6ujrvvvpuVK1cSCAR22/7Bgwfz8ssvc91111FZWcnKlSsBuPPOO/n0pz/d7e9kV6jYKIrSb5kyZQr/93//x1e/+lWef/55Kisr+ec//8mECRM4+OCDAbj44ot57rnn9vpc73fcM888E4AjjjiCdevWdeuYc+fOpaSkBDADZD/3uc8xZcoUzjnnnDxXnvDkk09y1113MW3aNI4++mh27NjBG2+8wVNPPcWnP/1pSktLAVPUeE/bLwIIcOmll3LnnXeSzWa57777uOCCC7p1PbtDEwQURem3HHzwwbz88ss89thjXH/99ZxwwgldTl/SFW76bk9UQIjFYgCEQqHdxmBc3FmJf/jDHzJ8+HBeeeUVcrkc8Xi80/ae5/HjH/+Yk08+OW/5E088sRct79yWs846i+985zvMmjWLI444gsGDB+/18dWyURSl37J582ZKS0uZN28e1157LS+//DKTJk1i3bp1vPnmmwD86le/6piY0WX48OG89tpr5HI5fv/733csr6iooLm5udP23T3urtjVcYWdO3cycuRIgsEgv/rVr7oM1J988snccccdpNNpAF5//XVaW1s56aSTuPPOO2lrawNMUePCc+5J++PxOCeffDKf//zne8SFBio2iqL0IMPLhu/T461atYqjjjqKadOm8Z3vfIfrr7+eeDzOnXfeyTnnnMOUKVMIBoNcfvnlnfb9/ve/z6mnnspHPvIRRo4c2bH8vPPO45ZbbuHwww/nrbfe6lje3ePuiuOPP541a9Z0JAgUcsUVV/DLX/6SqVOn8o9//CPP0hAr7NJLL+Wwww5j+vTpTJ48mcsuu4xMJsMpp5zC3LlzqaurY9q0aR0xo0suuYTLL7+cadOm4XneHrX/wgsvJBgMMnv27G5f4+7oldpoe4vWRlOUvsFrr73GoYce2tvN2K/561//ype//GWeffbZfXreW2+9lZ07d/K9732vy/Vdffe765s1ZqMoitJHWbFiBRdccME+r4Z/xhln8NZbb7Fs2bIeO6aKjaIoSh+lrq6O119/fZ+f141h9RQas1EURVGKjoqNoiiKUnRUbBRFUZSio2KjKIqiFB0VG0VReo4RQKAHXyOK08xnnnmGU089FYAlS5bs82wvgJtuuomJEycyadKk960AcNVVV/X7WYw1G01RlJ6jp2cE2IPjeZ6H53kEg3v2DD137lzmzp27hw3bO9asWcNvfvMbVq9ezebNmznxxBN5/fXXCYVCnbZdsWIFDQ0N+7R9xUAtG0VR+i3r1q1j0qRJ/Ou//iuTJ09mw4YNfP7zn6euro7a2lq+9a1vdWy7dOlSDjnkEKZPn87vfve7juW/+MUvuPLKKwHyphMAOw3Bu+++y3HHHce0adOYPHkyzz///F61+6GHHuK8884jFosxYcIEJk6cyEsvvdRpO5lS4Oabb96r8/UF1LJRFKVf88Ybb/DLX/6SY445BoAbb7yRmpoastksJ5xwAq+++ioHH3wwn/vc51i2bBkTJ07Mq3DcHe655x5OPvlkrrvuOrLZbEcNMperr76a5cuXd1p+3nnn8bWvfS1v2aZNmzraCzBmzBg2bdrUad//+Z//Ye7cuXnldPorKjaKovRrDjjggLyO+7e//S0LFiwgk8nw7rvvsmbNGnK5HBMmTOBDH/oQAPPmzWPBggXdPseRRx7JZz7zGdLpNKeffjrTpk3rtM0Pf/jDvb4Wl82bN7N48WKeeeaZHj1ub6FuNEVR+jVuwcq1a9dy66238vTTT/Pqq6/yyU9+co+mDwiHw+RyOQByuRypVAqA4447jueee47Ro0dzySWXcNddd3Xa9+qrr2batGmdXl0lH4wePZoNGzZ0fN64cSOjR4/O2+Zvf/sbb775JhMnTmT8+PG0tbUxceLEbl9LX0MtG0VR9huampooKyujsrKSrVu38vjjj/Oxj32MQw45hHXr1vHWW29x0EEHce+993a5//jx4/nrX//Kpz71KZYsWdJRyv+dd95hzJgxfO5znyOZTPLyyy/zr//6r3n77ollM3fuXC644AK+/OUvs3nzZt544w2OOuqovG0++clPsmXLlo7P5eXlHdMD9EdUbBRF6TmG07MZaXs4Y8HUqVM5/PDDOeSQQxg7diwf/ehHATM9wIIFC/jkJz9JaWkpxx57bJdzy3zuc5/jtNNOY+rUqZxyyikdVtMzzzzDLbfcQiQSoby8vEvLZk+ora3lU5/6FIcddhjhcJjbb7+9IxPtE5/4BD//+c8ZNWrUXp2jr6FTDCiK8oHRKQYGLns6xYDGbBRFUZSio2KjKIqiFB0VG0VR9op+6IlX9pIP8p2r2CiK8oGJx+Ps2LFDBWcA4XkeO3bsIB6P79F+mo2mKMoHZsyYMWzcuJHt27f3dlOUfUg8HmfMmDF7tI+KjaIoH5hIJMKECRN6uxlKP0DdaIqiKErRUbFRFEVRio6KjaIoilJ0VGwURVGUoqNioyiKohQdFRtFURSl6KjYKIqiKEVHxUZRFEUpOio2iqIoStFRsVEURVGKjoqNoiiKUnRUbBRFUZSi06Nic//993PWWWdxwAEHUFJSwqRJk/j617/eaa7vhoYGLr30UoYMGUJZWRknnngiq1at6smmKIqiKH2IHhWbW2+9lVAoxH/8x3+wdOlSPv/5z3PHHXdw0kknkcvlADMXwpw5c1i6dCk//vGPeeCBB0in0xx//PFs3LixJ5ujKIqi9BF6dIqBhx9+mKFDh3Z8njlzJjU1NVx88cU888wzzJo1iyVLlvDCCy+wbNkyjj/+eABmzJjBhAkTuPnmm7ntttt6skmKoihKH6BHLRtXaIQjjzwSgE2bNgGwZMkSRo0a1SE0AJWVlcyZM4eHHnqoJ5ujKIqi9BGKniDw7LPPAnDooYcCsHr1aiZPntxpu9raWtavX09LS0uxm6QoiqLsY4oqNps2beKGG27gxBNPpK6uDoD6+nqqq6s7bVtTUwOY5IH9mhyQAhJA0n+v07crirKfU7RpoVtaWjjttNMIh8Pceeede328BQsWsGDBAoD+N995Ctjpv9qBjL88gJH7CFACVAHl6GTdiqLsdxSlW2tvb2fOnDm8/fbbPPvss4wZM6ZjXXV1dZfWS319fcf6rpg/fz7z588H6LCS+jwJYBNQjxGcEMayCTjbBDDfQjOwHSgFBgNDUNFRFGW/oce7s3Q6zdlnn82KFSv4v//7P6ZMmZK3vra2lieffLLTfmvWrGHcuHGUl5f3dJP2PTmMcLyNsWJEXJLki41YNln/bwBj+awH3gPGARXki5OiKEo/pEdjNrlcjgsvvJBly5bx4IMPcswxx3TaZu7cuWzatKkjcQCgqamJhx9+mLlz5/Zkc3qHDPBPYDVGXNIYC6cdY91k/FfWX5fyt5P1KYwgtQNvAFvRmI6iKP2eHrVsvvCFL7B48WKuu+46ysrK+NOf/tSxbsyYMYwZM4a5c+cyY8YM5s2bxy233EJ1dTU33XQTnufxla98pSebs+/JYERmK8YayfrLcv6rK8S6ifnbpjExnAjG7bbBP84o1MJRFKXf0qOWzeOPPw7AjTfeyIwZM/JeP//5z80Jg0EeeeQRTjrpJK644grOOOMMQqEQy5cvZ+zYsT3ZnH2Lh7Fo3sGIQ8J/iaWSwwiJvLJYofGANv8l2yWc7TZj3GqKoij9lIDnef3OSVNXV8eKFSt6uxn5bAb+jLEVxYoRywaMsBRaJjlnXczfN4uxaGL+uri/PAoc6n9WFEXpg+yub9Z8p56gHXgZIxSe89fDCIm40kTWAxhBkVcAY8mAFZM2TGZaAiM8OUziwIdQd5qiKP0OnWKgJ3gT2IEN+mcw7rN28sfVBLF3PI0RlGZ/G0mBbscITNBf7/nbev452ot+NYqiKD2Ois3ekgZWYSwUEQapCiBWSwaTcSZVAyQdWtxjOaAVIyTiShMBSmATBzLAln1zWYqiKD2Jis3esg4TvPcwQpLFpi63YqwTNyNNhKcVaMG6z6L+vq1Yd1sCK2Bpf1m9815RFKWfoDGbvUXmfMtghCGFjclIppnEawLOOkEsoQg2KaANU74mgBUWV8Rke0VRlH6Cis3e0IJJdY5jBABsRQCpFgA2oF+YICBjacTVlsGITAQjKmX+sjA2DhTCiFFZka5JURSlCKgbbW/YCDT578Xd1YYRmgB2cGbYfzmDNf+t5d/4XdPvyLZkjYiEsUkBaWyyQADrnpNMN00SUBSln6Fi80HJAm9hrZYExpIRayWHTQqQDLOU2e/V9Kv8V/K/uLj9YloCLbZcDRiRkaQAsMkGHtb6kXWKoij9BHWjfVB2YoL1YMQhRL77zC22CdYqScHtqdsBuDhyMZWhSlvaphUztkYEpxzrRkujjwaKovRbVGw+KFuwyQBgXV6S0rwLmrwm7s7dDcAX+ILZL4aN3chgziBGcCLY0jZi0cRQFEXpV6jYfBDSmHlqpAJAE0Yg3OKbkj0mBM22d3t300orM4MzOTR6qNmmDZuN5iYAZDAp0WLVSPq0JgcoitLPULH5IDRiXF5ixcg4GEl/FgvHdXv5CQQLvYUAzA/MN8IhVQVcqyWIccfFsGnO4oaTWT0VRVH6ESo2e4qHKboJRhSimEGdZVhXWFcE4GXvZf6W/Rs11HBm9kxjwUgBTklvBpNKLVMNSCUCP97DENSNNkDxPK/j1ZsEAoGOl6J0FxWbPSWJqVEGdtxMHBtXybJLF9qi4CIA5gXmEQ/FbdWBDFZ0pDRNCCMuUaxV0wIcXKwLU/o6LS0trFy5knXr1vWq4AwbNozp06czbNiwXmuD0v9QsdlTmjBBfZkKoAwjAu9h3FsyvbNbJcCDZCbJPZF7IACfznzautmkFloCI1ohjACVkj+2JuWvH170K1T6KA0NDTzwwAMsWbKEXG5Xs/EVnyOPPJKhQ4eq2Ch7hIrNniAuNA8bsJeMMQnod1VGJgCP8AgNgQamelOZlp1mt5VxOSI4UqZGYjXSp+wAZu/i+EqP43ke2WyWdDrd626rSCRCOBwmnU6zffv2Xrdsxo4dSzKZ7LXzK/0TFZs9IYWZEkBK0kh5Galh1oAN+gt+AsBd4bsAuNi7OL9CdBabhSYWjEwRLVZPEzAIqC3q1SkOnuexatUqXnjhBVpaWnqtHfF4nBkzZjB9+vRea4Oi9AQqNntCPcbF5cZqWjGiEcDEV3Zi4y8AWXgv8B6PRR4j6AU5P3O+dbV15UKTUjUBbFp0EjgO41pT9gm5XI5XXnmF22+/nS1bem9eh+rqakKhEFOnTu21NihKT6BisyfIVAJS0VliN5KOHMLc0SbsGBlgcXgxmUCGUzKnMCI5wgqTjNMRd1zcP64kBjRjth2NmRJa2Wd4nkcymaSpqYnGxsZea0cwGCSRSPS6K09R9hYtgNJdEpjOX7LE2jCCEMdOghb2P5djRKjN/L0ndA8AF2YvtOnRSWzdM3f2Tqkc0OJvNxyYgHGjKYqi9FNUbLrLTuzAzRzGpRVxPrf7f7MY0fErCmxo3cAfQn8g7sU5LX2aOZZkq6WxbjmxlMQqKgdG+H8nkp/dpiiK0s9QN1p3qceWi5Fgfgs2W6wMO1VAEmPpxOC3ud8CcGriVCp2VtipBmQ8XJL8wZ1lQLX/eZD/WTNMFUXp56jYdAdxm4lFIkH7GLbCs8RoUpj4jV+k87eZ34IH50bPNaP/ZW4aSXkW27IMm9lW5qwfiZanURSl36Ni0x2asVWdWzB3LYQRlyb/vcxXAx2Tp70TeoeX0i9RSimfiH3CiJMM1szSYf10VB6QApsSCyoBxrDrEjiKoij9BBWb7tCIsUBaMIIg49masHPYSB0zGXuTgfvb7gfg1MCplEZKjWjIRGhgrKIw1kICqMBsF/PfVxXvshRFUfYVKjbvRxojMjLiP4exdGQsjEx2lqRDZEiYZb8L/Q4ycHbkbGv1RP2XKzwydbRYO0F/2XC06KaiKPsFKjbvRzsmRhPCDOCU4D+YTLFWbAmZdoxQlMK7yXd5MfMiMWJ8PP5xc6dzGPECKyo5jBCV+e89bBr1kCJfm6Ioyj5Cxeb9aMQKjYz6j/nLJdW5GSs4CSADD2UfAmB2dDbl0XI7z43MvJnCuMmk0nPG/yxiUw5UFvviFEVR9g0qNrsjh3GhyZQBpZj6Z0nsqH9JWW7HClEGluxYAsDpnG4z1yTbzMOITcI/Zgm2MoFUg65Bvx1FUfYbtDvbHUk6LBVKMAkBMrCzFJuFFnXep6El0MLT6acJEODUylOtuEhatAz6FIER60bG34SBofvqIhVFUYqPis3uaMVYNTK6X1xfrdhxNVIsM0JH4P/JlidJkeIjkY8wLDzM7B/Hut1kKmj8Y5Zjx+bIQM7yfXKFiqIo+wQVm92xE2NptGHuVAm2HI2skzEwUjQzBI8mHwXg1NiptnqzpDbL7JzuIM527MDNICZWo/PWKIqyH6FisyvSGBGQeItMI9CIdYMlMG4wqfqcgFwqx2PJxwD4ZOyTtrxNKdaaEcFJ+/tKMU/8bQYX99IURVH2NSo2u0IqAnjY7DMZVyNTAIQxVo+IRgmsZCVbclsYHRzNlNAUKyztGFGRuIz8lZpqEgcqwbjqFEVR9iNUbHZFK9YqkakFRCRyGAGRqQVS/vYheLz1cQA+Uf4JAoMCduyMVBcIYcvSJPz9apx15dhxPIqiKPsJKjZd4WEEBowgiAXSjBEFmV2zMIMsBU82PwnAKcFT7NTOku4sc+E0Y6wcER3P3y6IqfisKIqyn6Hz2XSFpCnLOBgwrjIRiiAma8xNX45Bc1kzL2ZeJEiQWSWzjNXSiHGhZcgvbxMnvyZaBDutgKIoyn6GWjZdIbNoBjECIVaMPyFah0UjY2bazW7Lk8vJkGFGfAZV5VVmoaQ7i5hIiRqZibMS60KL+y9FUZT9DBWbrpBxNJLiLBaIX/eMVmwmGhiByMDTO54G4KTYSWZ/sRtlwKdYSmFMbEb2l3E3g+j30wm0tLSwYcMGmpqaerspnQiHw4waNYrhw4cTDKpRryj7EhWbQjyMmMhATin3L4M3cxghSWOLcPoxmWWJZQCcEDvBCo2IRylWsMqwc+OUYeM1VUW9sn3Chg0b+PnPf87KlSt7uymdqKio4Pzzz+f0008nFtNy2oqyL1GxKSSNLVMTxcZqUtiqzlFnXQLwYCtb+Xv675QESji6/Ghr2cTIzy6TuE8ZxoUmtdBK2C9caE1NTaxcuZJly5b1dlM6UVNTw0c/+lFyudz7b6woSo+ivoRCRFgk1TmHrfxc7f/N+NtG6Cg182zbswB8NPZRYl7MZqG1YWqqySDOEDY+E8GIlcRqVPoVRdlPUbEpROI1MpATjBUiUwSUY+6aX3STrFn+XOY5AD5W8TE7OZpYQSFs1egK/2+Tv6zMP181/T5eoyiKsiv0WboQmSjNnVkzjBGeNFZgJKMsaLZ/rtmIzXGR46zIuOKR9Y8dxxbm9BMLiGNTrBVFUfZDimrZnHLKKQQCAa6//vq85Q0NDVx66aUMGTKEsrIyTjzxRFatWlXMpnSPDNaNBkYEJFaTw1g9YISkFDPyvwzqc/WsSq8iFohxZPRIs12Lv68kFEgKdAZbhLMFm52m8WpFUfZjiiY29957L6+88kqn5Z7nMWfOHJYuXcqPf/xjHnjgAdLpNMcffzwbN24sVnO6hyQHgE1HbsKOsRmEFQ3ZJgYvBl8E4Kj4UcTDcTuWppmOMjaUYlxlEYwIZbGTpsWxadCKoij7IUURm4aGBq6++mr+67/+q9O6JUuW8MILL/CrX/2K888/n1NOOYUlS5aQy+W4+eabi9Gc7iNiU4YRiST5RTEj/ueAv84vXfNimxGbj5R/xIpSJaZ6cym2eoDgViYIYFKeNV6jKMp+TFHE5qtf/SqTJ0/m/PPP77RuyZIljBo1iuOPP75jWWVlJXPmzOGhhx4qRnO6j8RgJN1ZCm2GsYMypfRMBmO5NMIfm/4IwIzwDLPcc44pWW2N2MyzCuf4YvUoiqLsx/S42PzhD3/grrvu4vbbb+9y/erVq5k8eXKn5bW1taxfv56WlpaeblL38LCTpLVjrBMpuimFN1v97aIY66cKMhUZXkq9BMCMwAxTcaDR3zaFTXeu8P/KmJsUtnyNxmsURdnP6dFstFQqxWWXXcY111zDpEmTutymvr6e8ePHd1peU1MDGBdceXnnOZEXLFjAggULANi+fXvPNVrIYoRB6pSFMK6yJEYgKjGCk8RWDQjAmuwa2rw2JkQnMGzwMBvTcSdNk3hMGiNGVRiByfrH1QT0PkcgEKCsrIyRI0cSCPSej7O6upqKiopebYOi9AQ9KjY333wz7e3tXHfddT15WADmz5/P/PnzAairq+vx45PFuNHiWHdZ2F8uBTndwpv+tAAr2lcAcFTJUR3TQneISxbjaivBCopUH5DjlqPxmj5IMBjkiCOO4Mtf/jJtbW291o5YLMbUqVMJh3WUgtK/6bH/4PXr13PjjTfy85//nGQySTKZ7FiXTCZpbGykoqKC6upqGhoaOu1fX18PmCe5XqEdG19J+MtkTEwbNsNMqgf4tdH+0vAXAOoiddYqwt9WqqI0YzLRyrFZatXYgZ5KnyMQCDBp0iQ+9KEP9XZTCAaDatko/Z4eE5u3336bRCLBvHnzOq279dZbufXWW/nb3/5GbW0tTz75ZKdt1qxZw7hx47p0oe0T2jAWRxNGANqwVQM8oAHjPgtha5mVwIptxrKpi9TZAp4x7JQCImCSVIC/LIxNe1b6HIFAgEAg0KeqQ8diMcaNG8eHP/xhPM97/x2KxMSJEykt1ackZc/oMbGZNm0ay5cv77T8+OOPZ968eXz2s59l4sSJzJ07lzvvvJNnn32WmTNnAqZ448MPP8wFF1zQU83ZMzxMLMbDWhtpTKBfZuGsxlZ9jpjd0l6aV5OvAjC9ZroREYnXQL5LDYyQDcNWKKhEx9co3aampoZzzz2XY489tlfFpqamhgMOOKDXzq/0T3pMbKqqqvjYxz7W5boDDjigY93cuXOZMWMG8+bN45ZbbqG6upqbbroJz/P4yle+0lPN2TNy2HEzUhxTqghI+X8wGWWSNAD8M/NPUl6Kg6IHMSjkT7EpAiPxmqh/TJlsLYcRsAjWTaco3aC0tJSpU6cyderU3m6KuvWUPWafRx2DwSCPPPII11xzDVdccQWJRIIZM2awfPlyxo4du6+bY8hiBESsDAnqR/z3YCtAS/pyGl7ZaSokTI1ONeLU1e9Pqg+I16EZGO6fs5c8hkr/RTt5pb9SdLHpytyvqalh0aJFLFq0qNin7x4ypUASIwJyVyQ2U49xq8ly//f+StaIzYejH7ZWkVgyUu4G/7hB53hB/zwar1EUZYCgThwwbjPJOothBmwmMVZIG8YqkTpmYfuSeM3UiqlmmyBGuPwJ1QCbZCDWkpTCkQKciqIoAwDt7sBOHRDFJgnIWBqxPsLYeI4fd1mTXANAbbzWbOPGa5qwlk4cIzJps19HCRy9+4qiDBC0u/OwVZhLgR3Ykf3tGIHws8863GNZaE40syG9gWggyoTABLOdi1QZkBToANaiCWFL1SiKogwAVGxkoGYQO5+NCEEppqqA1ERzqjf/o/0fAEyKTiIcCFvLx81ewz9em7+uFGs5laCVAxRFGTDos3UOY5V4GEumCjvZWYL8igEhOtKYX8u+BsCh8UPN8oy/fzt2vhuJ17T7+5X7x4yixTcVRRlQqGUjUwJIqRkp+1+PFYUYxhJJ0OEuezPxJgAHxw7OzzzLYIpthrFTP1f4+yWwIiPVnxVFUQYAKjYyxiaKcXe1YQQjjrVQwLrVPCANb6aM2EwMTcyP1wT8fQP+PlFsXbU0xroJYuNAiqIoAwAVG6lZVoHJIGvHWDFRbCUBqY8m7rUcvJV6C4CDyg6y0zpLHTT32C1Y6yjqLytBy9QoijKgULGRGEortg6ajJGRGTqbMdaIBP8DjmUTnGirRIvrzBUSSakuxc7UqZloiqIMMFRskpjOfyd2oGUTdpbOMDAYWxMtC800U5+tJx6IMzw23FozMr5GUptjGLdZBiM0EexMn4qiKAOIgf187WFjNmFnWRrjPotiXWNxzFTRJbAhswGAsZGxBDIBOyg0V7CPxGtCGHdaKVqmRlGUAcnAtmxkKoAMZoR/PUYMpLqzWCMSs/ErB2zwfLGJjTX7yUyegYJjS7wmjBkkmsSKkKIoygBiYIuNBP2DWIsmirVkghi3mBTSxKzb0GLEZlxwnI31SGmakN0ODyNYw7Dxn8FocoCiKAOOgS027kRn7Rjro8X/nMJOOxDDpjeHYJO3CYDRsdE2DiPjaGQKgjAmXhPyjy1jdTIM9LuuKMoAZGB3e64bTSo6RzH10cTdJa4xEZQ0bE1uBWB4YHj+GBuJ1XjY0jUBbMabpEfvx5GyvjzfSl9um6Ls7wxssRE3Whhj5aQx1kwJHbNxdiDiE4WtAV9syoebmI0M4HSFSebGiWC2CWHEza02sB9SU1PDzJkzGTJkSG83pRPl5eVMmjSJUEj9mIqyrxnYYiPTQZdi3GY7/PcyAVobdhpnSYXOwLbENgCGZ4bbTDYpc4O/vUzGJtM/p7BCsx/3dWPGjOEzn/kMbW1tvd2UToRCIaqrqwmHB/a/vaL0BgP7VyfFNSV247rNZIBnE7aKgG/ZbMsasRkaHWoLdoobTuqrhYFqjEhJ7Ee22Y+Jx+OMGTOmt5vRp8nlcmSzWXK53Ptv3AuEQiFCoZC6HZUeZT/v+t4HGfPSgLE4KjAJAlmMgAT8ZWATBtLQkG0AoIYaG/cJYMfaSCUBqTrQBIz012lNtAFPY2MjL774Im+++WZvN6UT4XCYadOmUVdXRzyuA8KUnmNgi40IRRY79sXDTuHsxlb8igBezmNnbicAVWVV1iUmyQHicmvGuOQCmCy3FNbqUQY0O3bs4IEHHuDRRx/F87z332EfUlJSwuc+9zkmT56sYqP0KAO765NAfgUmPiPusmr/swT4ZUBnDhKZBCkvRTQQJR6I2+QAV5jC/nGbgKEYK6cFM1dODyYHeJ5He3s7iUSiz3VaANFolJKSEo2RFJDNZmlubmb79u293ZROlJSU0Nra2if/n5T+zcDuBbL+qxxTG00y0cAKUDM2tgPszPpWTbDKFuAEO52AWDcxjMi4Y3U8ejQ5IJlM8vzzz/P888+TTBamz/U+hx12GB//+McZMWJEbzdFUZReZmCLTQ7T+bdhZ+GU7DOZKE3Gy6TMLq3ZVgDKgmU26C9TS0s9tQqMCw3/uM3AGEy1gR6846lUipdeeomf/exntLS0vP8O+5hPfvKTHHXUUSo2iqIMcLFJYyyZrdiU50aMNVJYndmfTC3hmzMlwRLrQpMpA8RqkTE24vIuwwiam+3WU5eQTpNIJEgkEu+/8T4mlUr12YwrRVH2Lfvx8MJuEMBaN/jvnYKbnbYNQyJoOvVYKGbERCZak2oB4kJLY5MESjBiU1isU1EUZYAwcC0bGc3fhnF77cS4ztxYi1Rp9jAusiwkWo3YxL24cYtJHCaMrRAts3G6A0NLULFRFGXAMnDFRpCKzVLjTGy9KNY6ydAhKKmgCd5Eg1E7q2cWKzwy74240FKYQpxV2ArRA5hsNsu7777Lli1byGazvdaOeDzO6NGjGTx4sA5eVJR9wMAVG9eF1oKxPFIYgQlgLBIPM0ZGaqhl6BibE/ACdlvJNpM06XZszEcqD0jl5wFOIpHgqaee4v7776e9vb3X2jFy5EguueQSZs2apWKjKPsAFZs4JimgHCMWO7Di48xhI6Vq3KkGGETXlopMK9AODMe60wb3/GX0N7LZLG+//TbPPvtsr2bQHXjggXz84x/vtfMrykBj4IqNh42vSMqzzEdT4b8PYwVHxuQknc8t2JI0MnWAlK0RS6cdK2qZIl+ToihKH2Vgi00IIyqVwHsYN5q4uiKYGEs71l0GHW40L+BZ60hSnaWY5yDs4NA0xqoZ5R9PURRlADJwxQbyKzwXVueQzzJw049lRz0z+CaVS3UsMyuw2Whi/UjVAEl9jnZxHkVRlAHAwBUbEZg4poaZjJdpxbrLZC4aIQclvsnSTrsZQ+MW4HRJYARmmH+MVlRsFEUZsAzcQZ0iNlI0U6aElkw0N17j7FMaMnVo2nJtxm2Wxk6sJkIitdTKMaIj00L7xTwVRVEGGgPXsoH8AZhpbKZZDTZWI260hNm2JO1bNtl2W9bGPVYYIzJlzrJmYDQ2WUBRFGWAMXAtGzBusgQmoJ/FxlWCGBdZ3F/WQEc1gfJIOQDNuWabfQY2NhPDCIxUDshhhKcVI1yakaYoygBkYFs2blqzDM7M+suTGCEqwQiPn/pclakiQICduZ1kyRKKhcx+ErsRMkA9xkoqxYhNCSo2iqIMSAa2ZSMj/2W8TCXGEmnCCE0EO37GTxYIlYeoClXh4dEQbrAFON3Av+e/yjDi0o4dENp7FVoURVF6jYEtNlIXLYN1iUWcda6AyHiaVqgJ1gCwo2mHEaoWTFxmJ3YStjJsMc4mrGhl0Iw0RVEGHANbbMIYa6MUKyYpjIUjAf4kRkia/Pc5GBIZAsB7gfds9QCw2W1gEwwyWAtHstcURVEGGCo2kkEWw1glYWedO3lakI65bkaFRgGwKbHJxntKsSIV95c1YURHpheQKgOakaYoygBjYCcIyDw0OYwQlGMsmwg2QSCCLWHjx2LGloyFZtgQ2mDqqEmdNTdBQErXxDGWUQwjSAnyJ2xTFEUZAKjYBDFCk8JYJVFsnbTSgu39WmjjwuMAWN+83rrbRHAk9hPDiFfAP1YCI0ziWougKIoyYCiKG+2xxx7juOOOo7y8nEGDBlFXV8eyZcs61jc0NHDppZcyZMgQysrKOPHEE1m1alUxmrJ7JN1ZrBv8vzHsQE9JVU5j0pdbYGxgLAAbshuMaISdY0jRzhxmrI18LsdYOGGM+CiKogwgelxsfvazn3HaaadxxBFH8Pvf/57Fixdzzjnn0NbWBoDnecyZM4elS5fy4x//mAceeIB0Os3xxx/Pxo0be7o5u0emB5AqAlLPLIqxWMoxYtSIieekgAAcGDsQgDeTb9rKAzJ4swpjEUl5mgazT4cYSTUCRVGUAUSPutHWrVvHl770JW655Ra+9KUvdSw/+eSTO94vWbKEF154gWXLlnH88ccDMGPGDCZMmMDNN9/Mbbfd1pNN2j3i7mrECEULxqUmiQEyQ2cZRpj82M6k4CQAXk+9TrY0SygU6izbfjIBg/z3OzHilUHFRlGUAUePWjaLFi0iGAxy+eWX73KbJUuWMGrUqA6hAaisrGTOnDk89NBDPdmc90csDjDWSRRjlcj00C0Yyybi/I1DRVkFYyJjSHpJ1jWvM+6xRv/V7O/XZrcnghGxNOaOS5KAoijKAKFHxeYPf/gDhxxyCL/5zW846KCDCIfDTJw4kdtvv71jm9WrVzN58uRO+9bW1rJ+/fp9P1VwBFtORlKY4xhLRIL+QhYjFE1wSPQQAF5LvGaOIfPZZDHWSwgjWq3+PiI67VjhURRFGSD0qNhs3ryZN954g2uvvZavfe1rPPnkk5x00klceeWV/OhHPwKgvr6e6urqTvvW1JhR+Q0NDV0ee8GCBdTV1VFXV8f27dt7rtHiMpNxNB5GJKowwpPBCEM7xmrxpyM4LH4YAK8mX7XbZLHjbeL+sbOY8TZg06NTaJKAoigDih6N2eRyOZqbm/nFL37BmWeeCcCsWbNYt24dN910E1ddddUHPvb8+fOZP38+AHV1dT3SXsBYGx4mdiOuM4/8LLMmrAvMj+McET0CgBXtK4yoSCUB1xKSuE0Ndnrooc77ip67DEVRlL5Mj1o2gwcPBuCkk07KWz579my2bt3Ku+++S3V1dZfWS319PUCXVk9RCWHEIoO1ZMTCkfTlICbQ77yOHHokAH9p/4vZrh0jSo0YF1wzxoUWx6ZXi2stgBE1RVGUAUKPWja1tbX86U9/2uX6YDBIbW0tTz75ZKd1a9asYdy4cZSXl/dkk94fCfyLe6sMIzRtGAGB/AGY/oRok3KTqAhWsDG9kS2JLYyIjbATrUkqdBwTrxGBqfLPl3SWuyVxlAFBKBSisrKSYcOG9XZTOlFSUkJ5eTmBQOE854qyd/So2Jxxxhn87//+L0888QRnn312x/KlS5cyZswYRowYwdy5c7nzzjt59tlnmTlzJgBNTU08/PDDXHDBBT3ZnO4hlZ7LgG0YgZDqzK3Oexkn024+B4NB6krqWN66nD8l/8TpsdNt0L8EO8U02GmjExgxkknZkqjYDECGDBnCWWedxbRp03q7KZ0Ih8NMnTqVeDze201R9jN6VGw+8YlPcPzxx3PZZZfx3nvvceCBB7J48WKefPJJ7rzzTgDmzp3LjBkzmDdvHrfccgvV1dXcdNNNeJ7HV77ylZ5sTvcpxbjAYhhRcJMEpCpAK0YgoEN4jis5juWty1nWtIzTy083x3HjNp7zGuQva8YKTBMatxmAVFVVcdJJJ3HiiSf2dlO6JBgMmrFjitKD9KjYBAIBHnzwQb7+9a/zrW99i4aGBg455BDuvvvuDqslGAzyyCOPcM0113DFFVeQSCSYMWMGy5cvZ+zYsT3ZnO4Tx4hMHGOFNGBrnIWwhTMr/e39+Whm1cziO+99h2Xty8ydTPr7i9jIWBqZlC3on0MqPzf759Xf9YAiEAgQiWhxPGVg0eOFOAcNGsTtt9+eN7amkJqaGhYtWsSiRYt6+vQfDHeKAZkOIOe8WrFxHSEHx5QcQ0mghNWJ1Wxt38rw6HBjtUjlgbB/vBwmISCDyUwTAWvDuNbKin+JiqIovcnAns9GEIsjiLFOSjBurxxGgMTKETJAC0RTUY4tOxaAp9qfMttI8c44pjyNpE8H/OOK5RRxjq8oirKfo2IDtnJADJMAEMSmK0vqshTclHI0fqmbT5R/AoAHGx80QiJTC7jz5EjigBTolBlAM5i4jVSWVhRF2U9RsRHEspGYSgabCl2GDeTLgE/f0jm96nQAHm99nPZIe4fV0/FqwlgvWf+ziFmJf/w2jJtOURRlP0bFRohiM8kqMKLgFstMYas4y/QDZXBAxQEcUXIErblWnqp/ymwbwY7NCWHiNDJVdKt/rCas9fMeHUkHe0ogEOiTYyL6YpsURek9BvZMnS4hjIg0YuuagXWBJcgfEyPp0Qk4o+IM/tr+V+5vvZ85NXOs+0ymhXaz00r886QwghbBuOba6Twz6PsQDoepra3ljDPOIJHoe/MWHHHEEVRWVr7/hoqi7Peo2LiUYwQigbVwUsB2rIBImnIbHRlnn6r+FNdvu54HGh/gf4b+DxWxCiMqUp5GxtpkMWN3JEZThh3o+R4wlvxEhPchHo9zwgknMH36dHK5vjdnQVlZGUOGDOntZiiK0gdQsXGJYwL8GezgS0kekCQBCepn6Zjl80PBD3Fs6bE83/Y8ixOL+UzsM7bUjZDGiFkbVnii2NlCtwPD/fN3k2AwyODBgztq0imKovRVNGbjEsF09jLuRqYWCPkvSSIowcRhpDBnJXxm2GcAWLR9kbGCJG4jd7gK654LYqwaETUpZ7OtyNenKIrSS6jYFFKFSUuWVOiksy7tf45h56YBaIezY2dTHiznhfYXWJNeYwtyBjHuuDDWogErajIINIUZf1NoESmKouwHqButEBEZmda5AiMQKfLrmkFeYc7yWDnzqubx0/qf8sNtP2ThyIXmODINdBY75qbcPxbkZ62lgHeB8ey3jwGBQIDS0lKqq6uJRnuvCmllZWWvnl9RBhoqNoXINNFJjODIwM4gNv05jZ2GQGI3abi66mp+Vv8z7tp5F98b+T1GJEeYY0oFggDG7SZxmaS/rhXjShuCsW6qMG66/ZBoNMq//Mu/UFJSQirVe9OVVlVVUVtbqynairKPULEpJIApnNmKHeUfxghNKXa2TnditIBZfvCggzltx2k8uPNB/mfb//Dvo/7drBOXWglWaDx/X/kspXIAtmAsqv2wVmM0GuXoo4/miCOO6NV2BAIBwmH991eUfYX+2rpCss/cUf5iwYB1h7lTfviWzjVV1/Dgzgf5SeNPuGbENVR5VWZdKR3Za2Sx42pk8Kic08NUGtgMjGOPUqH7A1LxWKseK8rAYj+NDOwlYYx1k8aIQDnWfSZC4cq041L7yKCPcFzZcTRkG7hlyy1GQEqxddVaMWIi00RnMRZN0lkfxAwu7Tx7tqIoSr9ExWZXSGxFXFtVGNHxMAF/ER+phZY06wKZAD8Y+gMAflj/Q95Nv2uESNKhA5gkgzKM2AT9zyUYARJ3WgLYhGanKYqyX6BisyuiGMEJYzp8v8ozYazwuDGcMv9zBRwz/BhOrzyddq+d7239nhGZEHZitXJsLEeSBNIYgQlhRSkFrMeWzFEURemnqNjsiiDGlSYToGX8VxorHuIGK8dYQTKIMwH/Mfg/CBJkQcMCXm171ewXxg7mbMG4ytowoiLTTrdjK0VLIsIm8ouCKoqi9DNUbHZHGXbE/yBsVpkMzJTMXfcuJs3yQ8sP5YrBV5Aly/yN88nGs0acmrEla0TQRKgi2CoDYWx8aAcmQ00FR1GUfoqKze4IYsa+eJiOXqYWkAGYrdiqADlskB8gDTcOvZFRkVH8OfFnfrblZ8ZNJq64LDZ1Gv9zBJvp5tGRUk0EIzZ7MRWBoihKb6Ji836UANVYF5q4wiRmIynNEmORZRUwqGoQPx73YwC+vvXrbPQ2mmO6Y27EemnCiFGr/z6FnXytzd9nK8bKUcFRFKWfoWLzfkgatJSeEatDLBHJJJMBmqXY2E0GzgifwZyKOTTlmvjX9f9KNp21AiVp0G3+MeJYd5rrWpPkgjSwFhUcRVH6HSo23aEC0/mXYIQigxEIQQppuuMUc0ArBCIBFoxcwLDQMJa3LecHzT+wAzdz2OyzuHMsMN9M1j+XxGoC/rbvYqYkUMFRFKWfoGLTHYLAMKwbTSwdSRhox1o8MsmaM3XAiPgIfnnQLwG4YdMN/LH+j3ZMTRbjlpNU6HZ/WRNGkFzrR6pCt2FiONvQpAFFUfoFKjbdpQwTuxFRiWJEZxB2+ucgdtxNyFlfCqdUn8K/Dfk3smQ5d/O5bE1vNWJUjh3IuRMjJCH/mFIyJ+q/xL2W88+xAZMWLTN/Koqi9FFUbLpLEFOJuQybMSbLwSYGlGKsFKkI4Mx58x9D/oMZpTPYkN7AGe+cQaIsYda1YJIDIL8Ap9RjkxhRBuOuy2AFrQkjOu68O4qiKH0MFZs9IY4RHJmGIOW/slhRkcGfIWe/tNkuGojy+9G/Z2xkLH9s/yPz356P1+wHXiTF2Z1iRQRmJzYrTV5SR60ZqMdUGmjt4etVFEXpIVRs9pQa7BTRFRgLRiwdmckzhxWdFEYoMNsOrxnOkkOWUBos5Vf1v+I/6v/DrJPK0AFszCaJzXqTSgYR5+VaUc3AO5ixOBrHURSlj6Fis6eEgeHYDLQYNjYjAz6lCoCkL5dj6qlFgQBMi0/j7lF3EyDA9duu5yf1PzHbgBGYJmw8x3XFSTxIxK0NOxZHRO5d/6X11BRF6UOo2HwQyjHZaTKpGpg7KdMGlGIER4L5bkq0XzX69EGn89MRPwXgC+9+gbveu8tkmknRT6kKLfuIuDVjXWhp//iSQr0TKzjrMCKk6dGKovQBVGw+CAFMZppUEZBSNRK4F0sk67/cu5ygQ1Dmj57PLeNuAeDTb3+aB3Y8YEXLjd1IBYEkNlMthi19A1bogv66FGYCti1otpqiKL2Ois0HRdxpGWyWWggbuxFxcOM5Kf/vIDqyzq4ZdQ3fHPFNcuQ4d9O5/Hrnr+24mxzGTdaOTYEWIXNrqkm5myZMskCr/zeJEZv12JptiqIovYBOC703lAFDMW6rHMbSkUy0HMYikVH/QWzgviDj7DuV3yGXznHjjhu5aNNFNEWauGLoFdZVNgj7WCDVojMYKymJTbOOYSdnCzjvJaZT4bc31sP3QVEU5X1Qy2ZvqcZUhgYbQ5GJ1mQWTncGzlDB/q0Q8AL8+7h/5+axNwPwhXVf4KZ3bsLLerZemiDjbFqxQibTIEgmm8Rpmsm3sHZiMtYa0Yw1RVH2KSo2e0sQGIzJSBNhSWDnvQk428pnca0lMMLgl7+5dvS1/HTCTwkQ4BvbvsHlWy8nHUvbY6QxbjURMfn2gs7x2zGiImNwZLCnJAs0Y2I5a7H12RRFUYqMik1PEAFGYDruSox7rQRzdyVO40+q1pE5JlWe3awz4LKay7hvxH3EA3EWNC7glNdPoSHTYKyZRmxV6UJEXKR+m2vxSOVo/HOmMMLzDmbagkThwRRFUXoWFZueogQYgy3GGceOv5G/8irFCEGafLdaDmiCcwafw7OHPcvwyHCWNS3jmL8fw+s7Xrc12IQM5hts8d9L7bRCa0ometuKESRJJJA06bXARqylpSiK0sOo2PQkZRgLJ46d+tlNhS4UAVkn1aMTdFQmOKriKF6a/BIfLv0wrydep+6dOhanFucfQ6wlmaYgh035kArS4lILYNOmy7DFREWckpistfdQS0dRlB5HxaYnCWAsl+EY0ZDstHTBX7egpiQViLXjWC7jYuP4w8Q/cE7ZOTTnmvnUW5/iqnVXkcqljJCksNWmwRbnzGAsmBR2zE4YWy064GzTjhGYRv/9ekxhzy2opaMoSo+hYtPTBDDjbioxnbsM/qzAVhWI+e9LsWNuRAxct1oKKtoruG/Sfdw2/jYigQg/3vJjjl19LG81vmX2FUtHRCGNEZHCWI1sk8BOZdDsL5P6bnH/vSQirMdM0taOio6iKHuFik0xCGDcaYP8zymMdVGKFRqZn8atNuC+T/mfyyEQDfDFEV/k+cOeZ1x0HC+1vsTUDVNZ8N4CPM9XAakSkKDrTLUMdoCoVBkAI0biypP6as3+tinMfDkbMDGdVjR7TVGUD4SKTbEIYOqnDcN0/tKRJ8mP1eD/FZdaGvOtyKRqzrDboyuO5m8T/8anBn2K1lwrl629jDn/nMOW5Bbroos6x5aBnQlsBQEZYCrb5PxztmKsmBZ/u3ZsUdEERng2YKsRaKFPRVH2ABWbYhLCuNSGYERA5sIBG7uRmE0QY/lUYNOhs+QX40xDTaSG+ybdxz0T76EqVMWjjY8y+dXJ3LXjLryolz+vTgwjGmLtiBUjyPTVLf5yGb8jgiTWkp8lB9iBoesx01KLiCqKouwGFZtiE8JYN9WYzj2GEZPBmGkHqrDp0NLJg61EIJlqElMJmdf5Q87n71P/zuxBs9mR3cHFmy7mpH+exBvtb9hzi2tMyuO4YpPAiIxkpcmMoO5rO0ZckpgYj0ypIAVGmzDVpbdi068VRVG6oEfF5oUXXmD27NkMGzaMiooKpk+fzqJFi/K2SSQSXHvttYwcOZKSkhJmzJjBc88915PN6HuIS20IplNvxXTWYK2CQreaVBkQC6WC/DRqYHRoNEsPWsovxv2CweHBPN30NFNencK/r/93koGkEZTCKabBWjtu1WhBkgPanH0iGKFLYq0fEZcMttinDBJtd65PURSFHhSbV199lRNPPJF0Os3ChQv53e9+x5FHHslnP/tZ7rjjjo7tPvvZz7Jw4UK++93v8sgjjzBy5EhOPvlkVq5c2VNN6ZsEMNbMKIxwSJC+HWu5ZMgvcyPZanFsmRv5xvyYSSAe4OKhF/OPqf/g4iEXk/SSfHPzN6ldVcuDTQ/iiYJ5GNGQ4p1u7EZIYufEkUy2bMH6en+bBNDgH0Oy2YIYt9wGjOg0YmNUiqIMaAJeRzrT3vGNb3yDW2+9lfr6esrLyzuWz5gxA4A//vGPvPLKK0ybNo1Fixbx6U9/GoBMJkNtbS2TJk1iyZIl3TpXXV0dK1as6Ilm73s8jGWzCdOht/h/xS0lgyzFwnArRDdhpx6QFOpm8jLKltUv48qNV/Ja+2sAzBo0i/8e999MKZtitpfzSfXoUv/YCf/4NVhrJ4MRujDG0klixw/ht7HMb2MKW4FaBLMUawlVY4uKqvNWUfZLdtc399jPPpVKEYlEKCkpyVteWVlJLmd8RUuWLCESiXDuued2rA+Hw5x33nk88cQTJJNJ9nv8dGbG+Z+lGnQEKwKynTuGRuqrgeng49hsMie2M2vYLF457BVuG3cb1aFqljUtY9rfpzH/nfls3LnRCpPrVpMJ3WLk/0cEsHPl5JxzCiF/3U6MiCUwolSCEZo0Vjy3+C+xeFKoxaMoA4geE5tLLrkEgKuuuorNmzfT2NjIwoULefrpp7n66qsBWL16NRMmTKC0tDRv39raWlKpFG+++WZPNafvUwKMxcRxZNK1ENatlsMW8cxirIMK7ABQ6JxE4M9rE/EifHHUF3nz8Df54tAvEiDAwi0LmfjqRP5t47/xXvo9s38EOy+OWFCFbrU2f7sw+VMYZDEWmghR1D+/xHGkXe9i59N5DyNM2zH12DZjhCeBZrQpyn5Oj4nN5MmTeeaZZ3jooYcYPXo01dXVfOELX+CnP/0p5513HgD19fVUV1d32rempqZj/a5YsGABdXV11NXVsX379p5qdu8SwUxmNhLTmQ/Gpj5XYKoQyF9xSblIEkEWYy3FyOu0a4I13HbAbaw+ejWfqv4USS/Jf737X0z42wS+vfXbNKYb7XgasIICVmjc2UGFFPki4xLAuPakSkEO644rx4pqHCNWWzCC9A4mlbodO722oij7DT0mNm+88QZnnXUWtbW1PPzwwzz11FNcfvnlXH755dx99917ffz58+ezYsUKVqxYwdChQ3ugxX2EEEZkRmA6eEktjmBEx33id+ubFWaquXEWmRXUF6FJsUncN+E+Xp7yMh+v/DgtuRa+s/E7HPDHA/jG+m+wPbfdVo0GOy9OV9lqKWzcRyaFSxfs14iN1ciUBmIRSaXpRmzxURnn04RJLtiGsYKkvpsKj6L0e3pMbL7xjW8QiUR45JFHOPXUUznhhBO47bbb+NSnPsX/+3//j1wuR3V1NQ0NDZ32FYtGLJwBRwAz3mYcJvsshLEKwLrS0thxM2AsnUpsppogWW05bNabn912eNnhPHbQYzw35TlOqD6BpmwTN229iQP+dgBXb7iaTblN5hht2NiO+x+SwYiJW2ZHjp/GTtDmxp6EndgsNnHfhbEWW6tzjncxrrYtwNsYq2cHdppsFR9F6Xf0mNisWrWKqVOnEonkz+x11FFHsWPHDrZt20ZtbS1r166lra0tb5s1a9YQjUaZOHFiTzWnfxLHVIweia0mMBhrvVT7f8ux9c/cjldKz8j2bl00sUAicOzQY3lqylP8cdIfObXqVNpz7fz3tv9mwh8ncNHfL+Ll5pc711bLYUWo8L9GMudcS0gssqy/307sVAhh8isotGNTqiWBQoqSDvKPsd1/bcJOheDGtxRF6dP0mNiMGDGClStXkkql8pb/+c9/Jh6PU1NTw5w5c0in0yxevLhjfSaT4b777mP27NnEYrHCww48wpj041Hku7aC2AoCnrNM6qpJ4U6ZJbTQ2pHss1K77JjyY3h40sP87dC/cc7Qc8h6WX697dcc8Y8jOH7N8SxpXUJOenIpSxPu4titWJeatEvcfG5NNlcYgxiBacZOsSDuuIC/fT3GpSbVC+qdbRoxCQbb/W3q/e3EqlMUpU9R6JH/wFx55ZWcc845zJkzhyuuuIKSkhKWLFnCvffey9VXX000GuXwww/n3HPP5Utf+hLpdJoJEyZwxx13sHbt2h6J6+w3yNP9eOyMmpIVFsS6smTbKLb2mVudAKzrrVCEHGtn2pBp/Hbkb1nbspYfr/sxP9/xc55pfoZn/vEMB647kMtGXsanyz/N0PjQzlNSS2ynMGVaBDCIzbYTayWLsYSSmGw8V6Ta/WuRCgZpjEUnwppw3svkcG4tubB/72Ra7BidBVJRlH1Ojw3qBHj88cf5wQ9+wOrVq0kkEhx00EHMnz+fyy67jFDIpDy1t7dz3XXXcc8999DY2MjUqVP5wQ9+wMc+9rFun6dfD+rcUzxMx7sdO6iyEeuyaseO08F/LwIgtcoyGKFxccfOyIRtfvXnplwT/9vwv9y29TbWJdYBEA1EOaf6HC4fezkfrfkogUDAZqWVkz9dtbjOSrGZbpL0IKVu5NrKnLZLwU8ZsCrFQEVAMqZ9JDDxKjGEU1jXokwq59eQ6yhmWoatPycvRVF6lN31zT0qNvuKASU2QgYTJG/0P7diOk/poMVNlcRWHpB5c5rIL8LpYWIeMqGbIB01wCDIkmXpjqXc8c4dPLbzsY7SN4eVHcYlIy7hooqLGBEYYS0JQdxZg5xlMiZIrC4RQxmfI/PopDBC4qZUSwFQqc2Gfz0V/nupXi0iIpaVCEzW39cd0Brwz1uCtX66ikcpitJtVGz2J9owgiNuKKlHJlZKyF/nVgNoxVoIMoYljY3fCE3+8kHkp0E3w7rUOhY0LuB/t/4v29LbAAgR4uOVH+eSMZcwZ+gcosGo2b7BP4YrGDKXT9g5tvznlWFnDQ1gLRHZphFr3Yhgpp1tZDxPACNUct0JrMDIINkwJvNPBEmqabvjgWRGVXFbFk7NoChKl6jY7G9IzGMHdtqCndgBmFLHTDp1GcsirrdS8muqgenUG/x1rrWTwbq2SiHtpXlsx2PcufFOHm14lIzvq6sOV3P2sLO5oPoCjg0eS6gmlD8ltbj3qpxlMu4mjBECsc6yGNHwsNWlg/4y9x5IGrU73sdvZ8eEcElsirhcj0yFLfXcslgrSGJLYiVl/c9iAcn5xE2nKEoHKjb7K2lMAkEjtvONYot6imUhwXo3HVoSCSSukcN2sq4rSYSqpmB5M2xr38bd7Xdz55Y7WdW6qmPVqMgozh1+LuePOJ+6ijoT32nAdNiukImFBflWUBojEBKPijrLpAxOq79+cEG7ZApusWLEbTYIGwNq9Letws7jk8LGmKTGW8Dfz41/SSKGK1rihpN712NpN4rSv1Cx2d+RqQHEDRbHpguLa22nv23I2acVm2ZcinVHubODtvqf3QQDD2NVldnlf2/5O/e+ey/3bLmHdel1HZuOi43jzCFnclb8LD4y+iMEw44ySNq0TIEtiIuvFStCaX87qUAtsSU3wcDDjueReI7sK1afuNbEWnGTI0QQRXQkZVzmEhJLKeqfVwa0es7xJQVdkhzEQhJrSCxJdcsp+yEqNgMBiYm8h+kUY1ihCGE6QQmiS5WBNuyTu3TEEleRGIa8d5/WM5hOvYb8TjMNXqPHn8N/5t7t97J422LeTb3bsXpEdASnDz2d04eczseqP0YsFTOdvCsMYDtsN2DvxmgkAaJw7FAbVkxcl5skH0gsBuz4pAr/fG5KtVRxwL8fknEncbA0Nj09Tf4spoPIjwdFsYNtxRKSqbdFgMQNKJW+1T2n9FNUbAYSEudoxHSgzVgxEUvGrw7d8dnt0Jv9z0FM5y5pzK71kPDfd5VO7QT3c16OPzf9mQc2PMADOx9gXXJdx6ZloTJml8/m1CGn8okRn2BEbIQ9jgwSdcf4SlVrKRwqwpDDdPDu/D+uyw2//a0Yt5mbNSc120JYC0omgSvHxr4CdE5QaPXXV2CnURDXXQwrYFm/3RXONmnyKzNIiZ8SrNUl46cku87NlpNrV+tI6WOo2AxEcpgOsRVjhcQxnVQD+Z2jdNDy9L0Tm3osT+hSeFOKe0qn73bcYj2U0TlmsR28Go+VbSv53fbf8ch7j7CyZWXeJkdWHMnJg0/m5OqTOTpwNJFBkc6uNYmVFMZ3BmGFTiwPcblJooBklZU5+4pgDKHz1ApiZcg1ithF/f1cd5kcU9LQM/49lH1l3E8YO3OpTIAnApZw2lniHE9ER74LqSohk9q5lpFrCWoWndILqNgMZKQS83ZMpyfVCKTDltk/JcYg2VlujTOpWSbWThBjOckxxCXljoERkv7LHXMDbGjewKNbHuXhtod5uuFpkjk7cd6g0CBOqDmB2TWzOan6JA4sOZBAOmA76ALXXUe7XWGQqgJNzrVIlpu0q91/X+5s42ELilY555EkArlGV0gk5tSCTeeWrDiwSQkBbDUEaac71bdU6w5g40SuCA3CVgWX77LQQpKxVdJeiSdJYoNrHYkYBZzPcq046xSlm6jYKLYSQTP5yQOev0w6cUl1lg5L6ptJUFw6n53YOIPEXZLYOI+cU2IVhWXvnKf41mwrzzY8yxP1T/DEe0/wz8Q/8zYdGxvLrEGzmFU1i+OHHs/Y+Fh7fMkSc0vSSIKBFPoMO8sryR8IK+NspH0ya6mM2XFTw+uxCQTufZUBo26HncIWEZXYmYhlKbbcjqSkVxe0X8ZKRf31kqwg025LirY8AMSxlbfFCivFZuNJnEnGHMm04G5KeAabDCJZetImsaBEmILOe5z37v2iYJli8Qpe8h1JmSawySXyANEP7qOKjWKRRIIG7FgbicvEsOIj5WVEkKRmmdCGLYtT7h9HrAUZL5Ohc1YZ2GKZrpsKOjK+3uEdntz5JE/WP8nyxuXsSO/Iu4SDSg7iuKrjOHbQsfxL+b8wMT6RQMz5JUpGXoLOIjQIO0GbJD9IsD+LHa+UId/ikfiNVKJ27+cOrNXhLpfzu1MyiIsvgc2Sc8cHidtSLB4ZpCpWzk7/XCJ4sly+L3fMkZxbZkKV1HZx3UksK4itJiH/C1IaSdynck1B57PnnENcrJJMUoJ1F0rFBmmfa5GJQHvO30LhChS8l3MXVnso7IzfL65VuN4rWOaKMlgRKBQKuR8ZZ1na+SsuarFIxdoXN3Yj5iEhgb3n8rcE8zAxCBiGKdArDxF9EBUbpTPyQ9iJHc8iGVVSlUBcL5JSLB2A/Ggkm8vtSCW4L+ViwnTtcusq7iM/TIlFYJIMVjWuYlnDMpY3L+fZxmdpyjblXcqI6Aj+pfJf+Jeqf2FG5QymxaYRjUbzq2bL8cuda5P2iHtNOnmJ9ZRixUCsv8Lkgxbs2J5y51wZjCVUQ34MS9yakhbttk0EUjpgsTQkWUEGsUo6thuTEtehiFAOc98zmLFIYnFJFmKAfDERN6iInLyXenYi0FKpQpJHmrEJDxLrk1iY1L6LYcd6yf+YWzNPzinLRBhdq0keCsCKr7yCzjY58oVKhE5e8n2LuMk9ke/djY3J52TB9pLNWfh7kHNIWr7nrJPrSvr3S4YrSIaoJJGIhSy/AfkNyv9KCeZ/6kA6Z3H2AVRslN0jM2buxCYDBMjvlCWzTWI70qm4rpQcJvVagubSyRa63KRTluQE2VeeKt0ftrTPn4I642VY2bKS5xuf5/mG5/lD0x/Ynt6edzmxQIzpFdM5puIYjqk6hqMHHc24+DgCmYDpKF0rzU2BdgfCumnVUkpHqleLxZPCZuoVDjptwrq93Kw9uY81XVxjE/kxHbAdlWsFyr2K+ed3rRmJVUmb5ViDsJmKbuq2uEtb/XUyj5J03k1+G6rJj1O5rlURJrGwYgXb4tw/EVG5T+5feWWdZW6HLpmD7nYyLsztxdwpJtw4mCRqiMBKmFBcyFLTT65JHj7E1Sjbu+d3i762O20Ui1NES4YVyHew3fkOwtj5qQLO+6jzuXDQcBRTFX4UfSpVXsVG6R7yA9yJ/eHJU3gM+3Tr/vgktiA//DSdqyp3NQjTzXJzO/x28geWyo9VrAfBf2r0sh6vZ17n+Z3P8+LOF/nTzj/xWttrnS5tSGQIdeV1HDnoSOoq6qirrGNUbJR9+oxhLQe5F0F/uSu6YvFEMB2xWAHiogtiLUWJtUhSgrjp5LgiQtIBpTBJCW7n0Y4RgsHku07ku3DTsSV+JFaCHEe+E/mO3Hteju0kRTRjmO9Yau5JvEmSFiQRwrVcpOqCm2UnGZFyX93704K1hiLY8UwetqK3/H/JuWWZCKnU3xNLSly6YP9/xTKThArIFziw/5vSyYvIVGIfAsQ1KJaJmyko/9+l2NR29/9WYoBS8WMnNrNR/hcKXZJi/QXJFx8RGhGiYcBB9Jkq5io2yp4hLrZ2zA9JpgSQjldcXfLkWzga37UQwD7NV9M5y006uDLsD1fiDdIhVjrbCq7byV2ehcZMIy81vsQf2/7In3b+ib80/6VT3AdgeHQ408qmcXjF4Uwrnca0QdOYWDaRUCCU745xkwxEbIOYTq4wAy7stzXs7CsWj9t5F2bGtWE7FBEhSc4A6zaTeyf33U0s8DCxuDD5cSVJbpBEAkFcaiUF91CewKWWnn9fOywqsaTk/0SuRyzitLMerOhKB+1bqXlZjLK/3Etptyvy0oHLdiK40n65LxKDkpTyndj6eCJqMvA3iR03JgOFRYjEapX/s2bnvrY590uutclvi7jZ5P9DHqxasG5mcZ3JvXVjcFKpXMQF7Pcmrk8ZzxXAWMkH0ycEZ3d9s1ZxUjojT2wSh6jGutii2Ke7cswPRtxrblqwuGo8Z53rHpLPSUzHEHX2lSdNOYfrb3f/SvDbJQtV5VXMDsxm9vDZAHiexzst77CifQUrGlewonUFK5pXsDW1lSdST/BEwxMdu5cGS5lcPpnJJZOZUjmFybHJTBk0heHh4fnnER99IRIYdtdJ3MAdIySWAeTHrlLYTqgdK3hiKeFcc5D8uJSMD3JdkrIcbOaeew0SlHaR77emYHkj1vUG1pXURL7lFcSIm/x/yDKpx1fl7J+is+Um2XgipJ6znVgzEgNyEyMS2IC93It2TOctU4hLpp6HcWVFMf+fOYxQSwKFZFbKd5TAulRzGOF2Eykk2aMem2gRwJZPSmESSeR8knQjmX9t2MB/m398+R+T8VliWcnvQb77RmAjxq3WRxMHQMVGeT/EHRPHdAhSU60d21HJoEqxRiLYH5L8mGQ/6ajkKTVMvhXkPjG7T9xhZx9JtS7MRhKrocCHHQgEGB8fz/ia8ZxdfTZEjACtS6xjZeNKViZXmr+tK1mfXM9LTS/xUtNLsNUeY2hkKIeWHcqh8UM5tOpQDg0fyqEVhzImNMYUGhWSnc/faYyOIE/RrjC4qbAiTmIByD5Sn006VXGRiWhJyrc8qUumoQT4RaSkA5UCrtJ+iTtIMB/n2EnyJ7aT8kDSqYO1CNxt5bgB57hpbAp+O6aDlWXSmYuIiMvPPV8Q+8AhQX25DnG1SQcewAhBFbajdschyblkHJpYzmlMHLIa+3/p3qcY1moUa0TcZOJua8Y+LEjcz/3+pH6hfB9ZZ3nCP3ahFRjA/p/FMVOjV9L5AaEPoWKjdB8RB3EVtGOeBuVJz43vSGchP4goNkgqnUQF+fPYgP0xFbp8xOKRTkD+c91MJcnSKny6k6dfp7MPBAJMiE9gwpAJnFF2RofLoz5dz99b/s6qplWsSq7i701/Z1XrKrant7O9cTvP8RxssccpD5VzcOnBHBw/mIMHHczB4YM5uOJgDi49mMpwpd1Q3E2FJLpYniNfnOTpXmIy7r2Szs/NhnJjXCJ+bqZUHOvOkfsmHZlbmkhESDIPxWKVzlLGD4Wc44q7S6yPdmxhWBEC+b+QNsh1yP+HuOOCWHehiF4EYyHEybewJPMP8lOOZUyTuFtFFEWAJaNOXHtZ/1iSUCH3NUl+/Koe07nLcWW8FNjCt3IPWv1zVJHvlhXBdlPgRYjdZIZ2rNCKxeNaO/LdbSDf8uxjqNgoe464wEQwJL4jAd0ENmNNnhjF7eWRP37F7Yik4xyCdaUJbiZYoQiJ60qeEF3ErVLmfBYydKpIUBOp4biq4ziu4riOMUJe2GNDcgOvtb7Ga82v8VrqNV5reo3X2l/jvfR7vNz8Mi83v2zcMg6DI4M5KH4QB5UdxEGRgzio/CAOjB/IhJIJjAqOIuSF8mMVgohN4XLJfir81cqTsCvAafKtIPceyhO8LJfO3s0Ik3vlWi/4+0rA2z0G2PEjrjUq7XBL54h7STpc2VYsBFdMd5JfYkiuyR3TJO0TwRN3mlhUsp/8j0nMRs4tFg9YkRerXR5uJEYjriwRMEm+kNiaPPiIpSVJM/LAIL8dtyK7fNdh571YhuItSDvr49iHCUlbFwuvCfP76YOo2Ch7hxvfyWF+tJJUIALjZgWJOy1EvgCJGyWO/bGKy8cVDHeAJP7nVuy8MoIblJYCl4XuKrD+ehcZjCqXGAgwLj6OcbFxnFx1sjmebwm9l3qPN9rf4PXm13k98zqvN73O64nXeaPtDXakd7AjvYOXml/qdNvCgTDj4uMYHx3P+NLxjI+PZ2x8LOPC4xibG8tYbyzxSEEwRWIIhZabuNgKlxfGleS+inUpiAhJ8N/dXjpW18oC+x12XBD5RV8FEQd3DJKIyGBnmXSahdclLkVBHmRERCUl2i1ZJB22JAi4aeNiLYkVJBZnyvkrFolrmct6aWcc8z8u8RmxQETQ5H5QcEwROff/W9rhJkNIXEdS8OXa5B7Jg5KIjwj6NjrPPdVHULFReg7Jpopi00YltiPjOCQTR9KeI8BQ7LQI8iMpJT+rSzordzynuJYidB5VLecodLfIE7mMgZEAvIiP5+zvdnLQSYQAhkSHMCQ8hBmDZpj2+0kUnufxbupd3mp+i7dyb/HWzrd4O/U2b7W/xbrEOramtvJ2+9u83f62nWuogKGRoYyNj2V0bDSjw6MZXTqa0aHRjCodxejYaEbGRlLj1ZjxQ10hT9SFFFpNbgaZ2yOIq6swgUA6eNeqEWEqLEsk97qwXXKcsLPMfcoH68YTIZSO3nXtiQgIMphUrlsEK+J8lr+uMLluTvl/SGBLDsn/jWtxiyjIvZNju9UUpP6exBzFnesmjIjYi3WTLlgu1o20Q9oq4iX3UlzbEvvpY6jYKMXBtXgqsU/UkhVUTn7l5Aj5gz/xl8kIelkmP25Jt5UkBBl5754/QL5bRo4psSVXhMSCEkGRAOyuRMh1x0mMSY6FsYZGRUYxavAojo0dayw+JxOtLdXG+sB61jWsY11mHeva17EhuYEN7RtYn1rPpuQmEydKbzcuul0QCUQYER1hXjHzd3h0OMOCwxgWHsaw0mEMiw5jaHQoNeEawrlwZ/eXXEOhMImAlNGZdMFnGb1fOJBCOntXANzvT3og9+nfTZIQi1k6UzmGdLauRRDCZnW57XLFTtyQIhAy7kgGpbr/Q65lJQIk1otYJRIbEktdBqy6yRCuGMp+Ik5R59hiQXrOMUVoce6HGxOT/1G5RhFgFRtlwCLJBWVYf7aMeWjG/OBENCQJoQo74l06FAlMR7FjTySrR/zlsn01+R2atEMGIxaKkDxJy1OwK0JpZx+383DHkhR2tlmMUBZ2wDkojZVySPQQDuGQzqVz4pBty7LV28qGxAY2JTexuX0zm9Kb2JTYxKb0JjYnN7MltYXGTKMRqeQG67bZDdWhaoZEhhiLLDqEweHB1ERqqMnWUF1STU28hppwDVWRKqqCVVSlq6jKVRENOerUlTDlCv7Ke/lO3LR16eDFFSTbyffkOftB5yQGsHEPl67EUb4Tz/nsWhmCDDaVfeVcOfLPKwkPIlYS43ETMCSDUCwhsQwl+UWs9UIRlrI90k75P/Ow4iyC5m7vZi0GML+ZKvocKjbKvkeEJ46twixWj2QwSVxBLBlxu8UwJTpkECT+31JMZlwUO6pdRuzLj1CeqKvI/zFLm2RwXXdFKFewj2uVyTnFz18Yx5AsKQqW+316KBBiVHSUrXIgnaQ7wBRoT7SzNbOVd9PvsiWzhS3JLWxLb2Nb+za2pbexPbudbSnzvj5dT0O2gYZsA28k3mBPKAmWUBmupDJcSQUVDAoNYlBsEIPCg6gIVVBBBeXZcsqbyimPl1MeKqeMMkpTpZQGS837SCmlXinxdJySYAmxXIwAgc4xs0JrRDpUiUtA/vfp7ud24O54I1dw5LNYIUIK6651XWdyXrddbkePc2z5ruS9vz5DhvZAO21eG2200UorrYFWWrOttAZbaaGF5kwzzV4zTdkmmtqaaAw28pPYTyjJlliBpOB80ha5fjcpoo+hYqP0PkGsO0x+0CI+SUzA083QkvEKO7GdPBjhkkGk+NvIQDwZcR3HjjR3S+IEMSJUKAAS+6kiv2OKYeNMrjvO7ZTEEnJH08tTqoibi4z5KLSQZJR8F51ySayE8RGTZNBxH6STc0fgA9lclsbmRt7jPd7z3mN7ejv16XrqM/XUN9dTHzRitCO9g8ZMIzvTO2lMN9KYa6Q91057qp0tKSfvuxuW1O4IECAe9IUnECMWjBEjRixs/kYDUaLBqPkbiBIhQjgYJhKMECZMOBMmFA4RCoUIESLkhQilQoRaQgQCAYJekEA6QJAggUb/yxO3Vha8gIeHZ/56HrlEzrzIkU1myQayZBozZLNZMsEMmVyGdCpNpj1DOpsm7aVJ5VKkvTTJQJJkNknSS5JM+X9DSdpHtpMIJMgECrMfyC88CtZViP17Y/RGSijpbEm6wpkrWF74f9JHULFR+hZuWnU51n0iyQbN2AF9g7AWkHTsbi0ueToVoSl1ziEpsmATDGQAnht0BWsJFWZwZTCuOpeofwwpK4LTBulMyp1jSCcilsuuiiq6cQVBBE2C5UFnuWT+5TU5xODSwQz2BjMpOsmuyGEEtbzgGv1R+16VR1uujcZMI82ZZpoam2gKNtEUamJnZict2RZaki20JFpoCbWYp/RsM22ZNlpTrbTl/Cf5XCvt2Xbas+0kvAQpL2VELNee185efTJvef9NOrnvdoWTRh70gpRSSolXQkmghDLKKPPKKA+WUxYoo9wrpzxQTkW4ggqvgspgJdXRaioCFbs+NnTOTnQfvvoYKjZK30YGDUpxx6HYTlc6WSlWCTbDTQbvSUn2ZvLHJogFEie/8xcRkh+tDO5rIt89AyZ11/WzyzGgc4A2jikrIunbgsQuPGzJeEmPlRhOhHzrCOe9xKBSBevc4qiuCEmKsIskWhQKqp9IEAgEKAuVURYqs+m2peRnnkmBzGryLUZJgR+EjTe0mG2ywSyJeIJEMkGyJUkynCSRSpAsSZJIJkhn0qQDaVLZFGnSpDIpY2GQIZ1Lk0lkyIQzZINZsl6WbDZLNpElG88aayXnkUvlyHk5m0rsWzZe1iMQDBAgQCAQIOAFCCVCBEuDhAIhggnzNxwOE8oa6ylChHBbmHBZmEgmQjQbJZKLEAlEiIVixFIxoqko8WCceC5OrD1GfEeckkgJkVjE3hM3LiQFN+V/Ur43eUBy68QVioog91vceoXZg30EFRul/xHC/GDLsMFlycJJYC2iNmzCgRv7EeGqxCYgyHgf+eHLNvKjllHnrghBZxHyMCLkJjWAdbEVdgQyTUA5+VaNVNmWxAmwVp+bul34JCsZTeKqkwC2O66osJ4c/rHaCpbt6gm5K+vL7fAEiY0UDujEXEMoFjIiFi2zmYshzL2V71LSkiV5JOQcV6xauc9ZTNxO6qyJxSZC7mFjfzJYU86Rw7hlxVJtxSaFuPXucn775CHHtZ7lwUBEtdBS9eh879yEgIjzvvABRixf94HDvb/y2R0m0MdQsVH6N9IBS8JBBbYzFjeb1M4Sq0gGKgYwItKEHTRX6u/XSn7GknTY0hm6ItRKvgjJoEXpNMQdN4T8OmSCm3wgRPx2VZE/oFIGE8ogWbAj8UV0y53lIizyFC2ds3SGu3LfFcaW3MyoKJ2FqFBUcNrkXluoi/fusSXJwk07l2O5FoCkb7uhENkv4OxTmFLsnqdQCNyU5hD50zWIaEv2XID89jous47/SWmPG8Av7HEL71dX7yUbTzLO3O/NvRZZVzjWqY+gYqPsX8gPPIT5cYo7S+IlUn8qge2Ia7DpzbJ/OXYmxSjmyVrqU7lWi9vZuCJUhhGLAKazLcMKj1hSklk2lM6xghz5lpUgnX9lwbq4fz7XTSfut6yzDeSXBpIYkitY4o6RjLl28l1jUryzkDj5T+UiwIVCJvEudzvp0N24GliBEOET60YqQ7hiI25RVxwLO23J6IsVrBNrVtrfVUcubkg3nV7+1yQzzhUreZiRatduJQZXnOUagwXv3TRpOZefGt9xre61BTHfpYqNovQirgC5ddJkzE8amwEnMZsqfzu3KGIIOzZILCEZoFpotciTsggN2MoIUnurFJuwILWwZNzEIGyRSHewn8SKXEQsCjuaONY9FHC2lTEpblFTqQEmlo5bSsjNfpLOTqwjsRLLyZ8FVa69MMNPOllXLKUqgGtJigCJu1DO5Q6wlcQJEYRS8itvF7qeotjCliIqci9FGNxt3fZKyrq4UyW2JpUIpI0iiuIClTFl8j8IVrjkPrnuNxno6Qqg+zDglq2RbeU7HE7nB5Q+goqNMnARqyNC/tTNYuW4IpTCju2pwFgW0sG47hWpGVaGEYs0+ZOsuU+mUi4HbIHHdn8/KRBaSkdAnZz/WTLepLOXp+bh5NfzkmssHMAqgtFUcN3SEaexllMAKwRgrS13mbjuZGCkiE/Gv1etzjJx9QScZdIm6byFEmwFZrACmMSObZISQeK6kiKVbgIFzrlC5Hf6MeyUACIUklhRjnWlimjJNmKVyvcvri2xrHDWxzHfl3wPrrUGnQe0ishLe9P+9Yor1E0CcF2OZfTZeA2o2ChKZ6QzLYytFFpCKaxFI0/VFdhJvcQNInEWcbWJxdOMzWbLYuNN0gbInxisymlTmb+/BLgrsG4WtySLDJzdSX6QO4hxHxaOpHez9QSxeKRDlnaJOJU6y0uwiQ1iNbmVvUWYcs4yaVcV1sUIVlxdpJK4WAVihSWxbkw3CcJ1BUrhzkry64+Jiw7sbLTyACKWpVg3Ym1ItQGxEMXCkYQPsWqcqhB5ZWfEkpH3Iiri+hSLLuesd7+XoLOt3IMD6NM9eh9umqL0MXZlCUF+OXkRowQ2JiJPyWK9hLAds3Sq0jm1YQs4BrETd0n2kzzJSodVSn7KtVhhldgn4DKsi0gSGQIY60YEL40tPOm6tKSDdkfkg3Ufua47t7ONO8tElGX8kaQhy32TuFETNgAvgiiWi1gFhSIp2WSutSJ1zkTMUthK0WItigVThbWGXPGRyhQ7nfa1+H/FshILTwYmiytUhFbWDfWPI6It90DS1OW+StZkFOtSLXX+5pxzi+AEgDHkTwXeB1GxUZSeQH5JXVlD2YKXCJKk5EqatiQJSKBX3GRupeBmrBUlKb4tWJeeh+10xL3mxhLEspCOuQKTLiz7SceXc44pCQ5J7ORq0tkPw4iYtM8tSlooTIXun1LMLJjlWOGQig9iBbqiLJ1xCVYkwXb+bsxMziNtKMGMc4o5n5uwdfiCWGuokvzyOLJPGGtZVWOzHKXtcn9lLI18x3J8z79esV7d++FWiXAzBuNY95lbHUMs3gDGfTqSPo+KjaIUE7ejL0Q6dTfoLy954heLQNwtQ/x9ZaIy6YBTWLeZPPm2YTPiSvx9XXES910V1ropfMKW+JObGCBZb262nMRlJLEhix3jIq4nGdcENtg9hPzBp5LEIU/uYAeiSpxLkhEase43EVqxaCr940q8TGJaYkkGnGOBjfXIObJYF5qkrIvVImIkghXHut8kNR5sDEcsnBKMKDT62w7CCpQrjiXOMhH7YMFfiaeNAcax68oTfQgVG0XpLdyANXQea1NYMcAVIckGyxRsJ9aHiMZgf7lsK6IhAiY16cR9J7GlCuzgRRES6ehG+ucRy0vESVx7EqyXmIdUaZDjNvrtr8SKWit2rJMIkFhMnr8tWPeeuN7ACrGbTi6uMGm/nCuEseQiGHeciIqcQ1xuYMVIREOuVyytNmzGosSExMUpLjsZsxX2vwuJp7X5r4xzbdJmOZ/cbxFgeWiRJJWJWOu2H6Bioyh9lcIMMul8ukLiKe7ARcmqk0GuaWe5lMaR+I5YYIOd5VKDTjpa6Qzlbyt2llQZzNqK6bwl3iEZeQlsXEXSsF13osRypDOV88r8NNLxy/FlCgqpCtCCtRykUoTEN6qwlkkAI2Yi1uLOAxtsdwP0If+cKWzHvtNfPgKbqShiKqIu7rwE+YkEEs+RFPtGbFagJCHIvq5wS6WCUowlM5Y+W5ZmV6jYKMr+QOGIfqEwhiSIOLmIC08Eya1ALJ8l0y6MdellsGIigiWZe9LBu64f110ngzQlyUFEDqxQuNaaZNf5RUI7LDVX1MCObRKhFatDrElZLmNnRPxEZMqxrkpJY0/621T755DxVzKWSBIMJCnBrfwtlo7cP3F9VvjX24Ktoi1xKYnNSU3AUf4939V32sdRsVGUgUhX4rQnfn+JxbiDPcG6+LwutvMK1nvOPmLhxLAWi2SrScVusTjEbSYWkohG4XQPhQNfJa7jZtWJm0xSjCUtW2aTleNI3MmlcPySe0x3vAwF790xO7K9O+bGFa0Ynasd9FNUbBRF2XPcDtKlJwLVrsUlHbe8ArtY1tW+u1vmHrurv7varvDzrt4rnVCxURSlb9FVZ9/Vsr5CYWxN6ZL9wDhTFEVR+joqNoqiKErR6ZbYbNy4kS9+8YvMmDGD0tJSAoEA69at67RdIpHg2muvZeTIkZSUlDBjxgyee+65Ttvlcjluuukmxo8fTzweZ+rUqTzwwAN7fTGKoihK36RbYvPmm2/y29/+lurqao499thdbvfZz36WhQsX8t3vfpdHHnmEkSNHcvLJJ7Ny5cq87b75zW/y7W9/myuvvJLHH3+cY445hnPOOYfHHntsry5GURRF6aN43SCbzXa8X7hwoQd4a9euzdtm5cqVHuAtWrSoY1k6nfYOPvhgb86cOR3Ltm7d6kWjUe+GG27I23/WrFnelClTutMc74gjjujWdoqiKMq+Y3d9c7csm2Dw/TdbsmQJkUiEc889t2NZOBzmvPPO44knniCZTALwxBNPkEqlmDdvXt7+8+bNY9WqVaxdu7b7SqkoiqL0C3osQWD16tVMmDCB0tL8CShqa2tJpVK8+eabHdvFYjEmTpzYaTuANWvW9FSTFEVRlD5Cj4lNfX091dXVnZbX1NR0rJe/VVVVBAKB3W6nKIqi7D/0m0GdCxYsYMGCBQBs3769l1ujKIqi7Ak9ZtlUV1fT0NDQablYKmK5VFdX09jYiOd5u92ukPnz57NixQpWrFjB0KFDe6rZiqIoyj6gx8SmtraWtWvX0tbWlrd8zZo1RKPRjhhNbW0tyWSSt956q9N2AIcddlhPNUlRFEXpI/SY2MyZM4d0Os3ixYs7lmUyGe677z5mz55NLGZKpp5yyilEIhHuvvvuvP1//etfM3nyZCZMmNBTTVIURVH6CN2O2dx///0A/PWvfwXg8ccfZ+jQoQwdOpSZM2dy+OGHc+655/KlL32JdDrNhAkTuOOOO1i7dm2esAwbNowvf/nL3HTTTVRUVDB9+nTuu+8+li1bxpIlS3r48hRFUZQ+QXcH65Bf1LvjNXPmzI5t2travKuvvtobPny4F4vFvKOOOspbvnx5p2NlMhnve9/7njdu3DgvGo16U6ZM8RYvXtwjA4cURVGU3mF3fXPA87xdzfbQZ6mrq2PFihW93QxFURTFYXd9s1Z9VhRFUYqOio2iKIpSdFRsFEVRlKKjYqMoiqIUHRUbRVEUpeio2CiKoihFR8VGURRFKToqNoqiKErRUbFRFEVRio6KjaIoilJ0VGwURVGUoqNioyiKohQdFRtFURSl6KjYKIqiKEVHxUZRFEUpOio2iqIoStFRsVEURVGKjoqNoiiKUnRUbBRFUZSio2KjKIqiFB0VG0VRFKXoqNgoiqIoRUfFRlEURSk6KjaKoihK0VGxURRFUYqOio2iKIpSdFRsFEVRlKKjYqMoiqIUHRUbRVEUpeio2CiKoihFR8VGURRFKToqNoqiKErRUbFRFEVRio6KjaIoilJ0VGwURVGUoqNioyiKohQdFRtFURSl6KjYKIqiKEVHxUZRFEUpOio2iqIoStFRsVEURVGKjoqNoiiKUnRUbBRFUZSi0y2x2bhxI1/84heZMWMGpaWlBAIB1q1bl7fNihUrmD9/PocccgilpaWMGzeOCy+8kLVr13Y6Xi6X46abbmL8+PHE43GmTp3KAw880CMXpCiKovQ9uiU2b775Jr/97W+prq7m2GOP7XKb3/zmN6xevZqrrrqKxx9/nO9///u8/PLL1NXVsWHDhrxtv/nNb/Ltb3+bK6+8kscff5xjjjmGc845h8cee2zvr0hRFEXpe3jdIJvNdrxfuHChB3hr167N22bbtm2d9lu3bp0XCAS8b37zmx3Ltm7d6kWjUe+GG27I23bWrFnelClTutMc74gjjujWdoqiKMq+Y3d9c7csm2Dw/TcbOnRop2UHHHAAQ4cOZdOmTR3LnnjiCVKpFPPmzcvbdt68eaxatapLt5uiKIrSvylqgsBrr73Gtm3bOPTQQzuWrV69mlgsxsSJE/O2ra2tBWDNmjXFbJKiKIrSCxRNbDKZDJdffjlDhw7ls5/9bMfy+vp6qqqqCAQCedvX1NR0rFcURVH2L8LFOvCVV17Jiy++yKOPPkp1dfVeH2/BggUsWLAAgO3bt+/18RRFUZR9R1Esm6997WssWLCARYsWMXv27Lx11dXVNDY24nle3nKxaMTCKWT+/PmsWLGCFStWdBkfUhRFUfouPS42N954Iz/4wQ+47bbbuOiiizqtr62tJZlM8tZbb+Utl1jNYYcd1tNNUhRFUXqZHhWb2267jeuvv54bb7yRK6+8ssttTjnlFCKRCHfffXfe8l//+tdMnjyZCRMm9GSTFEVRlD5At2M2999/PwB//etfAXj88ccZOnQoQ4cOZebMmfzmN7/hS1/6EqeccgqzZs3iT3/6U8e+gwYN6rBYhg0bxpe//GVuuukmKioqmD59Ovfddx/Lli1jyZIlPXltiqIoSh+h22Jzzjnn5H2+4oorAJg5cybPPPMMS5cuxfM8li5dytKlS/O2lW2EG2+8kfLycn70ox+xZcsWJk2axG9/+1tOPfXUvbgURVEUpa8S8Aoj9f2Auro6VqxY0dvNUBRFURx21zdr1WdFURSl6KjYKIqiKEVHxUZRFEUpOio2iqIoStFRsVEURVGKjoqNoiiKUnRUbBRFUZSio2KjKIqiFB0VG0VRFKXoqNgoiqIoRUfFRlEURSk6KjaKoihK0VGxURRFUYqOio2iKIpSdPrlFANDhgyhrKyMoUOH9nZTisb27dv36+uD/f8a9fr6N3p9e866det47733ulzXL8UG9v85bfb364P9/xr1+vo3en09i7rRFEVRlKKjYqMoiqIUnX4rNvPnz+/tJhSV/f36YP+/Rr2+/o1eX8/Sb2M2iqIoSv+h31o2iqIoSv+hX4nNhg0bOPvss6msrGTQoEGceeaZrF+/vrebtcfcf//9nHXWWRxwwAGUlJQwadIkvv71r9Pc3Jy3XUNDA5deemlHqveJJ57IqlWreqnVe8cpp5xCIBDg+uuvz1ve36/xscce47jjjqO8vJxBgwZRV1fHsmXLOtb35+t74YUXmD17NsOGDaOiooLp06ezaNGivG0SiQTXXnstI0eOpKSkhBkzZvDcc8/1Uot3zcaNG/niF7/IjBkzKC0tJRAIsG7duk7bdfd6crkcN910E+PHjycejzN16lQeeOCBfXAlXdOd61uxYgXz58/nkEMOobS0lHHjxnHhhReydu3aTscryvV5/YTW1lZv4sSJXm1trff73//ee/DBB73Jkyd7Bx54oNfS0tLbzdsjjj76aO+cc87xfv3rX3vPPPOM98Mf/tCrrKz0jj76aC+bzXqe53m5XM776Ec/6o0ePdq75557vMcff9w77rjjvMGDB3sbNmzo5SvYM+655x5vxIgRHuBdd911Hcv7+zX+9Kc/9cLhsPelL33Je/LJJ72lS5d63//+972HH37Y87z+fX2vvPKKF4/HvY997GPegw8+6D355JPe/PnzPcD7yU9+0rHdBRdc4FVWVnoLFizwnnrqKe+MM87w4vG497e//a33Gt8Fy5cv94YNG+Z9/OMf92bPnu0B3tq1aztt193r+cY3vuFFo1Hvlltu8ZYtW+bNnz/fCwQC3qOPPrpvLqiA7lzfv/3bv3kf+chHvNtvv9175plnvLvvvts75JBDvJqaGm/9+vV52xbj+vqN2Pz3f/+3FwwGvTfeeKNj2dtvv+2FQiHvP//zP3uxZXvOtm3bOi375S9/6QHe008/7Xme5z344IMe4C1btqxjm8bGRq+6utr74he/uM/aurfU19d7w4cP9+65555OYtOfr3Ht2rVePB73fvjDH+5ym/58fV//+te9SCTiNTc35y0/5phjvGOOOcbzPM9buXKlB3iLFi3qWJ9Op72DDz7YmzNnzj5t7/shD3Ge53kLFy7ssjPu7vVs3brVi0aj3g033JC3/6xZs7wpU6YU5wLeh+5cX1f9zrp167xAIOB985vf7FhWrOvrN260JUuWcMwxxzBx4sSOZRMmTOCjH/0oDz30UC+2bM/patTukUceCcCmTZsAc72jRo3i+OOP79imsrKSOXPm9Kvr/epXv8rkyZM5//zzO63rz9e4aNEigsEgl19++S636c/Xl0qliEQilJSU5C2vrKwkl8sB5voikQjnnntux/pwOMx5553HE088QTKZ3Kdt3h3B4Pt3dd29nieeeIJUKsW8efPy9p83bx6rVq3q0i1VbLpzfV31OwcccABDhw7t6HegeNfXb8Rm9erVTJ48udPy2tpa1qxZ0wst6lmeffZZAA499FBg99e7fv16Wlpa9mn7Pgh/+MMfuOuuu7j99tu7XN+fr/EPf/gDhxxyCL/5zW846KCDCIfDTJw4Me9a+/P1XXLJJQBcddVVbN68mcbGRhYuXMjTTz/N1VdfDZjrmzBhAqWlpXn71tbWkkqlePPNN/d1s/eK7l7P6tWricVieQ++sh3Qr/qj1157jW3btnX0O1C86+s3YlNfX091dXWn5TU1NTQ0NPRCi3qOTZs2ccMNN3DiiSdSV1cH7P56gT5/zalUissuu4xrrrmGSZMmdblNf77GzZs388Ybb3Dttdfyta99jSeffJKTTjqJK6+8kh/96EdA/76+yZMn88wzz/DQQw8xevRoqqur+cIXvsBPf/pTzjvvPOD9r6++vn6ftnlv6e711NfXU1VVRSAQ2O12fZ1MJsPll1/O0KFD+exnP9uxvFjXF/7gTVV6gpaWFk477TTC4TB33nlnbzenx7j55ptpb2/nuuuu6+2mFIVcLkdzczO/+MUvOPPMMwGYNWsW69at46abbuKqq67q5RbuHW+88QZnnXUWtbW1/PSnP6WkpISHHnqIyy+/nHg8zoUXXtjbTVT2kiuvvJIXX3yRRx99tEuR7Wn6jdhUV1d3+SS4q6eR/kB7eztz5szh7bff5tlnn2XMmDEd63Z3vbK+r7J+/XpuvPFGfv7zn5NMJvN898lkksbGRioqKvr1NQ4ePJg33niDk046KW/57NmzWbp0Ke+++26/vr5vfOMbRCIRHnnkESKRCAAnnHACO3bs4P/9v//H+eefT3V1Ne+8806nfeX65Em4v9Dd66murqaxsRHP8/Ke/vvTdX/ta19jwYIF/PKXv2T27Nl564p1ff3GjVZbW8vq1as7LV+zZg2HHXZYL7Ro70in05x99tmsWLGCxx57jClTpuSt3931jhs3jvLy8n3V1D3m7bffJpFIMG/ePKqrqzteALfeeivV1dWsWrWqX1+j+K93RTAY7NfXt2rVKqZOndohNMJRRx3Fjh072LZtG7W1taxdu5a2tra8bdasWUM0Gu3k8+/rdPd6amtrSSaTvPXWW522A/p8f3TjjTfygx/8gNtuu42LLrqo0/qiXd8HzmPbx/zwhz/0QqGQ99Zbb3UsW7t2rRcOh71bb721F1u252SzWe+cc87x4vG499RTT3W5ze9//3sP8J555pmOZTt37vRqamq8K6+8cl819QPR0NDgLV++vNML8ObNm+ctX77ca25u7tfX+Mgjj3iAt3jx4rzls2fP9saMGeN5Xv/+DmfOnOlNmDDBSyaTecvPP/98Lx6Pe8lk0nv55Zc9wPvFL37RsT6dTnuHHHKId+qpp+7rJnebXaUGd/d6tm7d6kUiEe/b3/523v4nnHCCN3ny5KK2vTvs6vo8z/N+9KMfeYB344037nL/Yl1fvxGblpYW76CDDvImT57sPfjgg95DDz3kffjDH/YmTJjQaSxAX+fyyy/vGHPyxz/+Me8lg/2y2aw3Y8YMb8yYMd69997rLV261Js5c6ZXXV3daQBWf4GCcTb9+RpzuZx3/PHHezU1Nd4dd9zhPfHEE96ll17qAd6dd97peV7/vr7Fixd7gDd79mzvwQcf9J544gnvC1/4ggd4V199dcd25557rldVVeUtXLjQe+qpp7yzzjrLi8Vi3l//+tdebH3XLF682Fu8eHHH7+8nP/mJt3jx4ryHge5ez1e/+lUvFot5//mf/+ktX77cu/zyy71AINAxoLc3eL/ru/fee71AIOCdcsopnfqd1atX5x2rGNfXb8TG8zzvnXfe8c4880yvoqLCKy8v90477bQu1buvc8ABB3hAl69vfetbHdvt2LHD+/SnP+1VV1d7JSUl3qxZs7yVK1f2XsP3kkKx8bz+fY07d+70rrjiCm/YsGFeJBLxpkyZ4t1999152/Tn63vssce8mTNnekOGDPHKy8u9qVOnerfffruXyWQ6tmlra/Ouvvpqb/jw4V4sFvOOOuoob/ny5b3X6N2wq9/czJkzO7bp7vVkMhnve9/7njdu3DgvGo16U6ZM6WTl7mve7/ouvvjibt0DzyvO9WnVZ0VRFKXo9JsEAUVRFKX/omKjKIqiFB0VG0VRFKXoqNgoiqIoRUfFRlEURSk6KjaKoihK0VGxURRFUYqOio2iKIpSdFRsFEVRlKLz/wHkUVgLnbfD7QAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "trajectory = get_trajectory(info.best_solution).cpu()\n", + "\n", + "sdf = th.eb.SignedDistanceField2D(\n", + " th.Variable(batch[\"sdf_origin\"]),\n", + " th.Variable(batch[\"cell_size\"]),\n", + " th.Variable(batch[\"sdf_data\"]),\n", + ")\n", + "figs = theg.generate_trajectory_figs(\n", + " batch[\"map_tensor\"], \n", + " sdf, \n", + " [trajectory], \n", + " robot_radius=robot_radius, \n", + " labels=[\"solution trajectory\"], \n", + " fig_idx_robot=0,\n", + " figsize=(6, 6)\n", + ")\n", + "figs[0].show()\n", + "figs[1].show()" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "3095d307436ac388e461a5585c0eeaa747818d9658111384e6a455f40a311fed" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/tutorials/05_differentiable_motion_planning.ipynb b/tutorials/05_differentiable_motion_planning.ipynb new file mode 100644 index 000000000..9e83f6088 --- /dev/null +++ b/tutorials/05_differentiable_motion_planning.ipynb @@ -0,0 +1,529 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "09f617b4", + "metadata": {}, + "source": [ + "# Motion Planning Part 2: differentiable motion planning" + ] + }, + { + "cell_type": "markdown", + "id": "136ca5ef", + "metadata": {}, + "source": [ + "In this tutorial, we will build on the [first part](https://github.com/facebookresearch/theseus/blob/main/tutorials/04_motion_planning.ipynb) of the motion planning tutorial to illustrate how we can differentiate through a motion planner implemented using `Theseus`. In particular, we will show how to set up an imitation learning loop in `torch` to produce values to initialize the `TheseusLayer` so that it converges to a high quality trajectory faster. If you haven't already, we encourage you to review part 1 of the motion planning tutorial before proceeding with this one." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "175d8501", + "metadata": {}, + "outputs": [], + "source": [ + "import random \n", + "\n", + "import matplotlib as mpl\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import torch\n", + "import torch.nn.functional as F\n", + "import torch.utils.data\n", + "from IPython.display import clear_output\n", + "\n", + "import theseus as th\n", + "import theseus.utils.examples as theg\n", + "\n", + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "torch.set_default_dtype(torch.double)\n", + "\n", + "device = \"cuda:0\" if torch.cuda.is_available else \"cpu\"\n", + "torch.random.manual_seed(1)\n", + "random.seed(1)\n", + "np.random.seed(1)\n", + "\n", + "mpl.rcParams[\"figure.facecolor\"] = \"white\"\n", + "mpl.rcParams[\"font.size\"] = 16" + ] + }, + { + "cell_type": "markdown", + "id": "78d6afce", + "metadata": {}, + "source": [ + "## 1. Initial setup" + ] + }, + { + "cell_type": "markdown", + "id": "1cdf433c", + "metadata": {}, + "source": [ + "As in part 1 of the motion planning tutorial, the first step is to load a few planning problems from the dataset, and set up some constant quantities to use throughout the experiment. During this example, we will use a batch of 2 problems obtained from the loader." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "e095d14a", + "metadata": {}, + "outputs": [], + "source": [ + "dataset_dir = \"data/motion_planning_2d\"\n", + "num_prob = 2\n", + "dataset = theg.TrajectoryDataset(True, num_prob, dataset_dir, \"tarpit\")\n", + "data_loader = torch.utils.data.DataLoader(dataset, num_prob, shuffle=False)\n", + "\n", + "batch = next(iter(data_loader))\n", + "map_size = batch[\"map_tensor\"].shape[1]\n", + "trajectory_len = batch[\"expert_trajectory\"].shape[2]\n", + "num_time_steps = trajectory_len - 1\n", + "map_size = batch[\"map_tensor\"].shape[1]\n", + "safety_distance = 0.4\n", + "robot_radius = 0.4\n", + "total_time = 10.0\n", + "dt_val = total_time / num_time_steps\n", + "Qc_inv = [[1.0, 0.0], [0.0, 1.0]]\n", + "collision_w = 5.0\n", + "boundary_w = 100.0" + ] + }, + { + "cell_type": "markdown", + "id": "6ac4fdf5", + "metadata": {}, + "source": [ + "Next we create the motion planner. Class `theg.MotionPlanner` stores a `TheseusLayer` constructed by following the steps described in part 1, and also provides some useful utility functions to retrieve trajectories from the current variables of the optimizer. " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "0abfeed2", + "metadata": {}, + "outputs": [], + "source": [ + "planner = theg.MotionPlanner(\n", + " map_size=map_size,\n", + " epsilon_dist=safety_distance + robot_radius,\n", + " total_time=total_time,\n", + " collision_weight=collision_w,\n", + " Qc_inv=Qc_inv,\n", + " num_time_steps=num_time_steps,\n", + " optim_method=\"levenberg_marquardt\",\n", + " max_optim_iters=2,\n", + " step_size=0.3,\n", + " device=device,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "dce43156", + "metadata": {}, + "source": [ + "Since we are working with a single batch of data, we can initialize the input dictionary for the motion planner with some tensors that will be throughout this example. As a reminder, the input dictionary associates `th.Variable` names in the `TheseusLayer` with tensor values for each of them." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "8e00fe63", + "metadata": {}, + "outputs": [], + "source": [ + "start = batch[\"expert_trajectory\"][:, :2, 0]\n", + "goal = batch[\"expert_trajectory\"][:, :2, -1]\n", + "planner_inputs = {\n", + " \"sdf_origin\": batch[\"sdf_origin\"].to(device),\n", + " \"start\": start.to(device),\n", + " \"goal\": goal.to(device),\n", + " \"cell_size\": batch[\"cell_size\"].to(device),\n", + " \"sdf_data\": batch[\"sdf_data\"].to(device),\n", + "}" + ] + }, + { + "cell_type": "markdown", + "id": "1b062daf", + "metadata": {}, + "source": [ + "## 2. Imitation learning loop" + ] + }, + { + "cell_type": "markdown", + "id": "9116c2ef", + "metadata": {}, + "source": [ + "### Overview" + ] + }, + { + "cell_type": "markdown", + "id": "5cbc8c56", + "metadata": {}, + "source": [ + "We consider the following imitation learning pipeline in this example (see Section 2.2):\n", + "\n", + "* For some number of epochs:\n", + " 1. Generate initial variable values (i.e., trajectories) from `nn.Module` that uses map info as input.\n", + " 2. Plan trajectories using `TheseusLayer`, initializing optimization variables with the result of step 1.\n", + " 3. Compute a loss that encourages the output of step 2 to be a high quality trajectory that is close to an expert.\n", + " 4. Use backpropagation to update the parameters of the module in step 1.\n", + " " + ] + }, + { + "cell_type": "markdown", + "id": "eaade964", + "metadata": {}, + "source": [ + "### 2.1. A basic initial trajectory model" + ] + }, + { + "cell_type": "markdown", + "id": "82abf5d0", + "metadata": {}, + "source": [ + "The following cell creates a basic model for generating initial trajectories. This model takes as input a one hot representation of the map's ID and generates a trajectory between the map's start and goal positions. The output is a dictionary with keys mapping to variable names, and values mapping to initial values (tensors) for each of them that represent the resulting trajectory.\n", + "\n", + "
\n", + "Note: We don't include the model code to keep the focus on the learning part, but the interested reader can find it here. This model takes advantage of the probabilistic interpretation of GPMP2 as a way to easily generate diverse smooth trajectories between the start and goal positions. The high level idea is as follows. The trajectory is generated by first producing a parabola whose focus is the middle point between start and goal, and whose distance between focus and vertex is the output of an MLP; this parabola makes it easier for the model to learn wide curves. Then, the model can add higher order curvatures to the trajectory by constructing a \"sampled\" trajectory around the parabola, using the probabilistic formulation of the motion planning problem. The sample in this case is the output of another MLP, which is used as a seed for \"sampling\" a trajectory around the parabola. The model then returns this sample as the initial trajectory to use.
" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "e50e4e83", + "metadata": {}, + "outputs": [], + "source": [ + "init_trajectory_model = theg.InitialTrajectoryModel(planner)\n", + "init_trajectory_model.to(device)\n", + "model_optimizer = torch.optim.Adam(init_trajectory_model.parameters(), lr=0.03) " + ] + }, + { + "cell_type": "markdown", + "id": "dfd6c7b1", + "metadata": {}, + "source": [ + "### 2.2. Learning loop" + ] + }, + { + "cell_type": "markdown", + "id": "a2f6ec46", + "metadata": {}, + "source": [ + "With the model in place, we can now put all of this together to differentiate through the motion planner, and find good initial trajectories for optimization on two maps. This loop essentially follows steps 1-4 from the overview subsection." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "a2b2819f", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "------------------------------------\n", + " Epoch 99\n", + "------------------------------------\n", + "Imitation loss : 0.125\n", + "Error loss : 0.632\n", + "Total loss : 0.126\n", + "------------------------------------\n", + "------------------------------------\n" + ] + } + ], + "source": [ + "initial_trajectory_dicts = []\n", + "best_loss = float(\"inf\")\n", + "losses = []\n", + "num_epochs = 100\n", + "best_epoch = None\n", + "\n", + "# For speed considerations, we set the max number of optimizer iterations to a low number (2).\n", + "# This will also encourage the initial trajectory model to produce trajectories of higher quality.\n", + "planner.layer.optimizer.set_params(max_iterations=2)\n", + "for epoch in range(num_epochs):\n", + " clear_output(wait=True)\n", + " model_optimizer.zero_grad()\n", + " \n", + " # Step 1: Generate an initial trajectory by querying the model on the set of maps.\n", + " initial_traj_dict = init_trajectory_model.forward(batch)\n", + " # This updates the motion planner's input dictionary with the trajectories produced above.\n", + " planner_inputs.update(initial_traj_dict)\n", + "\n", + " # Step 2: Optimize to improve on the initial trajectories produced by the model.\n", + " planner.layer.forward(\n", + " planner_inputs,\n", + " verbose=False,\n", + " damping=0.1,\n", + " ) \n", + "\n", + " initial_trajectory_dicts.append(\n", + " dict([(k, v.detach().clone()) for k, v in initial_traj_dict.items()]))\n", + "\n", + " # Step 3: Compute a loss evaluating the quality of the trajectories.\n", + " # The loss consists of two terms. The first one encourages the final trajectory to \n", + " # match an expert trajectory available for this map (imitation_loss).\n", + " # The second term uses the trajectory planner's total squared error, which also\n", + " # encourages the trajectory to be smooth and avoid obstacles. We scale this term\n", + " # by a small factor, since otherwise it would completely dominate over the\n", + " # imitation loss.\n", + " error_loss = planner.objective.error_squared_norm().mean() / planner.objective.dim()\n", + "\n", + " solution_trajectory = planner.get_trajectory()\n", + " imitation_loss = F.mse_loss(\n", + " batch[\"expert_trajectory\"].to(device), solution_trajectory)\n", + " loss = imitation_loss + 0.001 * error_loss\n", + " \n", + " # Step 4: Do backpropagation through the TheseusLayer and update the model parameters.\n", + " loss.backward()\n", + " model_optimizer.step()\n", + " \n", + " if loss.item() < best_loss:\n", + " best_loss = loss.item()\n", + " best_epoch = epoch\n", + " losses.append(loss.item())\n", + " print(\"------------------------------------\")\n", + " print(f\" Epoch {epoch}\")\n", + " print(\"------------------------------------\")\n", + " print(f\"{'Imitation loss':20s}: {imitation_loss.item():.3f}\")\n", + " print(f\"{'Error loss':20s}: {error_loss.item():.3f}\")\n", + " print(f\"{'Total loss':20s}: {loss.item():.3f}\")\n", + " print(\"------------------------------------\")\n", + " print(\"------------------------------------\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "61a63881", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAt8AAAGBCAYAAAC+Zt3eAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAABW20lEQVR4nO3dd3hUZdoG8PtMySSTOpPeE0gIkEAoARIQQhDBBiiCoIDIumJBV3HFlRURcVF3xfa5ioIKFjouRaRKlU6A0EIL6QmkTXqfmfP9ETIaE0LqTDJz/64rF+TUZzheeOflOe8riKIogoiIiIiI2p3E1AUQEREREVkKhm8iIiIiIiNh+CYiIiIiMhKGbyIiIiIiI2H4JiIiIiIyEoZvIiIiIiIjMWr43rBhAx555BH4+/vDxsYGISEhmDt3LoqLi+94riAIDX7FxcW1f+FERERERG1AMOY835GRkfDz88O4cePg4+ODM2fOYMGCBejevTuOHDkCieT2PwsIgoAnn3wSzzzzTJ3tvXv3hlKpbO/SiYiIiIhaTWbMm/38889wdXU1fB8dHQ21Wo3p06dj//79GDFiRKPne3t7IzIysr3LJCIiIiJqF0ZtO/lj8K41YMAAAEBGRoYxSyEiIiIiMjqjjnw35MCBAwCAHj163PHYJUuW4IMPPoBUKkVkZCTefvttDB06tMn3cnFxQUBAQEtLJSIiIiK6o+TkZOTm5ja4z6g933+WkZGBvn37Ijw8HLt372702GnTpuHBBx+El5cXUlJS8MEHHyA+Ph67d+/G8OHDm3S/iIgIxMbGtkHlREREREQNayxzmix8l5SUYPjw4cjMzMSJEyfg4+PTrPOLi4sRFhYGX19fHDp06LbHLV26FEuXLgUA5OTkICUlpVV1ExERERE1prHwbZJ5vsvLyzFmzBgkJiZi586dzQ7eAGBvb48HHngAJ0+ebPS4mTNnIjY2FrGxsQ32nBMRERERGYvRe76rq6sxYcIExMbGYvfu3ejVq1erricIQhtVRkRERETUvowavvV6PaZMmYK9e/di69atrZo2sKioCFu3bsXAgQPbsEIiIiIiovZj1PA9a9YsrF+/Hm+88QZsbW1x7Ngxwz4fHx/4+PggJSUFXbt2xfz58zF//nwAwOLFi3HlyhXExMQYXrhcvHgxbt68iZUrVxrzIxARERERtZhRw/f27dsBAIsWLcKiRYvq7HvrrbewYMECiKIInU4HvV5v2BcSEoKNGzdi48aNKCwshIODA4YMGYJvvvmGI99ERERE1GmYdKpBY+NUg0RERETU3jrcbCdERERERJaI4ZuIiIiIyEgYvomIiIiIjIThm4iIiIjISBi+TaRSq0NybqmpyyAiIiIiI2L4NpFVx1Nx76cHUVGtM3UpRERERGQkDN8mkpxbiopqPTSlVaYuhYiIiIiMhOHbRLKLKwEA+WUM30RERESWguHbRLKKKgAA+aXVJq6EiIiIiIyF4dtEOPJNREREZHkYvk1AFEWGbyIiIiILxPBtAoXl1ajS6gGw7YSIiIjIkjB8m0DtqDfAkW8iIiIiS8LwbQLZRQzfRERERJaI4dsEamc6cbSRI7+MbSdEREREloLh2wRq205CPOyRz0V2iIiIiCwGw7cJZBVVwE4hg7eTDdtOiIiIiCwIw7cJ5BRXws1BAZXSCgVsOyEiIiKyGAzfJpBdXAE3ewVUSjlKKrWGaQeJiIiIyLwxfJtAVlEl3B2s4WRrBQAoYOsJERERkUVg+DaymtUta0a+1cqa8M0ZT4iIiIgsA8O3kRVXalFRrYebvTVUSjkAQMMZT4iIiIgsAsO3kWXfmuPbzUEBJyXbToiIiIgsCcO3kdWubulmbw31rZ5vDcM3ERERkUVg+Day2gV2aka+a9pOON0gERERkWVg+Day2qXl3R2sYS2XQmkl5SqXRERERBaC4dvIsosrobSSwk4hAwColFZsOyEiIiKyEAzfRpZdXAk3e4XheyelnG0nRERERBaC4dvIsooq4OZgbfhebWvFqQaJiIiILATDt5Hl1Bv5tuJUg0REREQWguHbyLKKKuBm/4eRb6WcK1wSERERWQiGbyMqqdSirEoHd4e6I9+F5dXQ6vQmrIyIiIiIjIHh24j+uLplrdol5gvLOfpNREREZO4Yvo0o6w+rW9ZS3VrlMp9930RERERmj+HbiLKLaxfY+ePId2345sg3ERERkblj+DainFtLy7va151qEABXuSQiIiKyAAzfRpRVVAGFTAIHa5lhm9Otnm+2nRARERGZP4ZvI8ouroS7gzUEQTBsY9sJERERkeVg+Dai7KK6C+wAgNJKCiuZhG0nRERERBaA4duIsoor4P6HpeUBQBAEqJRytp0QERERWQCGbyPKKaqE659GvoGa1hO2nRARERGZP4ZvIymr0qK4UltngZ1aKqUV206IiIiILADDt5Fk31pgx93eut4+lS3bToiIiIgsAcO3kWTfmuP7tiPfbDshIiIiMnsM30aSVVSzuqVbQyPfSisUlFVBrxeNXRYRERERGRHDt5HUjny7NzTybWsFvQgUV2iNXRYRERERGRHDt5FkF1fASiaBo4283j7VrVUuNez7JiIiIjJrDN9Gkl1UCVc7RZ3VLWv9vsolwzcRERGROWP4NpLs4ooGW06AmrYTAJxukIiIiMjMMXwbSc3S8vVftgR+bzvhjCdERERE5o3h20iyiioanGYQ+H3ku4BtJ0RERERmzajhe8OGDXjkkUfg7+8PGxsbhISEYO7cuSguLr7juRUVFZgzZw48PT1hY2ODqKgoHDx40AhVt15FtQ5FFVq4OzQ88m2vkEEmEaBh2wkRERGRWTNq+F68eDGkUineffdd7NixA8899xyWLFmCe+65B3q9vtFzn3rqKSxbtgwLFy7E1q1b4enpidGjRyMuLs44xbdCzq1pBl3tGx75FgQBTko5206IiIiIzJzMmDf7+eef4erqavg+OjoaarUa06dPx/79+zFixIgGzzt79ixWrVqFb7/9FjNmzDCcGxoaivnz52PLli1Gqb+lfl9gp+HwDdxa5ZIj30RERERmzagj338M3rUGDBgAAMjIyLjteVu2bIFcLsekSZMM22QyGSZPnoydO3eisrKy7YttQ78vsNNw2wlQu8Q8wzcRERGROTP5C5cHDhwAAPTo0eO2x1y8eBGBgYFQKpV1toeGhqKqqgoJCQntWmNrZTdl5NtWjgK2nRARERGZNZOG74yMDMyfPx8jR45ERETEbY/TaDRQqVT1tqvVasP+jiyruBJyqWBYTKchKqVVh13hUqcXkVlQbuoyiIiIiDo9k4XvkpISjBs3DjKZDMuXL2+3+yxduhQRERGIiIhATk5Ou92nMbWrW0ok9Ve3rOWktEJBWRVEUTRiZU2z4VQaYhbvR2E5R+aJiIiIWsMk4bu8vBxjxoxBYmIidu7cCR8fn0aPV6lUyM/Pr7e9dsS7dgS8ITNnzkRsbCxiY2Mb7Dk3huziCrg20u8NAGpbOap1IkoqtUaqquku3ShGpVaPG4Uc/SYiIiJqDaOH7+rqakyYMAGxsbHYtm0bevXqdcdzQkNDkZSUhLKysjrb4+PjYWVlhaCgoPYqt01kF1XCvZF+b6Bm5BtAh+z7Ts+v+XOvnTKRiIiIiFrGqOFbr9djypQp2Lt3LzZt2oTIyMgmnTdmzBhUV1dj/fr1hm1arRZr167FqFGjoFA0HmxNLbv49qtb1lLfCt8dccaTVE1N+M4uYvgmIiIiag2jzvM9a9YsrF+/Hm+88QZsbW1x7Ngxwz4fHx/4+PggJSUFXbt2xfz58zF//nwAQN++fTFp0iS8/PLLqK6uRmBgIJYsWYKkpCSsXLnSmB+h2Sq1OuSXVcPNvvG2E5WtHAA63CqXoigawndOCcM3ERERUWsYdeR7+/btAIBFixYhKiqqztfXX38NoCbs6XS6eiteLl++HDNmzMC8efPwwAMPIC0tDTt27EC/fv2M+RGaLccwx3fnbDvJKalERXXNs+DINxEREVHrGHXkOzk5+Y7HBAQENDjjh42NDT766CN89NFH7VBZ+6ldYOdOI9+1bScdbeQ7TfN7nz1HvomIiIhax+SL7Ji72gV2XO/wwqWDjRyCABR0sJ7v2pYTZ1srw2chIiIiopZh+G5nTVlaHgCkEgFONnLkd7C2k9S8mukF+/o5cbYTIiIiolZi+G5n2UWVkEoEONvefnXLWh1xlctUTRk8HKzho1IyfBMRERG1EsN3O8sqqoCLnVWjq1vWclLKO1zbSZqmDH5qJVztFSiu1KK8SmfqkoiIiIg6LYbvdvb0sC74cGKfJh2rtrWCprSDtZ1oyuCrVsLtVs86R7+JiIiIWo7hu511c7fHXcEuTTrWSWnVoUa+K6p1uFlUYRj5BmoWDCIiIiKilmH47kDUtlYdaoXL9Pyaly39nG0MUyVy5JuIiIio5Ri+OxAnpRwV1foO01ddO8d33ZFvhm8iIiKilmL47kBUtxba6Sij37VzfPuqlVDbWkEqETjyTURERNQKDN8dSEcM39ZyCVztFIbpEtnzTURERNRyDN8diEopBwDkd5AZT2qnGRSEmmkS3RwUHPkmIiIiagWG7w5EbdvxRr791ErD92721uz5JiIiImoFhu8OxKkDtZ2Iooi0W3N813K148g3ERERUWswfHcgTh2o7URTWoXSKl3dkW8HBXJLKqHTiyasjIiIiKjzYvjuQORSCeytZR1i5Dv1D9MM1nK1V0Av1gRzIiIiImo+hu8ORqXsGAvtNBS+3bjKJREREVGrMHx3MCpbK+SXmb7tpHaBHR9V3ZFvgKtcEhEREbUUw3cHo1LKkd8B2jpSNWVwtVfAxkpq2Fa7xDxnPCEiIiJqGYbvDqYjtZ38seUE4Mg3ERERUWsxfHcwKqUVCjpE20l5vfBtLZfC3lrG8E1ERETUQjJTF0B1qZRylFRqUaXVw0omQVmVFkcS8rD3SjbiUgvw70d6o5ePY7vWUKXVI7OwvM4c37Vc7RV84ZKIiIiohRi+OxjVrVUul+y/jtgUDY4nalCl08P2Vu/1+zsuYeVfI9u1hoyCcogi6o18AzUznnDkm4iIiKhlGL47mNq+6o9/vYourrZ4IsofMd3dMCBAjR+OpeCdrfE4lpiHyC7O7VZDQ9MM1nKzt8bZ9IJ2uzcRERGROWP47mBiQtzw+eP9EObtAH9n2zr7pgzyw1cHruOj3VexdmYkBEFo0T02nclAaZUWUwb5N7i/sfDtypFvIiIiohbjC5cdjJVMggd6e9YL3kDNC4+zYoJwIkmDo9fzWnyPj3Zfxb+2XkJRRcMvdqZrymAlkxgW1fkjN3sFyqp0KKnUtvj+RERERJaK4buTmTTAFx4O1vj416sQRbHZ52cUlCNVU4byah1+PpvZ4DGpmjL4qmwgkdQfWed0g0REREQtx/DdyVjLpZg1Iggnk/NxKCG32ecfuzVirlLKse5kWoPHNDTHdy3DQjtFnPGEiIiIqLkYvjuhRyN84O1kg492N3/0+1hiHpyUcsyKCcLZ9EJculFUZ78oikjNu334Nox8l3Dkm4iIiKi5GL47IYVMihdGBOFMagH2X81p1rlHE/MwKFCNR/r5wEoqwdo/jX4XllejuFLb4BzfAAx94NlFDN9EREREzcXw3UlN6O8DH5UNPmnG6Heapgzp+eWI7OIMla0VRoW6Y1NcBiqqdYZjGpvpBACclHLIpQJHvomIiIhagOG7k5JLJfjbiGCcTS/E3svZTTrneJIGAAxzhE8a4IuCsmrsis8yHGMI384Nh29BEOBqp+DINxEREVELMHx3Yg/384a/s7LJvd9Hr+dBpZQjxN0eADCkqwu8nWzqvHhZG759VQ2HbwBwdbDmyDcRERFRCzB8d2JyqQQvjgjGxcwi7P7D6PXtHEvMw6BAZ8MUghKJgEcjfHEoIRdpt0J3mqYMLnZWsFXcfv2lmpFvznZCRERE1FwM353cQ3284O1kg++OJjd6XJqmDBkF5Yjsoq6zfUKEDwQBWB9bM/qdqim77cuWtdwcFMjlyDcRERFRszF8d3IyqQSTB/jicEIeUvJKb3vc0cSa+b2jurrU2e7tZIOhwa5YfyodOr3Y6BzftVztFMgrrYJWp2/9ByAiIiKyIAzfZmBihC8kArDmNovmADUtJ2pbKwS72dXbN3mAL24UVmDf5WxkFlTcMXy7OSggikBeaVWrayciIiKyJAzfZsDD0RojurtjfWw6qhsYjRZFEccTNRgUqG5wyfiRPdyhtrXCJ3uuQqcXG33ZEqgZ+QY41zcRERFRczF8m4nHBvoit6QSey7Vf/EyTVOOjIJyRHV1bvBcK5kED/f1xoWMmtUu79zzfWuJ+WK+dElERETUHAzfZiK6mys8Ha2x6kT91pNjt/q9a+f3bsikAb6G399uju9ahiXmiznyTURERNQcDN9mQiaV4NEIX/x2LccwbWCtY4l5cL5Nv3etbu726OvnBLlUgMetke3bcbGzAgBkM3wTERERNQvDtxl59Nbo9brY30e/RVHE0cQ8RHZxhiDU7/f+o7fGhGLB2FBIG+gL/yOFTAonpZwj30RERETNxPBtRrydbDC8myvWxaYZpgFM1ZThRmFFvfm9G9LH1wlTBvk36V5u9gr2fBMRERE1E8O3mZk80A9ZRZXYdyUHQNP6vVvCzd6aI99EREREzcTwbWZGdHeDm70Ca06kAgCOXs+Di50Vghrp924JV3sFe76JiIiImonh28zIb714ue9KNjILynEsUYNBTej3bi43ewVyiishimKbXpeIiIjInDF8m6FJA3yhF4EPdl7BzaIKRLVxywlQM/JdqdWjqELb5tcmIiIiMlcM32bIV63E0GAXbDyTAaDt+70BzvVNRERE1BIM32bq8YF+AGpCcldX2za/fm345ownRERERE0nM3UB1D5G9nSHu4MCQ4Jc2rzfG6iZ7QTgyDcRERFRczB8mym5VIKfX7wLSqv2ecRsOyEiIiJqPoZvM1Y7Ot0eHKxlUMgkDN9EREREzWD0nu/09HS8+OKLiIqKglKphCAISE5ObtK5AQEBEASh3temTZvatWaqTxAEzvVNRERE1ExGH/lOSEjAunXr0L9/fwwdOhS7du1q1vmjR4/GggUL6mwLCQlpwwqpqWrn+iYiIiKipjF6+B42bBiysrIAAF9//XWzw7eLiwsiIyPbozRqJjd7ayTmlpi6DCIiIqJOw+htJxIJZzc0F2w7ISIiImqeTpeEf/75ZyiVSigUCkRGRrLf24Tc7BUoKKtGpVZn6lKIiIiIOoVOFb7HjBmDzz77DDt37sTKlSthbW2Nhx9+GD/++ONtz1m6dCkiIiIQERGBnJwcI1Zr/mqnG8wtqTJxJURERESdQ6eaavCzzz6r8/3DDz+MyMhIzJ07F1OnTm3wnJkzZ2LmzJkAgIiIiHav0ZK4O9RMZXijoBzeTjYmroaIiIio4+tUI99/JpVKMXHiRKSnp+PGjRumLsfihHo5AADOpBaYthAiIiKiTqJTh+8/ao8l1Klxbg7WCHBW4kSyxtSlEBEREXUKnTp8a7VarF27Fn5+fvDw8DB1ORZpYKAaJ5M10OtFU5dCRERE1OGZpOd7w4YNAIBTp04BALZv3w5XV1e4uroiOjq6pjCZDNOnT8c333wDAFi9ejU2b96M+++/H76+vsjKysLnn3+O06dPY/Xq1ab4GARgYKAz1sWm41p2CUI87E1dDhEREVGHZpLwPXHixDrfP//88wCA6Oho7N+/HwCg0+mg0/0+hV1gYCCys7MxZ84caDQa2NraIiIiAjt27MDo0aONVjvVNShQDQA4kaxh+CYiIiK6A5OEb1G8c4vCn4+JjIzE3r1726skaiEflQ08HKxxIkmDaZH+pi6HiIiIqEPr1D3fZHqCIGBgoBonkvKa9EMVERERkSVj+KZWGxioRlZRJVI1ZaYuhYiIiKhDY/imVhtY2/edxCkHiYiIiBrD8E2tFuRqB5VSzvBNREREdAcM39RqEomAAQFqLrZDREREdAcM39QmBgaqkZJXhqyiClOXQkRERNRhMXxTm2DfNxEREdGdMXxTm+jp6QBbKynDNxEREVEjGL6pTcikEvTzVzF8ExERETWC4ZvazKBANa5kFaOgrMrUpRARERF1SAzf1GYGBjoDAE4m55u4EiIiIqKOieGb2kxvH0dYSSU4kZRn6lKIiIiIOqQ2Cd95eQxbBFjLpejj64QTHPkmIiIialCzwveyZcvwwQcfGL4/f/48fHx84ObmhoiICNy8ebPNC6TOZWCgGhcyClFaqTV1KUREREQdTrPC92effQYbGxvD96+88gqcnJzwySefoLCwEPPnz2/zAqlzGRiohk4v4nQqR7+JiIiI/kzWnINTUlLQvXt3AEBhYSEOHDiATZs24f7774ezszPmzp3bLkVS59HPXwWJAJxM0mBosKupyyEiIiLqUJo18q3X6yGR1Jxy6NAhCIKA4cOHAwB8fX2RnZ3d5gVS52KnkCHM2xHHOd83ERERUT3NCt/BwcH45ZdfAABr1qzB4MGDoVQqAQCZmZlQq9VtXyF1OgMD1DiTVoBKrc7UpRARERF1KM1qO3n11Vcxbdo0fPfdd8jPz8f69esN+/bt24fevXu3eYHU+QwIVOPrQ0nYcykb9/fyvOPxcWkFeH/7JaRpyuFgI4e9tQwO1nI42NT8encPN7awEBERkVloVvh+/PHH4efnh+PHj2PAgAEYNmyYYZ+7uzvGjh3b5gVS5xPV1RneTjZ4fuVpjOzhjjmjQxDiYV/vuOyiCvxn5xVsOJUOFzsFhga7oLhCi+KKamQUlOPSjWrkl1VhzclUHHwtBm721ib4NERERERtRxBFUTR1EcYSERGB2NhYU5dhEUortVh+OAlfHUhESZUWD/fxxux7usFXrUSlVoflh5Px2Z5rqNLp8Ze7AvFCTBDsreX1rpOUW4qRHx3Ak4MD8OaDPU3wSYiIiIiap7HM2ayR7yNHjkCj0eDBBx8EULO4zgsvvIALFy5g9OjR+Pe//w2pVNr6iqnTs1XI8MKIYEyN9MeSA9ex4nAyfj6XiYf7euNEkgbJeWW4u7sb5j3YE4Eutre9TqCLLR7q440fj6XgmeguHP0mIiKiTq1ZL1y+/vrrOHXqlOH7OXPmYNu2bejWrRuWLFmCd999t80LpM7NSWmFuff1wMHXYvBohC/+dzoDEomAFTMG4JsnBzQavGu9OCIIWr2IL/cnGqFiIiIiovbTrPB96dIlREREAACqq6uxYcMGfPzxx/jpp5+waNEirFq1ql2KpM7P3cEaix7uhbi3RmHXy8MwPMStyecGuNji4b7eWHk8BdlFFe1YJREREVH7alb4LikpgYODAwDgxIkTKC0tNbSg9OvXD6mpqW1fIZkVO4UMMmmz/rMD8Pvo95ID19uhKiIiIiLjaFYK8vb2xtmzZwEA27dvR1hYGNzcakYw8/PzDXN+E7U1f2dbjO/rjZXHU5HF0W8iIiLqpJoVvh977DH885//xIQJE/DRRx9h6tSphn2nT59GcHBwmxdIVOvFEcHQ60Us2c/RbyIiIuqcmhW+FyxYgH/84x+orKzE66+/jtmzZxv2nT17FhMnTmzzAolq+Tkr8Ug/H6w6kYqbhRz9JiIios6H83xTp5KmKUPM4v2YGumPBWNDTV0OERERUT1tNs93rQsXLuDAgQPQaDRQq9UYPnw4QkMZhKj9+aqVmNC/ZvT72eiu8HDkvN9ERETUeTQrfGu1Wjz55JNYvXo1/jhgLggCHn/8caxYsYKL7FC7mxUThA2n0rFkfwLeHhdm6nKIiIiImqxZPd9vv/021q1bh4ULFyIpKQnl5eVISkrCwoULsXbtWixcuLC96iQy8FUrMTHCF6tOpOLyzSJTl0NERETUZM3q+Q4MDMSMGTMwf/78evsWLlyI5cuXIykpqU0LbEvs+TYfeSWVGPXxQXg52WDj84NbNHc4ERERUXtoLHM2K7FkZmZi8ODBDe4bPHgwMjMzm18dUQs42ynwr4fCcD6jEF8d5LLzRERE1Dk0K3x7eXnh8OHDDe47cuQIvLy82qQooqa4r5cnHujtiU9+vYorN4tNXQ4RERHRHTUrfE+ZMgWLFi3CO++8g8TEREPP93vvvYdFixZh2rRp7VUnUYMWjg2FvbUcczachVanN3U5RERERI1qVs+3VqvFE088gTVr1kAQBMN2URQNs53IZC2avdAo2PNtnn45dwOzVp3Ga/eG4PnhQaYuh4iIiCxcm83zLZPJsGrVKrzxxhs4ePCgYZ7vYcOG4caNG+jXrx/OnTvXJkUTNdUDvT3xy3kPfLL7Gu7p4Y5gd3tTl0RERETUoBYNU4eGhtZbVOfy5cu4ePFimxRF1FwLx4Xh6PUDeHXDOfz0bBRnPyEiIqIOiQmFzIKLnQILx4XhbFoBvj7Ucae7JCIiIsvG8E1m48Henrg31AMf7b6K06n5pi6HiIiIqB6GbzIbgiDg3fG94Olojae/i0VKXqmpSyIiIiKq444934mJTVvA5ObNm60uhqi11LZWWP7kAIxfcgQzlp/E/54fDCellanLIiIiIgLQhPAdFBRUZ1rB2xFFsUnHEbW3Lq52WPZEBKYsO46Z35/C908NhLVcauqyiIiIiO4cvpcvX26MOoja1IAANT58NBwvrj6DORvO4dNJfSCR8IdDIiIiMq07hu/p06cbow6iNjcm3Avp+eX4947L8FXZ4LV7u5u6JCIiIrJwHXc5SqI28Gx0F6RqyvDF/uvwVSvx2EA/U5dEREREFozhm8yaIAh4Z1woMgvKMW/TBQS62CKyi7OpyyIiIiILxakGyezJpBJ8PqUf/NRKzF4bh8KyalOXRERERBaK4Zssgp1Chk8m9UFOcSXmbjwHURRNXRIRERFZIIZvshjhvk54ZVQ3bDt/E+tj001dDhEREVkgo4fv9PR0vPjii4iKioJSqYQgCEhOTm7SuXq9Hu+99x4CAgJgbW2N8PBw/PTTT+1bMJmVZ4Z1RVQXZyz4+SISc0pMXQ4RERFZGKOH74SEBKxbtw4qlQpDhw5t1rlvvvkmFixYgBdeeAHbt29HZGQkJk6ciG3btrVTtWRupBIBH00Kh1wqwUtr4lCl1Zu6JCIiIrIgRg/fw4YNQ1ZWFrZt24aJEyc2+bzs7GwsXrwYr7/+Ol599VXExMTgq6++QkxMDF5//fV2rJjMjaejDf79SC+czyjER7uvmrocIiIisiBGD98SSctuuXPnTlRVVWHq1Kl1tk+dOhXnz59HUlJSW5RHFuLeME9MHuCLrw5ex5GEXFOXQ0RERBai07xwefHiRSgUCgQFBdXZHhoaCgCIj483RVnUic0f0xOBzraYvS4OmtIqU5dDREREFqDThG+NRgMnJycIglBnu1qtNuxvyNKlSxEREYGIiAjk5OS0e53UeSitZPi/x/oiv7Qajyw5goTsYlOXRERERGau04Tvlpo5cyZiY2MRGxsLV1dXU5dDHUyYtyNWPj0IxRXVePjzI9h3OdvUJREREZEZ6zThW6VSoaCgoN7iKLUj3rUj4ETNNSBAjc0v3AU/ZyX+8t1JLNl/nYvwEBERUbvoNOE7NDQUlZWVuH79ep3ttb3ePXv2NEVZZCa8nWyw4dnBuL+XJ/694zJeXhuHimpdu9+XIZ+IiMiydJrwfe+990Iul2PlypV1tv/4448ICwtDYGCgiSojc2FjJcV/H+uLOaNDsOVsJh796iji0gqg07d9QK6o1mHBlovotWAXPvn1qlGCPhEREZmezBQ33bBhAwDg1KlTAIDt27fD1dUVrq6uiI6OrilMJsP06dPxzTffAADc3Nzwyiuv4L333oO9vT369euHtWvXYu/evdiyZYspPgaZIUEQMCsmCCHu9nh5bRwe+vww7K1lGBTojKiuzhjc1Rkh7vaQSIQ7X+w2zqcX4uW1Z3A9pxR9/Zzwya/XsPFMBhaMDUVMiFsbfhoiIiLqaEwSvv+8uM7zzz8PAIiOjsb+/fsBADqdDjpd3dHARYsWwc7ODp9++ilu3ryJkJAQrFu3Dg8++KBR6ibLMbKnOw7MGY5DCbk4ej0PRxPz8OulLACASinHyB7umBblj94+Tk2+planx5L91/HpnmtwsVPgh6cGYmiwKw4n5OLNzRcwY/lJjA51x/wxofB2smmnT0ZERESmJIgW1HQaERGB2NhYU5dBnVRGQTmOXs/DkYRc7Lh4E2VVOvTxdcITUf54oLcnFDLpbc9Nyi3FK+vicCa1AGPDvfDOuDA4KuWG/VVaPb4+lIjP9iQAAF4YEYTHB/pBZWvV7p+LiIiI2lZjmZPhm6gFiiqq8dOpdPxwNAWJuaVwtrXCpAG+uCvIBZqyKuQWVyKnpBK5xVXIKanE0et5sJJJ8K+HwjAm3Ou2103PL8PCn+OxKz4LEgHo76/C3T3ccXd3NwS52dWb516vF5FVXIH0/HIEutjCxU7R3h+diIiI7oDh+xaGb2prer2Iw9dz8f3RFOy5lIU/vpsplQhwtrWCq70C3dzt8Y97u8PD0bpJ1z2XXoBf47Pw66VsxN8oAgD4qZUYHuIKUQRSNWVI05QhPb8cVTo9AMBaLsFjA/3wzLCuTb4PERERtT2G71sYvqk9ZRSUIymnFC72VnC1U0CltGrVi5m1bhSWY8+lbOy9nI3DCbmwlkvhp1bCV20DX7USfmolPByssePCTWw8kwGJIGBChA+ei+4KX7WyDT4ZERERNQfD9y0M39TZ6fVio4E+TVOGLw9cx/rYdOhFEQ/19caMIQHo6elQr2WFWq68Socqrb5O3z4REVGtxjKnSWY7IaKWudNIuq9aiUUP98ILI4Kw9GAiVh1PxYZT6XB3UCC6myuiu7nhrmAXONowNLbGi6vP4EpWEXbPjoa1/PYv2hIREf0ZwzeRGfJ0tMFbY0LxQkwQ9lzOxoErOdhx4SbWxaZDKhHQ19cJ0d1ccVewC3r7OEHaBu0xluJ6Tolh2skfjqbg6WFdTFwRERF1JgzfRGbM2U6BRyN88WiEL7Q6PeLSCnDgag72X8nBh7uv4sPdV+FgLcPgri64K9gFQ4Nd4O9sa+qyO7TvjiTDSipBLx9H/HdfAh4d4Mt/SSAioiZj+CayEDKpBBEBakQEqPH3USHIK6nE4et5OHQtB4eu1cxdDgBdXW0x974eGNnT3cQVdzyF5dXYcCodY8K98NRdgXjgs9+wZP91vH5fd1OXRkREnQTDN5GFcrZTYGy4F8aGe0EURSTlluJQQs20iX/9PhYje7jhrTGhnDHlD9bHpqGsSlfzEquXAx7q443lh5MwfbA/PB25KikREd2ZxNQFEJHpCYKALq52eCIqANv+NhSv39cdR67nYeRHB/DZnmuo1OpMXaLJ6fQivjuajAEBKoR5OwIAXrmnG0QR+GT3NRNXR0REnQXDNxHVYSWT4Nnorvj1lWjc3cMNH+6+itEfH8SBqzmmLs2k9lzKQpqmHDOGBBq2+aqVmBblj/Wn0nAtq9iE1RERUWfB8E1EDfJyssEXU/rj+78MhCAImP7tCUxYcgR7L2fBgpYHMFh+OBneTjYY9ade+FkxQbC1kuHfO66YqDIiIupMGL6JqFHDurlix8tD8fbYUNworMBfVsTivk9/w6YzGdDeWtre3F26UYSjiXmYFuUPmbTuX5tqWys8O7wrfr2UhZPJGhNVSEREnQXDNxHdkUImxfTBAdg/Zzg+ejQcOr2Il9fGYfji/fjhaDLyS6tMXWK7WnE4GdZyCSYP8G1w/1+GBMLNXoH3tl2yyH8VICKipmP4JqImk0slGN/PBztfHoZlT0TA1V6BNzdfRL9/7ca4/x7Ch7uu4GSyxqxGxDWlVdgUl4Hx/XzgpLRq8BgbKylm39MNp1MLsCs+y8gVEhFRZ8KpBomo2SQSAff0dMfIHm44l16I/VdycPBaDj7fl4DP9ibAXiFDVFdnqG2tUKXVo0qnr/OrtVwKJ6UcKqUVVEo5VLZWUCmtYKuQQSoIkEoEyKQCJIIAmUSAl5MNXO0VJvmsq0+kolKrx4zBAY0eN7G/D77+LRH/2XEZI3u4c9VQIiJqEMM3EbWYIAgI93VCuK8TXhoZjMKyahy+nouDV3Nw5Hoeyqt1sJJKoJBJYFX7JZWgqKIaV24WI7+sCmVVd57G0FouwZdT+2N4iJsRPtXvqnV6/HA0BUODXRDsbt/osTKpBC+N7Ia/rT6D/VeycXcPLlJERET1MXwTUZtxVMpxfy9P3N/Ls8nnVGp1KCirhqa0CmVVWuj0gFavh/7Wr1qdiI92X8Vfv4vFR5P6YGy4Vzt+grq2X7iJm0UVeHd8WJOOvy/MAx4O1lh+OJnhm4iIGsTwTUQmpZBJ4e4ghbuD9W2PGdhFjb+uiMVLa86gqLwaUyP9270uURTx9W+JCHBWYni3po24y6USTIvyxwc7r+BqVjG63WG0nIiILA9fuCSiDs/BWo7v/jIQMSFumLfpAj7fl9Dus4psOZuJc+mFmBUTBEkz+rcfG+gHhUyCFUeS2684IiLqtBi+iahTsLGS4qtp/TGujxc+2HkFi35pv2n9yqt0+Pf2ywjzdsAj/Xyada7a1goP9fHG/06no6DMvKdgJCKi5mP4JqJOQy6V4ONH+2B6lD++PpSEv687i+ziija/z9e/JSKzsALzHujZrFHvWjPuCkBFtR5rTqa1eW1ERNS5MXwTUacikQhYMDYUL90djP+dycCQ9/fipTVncDo1v8GRcFEUkZBdjC/2J+CJb0/gyPXcRq+fVVSBJQeuY3SoOyK7OLeoxu4eDojq4ozvjySb1ZznRETUenzhkog6HUEQMPuebhjXxwvfH03BhlPp2ByXid4+jpgeFYD7e3niYmYhdsdnYXd8FhJzSwEA9goZnv4uFquejkS4r1OD11688wqqdXrMva9Hq2p8ckgAnvnhFHbHZ+G+Zsz+QkRE5k0QLWgt5IiICMTGxpq6DCJqYyWVWmw8nY7vjqYgIbsEEgHQi4BMIiCqqzNG9XTHyJ7ukAgCJnx5BCUVWqx/NgpBbnVnI7mQUYgx/z2Ev94ViDce6NmqmnR6EcMX74Ongw3WPRvVqmsREVHn0ljm5Mg3EXV6dgoZpkUFYGqkPw4n5GHv5Wz08XPC8BBXOFjL6xz7w18GYcKXRzHtmxPY8NxgeDvZAKhpT/nXL/FQKa3wwojgVtcklQiYHhWAf/1yCRcyChHm7djqaxIRUefHnm8iMhuCIOCuYBfMH9MTY8O96gVvAAhwscX3fxmIkkotpn1zHHkllQCAXfFZOJaoweyRwXC0qX9eS0yM8IXSSmqR0w6Koogxnx3Cv7bGm7oUIqIOheGbiCxOTy8HfPvkAGQWlOPJ5SehKa3Ce9suIdjNDo8N9Guz+zjayPFIPx9sictE7q2QbynOphfifEYhVp9IRUml1tTlEBF1GAzfRGSRBgSosWRKf1y6UYRRHx9Ecl4Z3nigB2TStv1r8ckhAajS6bHqeGqbXrej2xyXAUEASqt02ByXYepyiIg6DIZvIrJYMd3dsHhiOHJLKjGsmyuGhzRtGfnm6Opqh+hurvjhWArKq3Rtfv2OSKvT4+ezNzCqpzt6eDpg9QnL+sGDiKgxDN9EZNEe6uuNzbOG4LPH+rbbPZ4f3hW5JZX425oz0OnNf4Kpo4l5yC2pxEN9vPH4QF9cyCjCufQCU5dFRNQhMHwTkcUL93Vqs5csGzKoizPeHhuK3fFZeGvLhQYXAzInm+MyYa+QIaa7G8b19YaNXGpxbTdERLfD8E1EZARPRAXg2eiu+PFYKr7Yf93U5TRbeZUOCdnFd/zBoaJahx0XbuLeMA9Yy6VwsJZjTLgntpzNRHFFtZGqJSLquDjPNxGRkbw2OgQ3C8vxwc4r8HS0xvh+PiapQxRFbDiVjl3xWXCxU8DL0RoejtbwcrKBp6M17KxluJZVgouZhbiYWYSLmUVIzCmBXgTeGReKaVEBt7323svZKKnUYlwfb8O2xwf5Y11szSqkUyP9jfAJiYg6LoZvIiIjkUgE/GdCOHJKKvHahnNwtVdgaLCrUWtIzCnBPzeex7FEDbydbFBRrUNeadVtj/d0tEZPTwfcH+aBQwm5WLzrKh7s7QWVrVWDx286kwFXewWiujobtoX7OKKnpwNWHU/FlEF+EAShzT8XEVFnwfBNRGREVjIJlkztj0e/PIrnfjyNtc9EItSraatfVmp1OJtWiKLyakglAiQSAVJBgEQCyCQSeDhYw1dt02C4rdLq8dWB6/hsXwIUMgneG98LkyJ8IZEIqKjWIauoApkFFbhRWI7C8moEudmhp6cDnO0Uhms80NsL9316EB/tvop3Hgqrd4/Csmrsv5KDqZH+kEp+r0EQBDw2yA9vbrqAc+mFCPd1av4fHBGRmWD4JiIyMgdrOVbMGIjxXxzGk8tPYkJ/H3R1tUOQmx26utrC/tbKnDq9iIuZhTickIcj13NxMlmDimp9o9d2sVOgv78T+vur0M9PhTBvR1zMLMTc/53H1awSPNDbE2892BNuDtaGc6zlUvg728Lf2bbRa4d42GNqpD9+PJaCxwf5oYenQ539Oy7eQJVOj3F9vOqd+1AfL7z7yyWsOp7K8E1EFo3hm4jIBDwcrbHiLwPxyro4fP1bIqp1v7/I6O6ggJ9aiatZJSgsr3lJsZu7HSYP8MPgrs7wcLSGTi9CL4rQ6WtCuk4vIjmvFKdT8nEqNR87L2YBAKykElTr9fB0sMY30yNwdw/3VtX9yj3dsOVsJhb+HI9VTw+qM8q+6UwmAl1s0dun/ki+vbUcY8O9sOVsJuY92MPwAwYRkaVh+CYiMpFu7vbY+uJQVOv0SNWU4Xp2CRJySpCQXYLUvDKM6umOIUEuGNzVuc5I9e3cFexieKExt6SyJoin5MPGSoqnh3aBraL1f+U7Ka3w93u64c3NF7Hjwk3c18sTAHCzsALHkvLwtxHBt+3pfnyQH9bGpmFTXCam8cVLIrJQDN9ERCYml0rQ1dUOXV3tMKqNrulip8CoUA+MCvVooyv+7rGBflh5PBX/+uUSYrq7wVouxdZzmRBFNNhyUqu3jyNCvWpevJzKFy+JyEJxnm8iImoWmVSCt8aEIqOgHEsPJgIANsVloLePI7q42t32PEEQ8NhAP1y6UYSz6YUoq9IiNlmDbw8l4ZW1cbjnowN4+IvDrZ4PvLC8Gs/+cApf7E+AppGZXO4kq6gCs1aexrRvjiMpt7RVNRER1eLINxERNVtUV2fc38sDX+xPQD8/FS5kFGHeAz3ueN64Pl54d9slPPHNcZRUaqG/1eruaq9AT08HHErIxey1Z7F0Wn9IJC0bGf9491XsuHgTOy7exKe/XsO4Pl6YPjigybPKiKKI9bHpeOeXeFRp9bCSSnDfpwfx6qgQzBgSWGcmFyKi5mL4JiKiFpl7Xw/suZSNZ36IhSAAY8Nv33JSy95ajldHheBwQi5CvR3Ry9sRvX0c4X6rp3354SS8/XM8PtlzDa/c063ZNV26UYTvjybjiSh/TI30x3dHkvG/0xlYF5uOAQEqPDk4EHf3qGmVaUiapgz/3Hgev13LxcAANd5/pBeUVjK8sfE8/vXLJfxy/gY+mNAbQW72za6NiAgABPFOawWbkYiICMTGxpq6DCIis/HRriv4v70JGBLkjJV/jWz19URRxJwN57DhVDq+nNof94Y1vWddFEVM+uoYEnJKsPfv0XBS1iwEVFhWjfWn0vDd0WSkacohlQjo5m6PcB9H9PJxRLiPE4Ld7bD2ZBre334ZAoDX7+uOKYP8DaPvoihic1wmFvx8EWVVOrw8Mhgzh3aBTMruTSKqr7HMyfBNREQtVl6lw8wfYvGXIYGI6e7WJtesqNZh0tJjSMgqxsZZQ9DNvWmjzJvjMvDSmji8P74XJg/0q7dfpxdxKCEXJ5M0OJdRiHPpBSgoq+kvlwiAXgSGBrvgvfG94KNSNniPnOJKzN98Adsv3ERvH0d8OrkvAl0anx+diCwPw/ctDN9ERJ3DzcIKjPnvISitpNgy6y44KhufF7ykUosRi/fD09EaG58f0qR+cVEUkaYpx7mMAlzIKEIPT3uMDfdq0iwsv5y7gTc2nUeVVo+3x4ZiQn8fzt5CRAaNZU7+exkREXU4Ho7W+HJqP2QWlOOF1aeh0zc+TvTZnmvILq7E2+PCmvyipiAI8HNW4sHeXnj9vu4Y18e7yQH6gd6e2P7SUPTydsScDefwtzVxKGrlLC1EZBkYvomIqEPq76/GO+PC8Nu1XLy77RL0twngCdkl+OZQEiZF+KKPEZeu93S0waqnI/HqqG7Ydv4G7v/0N5xOzTfa/Ymoc2L4JiKiDmvyQD9Mi/THN4eSMPLjA1gXm4Yqrd6wXxRFLNhyEUorKV67N8To9UklAl4YEYx1z0QBACZ+eRSf7blWp0Yioj9i+CYiog7t7bGh+O/jfWEtk+K1Decw/IN9+PZQEsqqtNh58SYOJeTi76NC4GynMFmN/f1V2PbSUNzfyxMf7r6Kez89iH1Xsk1WDxF1XHzhkoiIOgVRFHHgag6+2HcdJ5I1UCnlkEoEuNgpsPXFuzrEtH+iKGLflWy8s/USknJLERPiinkP9kTXRlb+JCLz06FeuExLS8OECRPg6OgIBwcHjB8/HqmpqU06VxCEBr/i4uLat2giIjI5QRAwPMQN656NwoZno9DXT4Wici0WjgvrEMEbqKlxRHd37Hx5GOY90AOxyfkY/fFBvLM1HoXlfCGTiIw88l1WVobw8HAoFAr861//giAImDdvHsrKynDu3DnY2jY+V6ogCHjyySfxzDPP1Nneu3dvKJUNz8n6Rxz5JiIyL9U6PeQdJHg3JLekEh/uuoI1J9OgVlrhnYfCcH8vT1OXRUTtrLHMadTl5ZctW4bExERcuXIFQUFBAGqCc3BwML766iu88sord7yGt7c3IiNbv4oaERF1fh05eAOAi50C743vjSmD/PHGxvN4fuVpTOjvgwVjQ2GnMOr/gomogzDq31pbtmxBZGSkIXgDQGBgIIYMGYLNmzcbsxQiIiKjCfN2xIbnBuPFEUH43+l03P/pbziVojF1WURkAkYN3xcvXkRYWFi97aGhoYiPj2/SNZYsWQKFQgGlUokRI0bgt99+a+syiYiI2pxcKsHfR4Vg3TNR0IsiJn55FB/tvopqHaclJLIkRg3fGo0GKpWq3na1Wo38/DsvTDB16lR88cUX+PXXX7F06VLk5eVhxIgR2L9//23PWbp0KSIiIhAREYGcnJzWlE9ERNRqEQFqbH9pKB7q643/23MNE788is1xGUjNK4MFTUBGZLGM+sKllZUVXnnlFbz//vt1ts+bNw/vv/8+tFpts65XXFyMsLAw+Pr64tChQ3c8ni9cEhFRR7L1XCbmb74ITWkVgJoe8b5+Tujnp0I/Pyf091d1mJlciKjpOswLlyqVqsER7tuNiN+Jvb09HnjgAXzzzTdtUR4REZFRPdjbC/eGeuBKVjFOpxbgTEo+TqfmY3d8FgAg3McRS6b2h5eTjYkrJaK2YtTwHRoaiosXL9bbHh8fj549e7b4uoIgtKYsIiIik5FJJQj1ckSolyOmRfoDAPJKKrHncjYW/hyPBz87hM8e64shQS4mrpSI2oJR/y1r7NixOHbsGBITEw3bkpOTcfjwYYwdO7bZ1ysqKsLWrVsxcODAtiyTiIjIpJztFHg0whebXxgCZ1srTPvmOJbsv86ecCIzYNSe79LSUoSHh8PGxsawyM6bb76J4uJinDt3DnZ2NcvvpqSkoGvXrpg/fz7mz58PAFi8eDGuXLmCmJgYeHl5ISUlxbBtz549GDp06B3vz55vIiLqbEortXjtp3P45dwNjA51x+KJ4bC3lrfJdY9ez0M3d3v4qm3u+K/IWp0e13NKkZxXivT8cqTnlyFNU/NrZkE5eno54LnhQRgW7MJ/kSaL12F6vm1tbbF3717Mnj0b06ZNgyiKuPvuu/HJJ58YgjcAiKIInU4Hvf736ZdCQkKwceNGbNy4EYWFhXBwcMCQIUPwzTffcOSbiIjMlq1Chv8+1hd9fZ3w3vbLGPffw/hgYjj6+Tm1OOSeTSvAS2vOIDmvDADg7qDAgAA1BgaqMSBAjW7u9sgsKMfZ9AKcTSvA2bRCnM8oRHm1znANpZUUviolfNU26Oevwt5L2Zj+7QmEejng+eFBuDfMA1IJQzjRnxl15NvUOPJNRESd2fHEPMxadQa5JZXwd1biwd6eeLC3F7p72DcpiOv0Ir48cB0f774KN3sF3nywJ3JLq3AiSYOTSRrcLKoAAMilAqp1NfHASiZBqJcDwn2cEO7riK6udvBVKeGklNe5Z5VWj01nMvDlgetIzC1FFxdbPBvdFQ/19YaVjDO2kGVpLHMyfBMREXUiheXV2HHhBraeu4HDCbnQi0CQmx0e7O2Je3q6I8TdvsHpCTMLyjF7bRyOJ2nwQG9PvPtQLzgqf29fEUUR6fnlOJGkwaUbRfB3sUUfHyeEeNg3Kzzr9CJ2XLiJL/Yn4GJmESL8VVj1dCQDOFkUhu9bGL6JiMic5JZUYvuFm9h6NhMnkjUQRcBaLkGYlyN6+Tgi3McJvX0ccflmMeb+7zyqdXq8PTYUE/r7tHtftiiKWB+bjtd+Ooe/3hWIeQ+2fFYzos6mw/R8ExERUdtxsVNgWqQ/pkX6I6uoAkev5+FsegHOpRdi9YlULD+cbDg23McRn0zui0AXW6PUJggCHh3giwuZhfj6UBIGBqoxKtTDKPcm6sgYvomIiMyAu4M1HurrjYf6egOomZ3kalYJzqUXQKsXMWmAL+QmWC3zjQd64ExqAV5dfxa/eDrAV600eg1EHQkbsIiIiMyQTCpBTy8HTB7oh6mR/iYJ3gCgkEnx+eP9IAJ4YdVpVGn1dzyHyJwxfBMREVG78nNW4oMJ4TibXoh3t10ydTlEJsXwTURERO3u3jAPzBgSgBVHkrH9/A1Tl0NkMgzfREREZBRz7+uBcF8nvLbhHFLySk1dDpFJMHwTERGRUVjJJPjvY30hCMBfVpzE+fRCU5dEZHQM30RERGQ0vmolvpzWH0UVWoz7/BDe23YJ5VW6O59IZCYYvomIiMioBnd1wa+zo/FohC++OpiIez89iCMJuaYui8goGL6JiIjI6ByVcrz/SG+senoQAODxr4/jHxvOobCs2sSVEbUvhm8iIiIymcFdXbDz5WF4JroLNpxOR8yH+/HRrivIKqowdWlE7YLhm4iIiEzKWi7F3Pt6YPOsIejr64TP9iVgyPt78dKaMziTmt/gOWVVWpxNK8DOizfZM06dCpeXJyIiog4hzNsR3zw5AMm5pfj+aArWx6Zhc1wmwn2dML6vN3JLKnHlZjGuZBUjVVMGUaw5r6+fE76dPgAqWyvTfgCiJhBEsfY/XfMXERGB2NhYU5dBRERETVBSqcX/TqdjxZFkJOaUQiIAgS62CPGwR4i7A0I87FBcocUbmy7AT63E938ZCC8nG1OXTdRo5uTINxEREXVIdgoZnogKwNRB/kjLL4O7gzWs5dJ6x/molJj5fSweWXIEPzw1EEFu9iaolqhp2PNNREREHZpEIsDf2bbB4A0AUV2dseaZSFTrREz48ihO36ZPnKgjYPgmIiKiTi/UyxE/PRcFRxs5piw7jn1Xsk1dElGD2HZCREREZsHf2RYbnh2M6d+ewNPfxeLuHm4IcLFFoLMtAlxs0cXFFq72CgiCYOpSyYIxfBMREZHZcLVXYO0zkXj753icSc3H3svZqNb9PreErZUUw7u7YfbIbghyszNhpWSpGL6JiIjIrNhby7F4YjgAQKcXkVlQjqTcUiTnleJqVjE2ns7A9vM3MKG/D14a2Q3enCGFjIjhm4iIiMyWVCLAV62Er1qJYXAFALw8shu+2HcdPx5LwaYzmZgS6YdZMUFwsVOYuFqyBHzhkoiIiCyKi50C88f0xL45w/FwX298dyQZw/6zDx/tuoLiimpTl0dmjuGbiIiILJK3kw3+PaE3dr8SjZgQN/zf3gQM/2A/VhxOQpVWb+ryyEwxfBMREZFF6+pqh8+n9MPmWUPQzd0eC36Ox8iPDmDL2Uzo9RazEDgZCcM3EREREYBwXyesenoQVswYAKWVFH9bfQbjPj+M/VeyGcKpzfCFSyIiIqJbBEHA8BA3DA12xea4DHy46yqeXH4SvmobTIrwxYT+vvBwtDZ1mdSJCaIoWsyPchEREYiNjTV1GURERNRJVGp12HHhJtaeTMOR63mQCMDwEDdMGuCLEd3dIJf+3kQgiiJ0t0bIZVI2F1iyxjInR76JiIiIbkMhk2JcH2+M6+ONlLxSrItNw/rYdOy9nA2FTAKpRIBWXxO6DcFbImBUqDumRQYgsouaK2pSHQzfRERERE3g72yLOaO7Y/bIbjhwNQeHE2pGwqVSATKJAKlEAplEgKa0ChvPZGDb+Zvo5m6HaZH+eLifD+wUjF3EthMiIiKiNldRrcOWs5n4/mgyLmQUwU4hw/h+3gj3cYKnozU8bn0prRjIzRHbToiIiIiMyFouxaMRvpjY3wdn0grww9EUrDmRhu+PptQ5ztFGDg+HmiBeG8prfrWBh4M1bBVSAMCfh0pVtlYcSe+k+NSIiIiI2okgCOjnp0I/PxXeG98LNwsrcKOwAjeLymt+LaxAZkEFsooqcDGzCLkllU26rkwiYGCgGnf3cMfIHm7wd7Zt509CbYXhm4iIiMgIrOVSBLjYIsDl9kG5SqtHdnGFIaSXV+sAALWvbAqCAFEUcT2nFHsuZeGdrfF4Z2s8gtzscHcPN0QHuyLU2xGONnIjfKI7q9TqcC69EFZSCRRyCRQyKaxkEihkEtgpZLCWS01dotGx55uIiIiok0rNK8Ovl7Kw53IWjidqoL0144qPygY9PR0Q6uWInl4O6O5hD09Ha6NOgSiKIqZ8fRxHruc1uF8qERDm7YhBgWoMDFBjQIAajsqO8UNDazWWORm+iYiIiMxAUUU1TqXk49KNIsRnFiH+RhGScksN/eJSiQAPB2t4OVnDy8kG3k428HKy+UOvuQ1USnmbTY249VwmXlh1Bi+OCEJfPydUVutRqdWjUqtDlVaPG4UViE3OR1xaAap0eggCEOJujyFBLnhxRBCclFZtUocp8IVLIiIiIjPnYC1HTIgbYkLcDNvKqrS4fLMYV28WI6OgvOYrvxynU/Pxy7kbhpHyWlYySU0Yd7BGmLcjBgSoMSBABWc7RbNqKavSYtEvl9DT0wEvj+wGqeT2gb6iWoezaQU4kaTBiWQNvj+ajB0XbuL/HuuL/v6q5v0hdAIc+SYiIiKyQDq9aOgvv1lYgZtFv/eaZxSU40JGISq1egBAF1dbQ2vIqFB32Fs33h7y4a4r+GxvAtY/G4UBAepm1XU2rQAvrD6NGwUVmDM6BE8P7QJJI+G9I+LINxERERHVIZUI8HS0gaejTYP7K7U6XMgoxImkfJxM1uCX8zew5mQaeh5ywOqZkbd9qTM1rwxfHUzEQ328mh28ASDc1wlbXxyK1386h/e2X8bxJA0+nBgOlW3dNpSUvFLsv5KDwwm5KK7QNnit4SGueCa6a7NraE8M30RERERUj0ImRX9/Nfr7q/EcukKvF7ErPgsvrj6NGctP4IenBsG2gbnG3/klHjKJgNfv69HiezvayPHFlH744VgK/rX1Eu7/v9/w4aPh0OpE7LuSjQNXcpCYWwoA8HdWwt3eusHr6DtgfwfDNxERERHdkUQi4N4wD/zf5L6Yteo0Zv4Qi2+mD6gzXeCBqznYHZ+F1+4NgYdjw4G4qQRBwBNRAejrq8ILq0/j8WXHAQAKmQRRXZ3xRJQ/hoe4NTp1Y0fEnm8iIiIiapafTqXj7+vPYmQPNyyZ2h9yqQRVWj3u/fQg9HoRO2cPg0LWdnN4F1VU46dT6QhwsUVUF+cOPz84e76JiIiIqM080t8HZVVavLn5Iv6+7iw+ntQH3x1JRmJOKb59MqJNgzdQM5PLjCGBbXpNU2H4JiIiIqJmmxYVgJJKHf694zIEAdhzKRsxIa4Y0d3d1KV1aAzfRERERNQizw3vitJKLf67LwFyqYD5Y0JNXVKHx/BNRERERC3291HdYG8tg8rWCoGd7OVHU2D4JiIiIqIWEwShw82l3ZFJTF0AEREREZGlYPgmIiIiIjISo4fvtLQ0TJgwAY6OjnBwcMD48eORmprapHMrKiowZ84ceHp6wsbGBlFRUTh48GA7V0xERERE1DaMGr7LysowYsQIXL58Gd999x1++OEHXLt2DTExMSgtLb3j+U899RSWLVuGhQsXYuvWrfD09MTo0aMRFxfX/sUTEREREbWSUV+4XLZsGRITE3HlyhUEBQUBAHr37o3g4GB89dVXeOWVV2577tmzZ7Fq1Sp8++23mDFjBgAgOjoaoaGhmD9/PrZs2WKUz0BERERE1FJGHfnesmULIiMjDcEbAAIDAzFkyBBs3rz5jufK5XJMmjTJsE0mk2Hy5MnYuXMnKisr261uIiIiIqK2YNTwffHiRYSFhdXbHhoaivj4+DueGxgYCKVSWe/cqqoqJCQktGmtRERERERtzajhW6PRQKVS1duuVquRn5/f4nNr9xMRERERdWRmv8jO0qVLsXTpUgBATk6OiashIiIiIktm1JFvlUrV4Aj37Ua1m3ou8PsI+J/NnDkTsbGxiI2NhaurawuqJiIiIiJqG0YN36Ghobh48WK97fHx8ejZs+cdz01KSkJZWVm9c62srOq8xElERERE1BEZNXyPHTsWx44dQ2JiomFbcnIyDh8+jLFjxzZ67pgxY1BdXY3169cbtmm1WqxduxajRo2CQqFot7qJiIiIiNqCUcP3008/jYCAAIwbNw6bN2/Gli1bMG7cOPj6+uKZZ54xHJeSkgKZTIaFCxcatvXt2xeTJk3Cyy+/jK+//hp79uzB5MmTkZSUhLffftuYH4OIiIiIqEWM+sKlra0t9u7di9mzZ2PatGkQRRF33303PvnkE9jZ2RmOE0UROp0Oer2+zvnLly/HG2+8gXnz5qGgoADh4eHYsWMH+vXr16T7JycnIyIiok0/U1Pk5OSw39xC8FlbDj5ry8FnbTn4rC1Hez/r5OTk2+4TRFEU2+3OBACIiIhAbGysqcsgI+Czthx81paDz9py8FlbDlM+a6O2nRARERERWTKGbyIiIiIiI2H4NoKZM2eaugQyEj5ry8FnbTn4rC0Hn7XlMOWzZs83EREREZGRcOSbiIiIiMhIGL7bSVpaGiZMmABHR0c4ODhg/PjxSE1NNXVZ1AobNmzAI488An9/f9jY2CAkJARz585FcXFxnePy8/Px17/+FS4uLrC1tcXIkSNx/vx5E1VNbeXee++FIAiYN29ene183uZh27ZtGDZsGOzs7ODg4ICIiAjs3bvXsJ/P2TwcPnwYo0aNgpubG+zt7dGvXz98++23dY6pqKjAnDlz4OnpCRsbG0RFReHgwYMmqpiaIj09HS+++CKioqKgVCohCEKDU/019dnq9Xq89957CAgIgLW1NcLDw/HTTz+1Wb0M3+2grKwMI0aMwOXLl/Hdd9/hhx9+wLVr1xATE4PS0lJTl0cttHjxYkilUrz77rvYsWMHnnvuOSxZsgT33HOPYU56URQxZswY7NixA5999hl++uknVFdXIyYmBunp6Sb+BNRSq1evxtmzZ+tt5/M2D1999RXGjRuH/v37Y+PGjVi/fj0mTpyIsrIyAHzO5uLcuXMYOXIkqqursWzZMvzvf//DgAED8NRTT2HJkiWG45566iksW7YMCxcuxNatW+Hp6YnRo0cjLi7OdMVToxISErBu3TqoVCoMHTr0tsc19dm++eabWLBgAV544QVs374dkZGRmDhxIrZt29Y2BYvU5j755BNRIpGI165dM2xLTEwUpVKp+OGHH5qwMmqN7Ozsetu+++47EYC4Z88eURRFcdOmTSIAce/evYZjCgoKRJVKJb744otGq5XajkajEd3d3cVVq1aJAMQ33njDsI/Pu/NLSkoSra2txY8//vi2x/A5m4e5c+eKcrlcLC4urrM9MjJSjIyMFEVRFOPi4kQA4rfffmvYX11dLXbr1k0cM2aMUeulptPpdIbfL1u2TAQgJiUl1Tmmqc82KytLtLKyEufPn1/n/BEjRoi9evVqk3o58t0OtmzZgsjISAQFBRm2BQYGYsiQIdi8ebMJK6PWaGglrAEDBgAAMjIyANQ8ey8vL8TExBiOcXR0xJgxY/jsO6l//OMfCAsLw2OPPVZvH5935/ftt99CIpHg2Wefve0xfM7moaqqCnK5HDY2NnW2Ozo6Gv71csuWLZDL5Zg0aZJhv0wmw+TJk7Fz505UVlYatWZqGonkznG2qc92586dqKqqwtSpU+ucP3XqVJw/fx5JSUmtr7fVV6B6Ll68iLCwsHrbQ0NDER8fb4KKqL0cOHAAANCjRw8AjT/71NRUlJSUGLU+ap1Dhw7h+++/x+eff97gfj7vzu/QoUPo3r071qxZg65du0ImkyEoKKjOM+dzNg9PPvkkAOBvf/sbMjMzUVBQgGXLlmHPnj2YPXs2gJpnHRgYCKVSWefc0NBQVFVVISEhwdhlUxtp6rO9ePEiFApFnQHU2uMAtEmOY/huBxqNBiqVqt52tVqN/Px8E1RE7SEjIwPz58/HyJEjERERAaDxZw+Az78TqaqqwjPPPINXX30VISEhDR7D5935ZWZm4tq1a5gzZw5ef/117Nq1C/fccw9eeOEFfPrppwD4nM1FWFgY9u/fj82bN8Pb2xsqlQqzZs3Cl19+icmTJwO487PWaDRGrZnaTlOfrUajgZOTEwRBaPS41pC1+gpEFqikpATjxo2DTCbD8uXLTV0OtYP//Oc/KC8vxxtvvGHqUqgd6fV6FBcXY8WKFRg/fjwAYMSIEUhOTsZ7772Hv/3tbyaukNrKtWvX8MgjjyA0NBRffvklbGxssHnzZjz77LOwtrbGlClTTF0iWQiG73agUqkaHAm53U9d1LmUl5djzJgxSExMxIEDB+Dj42PY19izr91PHV9qaioWLVqEr7/+GpWVlXX6PCsrK1FQUAB7e3s+bzPg7OyMa9eu4Z577qmzfdSoUdixYwdu3LjB52wm/vnPf0Iul2Pr1q2Qy+UAgLvvvht5eXl46aWX8Nhjj0GlUiElJaXeubXPunb0kzqfpj5blUqFgoICiKJYZ/S7Lf8bYNtJOwgNDcXFixfrbY+Pj0fPnj1NUBG1lerqakyYMAGxsbHYtm0bevXqVWd/Y8/ez88PdnZ2xiqVWiExMREVFRWYOnUqVCqV4QuomXJSpVLh/PnzfN5moLaP83YkEgmfs5k4f/48wsPDDcG71sCBA5GXl4fs7GyEhoYiKSnJMM1krfj4eFhZWdXrA6bOo6nPNjQ0FJWVlbh+/Xq94wC0SY5j+G4HY8eOxbFjx5CYmGjYlpycjMOHD2Ps2LEmrIxaQ6/XY8qUKdi7dy82bdqEyMjIeseMHTsWGRkZhhcxAaCoqAg///wzn30n0qdPH+zbt6/eF1Dzxvu+ffsQFBTE520GHn74YQA1Mxz80Y4dO+Dj4wMPDw8+ZzPh4eGBuLg4VFVV1dl+/PhxWFtbQ61WY8yYMaiursb69esN+7VaLdauXYtRo0ZBoVAYu2xqI019tvfeey/kcjlWrlxZ5/wff/wRYWFhCAwMbH0xbTJhIdVRUlIidu3aVQwLCxM3bdokbt68Wezdu7cYGBhYb35R6jyeffZZwzzPR48erfOVlpYmimLNXKNRUVGij4+PuHr1anHHjh1idHS0qFKpxNTUVBN/Amot/Gmebz7vzk+v14sxMTGiWq0WlyxZIu7cuVP861//KgIQly9fLooin7O5WL9+vQhAHDVqlLhp0yZx586d4qxZs0QA4uzZsw3HTZo0SXRychKXLVsm/vrrr+IjjzwiKhQK8dSpUyasnu5k/fr14vr16w3/r/7iiy/E9evXi/v37zcc09Rn+49//ENUKBTihx9+KO7bt0989tlnRUEQxJ9//rlNamX4bicpKSni+PHjRXt7e9HOzk4cN25cvQnfqXPx9/cXATT49dZbbxmOy8vLE2fMmCGqVCrRxsZGHDFihBgXF2e6wqnN/Dl8iyKftzkoLCwUn3/+edHNzU2Uy+Vir169xJUrV9Y5hs/ZPGzbtk2Mjo4WXVxcRDs7OzE8PFz8/PPPRa1WazimrKxMnD17tuju7i4qFApx4MCB4r59+0xXNDXJ7f7/HB0dbTimqc9Wq9WK77zzjujn5ydaWVmJvXr1EtevX99mtQq3CiYiIiIionbGnm8iIiIiIiNh+CYiIiIiMhKGbyIiIiIiI2H4JiIiIiIyEoZvIiIiIiIjYfgmIiIiIjIShm8iIjOwYsUKCILQ4JeTk5PJ6nryySfh4+NjsvsTEXU0MlMXQEREbWf9+vX1wq5Mxr/qiYg6Cv6NTERkRvr06YOgoCBTl0FERLfBthMiIgtR25py8OBBPPTQQ7Czs4OzszNmzZqF8vLyOsfeuHEDTzzxBFxcXKBQKNC7d2/8+OOP9a6ZlJSEadOmwcPDAwqFAl26dMFLL71U77gzZ85g6NChUCqVCA4Oxpdfftlun5OIqCPjyDcRkRnR6XTQarV1tkkkEkgkv4+1TJ06FY8++iief/55nDhxAgsXLkRpaSlWrFgBACgtLUV0dDTy8/Px7rvvwtfXFz/++COmTZuGsrIyzJw5E0BN8B44cCCUSiUWLlyI4OBgpKamYteuXXXuX1RUhMcffxwvv/wy5s+fj+XLl+O5555DSEgIYmJi2vcPhIiog2H4JiIyI927d6+37YEHHsDWrVsN399///1YvHgxAGDUqFEQBAHz58/HP//5T3Tr1g3Lly/HtWvXsG/fPgwfPhwAcN999yErKwvz5s3DU089BalUirfeegvl5eU4e/YsvLy8DNefPn16nfsXFxfjiy++MATtYcOGYefOnVi9ejXDNxFZHLadEBGZkY0bN+LkyZN1vj755JM6xzz66KN1vp88eTL0ej1OnDgBADh48CC8vb0NwbvW1KlTkZOTg/j4eADArl278OCDD9YJ3g1RKpV1QrZCoUC3bt2Qmprawk9JRNR5ceSbiMiMhIWF3fGFS3d39wa/z8jIAABoNBp4enrWO8/Dw8OwHwDy8vKaNI2gSqWqt02hUKCiouKO5xIRmRuOfBMRWZisrKwGv/f29gYAqNVq3Lx5s955tdvUajUAwMXFxRDYiYioaRi+iYgszLp16+p8v2bNGkgkEgwaNAgAEB0djfT0dBw+fLjOcatWrYKbmxt69uwJoKZffOvWrbhx44ZxCiciMgNsOyEiMiNxcXHIzc2ttz0iIsLw+23btmHOnDkYNWoUTpw4gbfffhtPPPEEgoODAdSsSvnpp59i/PjxWLRoEXx8fLBy5Urs3r0bX331FaRSKQDg7bffxrZt2zB48GD885//RFBQEDIyMrBjx44GpyUkIiKGbyIiszJx4sQGt+fk5Bh+/+OPP+LDDz/EkiVLYGVlhaefftow+wkA2Nra4sCBA3jttdfw+uuvo7i4GCEhIfjhhx8wdepUw3EBAQE4duwY5s2bh7lz56KkpATe3t4YN25c+31AIqJOThBFUTR1EURE1P5WrFiBGTNm4Nq1a1wFk4jIRNjzTURERERkJAzfRERERERGwrYTIiIiIiIj4cg3EREREZGRMHwTERERERkJwzcRERERkZEwfBMRERERGQnDNxERERGRkTB8ExEREREZyf8D1XF5BtzdPiQAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure(figsize=(12, 6))\n", + "plt.plot(losses)\n", + "plt.xlabel(\"Epoch\")\n", + "plt.ylabel(\"Loss\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "9a431548", + "metadata": {}, + "source": [ + "## 3. Results" + ] + }, + { + "cell_type": "markdown", + "id": "70de73a0", + "metadata": {}, + "source": [ + "Let's now visualize the trajectories produced using the learned initializations, running the optimizer for a few more iterations. The following functions will be useful to plot the trajectories from the variable value dictionaries." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "6c5dbbb8", + "metadata": {}, + "outputs": [], + "source": [ + "def get_trajectory(values_dict):\n", + " trajectory = torch.empty(values_dict[f\"pose_0\"].shape[0], 4, trajectory_len, device=device)\n", + " for i in range(trajectory_len):\n", + " trajectory[:, :2, i] = values_dict[f\"pose_{i}\"]\n", + " trajectory[:, 2:, i] = values_dict[f\"vel_{i}\"]\n", + " return trajectory\n", + "\n", + "def plot_trajectories(initial_traj_dict, solution_traj_dict, include_expert=False):\n", + " initial_traj = get_trajectory(initial_traj_dict).cpu()\n", + " sol_traj = get_trajectory(solution_traj_dict).detach().clone().cpu()\n", + "\n", + " sdf = th.eb.SignedDistanceField2D(\n", + " th.Variable(batch[\"sdf_origin\"]),\n", + " th.Variable(batch[\"cell_size\"]),\n", + " th.Variable(batch[\"sdf_data\"]),\n", + " )\n", + " trajectories = [initial_traj, sol_traj]\n", + " if include_expert:\n", + " trajectories.append(batch[\"expert_trajectory\"])\n", + " figs = theg.generate_trajectory_figs(\n", + " batch[\"map_tensor\"], \n", + " sdf, \n", + " trajectories,\n", + " robot_radius=0.4, \n", + " labels=[\"initial trajectory\", \"solution trajectory\", \"expert\"], \n", + " fig_idx_robot=1,\n", + " figsize=(6, 6)\n", + " )\n", + " for fig in figs:\n", + " fig.show()" + ] + }, + { + "cell_type": "markdown", + "id": "e21dad05", + "metadata": {}, + "source": [ + "### 3.1. Trajectories initialized from straight lines" + ] + }, + { + "cell_type": "markdown", + "id": "54a90684", + "metadata": {}, + "source": [ + "As reference, below we show the quality of trajectories obtained after 10 optimizer iterations when initialized from a straight line. As the plots show, the trajectories produced from a straight line are of bad quality; more than 10 iterations are neeed to produce good quality trajectories (in part 1, we used 50). " + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "ae5482d5", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAGQCAYAAAB4X807AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAACIV0lEQVR4nO2dd3hUZdqH75nMTCa9QOhVEZAAiYgIoiKCyKpg79gVUFlXXV172d3Pta+rrquCgg0bugKyAi4CFqyoCIKutEiVloTUmUxmzvfHe545MwQVJEMSeG6uuTI5bd4T4PzydJdlWRaKoiiKkkDcDb0ARVEUZd9HxUZRFEVJOCo2iqIoSsJRsVEURVESjoqNoiiKknBUbBRFUZSE02Bis3btWs444wyysrLIzMzktNNOY82aNQ21HEVRFCWBuBqizqaqqoqCggKSk5P5v//7P1wuF7fffjtVVVUsXryYtLS0vb0kRVEUJYF4GuJDJ0yYwKpVq/jf//5Hly5dAOjduzcHHXQQTz/9NNdff31DLEtRFEVJEA1i2QwZMoRAIMCCBQvitg8aNAiA999/f28vSVEURUkgDWLZLF26lJNPPrnO9vz8fKZMmfKr5zdv3pxOnTolYGWKoijKb6WoqIitW7fudF+DiE1xcTE5OTl1tufm5lJSUvKr53fq1ImFCxcmYmmKoijKb6Rv374/u69BxOa3MH78eMaPHw/Ali1bGng1iqIoyu7QIKnPOTk5O7Vgfs7iARg9ejQLFy5k4cKF5OXlJXqJiqIoSj3SIGKTn5/P0qVL62xftmwZPXr0aIAVKYqiKImkQdxoI0eO5IYbbmDVqlUccMABgAksLViwgPvuu68hlqQoym8gFAqxbt06AoFAQy9F2Yv4/X7atWuH1+vd5XMaJPW5srKSgoICUlJSokWdd9xxB+Xl5SxevJj09PRfPL9v376aIKAojYDVq1eTkZFBs2bNcLlcDb0cZS9gWRbbtm2jvLyczp07x+37pWdzg7jR0tLSmDt3Ll27duWCCy7g/PPPp3PnzsydO/dXhUZRlMZDIBBQodnPcLlcNGvWbLet2QbLRuvQoQNvvvlmQ328oij1hArN/sdv+TvXrs+KoihKwmkydTaKojR+Wj3Uik2Vm+rtei3TWvLTDT/94jFHHHEEH3/88S8ec/nll3P99dfTo0cP/va3v3Hrrbfu1vnp6elUVFTEbSstLeXll1/mqquu+pW7qMsJJ5zAyy+/THZ29m6dN3/+fHw+H0ccccRuf2ZDo5aNoij1Rn0Kza5e79eEAuCZZ56JllX87W9/2+3zd0ZpaSn/+te/drqvtrb2F8995513dltowIjN7q7319ayt1CxURSlSSNJRfPnz+eYY47hjDPOoHv37px//vlIsu0xxxzDwoULufnmm6murqawsJDzzz8/7vyKigqGDBlCnz596NWrF9OmTfvFz7355ptZuXIlhYWF3HjjjcyfP5+jjjqKkSNHRoXtlFNO4dBDDyU/Pz/aAQVMyy3pIfbSSy/Rr18/CgsLGTNmDOFwGIBZs2bRp08fCgoKGDJkCEVFRTz11FM88sgjFBYW8uGHH1JUVMSxxx5L7969GTJkSHQm2MUXX8zYsWM5/PDD+dOf/sRBBx0U7bwSiUTo0qXLXu/Eom40RVH2Gb7++muWLl1KmzZtGDhwIAsWLODII4+M7r/vvvv45z//yaJFi+qc6/f7eeutt8jMzGTr1q3079+fkSNH/mww/L777uPbb7+NXmv+/Pl89dVXfPvtt9GU4IkTJ5Kbm0t1dTWHHXYYp59+Os2aNYte47vvvuO1115jwYIFeL1errrqKiZPnszvfvc7rrjiCj744AM6d+5McXExubm5jB07lvT0dG644QYARowYwUUXXcRFF13ExIkTueaaa5g6dSoA69at4+OPPyYpKYmsrCwmT57Mtddey5w5cygoKNjrnVjUslEUZZ+hX79+tGvXDrfbTWFhIUVFRbt8rmVZ3HrrrfTu3ZuhQ4eyfv16Nm3aPbdgv3794mpPHnvsMQoKCujfvz9r165l+fLlcce/9957fPnllxx22GEUFhby3nvvsWrVKj799FOOPvro6LVyc3N3+nmffPIJ5513HgAXXHABH330UXTfmWeeSVJSEgCXXnopL7zwAmAE8JJLLtmt+6oP1LJRFGWfITk5Ofo+KSlpt+IVkydPZsuWLXz55Zd4vV46deq027UksVOG58+fz5w5c/jkk09ITU3lmGOOqXM9y7K46KKLuPfee+O2v/3227v1ub+2lvbt29OyZUvmzp3L559/zuTJk/f4+ruLWjaKouxXeL1eQqFQne3bt2+nRYsWeL1e5s2bx48//viL18nIyKC8vPxn92/fvp2cnBxSU1P5/vvv+fTTT+scM2TIEN544w02b94MmGbEP/74I/379+eDDz5g9erV0e07+8wjjjiCV199FTBiedRRR/3sei6//HJGjRoVZ/HsTVRsFEWpN1qmtWzU1wPTQb53797RBAHh/PPPZ+HChfTq1YsXXniB7t27/+J1mjVrxsCBA+nZsyc33nhjnf3Dhw+ntraWgw8+mJtvvpn+/fvH7Xe5XPTo0YP/+7//Y9iwYfTu3ZvjjjuOjRs3kpeXx/jx4znttNMoKCjg7LPPBkyM5q233oomCDz++ONMmjSJ3r178+KLL/Loo4/+7HpHjhxJRUVFg7jQoIF6o+0p2htNURoH3333HQcffHBDL6NJEQ6HadGiBT/99NNuNbLcUxYuXMh1113Hhx9+WC/X29nf/S89mzVmoyiKshfJz8/n8ssv36tCc9999/Hkk082SKxGULFRFEXZi3z//fd7/TNvvvlmbr755r3+ubFozEZRFEVJOCo2iqIoSsJRsVEURVESjoqNoiiKknBUbBRFqTdatQKXq/5erVrV7/qkIecvMXXqVJYtWxb9/s4772TOnDl7/NmLFi3inXfe2e3zNmzYwBlnnPGbPvO5555jw4YNv+nc+kbFRlGUemM3W4nt9evtCjuKzV/+8heGDh26x9f9JbH5pbY6bdq04Y033vhNn/lbxEa6Ttc3KjaKojRZKisrOfHEEykoKKBnz5689tprgGlwecghh9CrVy8uvfRSgsFgnXNltADAG2+8wcUXX8zHH3/M9OnTufHGGyksLGTlypVcfPHF0Yf9z123U6dO3HXXXdHxBDumN9fU1HDnnXfy2muvUVhYyGuvvcbdd9/NBRdcwMCBA7ngggsoKiriqKOOok+fPvTp0yc6t6aoqIiePXsCRghuvPFGDjvsMHr37s3TTz8d/Yz777+fXr16UVBQwM0338wbb7zBwoULOf/88yksLKS6uvoX13/TTTfRp08f7rvvPvr06RO97vLly+O+/62o2CiK0mSZNWsWbdq04ZtvvuHbb79l+PDhBAIBLr74Yl577TWWLFlCbW0tTz755C5d74gjjmDkyJE8+OCDLFq0iAMPPDC679eu27x5c7766iuuvPJKHnroobjr+nw+/vKXv3D22WezaNGiaPuZZcuWMWfOHF555RVatGjBf//7X7766itee+01rrnmmjrre/bZZ8nKyuKLL77giy++YMKECaxevZqZM2cybdo0PvvsM7755hv+9Kc/ccYZZ9C3b18mT57MokWLcLlcv7j+Zs2a8dVXX3HbbbeRlZUVHZ0wadKkemlxo2KjKEqTpVevXvz3v//lpptu4sMPPyQrK4v//e9/dO7cma5duwJw0UUX8cEHH+zxZ/3adU877TQADj300F0ebTBy5EhSUlIACIVCXHHFFfTq1YszzzwzzpUnvPvuu7zwwgsUFhZy+OGHs23bNpYvX86cOXO45JJLSE1NBXY+kuDX1i8CCKZp56RJkwiHw7z22mvRMQZ7gnYQUBSlydK1a1e++uor3nnnHW6//XaGDBnCySefvEvnxg5F291RAjtDxhvszmiD2DEAjzzyCC1btuSbb74hEong9/vrHG9ZFo8//jjHH3983PbZs2fvwcrrruX000/nz3/+M8ceeyyHHnpo3MC334paNoqiNFk2bNhAamoqo0aN4sYbb+Srr76iW7duFBUVsWLFCgBefPFFBg0aVOfcli1b8t133xGJRHjrrbei239udMCuXvfn2JWRBK1bt8btdvPiiy/uNFB//PHH8+STT0ZHJPzwww9UVlZy3HHHMWnSJKqqqoCdjyTYnfX7/X6OP/54rrzyynrrEq1ioyhKvdGynicC/Nr1lixZQr9+/SgsLOTPf/4zt99+O36/n0mTJnHmmWfSq1cv3G43Y8eOrXPufffdx0knncQRRxxB69ato9vPOeccHnzwQQ455BBWrlwZ3b6r1/05Bg8ezLJly6IJAjty1VVX8fzzz1NQUMD3338fZ2mIFXb55ZfTo0cP+vTpQ8+ePRkzZgy1tbUMHz6ckSNH0rdvXwoLC6Mxo4svvpixY8dSWFiIZVm7tf7zzz8ft9vNsGHDdvkefwkdMaAoym9GRwwkni+//JLrr7+e999/f69+7kMPPcT27dv561//utP9OmJAURRlH2HhwoWcd9553HfffXv1c0899VRWrlzJ3Llz6+2aKjaKoiiNlL59+/LDDz/s9c+NjWHVFxqzURRFURKOio2iKIqScFRsFEVRlISjYqMoiqIkHBUbRVHqj1aAqx5f9TxiQJg/fz4nnXQSANOnT9/r2V4A9957L126dKFbt26/2gHgmmuuiWsc2hTRbDRFUeqP+h4JsBvXsywLy7Jwu3fvd+iRI0cycuTI3VzYnrFs2TJeffVVli5dyoYNGxg6dCg//PADSUlJdY5duHAhJSUle3V9iUAtG0VRmixFRUV069aNCy+8kJ49e7J27VquvPJK+vbtS35+PnfddVf02FmzZtG9e3f69OnDv//97+j25557jnHjxgHEjRMAZwzBxo0bOfrooyksLKRnz558+OGHe7TuadOmcc4555CcnEznzp3p0qULn3/+eZ3jZKTAAw88sEef1xhQy0ZRlCbN8uXLef755+nfvz8A99xzD7m5uYTDYYYMGcLixYvp2rUrV1xxBXPnzqVLly5xHY53hZdffpnjjz+e2267jXA4HO1BFst1113HvHnz6mw/55xzuPnmm+O2rV+/PrpegHbt2rF+/fo65/7zn/9k5MiRce10mioqNoqiNGk6duwY9+B+/fXXGT9+PLW1tWzcuJFly5YRiUTo3LkzBx10EACjRo1i/Pjxu/wZhx12GJdeeimhUIhTTjmFwsLCOsc88sgje3wvsWzYsIEpU6Ywf/78er1uQ6FuNEVRmjSxDStXr17NQw89xHvvvcfixYs58cQTd2t8gMfjIRKJABCJRKipqQHg6KOP5oMPPqBt27ZcfPHFvPDCC3XOve666ygsLKzz2lnyQdu2bVm7dm30+3Xr1tG2bdu4Y77++mtWrFhBly5d6NSpE1VVVXTp0mWX76WxoZaNoij7DGVlZaSlpZGVlcWmTZuYOXMmxxxzDN27d6eoqIiVK1dy4IEH8sorr+z0/E6dOvHll19y1llnMX369Ggr/x9//JF27dpxxRVXEAwG+eqrr7jwwgvjzt0dy2bkyJGcd955XH/99WzYsIHly5fTr1+/uGNOPPFEfvrpp+j36enp0fEATREVG0VR6o+W1G9G2m6OLCgoKOCQQw6he/futG/fnoEDBwJmPMD48eM58cQTSU1N5aijjtrpbJkrrriCk08+mYKCAoYPHx61mubPn8+DDz6I1+slPT19p5bN7pCfn89ZZ51Fjx498Hg8PPHEE9FMtBNOOIFnnnmGNm3a7NFnNDZ0xICiKL8ZHTGw/7K7IwY0ZqMoiqIkHBUbRVEUJeGo2CiKskc0QU+8sof8lr/zehWbN954g9NPP52OHTuSkpJCt27duOWWW+oE4kpKSrj88stp3rw5aWlpDB06lCVLltTnUhRF2Qv4/X62bdumgrMfYVkW27Ztw+/379Z59ZqN9tBDD9GhQwf+9re/0a5dO77++mvuvvtu5s2bx8cff4zb7cayLEaMGEFRURGPP/44OTk53HvvvQwePJhFixbRrl27+lySoigJpF27dqxbt44tW7Y09FKUvYjf79/tZ3W9is3bb79NXl5e9PtBgwaRm5vLRRddxPz58zn22GOZPn06CxYsYO7cuQwePBiAAQMG0LlzZx544AEee+yx+lySoigJxOv10rlz54ZehtIEqFc3WqzQCIcddhhAtO/P9OnTadOmTVRoALKyshgxYgTTpk2rz+UoiqIojYSEJwi8//77ANF87KVLl9KzZ886x+Xn57NmzRoqKioSvSRFURRlL5NQsVm/fj133nknQ4cOpW/fvgAUFxeTk5NT59jc3FyAfWJuwy8SAWqBkP2qBTS2qijKPk7C2tVUVFRw8skn4/F4mDRp0h5fb/z48dEurU0uGFkLVNmvIEZkwAiPC/O34AfS7Ffd+UmKoihNmoSITXV1NSNGjGDVqlW8//77cVkLOTk5O7VeiouLo/t3xujRoxk9ejRA1Epq9ISAUqCCeAsmAoTt9xZGgCqBbYAXyAFy0c51iqLsM9S7Gy0UCnHGGWewcOFC3nnnHXr16hW3Pz8/n6VLl9Y5b9myZXTo0KHJz9kGjICUA2sxYiMusxr7FSH+J2/hiE8A2AD8DyhDXWyKouwT1KvYRCIRzj//fObOncvUqVPjBhoJI0eOZP369dHEATBtwd9+++29Pgc8IUSALcBP9nsRGDfGZVaDsXTKMIJUab+qMW42EaMg8ANGeCJ79Q4URVHqnXp11Fx99dVMmTKF2267jbS0ND799NPovnbt2tGuXTtGjhzJgAEDGDVqFA8++GC0qNOyLP70pz/V53L2PiI05RiLJID5CYftbbWYeEwSRngg3p0m2ySmA/CjfV57tLmQoihNlnodMdCpUyd+/PHHne676667uPvuuwETn7nhhhuYOnUqgUCAAQMG8Pe//52CgoJd+pxGOWLAwsRciu3vazBCI0kBXoxYRHBcarU7XEPEJsk+XrbVAAdgBEdRFKWR8kvPZp1nU19UAesxolODEQyxcLwYCyZov8CxcHaGJBBYGMFyYYSpD5CdkNUriqLsMTrPJtGEMe6zHYXGhRGaoP19CPDZryScmpsARqwkfhMrSGIJhYFviHexKYqiNBE0ubY+kAB/GCMQFRihceME/cVCkcwzOV5cZ7GyL1YQOC41F7AVKAIOStidKIqiJAQVmz3FwklRlpqZCE68JoQTf6klXpR8u3D9CMbycQGpwLdAW/u9oihKE0HFZk8JYSwbsVICGBERi0YEJWC/dhQZC8edVhuzzYXTXUCy10IYd9wPQGGC7kdRFCUBqNjsKUGc9OUgxh0WwghLMkY4qu19Phy3mcRigjiFm+6Y/VbMMbJP4j2rgU5osoCiKE0GTRDYUwIYUbAwwpCEsWo8MftrMMIjQiIJA2LpeO1XEuZvRF6x+1w4whYAVuKInKIoSiNHxWZPkRiNpCrX2t8n4Vg44jaLAJVgVcHx/57GuJmLHQEi5jo1GGuo2j4/ZF9bXHA1mJqerYm9NUVRlPpC3Wh7SiTm5cYIhKQsV+EkB9hCgwWPLVnEu6+dzLueara1v4Nn8m8mLZTmWCqumK+STi2I1RPGZKZlY6wmRVGURoxaNvWBWDTgtKSR1GXpGlBpf++B3x/amz6HL4PaFF6dfDSFyw/h88DnjsvMg5MY4MHZ7sURMelCsA5t1qkoSqNHxWZPkd5n4Dz0xRqJjduIaw1w42bWxT3ISquFVcex4uu+HLH+CP6y7S/UWjE9bCRJQF6SpSaJBtUYV1p5om5OURSlflCx2VNScWbV1GJ+ojU4wiBTOcWdZgFVkJcCD59n1Cj1v08Rrk7nruK7OGrtUawoX2EsoQriv5YD2+3riOgEMaMMNFlAUZRGjIrNnpKMsWDE+hCBkb5n1THvbaEhDHjhkqNhYFeoKs/k5E+/pZ27HZ8GP6VwUyETKidgJVmOG02+umKuW2V/XwFsTvidKoqi/GZUbPaUZMxI5xDO3Jkw5icrmWnyU5ZOz7aV43bDkxdDkhumf9CO56uWcV7KeVRalYzePppTtp3C5nCMioi1lGJ/rhcjODLaoDpxt6k0PJZlEYlECIfDDfqKRCI0wf69SgOj2Wh7ihvIwwxLE1GRdjXBmG1SH+ONOTcMvZrB9UPgwf/CjS9n8Pntkzkp5SSuLLmS6YHpfLrpU57NfJaTfCeZ6/nta4mgiVVVBWzEFHvqrxD7JBUVFSxatIiioqIGfdi3aNGCPn360KJFiwZbg9L0ULGpD3IxlkZwh+0xVky0gDN2aFql2XbXKfDal/DVGvjXPPj94HM5MvdILtp+EfNq5zGidARjfGN42P0waf40xx0nYwq8GBHaikmFzk3crSoNR0lJCW+++SbTp08nEmm48a2HHXYYeXl5KjbKbqFiUx94MRbF1xgXF8QH7MMY4Ykt7hSxcENaMjx+Hpz8T7jtLTi9G7TPas+cjDk8EnyEW6tu5emap5mbNJeXXC/Rz9cvPjVa0qr9mNhNOrvW5FP5WSzLIhwOEwqFGtxl5PV68Xg8hEIhtmzZ0uCWTfv27QkGd/zNSlF+GRWb+qINpmfZVoz4xI55riHetRVbg2Mzshec3BOmfQvXToPXLzMp0n9M/SPHuY5jVPUoloSXcMRPR3BH7R3c1vE2PJbHsZyky7QbE79pQ3x3AmW3sCyLJUuWsGDBAioqKhpsHX6/nwEDBtCnT58GW4Oi1AcqNvVFEqYT82yMFRM7AiA2O23HVGjZVg2PnQFzfoApX8PMpfC7fKAGert683mLz7mt4jb+XvF37t56NzMrZ/Jiqxc5yGMPt5GkARcmfpQBZCbqZvd9IpEI33zzDU888QQ//fRTg60jJyeHpKSkXR6ZriiNFRWb+iQTM7r5I4xlI90DJGEAnNiNIIPUkqBDc/jLifDHt+Cq1+DbP0KanX3mD/l5uNnDnJh6IhcVX8Rn1Z9RWFTIP6x/cHnG5bi8LvM5ze3P3WzOixM1ZZexLItgMEhZWRmlpaUNtg63200gEGhwV56i7Cmat1TfdMJM0pTZM7XEJwXEpkJHcOpw7G3XHAOFbaGoGP4yG2MhJQNZ5jrH5hzL4q6LOTfzXKqsKkYXjeaUtaewmc1OV4EqjKuuGG1loyhKo0DFpr5JAnpiYiaZON2bazGxm9g4Smzsxu5A4AnC0yeBywUPfwSLt2EC/hGMleKDHCuHlw94mZcPfJmspCyml06n12e9mFEywwiT2772T2jtjaIojQIVm0SQCRyIiZu0sL9KqnMNRgiqcLoJBHBGFSRBv3y4ehCEIzD6ZQhLjCfZPr8GCMG5nnNZ0m4Jg9MHszm0mRHLRjDm6zFUhiqdepyf0FY2iqI0OCo2iaI1ptgzFeMCawnk2K8UjAWUYr+yMPUxHoxIpMM950HbHPhsJTz5gX3NckxrGmnE6Yb2zdozJ38OD7d7GJ/Lx/j14yn8pJDP1nxmrKUAZvaNoihKA6Jikyh8wAEYayTF/j4FY/VkA2mY4stM+70b4y7LMu8zffDPi8ylbnkN1m3ACEwOTmq13S/N7XJzfcvr+eLQL+iV1osVgRUM/Hwgd397N7WBWiM26k5TFKUBUbFJJGLR+O1XJs6gsx1n1Ej7mYh9TBqcchic2gcqAnD1K2Cl48R3IhjxcmGSEVzQO7U3nxd+zh/b/pGIFeHPRX9m4EcDWb55OWxC3WmKojQYKjaJxIVJFBDLxoXTWiYDY9GkYv4WsuxtUv1fBVTD4xdAZgpM/wre/BQjMiGMIFVjRg5sxyQgVIC/ws9DrR/ivd7v0T65PZ+XfE7hh4WMXzweq1hT0xRFaRhUbBKNH+iAERrbYsGKeUm3aJmHIynTESAD2naA+y80lxo3AUq22scGY87LsD/Hh3GtpcHgZoNZXLCYc1ueS1W4ijFfj2HkWyPZvEVnESiKsvdRsdkb5ADNMIIgsRaxZDwYaybVfonLLYvo/JrRx8KR3WDTdrjhNYyoiPvNTocGnGmgdsPP7ORsXj7sZV4ufJksTxYzNs6g58SevP3d23vnvhVFUWxUbPYGSZjYjbjQkjFWTC0mjiKzaZJwOgyEMVZOCNwWTLgcfB6YOA/mLLb3eTAC5cIITdB+H8C42Oz5Oee2Ppclg5YwOG8wWwJbGPn6SEa/PZqKmobr+aUoyv6Fis3eIhUTv5HWNVk4ri8w4lGFESAw4iOWDtD9ALjzLPP+in9BRS0mFiRut2L7vRSPylC1CmA7tA+1Z87AOTyU/xA+t48JX03gkKcP4bN1nyXwphVFUQwqNnsLF8aVlo6J29RgXGo++/tMjLDk4sRgxEqx+5v96UQo7ARFW+C2VzAiU42TwZaBM0Lajt1E4zg+cAfc/LHlH/liyBf0yu7FiuIVDJw4kLvn301tRFROURSl/lGx2Zt4MMWeMtrZj5NdFsCxaiS1ucL+arvbvBkw8TozRvrx/8CCFZi/QRdGWGJ7sHmIT5OOSbXundabz/t/zh8L7BTp9//MwIkDWb5teaJ/Aoqi7Keo2Oxt0jGdBaSuJhMn3dmDIw7V9v40e38GkASHtIY/nQyWBZf+E6qD9n6p0anBtMWpBUqBMvtVgdMuJ8nMSXmoy0O8d8Z7tM9sz+frP6fw6ULGfzleOwwrilLvqNjsbVwYsUnHWBoyWE2sE3Gz+TAC48MkDIil44K7zoQe7eCHjXDHvzECVY1xq5XhJBv47GunYIQsHHOcPSl0sG8wiy9fzHm9zqMqVMWYGWMY+epINlVs2gs/DEVR9hdUbBoCL9AKIzDp9tcgzggCieeIW0yExt6f7IdJ14PbDX9/Cz7+EuOKszBClUL8GAMZqubGCJB8vx2oguyKbCafMpmXT3uZrOQsZvwwg15P9uLt/2mKtKIo9YOKTUORiclICxNv0bgx1ocUfIZw+pql2+elQr/ujjvt4iehqhojIv6Yz6ixv0qmW2zXaXAGum0EyuDcXuey5MolDO40mC1VWxj5qqZIK4pSP6jYNBRuTO1NOkYAwhirI9Xelo3TpNNrv5eWNxGgEu46HfLbw/KNcMsbGIsG+3plGMtF6nXCOPN0xJUmbW5klHQ1tM9qz5wL5/DwsIfxJTkp0p+u+zShPw5FUfZtVGwaEh9GcHwYi6QGk5UmLjQXjhiIS83CWCkh8HvhhWvAkwSPvQ1zF2JEphojLpJO7cHpLODHSY+WmTcV9ueuN5/ndrm5fsD1LLxiIb1amBTpIyceqSnSiqL8Zjy/foiSULIxAiHP8CDG3SUxmEqMSIRi9geIZrD16QV3nAt3vQSX/AMWPwxZfoyVk4xDECMyQZw+bEk4NTluoMQ+rw3ggl4te/HFFV9w+9zbefiTh/nz+39m5oqZvHTqSxzU7KCd3k5FRQVr166lrKxsj3809Y3H46FNmza0bNkSt1t/z1KUvYmKTUPjxiQLFGEsFh8mluPBCEMt0aJOIvYrtqYmArecCG9/DAtXwTXPwvPXU7eGJ4QRqDDOWAK7nU00tuPFdCLwYwpQgWRPMg8Oe5ATu57IhW9dGE2R/vuwvzP60NG4XLFzrmHt2rU888wzLFq0qJ5+QPVHRkYG5557LqeccgrJycm/foKiKPWGik1jwI9xp63HEQGxNqQYE4yV4yE+S60SvG54cRz0uRle+ABOHAhn9cMREQsn5iPfgxMDwv4ssXrW22tKc5Z4TKdjWHzlYq5+52peXvIyY/8zlhnLZ/DMiGdomd4yelxZWRmLFi1i7ty5e/xjqW9yc3MZOHAgkUikoZeiKPsd6ktoLGRhHu5pmAd9LU5WmhR6hjCuL3BcbHbQv/uB8PAVZteYx2DdTzgi5cMRGqm1kXRqGTUdMNchjBGdIntbDNn+bCafZlKks/3Z0RTp6f+bXl8/BUVR9lFUbBoLbkwrGzDWSzZOcD+C06RT0qHFCsnEFH8mw9jhcFIfKK2EC5+AcNA+P9k+vhynoFMKSQXpWlCGEZxyYC1OLCmGc3udy+Kxizm287FsqdrCya+erCnSiqL8Iio2jQk/pruADEeTZpp2bU00HTp2Do4984ZacFXCs1dDiyyY9y3c97a9vwJn/IDLvoa46cCxlmS/GyNopcCP7HScdPus9vz3gv/WSZH+tvTbevtxKIqy75BQsRk+fDgul4vbb789bntJSQmXX345zZs3Jy0tjaFDh7JkyZJELqXpkAW0wMlAk24CYaJD0aKJArFdAuz2My3S4YWrzeY7X4WPvsbJOpPBbUk4lk4JTgac9E+TbtK1wGZgHTsVnJ2lSI/5bAyrOq7SX2MURYkjYY+EV155hW+++abOdsuyGDFiBLNmzeLxxx/nzTffJBQKMXjwYNatW5eo5TQd3BjrxoPJCPNhBEdm1YiwSEdnqZOR1OgUOH4g3HQKRCJw7mOwrdI+T9rhSIwGnLk5sbU3koQgbrv1GAvnZ+LqkiJ9w4AbiFgRijoWwaWYcQmKoigkSGxKSkq47rrr+Pvf/15n3/Tp01mwYAEvvvgi5557LsOHD2f69OlEIhEeeOCBRCyn6ZGMsW5COKnQaThTOaXQU7LKkjAuNplnE4C/ngn9u8K6bXDRoxCpso+1cDLcpG9aLUbA7F5p0S7RFThCtw6TNPAzNZ2SIv3Pw/5JciAZ2gFjgUP3/MehKErTJyFic9NNN9GzZ0/OPffcOvumT59OmzZtGDx4cHRbVlYWI0aMYNq0aYlYTtMkC8jBsWjsAWj4MaKSgTNawI8Td7FrZ7wp8Mp1kJMG//kK7pH4jSS7SxubIEZgpP5GpoeKICXhpFCvBVbg9FbbCYc2O5R+X/aDxfZ1RgDnEpdGrSjK/ke9i81HH33ECy+8wBNPPLHT/UuXLqVnz551tufn57NmzRoqKjSjCXDcadmYh75koUVwLJod06HDGPeYHe/plAsvXwMuF9z1Bsz62j4uhLFe3Bjx8ca8l3EFsWnRYuHUYJp2fmuf/zNjb7xhL/wbmGJfqxtwlf1VUZT9knot6qypqWHMmDHccMMNdOu28ydLcXExnTp1qrM9N9c4+EtKSkhPT6+zf/z48YwfPx6ALVu21N+iGzM+oDmmSWYW5uEurW3EnWbFfJXYjMywicDwAvjz2SZZ4Lx/wBd/hQOlw7R0iJZ+azKATVKik3GSEqTVjQfjbluKcZW1wqnn2ZGlGGvoFOAAjIXzJTAbx1pqpLhcLtLS0mjdunWdLgl7k5ycHDIyMhp0DYpSH9Sr2DzwwANUV1dz22231edlARg9ejSjR48GoG/fvvV+/UZLJkZEgpgHf2zacgXxw9WS7Zcbp3jTC7edAZ+vgBkL4aT74ZN7IFuERo6rwQhabNGohbGUpLhURA37uNWYbLYO9rp2ZieXAS8ChwNDMTGcTsBbmDhQI8XtdnPooYdy/fXXU1VV9esnJIjk5GQKCgrweLTZh9K0qbd/wWvWrOGee+7hmWeeIRgMEgw6jv1gMEhpaSkZGRnk5ORQUlJS5/zi4mLA/CanxODCJAvUANtw6mAkniLCU0187UyAaOGm2wWTR8PAjfDtejjjUZh5O3ilkFOKQ0VogjjZbfILtXyefLbsqwS+x1hgrdi5a80CPgVWAafZx10KfGC/GmH3GJfLRbdu3TjooJ03HN2buN1utWyUJk+9ic2qVasIBAKMGjWqzr6HHnqIhx56iK+//pr8/HzefffdOscsW7aMDh067NSFtt/jwQiOTPOUsFYE529QujhjHyNJBXacJ9MPM26Hw2+B9xbDlU/ChAttLZGx0WLJVOFktsWKh1hBksEms3i8wBagBHzrfPhDfly4sHZUns3ABOBY4AjgGOAgTHxn2x78fBKAy+XC5XI1qu7QycnJdOjQgd69e2NZPxMw2wt06dKF1NTUBvt8pWlSb2JTWFjIvHnz6mwfPHgwo0aN4rLLLqNLly6MHDmSSZMm8f777zNo0CDANG98++23Oe+88+prOfseqZi6m80Yd5eL+CC9jBAI42SXSezGbuzZMR2m3QzH3AHPzoOcZHhgFLh8xPdMk2w0cPqyleMUlsq/Gh9GdFJwulNXQl5VHu1oRwklVFIZLzph4L/AcuBUoC0wBngXWFhPP6t9lNzcXM4++2yOOuqoBhWb3NxcOnbs2GCfrzRN6k1ssrOzOeaYY3a6r2PHjtF9I0eOZMCAAYwaNYoHH3yQnJwc7r33XizL4k9/+lN9LWffpBlGELZggvtZOK1lwsSnNSfjJBGEiYrH4V3gjavhlMfhoVmQmQ13nI4RKPHUpOA05aywz/XbnxXbU01qcGT2ThA8AQ/eiJdsskkjjRpqKKGEcsqpjS3SKQKeBE4AegMnAV2BaRjXnFKH1NRUCgoKKCgoaOilqFtP2W32uo/A7XYzY8YMjjvuOK666ipOPfVUkpKSmDdvHu3bt9/by2lauDDxjmyMJSNta8TakD5psYIQxPlbtoAKOLEnTP49uN0mS+1vr9kGklg1HuItpBScpIOkmH3SuSCCsbwsiPgiJLmSCBLEjZsIEZrRjDa0oRnNSI6d6BYgPkW6K5oi/SuIe6+hX4qyuyQ8xWVn5n5ubi4TJ05k4sSJif74fQ8vZvZNECMMpfZ2sWxif9yxdTgiCi4gB846GipDcNm/4LY3YEsZPDwK3Mn2tSrta4mVQ8znBGI+z0NcRwNXxEVSJIlccgkRIoMMyigjjTT8+MkmmyBBtrOdaqqNtdOEU6QVRdk1Gk/0U9l10jAJAxFMID8NZxSBJBFIdwBJIkjFuML8RMXjkmPglTHgTYJ/vAsXPgPBGuJHEIghIsWeUmsj9TciRLYIJW1Pwmt5SSaZWmoJEcKLlwoqSCKJkJ3m1pzm5JJLDjnG2pEU6VkY8ToU0+6mXf3+6BRFaRhUbJoquZgYTg3mwZ+KM34g036l2d9LtlmIeFs2AGf3hf/cCGl+mPwBHHUb/LgVp25HGn5W2F8lFgRO9lutvd+uw6l11xIhggcPtdTiw4fH/uA88sgggxRSaEUrssmmnf0nw8rA+6kXxmMKWXMxKdKD0X+pitLE0f/CTRWpv8nDCEMtRnjEIpFRBLFjCIjZHrRfGXBcP/jgr9AxD75YBX3uhJlLcaZ7Sqsa6W9mxXyGjCdwA8ngspVIxMaNGx8+mtGMFFIA8OIlSJBqqvHhowpTNNmMZmSTTe7mXFImpMAC+zMGAZdhxFVRlCaJik1TxoNJGEjBJA1Iu5nYLLEwzqhn2e7CCIkMXwP6HAhf/QV+1wuKK+GEB2H0U1BahhGT2Gaf4poL4hSTyj6bMGFSSSWDDFy4cOOOc6slkUSECC5cZJCBDx/JJNOGNmSSSatwKzr/tzOZz2fiKnU5KdL7UfMIRdmXULFp6iRjHsRS2Z+DEZE0nMwyr/1epn6KhRIrELWQmwQzboR7zgOvByb8F3pcC1MXgRUT64laTIGY68diQQoppJJKiBBhwlHBMbstfPjw4yeNNJrRDD9+aqmlkkr8+AnYf3KLcmnxVAuSFyebezgJOA/tIq0oTQwVm32BNIzgyAROER5ptunHGRkQG9CXeEwIIxyp4E6HW8+ARQ/DgK6wsRRO/ScM/Qt8uZL4IlJJg5Y4kL0vOZyMFy811ETdahZWVGxcuGhOc5JJJkSIaqrx2opl2X/SScePn2SSaRloSad/d6LZlGa4q93QFVxXuUjrlkYKKSTtaFYpitLoULHZV8gA2mMsHQ9OzzMpuIxNX5aU6Fr7lYxxxcWUwPRoDx/eDI9fCDnpMHcJ9L0Rzvs7rNiEU88DjmUTAavWwm254ws4bdy4SSWVNNIIEyZiB5Ji4zsePKSSSnOak0Za1Npx4yZjaQbNnmxG8qpkrDSLynMrSRqZRAdfBzrRiRa0IJNMUkjBhw+3/ceF1oUoSkOjYrOv4MLEbVpghCMV404T6yOME7ORTDPJYJPamtiYTBCSPDDuFFj5L7jxFEj2wisLoNsf4PzHYclq4pt/BsFluYi4nc6asa1qUknFg4cau3jGHfPPT6yZDDKwsKiiKmrtANRQQyaZ5JXl0enFTuTNysNV66KiTwVrxq6hol0FWWTRjGa0pCXtaEdHOtKSluSSSzOakRPOwVvpNZlzVRhrLjaWJQIcjnk1wiahitIU0b7l+xIuTPdlgDUY8cjAiEEapr+ZD6ffWYxFEnWp1drvUzEPY4xl88D5cPUw+PMb8OL78PKH5jXycPjTWXBEd3BVg+Wx7KW4opYLgB8/ESLRzDMLK2pxuHCRSioWFuWUR89x4yaJpOj+JJLw4GG7tZ3MTzNJXpXMltO2EGwVZOulW7E+tGjxfgtqI7XRup5MMkkmmUoqybAySK1MNT8bsQClLY/L/nnJaAVi9svLi2MBeohPA1cU5RdRy2Zfw4VJEZY+iRLIj32Ixj4gRWBqMA9dP8Yiij3G9oh17AwTx8LKJ+D3w8Hvg+mfwZF/hMOugRc+hhr7RDfuaAGnBw9+/NHvnaWaP+mkx4mPszSLNNKi1o7EgMTiydicQY8JPWi1oBW4YNugbay6bBWRZhHa0jbOSkommdRIKu6w2xFbsWwkW287RpCleLUY2ITpSF0CbAB+iHl9j5nJI0PrFEX5WdSy2RdxYwoiI5imnVJ7I8Ijv8EHMJaOpEHH/qYuD08ZIZBKdCR1h+bw2KVw24Xw+HR4+h2TPHDRE5Cb3pWuqceTzCYi/Bi1SkKE4iwdMGIitTdhwnFuMzD1OH78lODMPwoTJhmTgODDRygcovV/W5O2PI0fT/mRYNsgq8esJvRuiKyFWdRQExW7DDJIqk1yui1U2V9jG4/K1FKxZMCISa39M3DZ+8EIcwVGhDIxVmU2+r9KUXaCWjb7Km5M/KYVTs+0FhhRycJYP+n2SxptitBYGJGRB6w8ZMUiCANp0DIX/u9iWDMBnh0LvTtDcYWHTzcP432eZyn/RzkDATcWVh2x8eDBh48w4TrL92Ja3uy4L0Ik6hoTSylMmOyibA5+6mByF+cS8UVYc9Ia1py3huT0ZDLIIEyYKncVnpDHCHAQJ0tvK8aqceMUsFbZ20SQXPb3MjVVum9Ls9JtmE7WK+3jGm4CgKI0SlRs9nVygc44c22k6DO2EzSYh2kApxA0BfPbug9HhCS5QBIP7PNTPHDpCbDoXzDp6tV0y/wGFxYlDOJbHuQDJrKcMwnGtABw4yaZ5GiyQCyStSZus1j3mqQ67yhcySSTGcik3b/b0WFKB5Kqk9jedTvLr1zO1u5bTeq15TZutKB9TyGMZeLHCKu06QFHcII4A+Vk5IKFE7MJ4gyqC2BcbysxcaG6CXmKst+iYrOv48IkCXTCiEc2RkCk75mkRidjHrqx/dREiCSF2m3vS9phu89sc7ngsAOqOKnjixzCSDryFH5+IkBrVnM5XzGV73mIYgbhthMGYpHMtRRSovtiYzkePNEGn7G1NV680dRpFy6yl2Zz8JMHk70qm1BaiKJzilg7ci1ejxfLZZl7LbNf4lILYAQmgLFaJGGiFCdpQqyezfa5lfbxm+3jpAFqCBPrWYkRI0VR1Lu835CCSRrYiHlgWhhXWhXOxM1ynH8R0kNN2tuk2Nur7a/iTku3t9luNrflpiapBh8ldOQVujOdTRSyhuMo4UhKGUgpA1lNKc2YRR7/IZUVJNkJBR480YJQswxXVHAk9uPCFRWbWCtI2uCECZNVlkX6i+lsPHwja4auoaRPCR93/pgBkQFm/RUY9yAY0fDYP5dSjOBKvzcPxuJJw5kblITTK05iOyGcFPIqjEVZhhGcAzBCrij7MSo2+xNeTMv+TEwWlbh5pL+ZJA2I0SAp0zJOoBZHgNw47jQpGg1Bal4qBYcUkJmdSXIomaRIEtXeaiz3e1TWfMLSTQV8s7Evm6ta8hPn8BPn0Dx1M/l539Kz1dc0TykmKZIUrdVxR9xUe6tJiiThC/uIuCO4LGPpBLwBfLW+6PHuiJtQUgiX5cIb8RJ2m3hP8cJi/p3/bzblbOIe7iG8OszdLe/GW+t14jGxfeTKMOIgLjI3zsweGdsgjUyTcWJZtfbPNgUjWhkYAf8RIzg7tvVRlP0Il9WQw8x/I3379mXhQh1Yv0cEMFaOxCW8ODEJyc6SLCwRoqC9P4P4WE4Z0aSBAAG2b99OVaAKd4kbK8XC8sf8E6sFKwRL1vmY8lEG73yaRkmF4xLL7xDkpCMq+d3hlXRoXgsWWGkWrgqXM5ba9r5Zfgt3hRvLZ1/frpdxhV1YXuczXWUugp4g/6j+B08XP42FxWFph/FS25fo6u9qBKPCPrgCR1zBcZ9V4cwNkrobsezECqzBWEtS6Joec2x7jNCr41rZh/mlZ7OKzf5MBJM5tQnzkPXhBMLFNVRhv3djftuvJL7HWsjetkO7m+j2LOKz3GoxD3O7c3SoFuZ+Aa/Mg7c+hLJK5xJd28HvBsDvjoRBXcEvzTelH5tYGrHFqTLfJ3Zbqb32VHh/0/tcuPxC1tSsIcWVwt/b/J0xOWNwhV1OPVI5xkKBeLeh3/4Z2SngBO379uGMzfbbPwdJpmhlH5cKHAjkQCQSIRwOE4k0zuKcpKQkkpKSdPyzstuo2Ci/TAgTlyjGPKgr7a/yPrbmpIr4Uc1i4dTgWEBiDYWIFyaJcwSI761WA2RAoBJmfQSvzodZn8P2iuinkJIMgw+BoX3h8K5wSG9IiRBvhUTs9abFXDtor1kak5bDdms744rG8dK2lwA4Ie0Enm3xLK0irZzWNeIeFFGrxUmXjk2ciI1niTutuf1zqcERqGRzjxwMxduL+fjjj1mxYsVO/zoaEo/HQ2FhIX379sXv10CTsnv80rNZYzaKeRC3wmSqbSI64jnatkbqRkQgQpjf/L0x3wdwkgbkt/1y4uI50QevJBQQs90Dfg+cchSccgzUhuDTpTBzEcz8EL5eDu98al4AniQoOBAO72Feh3aDLi0h2SLeVSWWlAhBBLJ8WbzY/kVG+EcwZuMY3ql8h14/9uKZnGc42XOy0x8tzT4/EHN+rLiJsMbGYkTcZPyCrEFaAhXDttJtvPnmm/znP/+hsf2ul5KSwhVXXEHPnj1VbJR6RcWmCWNZFtXV1QQCgfp7aMU27yzHiZNILEcytCS2AeY3eAmqp2DEpBp8tT5S3Cl43B4ntRriLRFXzPYYr5IHOLIfHDkQ7hllRh3M/gI+WgyfLYOlRfDlD+b1r6nmHLcbOrWEbh2cV+dMaN0cWuVAc1+MDoXhrOyzOCJyBBdvv5j3qt7jlG2ncFnyZfzD+w/SvenmPkSsRFSlj5q40kI44xbcMS+x7MoxWX/2CAd+gnBSmPLycrZs2bK7fzsJJyUlhcrKykYngkrTR8WmCRMMBvnwww/58MMPCQbrt6DDZblIqkkirToNb9iLFbbwRrxEkiK4wi6Sa5JNtpcLwp4wRDDFljEdnw9seyADCwbSvHPz+N/+JXXYhbGQRHykS4HEZGLOad0MLv4dXDwUSIHyalj4NXz2A3z2HSxZBas3wir7NfOzuvfkSYKW2dAqF7KTITMVMj3tODj1v7j4gnmRGTzr2840398Y57+Ew3MOIi8L8tKgZRokSyZa7OjtEEaURHglwy8dY8UFcOJhdv85l19jIcr+h4pNE6ampobPP/+cp59+moqKil8/4TeSHE4m3Uon3UrHi5eAK4DXMn3Lgm4jcklWEulWOrXU4sGDCxf9j+5Pfn4+zb3N6140jJkqGuvyEosBzMN6R+SXbS9kuGFwIQzu5+wLlsHKzfDDT/C/Nea1ZgNsLDaWUUk5rN9mXvG4gH72y4Sv7t7hCI8bCltD/wPgwv5wmDQ6lRHZsYWuNTiJFWLxVGKsuwC4Qio2yv6Hik0TJxQKEQgECAQCCfuMAAG2sx0vXtJJJ4ccggSppZbUcCpBgkSIECBgRgCwnRpqqKCCiDcSnz4txY+SgBCLB/NQziVehCTdWZpmSubXDp2pk7OgRwr0OMjeFsG491xAOgQq4KctsKkUyrZBWQWUldtfK6AqCGWBWj6s/oolwVVQnUtqVUfSqw5gS7mXheth4Xr454dw/qHw1NmQHrsWsXjEKpNYlrgmq829u8pc2jtN2e9QsVF2mRAhSiihlFLSSItW7nvwECBAKaVkkhkd7QyYehdJA07CPHi9xCcdgDOoTGpZYvHidDrw2duknQwxX8VVJYhbzo6j+H3QqQV0aoWxPKRoU1rUWEDIA+F+vB+p5sLwhaxhDRYpPBx5jIIfL+M/q1w88QlM/tJ83MRTiHerSQabrFsy5MQlGAZ3tRu3pQU3yv6F/otXdhsLiwoq2MIWfuInSinFhy/aTiaFFPz4SQon4XK7TGDcj6m5kVTpFJy+bNKRIBunIn9HIjjdpyHeKpLu1B7irR15L58pfdBEFMQakfY90ponAIOSBrE4bTGjkkZRTTXXu6/g4U4nceNJP/H5NZDsgUlfwhc/2dcXl5nMxqmwr+uzP7eKqEXmqnHhiejvecr+hf6LV34zFhZB+08xxaSTjh8/1VTjx0/AEyCcEYY8jCUTgbAVZvOWzWzZsAXXdhdWkkXEHzEWkAtcQRfuGrcz8TPsimZ+WRusODFxVbtM/CMJIqkRcIG70v79ybZ83NVu48qzrQx3lRs8kFyTTE52DrnJubgiLscVJpaRF7Jqs3gx/CIjGMFYxvKO6x16VffimRbPcOlhJ/PkJzBrBRx2sCwIIzgyDbUCY6n57e32/CArbJEU3tGHqCj7Nio2Sr0gI50rqIhaOW7LjavS5XSGLoNAKMAn73/CzP/OpCpQhc/yUZvk9OJ3WS7SatOiiQZhd5iAO4A34o3rdwbgiXhIDaVS4asg4jKZBZ6wh+RIMrXuWjwRDzWuGvMZbtP6JqU2hYgrQuvM1gweMpicA3Nw1bqcwWkBjNUlLrYkOMt1FgNrB3Kx62LmWHM4JXAKx7b/F3xyJQtW4VgtYj25Yt6L6MhgurARUBUbZX9DxUapV2Ktnc2pm6ltUWse3vYcnPBPYVZuWcm8L+dRWVVJGml48UYHoUkXZz9+trM9uj2JJDLIiBs1LceHCEVjRC5c0bhRLbVUUUUGGUSIYGHhw0c66WxqvonCwwuNxbFjg1Ep4Iwpymxb05bZvtk8zuPcxE3MzX0SuJL/ba8CUo1Y1WDuNVZc/DhdCezrRiIRvBHtyqnsX2jMRkkcbtNEk5aYFi5+IB2SapNIjaRGRUYExocPC4tiiimhJG7eTZhwNNvNi5daaimjjEoqo8IDRuzChHHhotpubFZNdXTkdK1dCBN0B82/fh/GipGYjcRdpH9bgGj7HrfHzR88f2Bh0kLyM7MBKCqt5Y7QHUYEY+cElRNfBOojmijgxh03j0dR9gdUbJTEIw/bHCAPynLK2OraSogQNdSwmc2UUEIllVRQQYgQFVTgtv+YS7iIECGJJAIEqKQSC4saaqJzcMBYPJVUUkVV9ONrqY0e48FDMcUkuZIIe8NOCnZsRwQRnGqc/mYxTUZ7Rnryhf+/4IpATSb/F7iXgYGB/M/zPyemlIzTskcam8akSSfVqtgo+xcqNsrexQ2RpAjlSeVsYhPFFEfdXSFCUQvHjZsAgWhmmxs3FVSwnvWECUdFCIzlImOmAwSosP94Y1oQBAniwxcVoggRM7VT+rjJsDQRBGkW6sHp9BzCxGfKIYVkspLNGtoGe/CF9QWHlB/Ck2VPYgUtI1ByPcw5pJprWJ74RAdF2R9QsVEaDLFMSu0/W9jCRjZSTTVhwlRRxTrWUUIJAQKECBEhQjnlJNl/xFopp5xqqqMxnVpqqaQSHz48eLCw2MSm6GdXu6tNrUsSphuzzPOR0QGSHi1utmr7q8yw8UGGXfMzs/YjLvBcQDXVXFVzFSdVn8RP1T85XaJjB9P5zDUsl1Z1KvsXKjZKoyBChFpqqaaaLWxhE5sopzwaqwkSJJVU0kiLutJctnmw3f5TQglu3FGLRsRM6oLE/ebFi8tymZ5ubkzHa8lAkw7PYtXs2A8thWjiQLLtCfN7Mnkh+QVe97xODjm8E3mHXqW9mFo11Uk+kA7SSeByuUiKqBtN2b9QsVEaJREiVFPNdrZTTDFb2cpa1rKRjVRQQRllbGFLXEGpuN4iREghhRpq2GL/EREKEyZIEDduwslhIzTJmIJSMO6u2NEAsZ0KpFWO3YYm2T6uxrZ6zvSfyZLsJQxNGspWtnJq1alctvUyysPlzuiFGrCSnA4LirK/oGKjNAnChAkRoooqiimmlFLKKKOUUjaxia1sJUiQAAGKKWYzm6PdDJJJjvZv8+Chmmo2J20m5AuZBAAvxt2Vab/KcYo7JQXag3GllROd9eOx//eEasz3BKBtRVtmZ8zmH+n/IJlkJgYnUrixkE8qPolaS5bbitYFKcr+gtbZKE2SiP1HUpmrqaaMMly4olls8l7iO7LNh482rjamlU4mxlKRnm2lOO40SQ4Q15oXx7oBolOTZRqp3SnAHXLzh6Q/MLTFUEaVjGJRaBFHrj+SW0O3cucBd5rxDZa60ZT9C7VslH0KqbORtGqJ04gVVEIJm9jEeu96KltWQkegE9Aa874tUAD0xNQGgRGdFIyoxL5ie6/FZp+lm+Pzw/l82vJTbsq5CQuL/9v8fxzx7RGsDq2OqyFSlP0BFRtl/0RayiRhxCQd879BXGkeTE+3ThgByrW3+4ifXQNY0tXaS3zH52xIdidzX/Z9zO8yn46+jiysWMiI9SNY0npJgm9QURoXKjaKIsLjxtTC2B1oot2qpaO0hZOp5oOIbdkkSePNAE5SgQunG3UKHM3RfHPIN1zQ/AICVoAPu3wI57HzIXGKsg+iYqMo0gvNjyM0bhxXWRXRAWzkYOpyUp2hou4UjNWTjbF8anHGFcj1syCrOosXCl7gsZaPkRxKhq7AlUD3hN+hojQ4KjaKAk4vNGkrk4pjqWRjhCZ2Ho4LwrbauMGpp0nGCE+6/X0VzuA0e3rnCZkncNY3Z8FKTP3NOcBInMFwirIPomKjKGAEREZWyyTRFJxiTHCGpNljqiN2zMYd64aLYNKmxYWWhRGcGoxFFDTHpQfT4SVgpn1sH2As0D6xt6koDUVCxOadd97h6KOPJj09nczMTPr27cvcuXOj+0tKSrj88stp3rw5aWlpDB06lCVLNGCqNBBSX5mMKb4M4YxFkEyzWpwaG7sTQFRsJGGgxn5Jp2fsY6XrQDlGcGpN1hwW8BnwNLARk4RwCTAY/TVQ2eeo93/STz/9NCeffDKHHnoob731FlOmTOHMM8+kqsp04bUsixEjRjBr1iwef/xx3nzzTUKhEIMHD2bdunX1vRxF+XUkoC9fw0QD+5TbL+lanY1xkfnBsgXFLUkEWfYrDSNU5TixILF67M+I6422BXgG+Mj+/EHAZUCzxNyuojQE9VrUWVRUxLXXXsuDDz7ItddeG91+/PHHR99Pnz6dBQsWMHfuXAYPHgzAgAED6Ny5Mw888ACPPfZYfS5JUX4dEYEajLWRjtNFIAVj8ezYI80NETtm47IwAmWPr47Oycmwt5ViRCgTKDNdn11R08cmDMwBlgOnYtKtxwLvAl/U/y0ryt6mXi2biRMn4na7GTt27M8eM336dNq0aRMVGoCsrCxGjBjBtGnT6nM5irLriJDIS4QjCSM2afb2aqDMfolxIgPWAjguNPk1TjpHi3suGVwR188Xdf4IPAl8g4kPnQicj6ZIK02eehWbjz76iO7du/Pqq69y4IEH4vF46NKlC0888UT0mKVLl9KzZ8865+bn57NmzRoqKirqc0mK8uuIOFgYYdmOcYtl4rjDKuz9aRh3Wg647FiNlYFT9Clp0+UYiyYJIxRJmEQBP1i1O7FsYgkCbwGvY8TtIOAqNEVaadLUq9hs2LCB5cuXc+ONN3LzzTfz7rvvctxxxzFu3DgeffRRAIqLi8nJyalzbm5uLmCSB3bG+PHj6du3L3379mXLli31uWxlf0fiNG5MQF8SBYQknO7PMvPGFVNGE8GxXGQUdBrGdRbGCJXMwakAl88VN/ztZ1kG/AuTIp2KpkgrTZp6jdlEIhHKy8t57rnnOO200wA49thjKSoq4t577+Waa675zdcePXo0o0ePBqBv3771sl5FicODcY/ZosA2nA4CFkaIaoimMksSWljSpD04HaRjBUpSqm1Rs6zdGC9QjkmR7gcch0mR7oSxfNb+xvtUlAagXi2bZs1M+sxxxx0Xt33YsGFs2rSJjRs3kpOTs1Prpbi4GGCnVo+iJBTJRAtjREGsk0wckbFn2BDGiEkWJCeb04OpOG40P8b1VgmU2O+z7euXARngCrlwRX7BjbYjP5cifSyaIq00GerVssnPz+fTTz/92f1ut5v8/HzefffdOvuWLVtGhw4dSE/XSKiyl5GYjbSq2YARHWm6WYozciCm27Pf7gwQlEacYrBE7PP89vfVGFFIMe+TvEmkZ6TTIq8FvxS62RnWNIvKfpVUHVIFR4Onu4fMOZl4Suvnv3JKSgrp6em4XLu5MEX5FepVbE499VSeffZZZs+ezRlnnBHdPmvWLNq1a0erVq0YOXIkkyZN4v3332fQoEEAlJWV8fbbb3PeeefV53IUZdeQmI0XE5xPt7/WYkTGjxGKEMaNFjL7ku3ncaAEI0gu+xoiVB57WyBmWxCysrMYftxw2g1q95stkxWhFbxU+RLFLYopP6+cU1JP4cjkI/dYJDweDwUFBfj9/l8/WFF2g3oVmxNOOIHBgwczZswYtm7dygEHHMCUKVN49913mTRpEgAjR45kwIABjBo1igcffJCcnBzuvfdeLMviT3/6U30uR1F2jSQc91gpTprxFpyGnNLtWY5PgvRU822FB5Oh5sLpDB3ExH08mESBZPvaOZDtyubI/kcy8OCBe+QGuz1wO9f/93pe+vYlplRNoaxVGeNPHE+r9Fa//aIYD0RSkg53U+qXehUbl8vF1KlTueWWW7jrrrsoKSmhe/fuTJ48OWq1uN1uZsyYwQ033MBVV11FIBBgwIABzJs3j/bttTGU0gCIG03a1ATsVw5OQkDAPlYsFiDbFqXSSuKz18Sd5scIlYyY9gC14PK48CZ5zb49eKbn+fJ48fQXGdl9JGNmjGH2qtn0eaYPE0ZM4NSDT/3tF1aUBFDv4cXMzEyeeOIJNm3aRE1NDYsXL67jHsvNzWXixIkUFxdTVVXFe++9R0FBQX0vRVF2DXF/BTBWTQ3x3QJcGMtEUqODQDVk2zGbkhJM1liZ/bWauBTpuDEFkmIdgPoa1nlm/pksuXIJxx1wHNuqt3Ha66dx6bRLKQ+W188HKEo9oLksiiLWhRcjFMn2qwIjCj6MxZOKIxi1kJNmTisO2PsycNrS+DFiU2Zfx3a5EcBxtUk36XqgbWZbZo2axaPDH8Xv8TNp0SQKnipgwZoF9fchirIHqNgoitTDpODMn/HiZJhZOO40F1Fhad3cnL5hO04yQSXGuqnECJeMnK6xt0kSQi3xrrf6uA2Xm2sOv4YvR39JYatCVpeu5ujnjub2ubcTCofq98MUZTdRsVGU2DqbFIwwVGNEIh0jIuKRkuOqob1trazdZu/z4PRVk/HQln0tsWhsqygqTgmgR14PPrv8M24aeBOWZXHPh/cw4NkBfL/1+8R8oKLsAio2TRyXy9UoayIa45p+liScQL5k/IohUGPvy8a40yowvdMC0N6uP15XjBEUsWgk3iPjpmNn3IjoBO1jE4Qvycd9Q+9j/sXz6ZjVkS83fkmfp/vwxOdP7F4HA0WpJ+o1G03Zu3g8HvLz8zn11FMJBBL0a/IecOihh5KVldXQy/h1XBjBkYw0Nyb+Ukp8r7RajBilm/cd88zpK7eC5QFXrGUjMRlxy4nFJKOjLYxoJZijOx7NN2O/4ZpZ1/DCNy8wbuY4/rP8Pzw78llaZ7RO/AIUxUbFpgnj9/sZMmQIffr0IRKpp9SmeiQtLY3mzZs39DJ2DR9O8D+C+Z/hsd8HMEIhFpCdkdY8DVpmwqYy+LEEOknjzdj5NjI4LYSxZDIwFlAapp1NLQn/X5jlz+L5U55nRNcRjJkxhpkrZtLryV6aIq3sVVRsmjBut5tmzZpFe9Ipe0AKRiTEOqnCCJAPKCa+R1qtc1zP1kZslqyGTj1xREYspbD9qsFYNJJoIL8blGF6ne0FzuhxBke0P4JLpl3Cuyvf5bTXT+OSwkt4dPijZCRn7J1FKPstGrNRFIjrykwqJqYS22omg/gOAjXAdujVwmxatA6ngaf0TxMLR4o+Q8BmTNxnM7AeMz5gL9Imow0zz5/JY8Mfi6ZIFz5dyMdrP967C1H2O1RsFAWMFeLDGQmQRbR4E4/9Xoan+YjGdY442Jz+/iqMy0y6RgcxQiVD1yCurxrbgXXAG5hx0D9i2uOUYiwf6VpQg5MmHSG+4edvxO1y8/vDfx9NkV5VsoqjJh2lKdJKQlE3mrJXcblcpKamkpOTg8/XcFPAsrKy4j9frJE0YJP91WN/lYd/Co6IeIEwDGpnTv+4CIKVTnPOqEhI5+hwzHlBjFj5MWL2PkZscjFiJc08xRXntt/LtVwxx3lw5udI+xuZpfMrSIr0XfPu4v4F93PPh/cwa8UsXjrtJbo317GgSv2iYqPsVXw+H0ceeSQpKSnU1NT8+gkJIjs7m/z8fCdF24N5gAdwijutmG2pGGGoso+tMdtbZEF+K1j6E3y8HAYfgLFeRDCkU7TU3YARnthebDJGWsRNri/WlrS4ie2lVhvzfRhneqfLfp+MM/jNh1OoKi14bHxJPu4dei8nHHQCF7x1QTRF+sHjHuSqw65qWinsSqNGxUbZq/h8Pg4//HAOPfTQBl2Hy+XC44n55y+dAbbhCIzUyMiDPoSJ3YRwOjrXwoiuRmxeWwqDD8QIhlgy2MeLcEnCgFyz1rkO1fZXuw1OtOtAmr0WuU4SJv06ZJ8j1xFRKY9ZnyQ9iBWUbN+nH6dRqBuO6ngUi69czDUzr+H5b55n3MxxzFg+g4kjJ2qKtFIvuKwmWOHVt29fFi5c2NDLUPY1tgM/YERCBqJtxhEasU6kbU0EqIYlm6H3vZCbAhvvBJ8UbbpwBELqbAIY8ajFyVoTqyQV01fNh1OPI8diHyvus3Ic60UsGykeTccRMHHbuXCsqAiOlePF6euWYr5/4/s3GDNjDMXVxTRLacb4EeM57eDT9vSnq+wH/NKzWRMEFEVIwamvkeLONMyDWqr9pQOAZKolQa9W0LMlFFfDlK8xYgSOuywVxwqRFGvJcpPkA6ESkw4tqdGlxNf5VAJbcayZGvv8LTiCIv3ZAphaHonriBswSLQLAlX29dYCy4EVcEb2GSwZtYRhnYexrXobp79+unaRVvYYFRtFETyYh7UE2SUTzYdjLYjYVOBYOxVw7RHmEvfMhYgPJ2gvFkZVzLXlvTT4lA4GksUmYwnKcKwVqfeRZLFa+5gt9v40e73Sh63SvmaKfZ0SnBY5bowABmKOL8HJnvsJ2pS0YWbfmTx22GP4k7SLtLLnqNgoiuDG9ECrJX6cQBCnDifD3ifBfNsVdcEA6JAN322Fl5bixHrCOOLiI2oNyYho3DixlSBOAkA5RliSMFaOfC/CIxaQHyctO4iJOXljjqnGSeH22sdvxxGfCpwsPBG2sNnvrnbz+1a/58sjv+SQ7EOiXaRvm30bNaGGS+5QmiYqNooSSxZGTCTgLi4wySqTrgCS3WXHY3zJ8Oeh5hLXTrfHDkhQX+pvJCstBeehLhllsRaNfJZsBydGI/ukZicJp89abJYbGCGR88QFJ1NIY8Wnwt5XhWM9+Z3r98jpwaf9P+WWrrdgWRZ/+/RvDHhyAN9/970zKE5RfgUVG0WJJRkndbgWJ/NLkgRCmIe1WC1gBCIIF/WF33WFkmo4702olkwwCfgHcNxrMqRNijSlHxv2cdKBQFx28nlJGAGJTaOO4GSfeXBiM7FNQd0YQaq21+DCEb9Ke580DA3gtNjZaD7Ph4+/dfwb7x/1Ph1TO/JVyVcc8sYhPDH7Cawiy7jzqtnjglNl30XFRlFicQN2N+doEScYwZAJnGJNyDiBoHm53PDMmdAqHd7/EU57BaqlMaeMKhARk3oY7GuIVSOzcMSCkp5sInYiWlK4GbA/Xywa1w7bwJnPI5+Ffb6Ms5YmpCJMlZgsPJnvI+vxwFHuo1jcfzEXtbmIQCTAuG/GccLsE9i4ciMUYV6lOFl4imKjYqMoO5KDU7kvNTPi5nJjHs5pmAdxJsbVlma+tsmB90abjtCzfoC+T8Oi9TgdA6QdjrTGEetDhEfSnMWSqY05TgauyXuxclw4iQiSUi0Peg/xoxJiBUtcdW77VRlzPVfM9hAmFmTHjTIjmTx36HNM6TOFXG8us7bMotfcXry18i1jYa0CVmB6v1WgbjYFULFRlLp4ALvBJhYmKSCJ6GiBqItLqvHFvYV536MVzLsMujWDZVug73i4YCos2YQjEjK+wL3D+ZEd3ov4xE75lKSBCE6BqIiCzM8RkZI1x5ZvV+LEZSTOH8G40lz2+eDUBUnGmsR47ILVM3LOYEmvJQzLGca20DZO+/I0Lv38Uso95eYzyzAp1UUYsapBrZ39GBUbRdkZeRiLRcRACh+lC0AQJ4YTxHmo2w/Unu3hq2vgajsl+qXF0PsJ6P4vuGEOTFkCP2yHsNTseIl3p7livhfETSZrkrRpsRxiuxOAY8F4Yo6xC1GjveBEwKQNj4iBuNkCOI1HYwVsq/n8NlltmNVjFo/3eBy/28+kdZMomFXAR5s+MgIjWXNFGGtnDcZCUmtnv0M7CCjKz/EjsAHnt3uxJOQ39ArMQ1kC+BIvsZt0Sp3O6g3w0Afw8rdQusNA1SQXtMqAtlnQLAWyUyArBTLSIcNlvmYmQ2Y2ZEYgNweaZ0OzNEj3gSuCEUIvxjKRFGtx1cV2D/DgJBek2wuQ9GuxliQ5ospefzPiBY2Y97EWXxi+C37H+d+dz9eVX+PGzc1dbuauA+7Cl+VzPksy8VIwjUfTcCwppcnzS89mFRtF+TlqgG9xCiG34lg1HhyXlYiNBNfFjSXNNe1mmSE3fLwS3l0K32yDbzbCuj0YDZ3ihXY50DYX2jeDAzPhoI5wUBvolmdEKip8koEm65P+a2UYgfHjWFLikpORChIPKsOkhksBqj/m5xAEcqDGVcPdK+/mvrX3YWHRJ6MPk7tOpnted8cyysRJOpBZQdLxWvt+NmlUbBTlt7IV4/4Rt5mFcS9FMA9uedjK/yJ5mEu8JRXHdQVO8N4O1geD8FMQ1pdASS2UlkKpBeVBqKiG8looC0BZDWwvh+IgbK0wr+pfGT3TqTn07gC928Jh3eHwjmaMdbSIM4xJHsjGybCTa4p1JK7ECnvt2cRnzYFjFYmFFYYPN33IBasv4Mfgj6S4U3iw24NcdeBVuGpd5t6TcdK/sT87237JZypNDhUbRfmtRIDVOBM2wYmDbMc8FKtxCj6rMA9QCycOEraPlXb/Ik4iSjJqQCyPLJyHvqRIi5ssm2hcpqwC1pfCugCs2QIr1sHyUli+Cf63AYIiBjF0ag79u8DRveGYLtA9B1zZOIJRgXGxiXWTgpPBFsFYIW6cBIDmOG426SxdZX4eZellXLPqGp7/6XkAhucNZ2K3ibSubW0+Q1xw4tKLFazmOFaV0mRQsVGUPaEG+A6nAl+mb4pbKDboHmvllOEMO5MHNxjRENeVHJ+GeYBH7OMiMeeJy6saZ8SBdHnG3iYtaXLMptrt8EMJLN4IX6+Cz4tg4Sqo2CFm1CITBveC4wtheD60TsGIHfbnxHaPlhELYtVZ9rGSnSdp2bGuu2R4c9ObjP5+NMXhYpp5mjG+63hOa3WauS/5OcrIBOnzVoMRnmz7MzSu0yRQsVGUPaUc+B7zQE/CCEnI3i5JAl6cLszyXjoIVBE/1lnqW2L7o7lwCj6lu0AQ87CVan9pnyMD3qTDQRXmQZ1hX19cVBJf8UE4CMvWwcdrYP4ymP8t/LRDzKigA/yuL5x8GPTrAG5ZiwhfOkZoxJUmbrmg/bNoRnwmndesZUP1Bi7dcCmzS2cDcHHLi3m016NkWpnmetLSJ4wzOqEGp4g2GyOkkkWnNEpUbBSlPigGVmIejtIRQDLSqnCEQlxpMhBNZuDIrJnYTDZp/S9xFEk2SMPpMJ0Sc21wBGy7fT1fzPXE7SWtckTYpPWNCFIIrEr4XzG89wPM/BLmfQtVMf01W2XByL5w6gAY0hu84vqT4H4IIzZhHEsn2z45NlnCbtdjuS2eKHmCG1fcSCASoFNyJ17s/iJH5h5prhErXBK3qbbvUXrVZdsv/y78fSl7HRUbRakvtmESBuQBG1voKa40aWMDRgRi59ZInEKGqLkxVpIEzSXOI64oqcOROFA15qEMJrifa78XC0nEQAQm1i0l7jCxioIY4bFdZYGt8OFPMONLmPoZrNnq3HZuOpx6GJx9BAwuBI90PogVyDCOC04sneb29+Ju9MF3xd9xftH5fF1tp0h3vJm72t2FL9Xn1AaJ+Mq5mTgC78EIUB5Oqx2lUaBioyj1SQnGwpHW/tL5WKwb+Y1cOidL7ENm4EisQkRKXG7gdIe2MA/UgH3NTJyi0cyY60pFv8RW5Brg9FATN1usuFTiWBPgDIezH/BWABatgKnfwhufwLK1zu3nZcK5A+GCY+HQA8El9yfn2zN+cMdcH5w+bD6oidRw97q7uW+jnSKd0oeXDniJg5sdHG81ue17ts+jGmP1iAjnYiydFJRGgIqNotQ3VZgstW04ghM7cE16j0mGmRsjAmLdSNGnzMaR2E9qzPEpOC42P46YSIcBSUjw2cdKtlwGTlpzDk5MSVrvQLw1Jd/7cXq0ifvP3r90Fbz2Mbz2KfywwfkxdG8DFxwDFx0NbVvhuPRkjbHFo7GWDkAIPqr5iAv+dwFFNUX4XX4ePOhBrm57tUmRlvhMEMf9J50MpFZHrL40TIshrdVpUFRsFCURhDAdBn7CcVPF1t+I8EjMRlxpVThpvZKx5seJ0YhFJG1ypCWOxEfKiWadRetk3DiWgwxUq8FJGJB07dhx0rEJBLGzdeQ+RNAkY8wFVhJ8tQpenAuvfAyb7QQDtwuGHwqXHQsnFYDPG7MWKXBNIt7SsccilCWV8Yc1f+C5Lc8BcHzO8UzsOJE2rjbxXQqk24CM7JZxCHIfEUyCQg4a02kgVGwUJVFIsH8tpgBUss0khlKFk50lcRoJnktcJXZomohNDU4atNS5+HBaxbhjzpXgv6RDSxdoiZNEcARI3G0iblIHJA9rsRykqNODI0wSM7H31wbg3cXw3DyY+jmE7LXlZcKlg2HMSdC5HUZoJLkg1V7/jtlrwJub32T0mtEU1xaTm5TL+I7jOb3j6c7PT34GMjwuHcflJqIqP7MMjBWlls5eRcVGURJNBGOlbMBkiUm7f0lvlmPEPSW/rSfhWEXp9rEBnIB4FU4hZwUmAG/Zn5WBeYCXYB6wqRhLx4XjZtpuv/fZ58TGacrtrxk4SQtS8yM94CB+KFvsw13SvUOwNQgvvQ/Pvgvf2vEdlwuOPwSuHAIn9oGkNBzBFItuB0tnAxu4dO2lzN5up0i3uphHuzxKZijT+bmIYKbgpIhLpl/Q/gwRz+b2zy8ZZS+gYqMoe4sIRiC2YERAAvGyT0Y9WzHbpDVLFU4Ps2qczgGVOA0yZcyzD6f3mqRPg2OBiMUTwXGFiWUj9SylmIe3WCvbMUIjVpDU8WTgjBsApyeaTAettY+pBisMn/wPnnoPXl8AQbsTQqc8GDcSLj0ecqTjgB/H3SWWTi5YLosnNj3BjT/eSMAK0MnXiRc7v8iR6Uc66dHi2pO1BnDEV6wesYhSMILTDCcmpSQEFRtFaQgkLboE85u8ZKNJoF9ESERBEgggflKnvI91I0l/Nck4i3WlBezPycJplZOCeTBLooDMwIkVwNjWMTX2miXBoAYnjTsVJ/Ub+9rSTkfWlwFbS+C5WfDkf2HVJnNoajJccBRcewJ0PwgnxVu6UcdYOsu2LWPUulHRFOmbWt3E3Qfdjc/jM/dYiSOWch87diUQ159Yk80wwqNtcBKCio2iNDTyAAxhHoCVMe8l20r6pMWOaI4dkCazZzwx34u7SOISsenQUlwqfc7EpSc9ziIY68aDERCxXGI7HYjlUY3TDkfiS2JlSbNRKeyUDgB2vVG4FmYug8dmwH+/dn4kJ/aD60+FwQfaoxJ2tHTKoCanhj9v+DP3bbiPCBH6pPXhpU4vcbD/YHOc9FiTc1KJjumOSygQEZWU8uY4HayVekPFRlEaM9ZOXmLVgDMUTYRHOlBLnCUc872ImghSTcxXSUWuxHGpiZCJRVKKESI/zohoETNpJYP9uVU44iNxGCm4dNnfg5PSHYbv/gePzoTnP4CAHRMq7AQ3joCzhoDHz05jOh+VfcQFyy+gKGRSpB/o+ADjcsfhSneZ48TSEVcaMfcrPxNxB8rPJhmTLi3Ftcoeo2KjKPsqseOhxW1XidM01MIRHAn6xw5Xc+MkFYgVIDN4quyXPMDLY46RzxbxCWHcaOJ2i+AkIIj1UU00425LGJ6aCf+c7qRPd2oJfzwNLj0KUkUMpFizBspKyvhD+R94butzAAzLHMakgyaZFGnp2BA7jK0GJ5tPMgEl/VzqjVwYt1oLjCBq5toeoWKjKPsbYh2Jm64MIxxSswOOCHlxsrgkcUAyuoI4SQtlOM1AJbtNzhEBEjdYLU4CggTly2POt3vHBUrgpQ/hwRlOsWjzDBPTGXcqZGXjWDpuIAv+Xfxvrlh1hZMi3Wk8p+ed7szfkaLaSozASEubWEtHikSlaapk8Gm69B6hYqMoivntX7LKtuNYP+JKk07RsZle1TgpzuBkkYEjPtKJoALzYJciUnFdYW8rJd7NFiAqfOE0mPop3P8KfLHSbMtKg3Ej4NrjobmMu7brdDZUbOCSlZfwbvW7AFycdzGPtn2UzJRMp15J0rhl7RLTkXiWzM6RVG5xEzbHJBLoWIPd5peezeqpVJT9BTdGKLKBjkA3oLv9Ph2nY3Ns1wNphZOGk8UlfdnE1ZUWc47UF4n1IkhXgjQcq0FiUamQ5IHT+8Nn98Cc282Mne2VcM+r0HEs3PASbKpwzmsTbsOsDrN4vOPj+F1+ntvyHAVLC/ho00eOtRZbDFqNsawqcERFsvfE1SbZa9swve+KcTIGlT2mXsVmwYIFDBs2jBYtWpCRkUGfPn2YOHFi3DGBQIAbb7yR1q1bk5KSwoABA/jggw/qcxmKouwKErhvAXQFugAHYLK0MnBiLRJYF1ebTNRsjvPbvzzUwUkwEMTFlo7zxBHrR6wLgFqjD0MKYO4DsODvcMIhUBWEh2dA5yvghvGwyXa3uXwuxrUex1e9v6JPSh+KQkUcvepobll3CzWRmvisOUkMSIp5iZVXjRGWrfa2Env7amAVxoJrcv6fxke9ic3ixYsZOnQooVCICRMm8O9//5vDDjuMyy67jCeffDJ63GWXXcaECRP4y1/+wowZM2jdujXHH388ixYtqq+lKIqyu0izz2YYwekCtMdJD87CWANZGJGJHQInLXma2ddw48RIRKDEdSaEMCKzo/VjEQ3UH9EV/nMzLLwXRvaD6iA8/G/ofDXc8CJssQtZD/YczCcdP+HWZrfiwsV9G+6j/7f9WVayzGkLJLU2EftzKom3dEREpbu0pH2XYVoR/Uj8FFZlt6m3mM2tt97KQw89RHFxMenp6dHtAwYMAOCTTz7hm2++obCwkIkTJ3LJJZcAUFtbS35+Pt26dWP69Om79Fkas1GUvYTEecowcZ7tOK12wCkUlYf5duJHWrtwMsokyC+914I4YhPBWBexCQXVGAvDjtV8tQL+8jxMs//rp/nhDyfDDcMgx06RXsACLlhxAauDq02KdLsHGNdmHC6Xy7GwRCSlu4B8nlhaYZzWOFLLJN0cWmLGGmhR6E7ZKzGbmpoavF4vKSnxgyWysrKIRIx9PX36dLxeL2effXZ0v8fj4ZxzzmH27NkEg0EURWlESJynBXAg0MN+L92YpYi0CqdaX1xvuThFk1KgKrGRFBxLJzYhIbadTKyguaBPJ5h6I3x5L5zYFyoD8LfXoPPv4a9ToCICAzMGsqjbIi7OupiAFeCatdfwu+9/x4bABmemj6xR2t6Iq60cI3DSjUCmpLpwBHY9Jp6zDY3n7Cb1JjYXX3wxANdccw0bNmygtLSUCRMm8N5773HdddcBsHTpUjp37kxqamrcufn5+dTU1LBixYr6Wo6iKPWNG/Obf1uM8ByIcav5MMIimVzpOPEbic1EMJaCpCbLA1yakMZaGOBYVLEdAuw2PX26woy/wscPw9BesL0K7nwdDhgNj/4bkqsymdR+Em+0e4NmnmbM3j6bXkt68WbJm/FpzdLcVNruSN2RxHQqMAJVDWzGiFIFRpR+xMR05FzlV6k3senZsyfz589n2rRptG3blpycHK6++mqeeuopzjnnHACKi4vJycmpc25ubm50/88xfvx4+vbtS9++fdmyZUt9LVtRlN+CFyM0ne1XDs7gNOlV5sKITwpGaCQTTawJsSBiXWfgWEDSc03YwdIZ0AX+ezvMuxP6d4Mt2+Ha8dD1WnjuPTgl93SW9FrC8PThFIeLOeOnM7j4x4spqy1zhEMy0pJiPkPSvStwOi/I/lScDg0lmASCtTgdspWfpd7EZvny5Zx++unk5+fz9ttvM2fOHMaOHcvYsWOZPHnyHl9/9OjRLFy4kIULF5KXl1cPK1YUZY+JtXa6YpIL0nG6M8skzSqcep4gRkhycQpG5SUPbZlVI0jh6I690NxwTC/4+BGYdgf0bA9rtsIlT8Ih18PXC1vzn7bv8M82/8Tv8vP81ucpWFzAh1s+dBIHwOm0IM1SZdCcJDvICIkKTFyqFKfYdDOwHONaE4tOqUO9ic2tt96K1+tlxowZnHTSSQwZMoTHHnuMs846iz/84Q9EIhFycnIoKSmpc65YNGLhKIrSBPFiBKQzJpOtBc5ETRldkIdjyUjWV2wHAp99bGyDTGlgKvU8EN+oNBVcbhhZCIsehOevgg55sKQITvwrDPk/F/22XM3X3b+mT2ofimqKGLRmELdsuYUaV40zFkLm9YibL7Y+R1r3YN9P7GRTSShYZb/EYlLiqDexWbJkCQUFBXi98WW3/fr1Y9u2bWzevJn8/HxWr15NVVVV3DHLli3D5/PRpUuX+lqOoigNhYwKaAcchDO8TAo+JV06tmt1GkaopBhTJpGG7GNiLRpwBEqEye4Nl+SBCwfD/56Bhy6CnDSYvxT63QZ/fbI7r6R8wq15dor0tvvov6w/3xV/Fy8e2OuswBnhIN2zZV8Vxo0mX2swwlQC/IBJJBABVYB6FJtWrVqxaNEiamrinZefffYZfr+f3NxcRowYQSgUYsqUKdH9tbW1vPbaawwbNozkZB2npyj7DFK70w5Tt9MRZ6Jopr2vJY7FIrUuYsnEdigQdxbs3NKJ7ZadCn4v/PEEWPUU/GkkJHvh5fnQ6zofof/ew4y8j+js68zXVV/TZ2UfHi99HMuynMF1ldTtRCA95mQgngymk+F0MsjNAtZhXGsyKlypP7EZN24cq1evZsSIEUybNo13332XcePG8corr3DllVfi8/k45JBDOPvss7n22mt55plneO+99zjnnHNYvXo1f/7zn+trKYqiNDZ8mGSBA3AERkYkeHAe4JIRtmPqtBRkSjwlg7quNltooinZQHYq3D8K/vc0nH8k1NTCg1PhwpsHcPWqpVyUcZlJkd5wDcPXDmdDxYb4gWtSFxRr5UjNjXSTlmw1SZ0ux2lougqTKi2TVPdj6rUR58yZM7n//vtZunQpgUCAAw88kNGjRzNmzBiSksy/jOrqam677TZefvllSktLKSgo4P777+eYY47Z5c/Rok5FaeLIA3wb5uEuLieZ+CkTPKWI1MI8sCVFOoQzLls6AvhxikRFvKQZpx1XWfgjXD8RPvzeHHZwOzj9rAU8mXMy28LbTBfpduM5Pet0p8uBuPKkU7SsJYix0HbcJ409ZYyDjGJohYlj7cMFodr1WVGUxonMvSnGWAIS9JcO1FWYh7kfp5O0x/4qA9Y8OK4qsXakk4FktZU7+6xaeOsLuPFFZ1z1kIIAwePG8VH6swBclH0Rj+U8RmZyppOWLfNxAjiWVpa9ZlmnTEWVnm/S+Vqaf6YAHXCy9fYxtOuzoiiNE3tGDR1wkglS7W0pmOw1CdzHzs4JYx7WqdSdtCkxnXScmh8hBC4/nHYYLHsMHrwQMlPgvW/8fPrIBAZ/9DXJgZY8X/o8BWsL+DD0oTkvghEZcfuJtSVxHBmp7cPpKA3xI8Cr7GO/x7jX9rPaHBUbRVEaHsk464xJKBA3lF1LE63UFytHOlOLS0omkUo/ttjx0NKRWiwOm2Qv3HAyLH8IrhgC4YiLeXMKSfvXOjp882eKan5k0MpB3LLxFmrKa+ITBmQqqoiPJAnIpNRSjHUmdUXyVWbpbAQWYxII9pO2Nyo2iqI0HmRiZqylk2G/0jBJBlLR78OJ90g2mlg6O3aYjp0wWktcanWLXBh/JXz5IBx1MBSXeVjz1p20fm4NrO3PfVvuo/+G/nzn+s5cT1x4IjJSjFpOvOUTO+KgDCM+Mq6gCiNKyzCp0vHVIPskKjaKojQ+pJlnJ6CN/V5iNuBYDWDcbVk4UzmlG4EE60XAfDHbJMtM6mfccMgB8P6d8PIV0DYXNq5ph/Xsx6RPm8LXJevos6oPj29+HKvCMudIrKjSXo9kqYVxRhjI58eO3xZ3XA1GZH4CFgEb2KetHBUbRVEaLxLT6YRph5OOE8/JxqRHu3FSpmX6p9TrSF1P7NA2K2Zf7BPQAlcVnDsEvn8cbjkVfB6o+PoMfP8sIvDp5Vyz/jqGbxnOhsgGZ5qp1NvIaO3Yxp4y00fGcAdx+q3J5FFp+LkM+DZm+z6Gio2iKI0fadgZW6cjhZchzANcUpCTcRp/igiJyMT2XtvRpVZDtI1Oegr87WxY8mcY3htqAqkw83GSJnzFuz+U0WtDL97Y8oa5llg41TjpzuC4ysI4saUgTj1OpX1MAGf8wQbgU0xH6X2sA4GKjaIoTYcknGminTDJAjk4fdcycFKMRWDACEkYp0+bpEhLppoMdIvtPh2Erq3hnTvgrRugYzMI/9Qbnv2E4rce4MzVY7loy0WU1ZQ51orU5QRxLB4ZZVBFvLBJQWgtTkJBOUaEFgOfY1LC9xFUbBRFaXp4cToSNMPJWEvCcWPZ/dKiY6lFiAR56Fs4vddiW+LUAGngSoJTCmDZ/XDrSeBNsuDry+Cf/+OF+V56/1jIh5EPnc4F5ThuNbFY5ElbjeN6s+y1iujIaAOxiNYDH2Dca/vAXEkVG0VRmi4+jFutq/1VkgVS7O9TMUKSjDNjB5yhbX4coZHaHRncJvN0bNFKTYZ7zoQl97gYmg9UN4O3n+HHZ1/k6MXjuGXzLdRU1zjD12LHKoi1IwJXFfNVREYKW9cBmzCJAxuAj4F3MenSTRgVG0VRmjaxDT87YiwYERNpLZOEU+fitY+XZqCCFXNOMo4VFI756oNuLeHdW+CVq6BlhgVrB8LTX3LfG9kctvEYloWXmc8SYRNLRkYZhO21iNtsO0ZoNuPEccL2/oC9/wfgdeBDmux0UBUbRVH2DVyYeEwnnMy1NJzMNSkEzcHpIu3BKQaNtXRiu0yLRSI91pLA5YJzesP3f3Vx9XHgIgkW3MTiR1+m8JNbeazqMSKRSHz/tgBOMoNkqG3HiIsIYCpG5Lz2Vz/O+IU04CvgDWBFvf7k9goqNoqi7FvIPJ0DgNaYh7bUwVThWDASr7EHsEVrcaRGJzamI12mZR5PjTkvuzn88wL49E8uerULw/ZOhF6eyh+ebcHg1eeyIbzB6V4Qts+rwcy9kf5vfuo255QMtghOHzYvxjXoxbjWPqdJZayp2CiKsm/iwWSodcUUhvowgpKMsRRSMVaOFINKHzURB6nF8ePEdGILRVOIdg/o1xm++lMSD50Kyb5aWHoOHzz6FAd9+ABTgm840zwlPiOuup11gJansrTFkZiPfC+tfYqAL2gyyQMqNoqi7Nt4MWJzAMaVJg/sCE5SQADHkhF3XAaOpQNOoaiIhFzDtnI8Xvjj8fD9nR6O7R6AQA5Vb/+Ds55szqlr/kRZRVn8TByL+GLTn3sax/blj+0w4MeMaFhMk7BwVGwURdn3kQ7RHTGWThpO0WcORkCy7FcqTp0OxLvB0ojPbJOOBclEe7Z1yoQ5l/l5+TyL9LQq+PEYpv7rL3T8/GnmeT+Kt2Zi631ihSeW2HXIvdTaX32Y5IKVOzmvkaFioyjK/oO0vzkAY+3IbBwZyOYl3soRKygFZzYOOO42SZH24MR/guDywrl9XRTdkMophdsh7Kd03o0c+0I6l2x8nJpwjdPHLXZtsHMrR4pBJTVbjrMTFtgIbNmjn0zCUbFRFGX/w4uZmtkdUxwqHaHlazomvpOBEScZjCZWTg1GYFKIt3Sk67PdDLRZMrx1QhbvnFxLVnYJbCrkuWevpP38yXwZWWbWIhZJ7Jjr2CezCJKFk6QgRGKOX0+jnpGjYqMoyv5LMiZVuhNGYCIYV5nEZuThLVaObM/ACI0g+9yYWIrEg+zx1b/r6mH9mBzOOnwDWG42f3wJhz3p4drVU4iEIz9v5bDDdqgrOmLxVGNSqRspKjaKouzfyAiCrpiYTjLOxE2xXmIHtkmqsrjNQjiD2cSqAWecgF0zk+aD14a2Yc5F1WTnrcfa1pVHnz2dzjOnsiJstweQolNJv5YWOrHdD4jZL4jgbCJ+MmkjQsVGURQFzIO9JdAFkzINxsqxMIIj8ZsA0dhMNAVamntK0oDU5+w4XycCQw5M46er23La0d+DK8Kaz06j2yPV/HX5fKeehpjPi10fxLvV5L2kZUvBaCNExUZRFCWWVMx46oNwMtYkBVpiOjKsLTaeIzU4MqogNpYjbjV7hk6yB94c2p1ZY0pIb7maSMkB3Pn00fSYPpufQmXOWrzO22iqtSQpuHbyVXqwNUJUbBRFUXbEhUmJPggzojoJx00mHZ1juwzE1uBIfU5sBwKIH9Zmd30+/sA8tl7bid8N/gJcEb775Hja37uVf32/yHyWJCWIey62o0Cs1SNrlk7TjRAVG0VRlJ/Dh0mRzsd0HZAYjnz1YkQoG2cgG8R3IZCWNPK0lcC+3YEg2evinWGH8eY16/C3WEnttgO4+oleDPj3fMprapw4jhBr4eyYJu2m0WakqdgoiqL8GmmYWM6BOIkDPsyDP2YMQbTvWgRniFtsv7UQjlDJ09ee7nnaQZ3YcnMHjhj0AQCfzjuGl5Z/7zQMFYGR82K/SuPQHRMHGhE768yjKIqi7EgSpjYnC1iLGQkARmwkWUAKRL0Yd5a4viR248exUqyY4+1xBukuLwvOOJqnD13CtB+2cWXXY5webNKrTawZsXCsmGvaXQwaIyo2iqIou0MyxsLJBVZhalvE8qjFaWEjlo5ks0m2moiMZLLJU1isHj+M6dGLMQdhhEjGIHhwOh5Idlwy8SnS0uCzEaJioyiKsru4MGKTjrFyfrK3u3E6SNsdoeMaf0oNTGyGm4iPzLCRrDIPjpjIGAT5bG/MOZI0IFlxclwjQ8VGURTlt+LD9FlrhhlothUnK0zGQ1fYXyXOI5llEsMBx722Y1wHnKFuMmogjfgUaHGhieUjSQqNDBUbRVGUPcGFyUbrjWmIuZJ4t1YNzkwccbXJeTKVM7YWRzoYgOMqk0ahyTitamRyaErMtVNRsVEURdmn8WHa3WQB/8O41lLsVzVOlwEvjssLnOp/T8wxYYx4+InvSJ2ME7MRYZJzXZimojv2UmskqNgoiqLUJ9nAoZhJmt8BlRixENcaxGeoiWtNrBNxo6XixG98GOEJ4VhJ4LjOwBSTNtJ4DajYKIqi1D8enB5rXwPfY0QnF6ceJraXmoiPiEzsGGrpXCBC443ZLjU4Hkxft0aa9gxa1KkoipI4soCBwJEYoaigbvGlD6eZp7jFpP4m0z5GYjQiJiI0khyQZ+9vxKhloyiKkki8QE9MQehsoJx4C0eQ2IskAcT2W5PWNLHZbNKjLQ8Tq2nkqNgoiqLsDVoAZwKfYOI5MidHmnru2OPMF7PNFXOsJ+Zrnn3dJuCjUrFRFEXZW/iBwRixWYZpaSMZZ2LpxPY5k1odEZgknHhNG0xRaSPNPtsRFRtFUZS9TSegNbAOWI8ZyAZOVpo014xNkU7CWEO59iu2E3QTQMVGURSlIZAea52AMqAUE88JEd+FwI8RmUzikwSaGCo2iqIoDUkSZlBbDsZFJinPEJ8i3cRRsVEURWksSN3MPkgTyGFQFEVRmjoqNoqiKErCUbFRFEVREs4uic26dev4/e9/z4ABA0hNTcXlclFUVFTnuEAgwI033kjr1q1JSUlhwIABfPDBB3WOi0Qi3HvvvXTq1Am/309BQQFvvvnmHt+MoiiK0jjZJbFZsWIFr7/+Ojk5ORx11FE/e9xll13GhAkT+Mtf/sKMGTNo3bo1xx9/PIsWLYo77o477uDuu+9m3LhxzJw5k/79+3PmmWfyzjvv7NHNKIqiKI0UaxcIh8PR9xMmTLAAa/Xq1XHHLFq0yAKsiRMnRreFQiGra9eu1ogRI6LbNm3aZPl8PuvOO++MO//YY4+1evXqtSvLsQ499NBdOk5RFEXZe/zSs3mXLBu3+9cPmz59Ol6vl7PPPju6zePxcM455zB79myCwSAAs2fPpqamhlGjRsWdP2rUKJYsWcLq1at3XSkVRVGUJkG9JQgsXbqUzp07k5oaP70nPz+fmpoaVqxYET0uOTmZLl261DkOYNmyZfW1JEVRFKWRUG9iU1xcTE5OTp3tubm50f3yNTs7G5fL9YvHKYqiKPsOTaZWdfz48YwfPx6ALVu2NPBqFEVRlN2h3iybnJwcSkpK6mwXS0Usl5ycHEpLS7Es6xeP25HRo0ezcOFCFi5cSF5eXn0tW1EURdkL1JvY5Ofns3r1aqqqquK2L1u2DJ/PF43R5OfnEwwGWblyZZ3jAHr06FFfS1IURVEaCfUmNiNGjCAUCjFlypTottraWl577TWGDRtGcnIyAMOHD8fr9TJ58uS481966SV69uxJ586d62tJiqIoSiNhl2M2b7zxBgBffvklADNnziQvL4+8vDwGDRrEIYccwtlnn821115LKBSic+fOPPnkk6xevTpOWFq0aMH111/PvffeS0ZGBn369OG1115j7ty5TJ8+vZ5vT1EURWkU7GqxDmbCQp3XoEGDosdUVVVZ1113ndWyZUsrOTnZ6tevnzVv3rw616qtrbX++te/Wh06dLB8Pp/Vq1cva8qUKfVSOKQoiqI0DL/0bHZZ1g6R+iZA3759WbhwYUMvQ1EURYnhl57N2vVZURRFSTgqNoqiKErCUbFRFEVREo6KjaIoipJwVGwURVGUhKNioyiKoiQcFRtFURQl4ajYKIqiKAlHxUZRFEVJOCo2iqIoSsJRsVEURVESjoqNoiiKknBUbBRFUZSEo2KjKIqiJBwVG0VRFCXhqNgoiqIoCUfFRlEURUk4KjaKoihKwlGxURRFURKOio2iKIqScFRsFEVRlISjYqMoiqIkHBUbRVEUJeGo2CiKoigJR8VGURRFSTgqNoqiKErCUbFRFEVREo6KjaIoipJwVGwURVGUhKNioyiKoiQcFRtFURQl4ajYKIqiKAlHxUZRFEVJOCo2iqIoSsJRsVEURVESjoqNoiiKknBUbBRFUZSEo2KjKIqiJBwVG0VRFCXhqNgoiqIoCUfFRlEURUk4KjaKoihKwlGxURRFURLOLonNunXr+P3vf8+AAQNITU3F5XJRVFQUd8zChQsZPXo03bt3JzU1lQ4dOnD++eezevXqOteLRCLce++9dOrUCb/fT0FBAW+++Wa93JCiKIrS+NglsVmxYgWvv/46OTk5HHXUUTs95tVXX2Xp0qVcc801zJw5k/vuu4+vvvqKvn37snbt2rhj77jjDu6++27GjRvHzJkz6d+/P2eeeSbvvPPOnt+RoiiK0viwdoFwOBx9P2HCBAuwVq9eHXfM5s2b65xXVFRkuVwu64477ohu27Rpk+Xz+aw777wz7thjjz3W6tWr164sxzr00EN36ThFURRl7/FLz+Zdsmzc7l8/LC8vr862jh07kpeXx/r166PbZs+eTU1NDaNGjYo7dtSoUSxZsmSnbjdFURSlaZPQBIHvvvuOzZs3c/DBB0e3LV26lOTkZLp06RJ3bH5+PgDLli1L5JIURVGUBiBhYlNbW8vYsWPJy8vjsssui24vLi4mOzsbl8sVd3xubm50v6IoirJv4UnUhceNG8fHH3/Mf/7zH3Jycvb4euPHj2f8+PEAbNmyZY+vpyiKouw9EmLZ3HzzzYwfP56JEycybNiwuH05OTmUlpZiWVbcdrFoxMLZkdGjR7Nw4UIWLly40/iQoiiK0nipd7G55557uP/++3nssce44IIL6uzPz88nGAyycuXKuO0Sq+nRo0d9L0lRFEVpYOpVbB577DFuv/127rnnHsaNG7fTY4YPH47X62Xy5Mlx21966SV69uxJ586d63NJiqIoSiNgl2M2b7zxBgBffvklADNnziQvL4+8vDwGDRrEq6++yrXXXsvw4cM59thj+fTTT6PnZmZmRi2WFi1acP3113PvvfeSkZFBnz59eO2115g7dy7Tp0+vz3tTFEVRGgm7LDZnnnlm3PdXXXUVAIMGDWL+/PnMmjULy7KYNWsWs2bNijtWjhHuuece0tPTefTRR/npp5/o1q0br7/+OieddNIe3IqiKIrSWHFZO0bqmwB9+/Zl4cKFDb0MRVEUJYZfejZr12dFURQl4ajYKIqiKAlHxUZRFEVJOCo2iqIoSsJRsVEURVESjoqNoiiKknBUbBRFUZSEo2KjKIqiJBwVG0VRFCXhqNgoiqIoCUfFRlEURUk4KjaKoihKwlGxURRFURKOio2iKIqScJrkiIHmzZuTlpZGXl5eQy8lYWzZsmWfvj/Y9+9R769po/e3+xQVFbF169ad7muSYgP7/kybff3+YN+/R72/po3eX/2ibjRFURQl4ajYKIqiKAmnyYrN6NGjG3oJCWVfvz/Y9+9R769po/dXvzTZmI2iKIrSdGiylo2iKIrSdGhSYrN27VrOOOMMsrKyyMzM5LTTTmPNmjUNvazd5o033uD000+nY8eOpKSk0K1bN2655RbKy8vjjispKeHyyy+PpnoPHTqUJUuWNNCq94zhw4fjcrm4/fbb47Y39Xt85513OProo0lPTyczM5O+ffsyd+7c6P6mfH8LFixg2LBhtGjRgoyMDPr06cPEiRPjjgkEAtx44420bt2alJQUBgwYwAcffNBAK/551q1bx+9//3sGDBhAamoqLpeLoqKiOsft6v1EIhHuvfdeOnXqhN/vp6CggDfffHMv3MnO2ZX7W7hwIaNHj6Z79+6kpqbSoUMHzj//fFavXl3negm5P6uJUFlZaXXp0sXKz8+33nrrLWvq1KlWz549rQMOOMCqqKho6OXtFocffrh15plnWi+99JI1f/5865FHHrGysrKsww8/3AqHw5ZlWVYkErEGDhxotW3b1nr55ZetmTNnWkcffbTVrFkza+3atQ18B7vHyy+/bLVq1coCrNtuuy26vanf41NPPWV5PB7r2muvtd59911r1qxZ1n333We9/fbblmU17fv75ptvLL/fbx1zzDHW1KlTrXfffdcaPXq0BVj/+te/osedd955VlZWljV+/Hhrzpw51qmnnmr5/X7r66+/brjF74R58+ZZLVq0sH73u99Zw4YNswBr9erVdY7b1fu59dZbLZ/PZz344IPW3LlzrdGjR1sul8v6z3/+s3duaAd25f7++Mc/WkcccYT1xBNPWPPnz7cmT55sde/e3crNzbXWrFkTd2wi7q/JiM0//vEPy+12W8uXL49uW7VqlZWUlGQ9/PDDDbiy3Wfz5s11tj3//PMWYL333nuWZVnW1KlTLcCaO3du9JjS0lIrJyfH+v3vf7/X1rqnFBcXWy1btrRefvnlOmLTlO9x9erVlt/vtx555JGfPaYp398tt9xieb1eq7y8PG57//79rf79+1uWZVmLFi2yAGvixInR/aFQyOratas1YsSIvbreX0N+ibMsy5owYcJOH8a7ej+bNm2yfD6fdeedd8adf+yxx1q9evVKzA38Crtyfzt77hQVFVkul8u64447otsSdX9Nxo02ffp0+vfvT5cuXaLbOnfuzMCBA5k2bVoDrmz32VnV7mGHHQbA+vXrAXO/bdq0YfDgwdFjsrKyGDFiRJO635tuuomePXty7rnn1tnXlO9x4sSJuN1uxo4d+7PHNOX7q6mpwev1kpKSErc9KyuLSCQCmPvzer2cffbZ0f0ej4dzzjmH2bNnEwwG9+qafwm3+9cfdbt6P7Nnz6ampoZRo0bFnT9q1CiWLFmyU7dUotmV+9vZc6djx47k5eVFnzuQuPtrMmKzdOlSevbsWWd7fn4+y5Yta4AV1S/vv/8+AAcffDDwy/e7Zs0aKioq9ur6fgsfffQRL7zwAk888cRO9zfle/zoo4/o3r07r776KgceeCAej4cuXbrE3WtTvr+LL74YgGuuuYYNGzZQWlrKhAkTeO+997juuusAc3+dO3cmNTU17tz8/HxqampYsWLF3l72HrGr97N06VKSk5PjfvGV44Am9Tz67rvv2Lx5c/S5A4m7vyYjNsXFxeTk5NTZnpubS0lJSQOsqP5Yv349d955J0OHDqVv377AL98v0OjvuaamhjFjxnDDDTfQrVu3nR7TlO9xw4YNLF++nBtvvJGbb76Zd999l+OOO45x48bx6KOPAk37/nr27Mn8+fOZNm0abdu2JScnh6uvvpqnnnqKc845B/j1+ysuLt6ra95TdvV+iouLyc7OxuVy/eJxjZ3a2lrGjh1LXl4el112WXR7ou7P89uXqtQHFRUVnHzyyXg8HiZNmtTQy6k3HnjgAaqrq7ntttsaeikJIRKJUF5eznPPPcdpp50GwLHHHktRURH33nsv11xzTQOvcM9Yvnw5p59+Ovn5+Tz11FOkpKQwbdo0xo4di9/v5/zzz2/oJSp7yLhx4/j444/5z3/+s1ORrW+ajNjk5OTs9DfBn/ttpClQXV3NiBEjWLVqFe+//z7t2rWL7vul+5X9jZU1a9Zwzz338MwzzxAMBuN898FgkNLSUjIyMpr0PTZr1ozly5dz3HHHxW0fNmwYs2bNYuPGjU36/m699Va8Xi8zZszA6/UCMGTIELZt28Yf/vAHzj33XHJycvjxxx/rnCv3J78JNxV29X5ycnIoLS3Fsqy43/6b0n3ffPPNjB8/nueff55hw4bF7UvU/TUZN1p+fj5Lly6ts33ZsmX06NGjAVa0Z4RCIc444wwWLlzIO++8Q69eveL2/9L9dujQgfT09L211N1m1apVBAIBRo0aRU5OTvQF8NBDD5GTk8OSJUua9D2K//rncLvdTfr+lixZQkFBQVRohH79+rFt2zY2b95Mfn4+q1evpqqqKu6YZcuW4fP56vj8Gzu7ej/5+fkEg0FWrlxZ5zig0T+P7rnnHu6//34ee+wxLrjggjr7E3Z/vzmPbS/zyCOPWElJSdbKlSuj21avXm15PB7roYceasCV7T7hcNg688wzLb/fb82ZM2enx7z11lsWYM2fPz+6bfv27VZubq41bty4vbXU30RJSYk1b968Oi/AGjVqlDVv3jyrvLy8Sd/jjBkzLMCaMmVK3PZhw4ZZ7dq1syyraf8dDho0yOrcubMVDAbjtp977rmW3++3gsGg9dVXX1mA9dxzz0X3h0Ihq3v37tZJJ520t5e8y/xcavCu3s+mTZssr9dr3X333XHnDxkyxOrZs2dC174r/Nz9WZZlPfrooxZg3XPPPT97fqLur8mITUVFhXXggQdaPXv2tKZOnWpNmzbN6t27t9W5c+c6tQCNnbFjx0ZrTj755JO4lxT7hcNha8CAAVa7du2sV155xZo1a5Y1aNAgKycnp04BVlOBHepsmvI9RiIRa/DgwVZubq715JNPWrNnz7Yuv/xyC7AmTZpkWVbTvr8pU6ZYgDVs2DBr6tSp1uzZs62rr77aAqzrrrsuetzZZ59tZWdnWxMmTLDmzJljnX766VZycrL15ZdfNuDqd86UKVOsKVOmRP///etf/7KmTJkS98vArt7PTTfdZCUnJ1sPP/ywNW/ePGvs2LGWy+WKFvQ2BL92f6+88orlcrms4cOH13nuLF26NO5aibi/JiM2lmVZP/74o3XaaadZGRkZVnp6unXyySfvVL0bOx07drSAnb7uuuuu6HHbtm2zLrnkEisnJ8dKSUmxjj32WGvRokUNt/A9ZEexsaymfY/bt2+3rrrqKqtFixaW1+u1evXqZU2ePDnumKZ8f++88441aNAgq3nz5lZ6erpVUFBgPfHEE1ZtbW30mKqqKuu6666zWrZsaSUnJ1v9+vWz5s2b13CL/gV+7v/coEGDosfs6v3U1tZaf/3rX60OHTpYPp/P6tWrVx0rd2/za/d30UUX7dLPwLISc3/a9VlRFEVJOE0mQUBRFEVpuqjYKIqiKAlHxUZRFEVJOCo2iqIoSsJRsVEURVESjoqNoiiKknBUbBRFUZSEo2KjKIqiJBwVG0VRFCXh/D+pI9FtL0h0pwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAGQCAYAAAB4X807AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAACB4klEQVR4nO2dd3hUZdqH75lk0gsJvRNBQUMTUQEXkSJFBRVEQECxISIWXFHWrrsuWHZtawM/UBQEQQVE2iJgwbagCIKFFnongfRkZs73x3ueORODFMmQ9tzXNVcmp75H4fx4usuyLAtFURRFCSHu0l6AoiiKUvFRsVEURVFCjoqNoiiKEnJUbBRFUZSQo2KjKIqihBwVG0VRFCXklJrYbN++nWuuuYbExEQSEhLo27cv27ZtK63lKIqiKCHEVRp1Njk5ObRq1YrIyEj+8Y9/4HK5ePjhh8nJyWHNmjXExsae7iUpiqIoISS8NG46ceJENm/ezK+//kqTJk0AaNmyJWeeeSZvvPEG9957b2ksS1EURQkRpWLZdO3alby8PFasWFFke6dOnQD47LPPTveSFEVRlBBSKpbNunXruPLKK4ttT01NZebMmcc9v1q1ajRq1CgEK1MURVH+LGlpaRw4cOCo+0pFbA4dOkRSUlKx7cnJyaSnpx/3/EaNGrFy5cpQLE1RFEX5k7Rt2/YP95WK2PwZJkyYwIQJEwDYv39/Ka9GURRFORlKJfU5KSnpqBbMH1k8AMOHD2flypWsXLmS6tWrh3qJiqIoSglSKmKTmprKunXrim1fv34955xzTimsSFEURQklpeJG69OnD/fddx+bN2/mjDPOAExgacWKFYwfP740lqQoyp+gsLCQHTt2kJeXV9pLUU4jUVFR1KtXD4/Hc8LnlErqc3Z2Nq1atSI6OjpQ1PnII4+QmZnJmjVriIuLO+b5bdu21QQBRSkDbNmyhfj4eKpWrYrL5Srt5SinAcuyOHjwIJmZmaSkpBTZd6x3c6m40WJjY1m6dClnnXUWQ4cOZfDgwaSkpLB06dLjCo2iKGWHvLw8FZpKhsvlomrVqidtzZZaNlqDBg344IMPSuv2iqKUECo0lY8/8/9cuz4riqIoIafc1NkoilL2qfVcLfZm7y2x69WMrcme+/Yc85gOHTrw1VdfHfOYW265hXvvvZdzzjmHf/7znzz44IMndX5cXBxZWVlFtmVkZDBt2jRGjhx5nKcozmWXXca0adOoUqXKSZ23fPlyIiIi6NChw0nfs7RRy0ZRlBKjJIXmRK93PKEAePPNNwNlFf/85z9P+vyjkZGRwauvvnrUfV6v95jnzp8//6SFBozYnOx6j7eW04WKjaIo5RpJKlq+fDmXXHIJ11xzDc2aNWPw4MFIsu0ll1zCypUrGTt2LLm5ubRu3ZrBgwcXOT8rK4uuXbvSpk0bWrRowZw5c45537Fjx7Jp0yZat27NmDFjWL58OR07dqRPnz4BYbvqqqs477zzSE1NDXRAAdNyS3qIvfvuu1xwwQW0bt2a2267DZ/PB8DChQtp06YNrVq1omvXrqSlpfH666/z/PPP07p1a7744gvS0tLo0qULLVu2pGvXroGZYMOGDWPEiBFceOGF3H///Zx55pmBzit+v58mTZqc9k4s6kZTFKXC8MMPP7Bu3Trq1KnDRRddxIoVK/jLX/4S2D9+/Hj+85//sHr16mLnRkVF8dFHH5GQkMCBAwdo164dffr0+cNg+Pjx4/npp58C11q+fDnff/89P/30UyAleNKkSSQnJ5Obm8v5559Pv379qFq1auAaP//8MzNmzGDFihV4PB5GjhzJ1KlT6dWrF7feeiuff/45KSkpHDp0iOTkZEaMGEFcXBz33XcfAL179+aGG27ghhtuYNKkSdx1113Mnj0bgB07dvDVV18RFhZGYmIiU6dO5Z577mHJkiW0atXqtHdiUctGUZQKwwUXXEC9evVwu920bt2atLS0Ez7XsiwefPBBWrZsSbdu3di5cyd7956cW/CCCy4oUnvy0ksv0apVK9q1a8f27dvZsGFDkeM//fRTVq1axfnnn0/r1q359NNP2bx5M9988w0XX3xx4FrJyclHvd/XX3/NddddB8DQoUP58ssvA/v69+9PWFgYADfddBNTpkwBjADeeOONJ/VcJYFaNoqiVBgiIyMD38PCwk4qXjF16lT279/PqlWr8Hg8NGrU6KRrSYKnDC9fvpwlS5bw9ddfExMTwyWXXFLsepZlccMNNzBu3Lgi2z/++OOTuu/x1lK/fn1q1qzJ0qVL+e6775g6deopX/9kUctGUZRKhcfjobCwsNj2w4cPU6NGDTweD8uWLWPr1q3HvE58fDyZmZl/uP/w4cMkJSURExPDL7/8wjfffFPsmK5duzJr1iz27dsHmGbEW7dupV27dnz++eds2bIlsP1o9+zQoQPTp08HjFh27NjxD9dzyy23MGTIkCIWz+lExUZRlBKjZmzNMn09MB3kW7ZsGUgQEAYPHszKlStp0aIFU6ZMoVmzZse8TtWqVbnoooto3rw5Y8aMKba/Z8+eeL1ezj77bMaOHUu7du2K7He5XJxzzjn84x//oHv37rRs2ZJLL72U3bt3U716dSZMmEDfvn1p1aoVAwYMAEyM5qOPPgokCLz88stMnjyZli1b8s477/Diiy/+4Xr79OlDVlZWqbjQoJR6o50q2htNUcoGP//8M2effXZpL6Nc4fP5qFGjBnv27DmpRpanysqVKxk9ejRffPFFiVzvaP/vj/Vu1piNoijKaSQ1NZVbbrnltArN+PHjee2110olViOo2CiKopxGfvnll9N+z7FjxzJ27NjTft9gNGajKIqihBwVG0VRFCXkqNgoiqIoIUfFRlEURQk5KjaKopQYtWqBy1Vyn1q1SnZ90pDzWMyePZv169cHfn/00UdZsmTJKd979erVzJ8//6TP27VrF9dcc82fuudbb73Frl27/tS5JY2KjaIoJcZJthI77dc7EX4vNk8++STdunU75eseS2yO1VanTp06zJo160/d88+IjXSdLmlUbBRFKbdkZ2dz+eWX06pVK5o3b86MGTMA0+Dy3HPPpUWLFtx0003k5+cXO1dGCwDMmjWLYcOG8dVXXzF37lzGjBlD69at2bRpE8OGDQu87P/ouo0aNeKxxx4LjCf4fXpzQUEBjz76KDNmzKB169bMmDGDxx9/nKFDh3LRRRcxdOhQ0tLS6NixI23atKFNmzaBuTVpaWk0b94cMEIwZswYzj//fFq2bMkbb7wRuMfTTz9NixYtaNWqFWPHjmXWrFmsXLmSwYMH07p1a3Jzc4+5/gceeIA2bdowfvx42rRpE7juhg0bivz+Z1GxURSl3LJw4ULq1KnDjz/+yE8//UTPnj3Jy8tj2LBhzJgxg7Vr1+L1ennttddO6HodOnSgT58+PPvss6xevZrGjRsH9h3vutWqVeP777/n9ttv57nnnity3YiICJ588kkGDBjA6tWrA+1n1q9fz5IlS3jvvfeoUaMG//3vf/n++++ZMWMGd911V7H1/d///R+JiYn873//43//+x8TJ05ky5YtLFiwgDlz5vDtt9/y448/cv/993PNNdfQtm1bpk6dyurVq3G5XMdcf9WqVfn+++956KGHSExMDIxOmDx5com0uFGxURSl3NKiRQv++9//8sADD/DFF1+QmJjIr7/+SkpKCmeddRYAN9xwA59//vkp3+t41+3bty8A55133gmPNujTpw/R0dEAFBYWcuutt9KiRQv69+9fxJUnLF68mClTptC6dWsuvPBCDh48yIYNG1iyZAk33ngjMTExwNFHEhxv/SKAYJp2Tp48GZ/Px4wZMwJjDE4F7SCgKEq55ayzzuL7779n/vz5PPzww3Tt2pUrr7zyhM4NHop2sqMEjoaMNziZ0QbBYwCef/55atasyY8//ojf7ycqKqrY8ZZl8fLLL9OjR48i2xctWnQKKy++ln79+vHEE0/QpUsXzjvvvCID3/4satkoilJu2bVrFzExMQwZMoQxY8bw/fff07RpU9LS0ti4cSMA77zzDp06dSp2bs2aNfn555/x+/189NFHge1/NDrgRK/7R5zISILatWvjdrt55513jhqo79GjB6+99lpgRMJvv/1GdnY2l156KZMnTyYnJwc4+kiCk1l/VFQUPXr04Pbbby+xLtEqNoqilBg1S3giwPGut3btWi644AJat27NE088wcMPP0xUVBSTJ0+mf//+tGjRArfbzYgRI4qdO378eK644go6dOhA7dq1A9sHDhzIs88+y7nnnsumTZsC20/0un9E586dWb9+fSBB4PeMHDmSt99+m1atWvHLL78UsTTECrvllls455xzaNOmDc2bN+e2227D6/XSs2dP+vTpQ9u2bWndunUgZjRs2DBGjBhB69atsSzrpNY/ePBg3G433bt3P+FnPBY6YkBRlD+NjhgIPatWreLee+/ls88+O633fe655zh8+DB///vfj7pfRwwoiqJUEFauXMl1113H+PHjT+t9r776ajZt2sTSpUtL7JoqNoqiKGWUtm3b8ttvv532+wbHsEoKjdkoiqIoIUfFRlEURQk5KjaKoihKyFGxURRFUUKOio2iKCVHLcBVgp8SHjEgLF++nCuuuAKAuXPnnvZsL4Bx48bRpEkTmjZtetwOAHfddVeRxqHlEc1GUxSl5CjpkQAncT3LsrAsC7f75P4N3adPH/r06XOSCzs11q9fz/Tp01m3bh27du2iW7du/Pbbb4SFhRU7duXKlaSnp5/W9YUCtWwURSm3pKWl0bRpU66//nqaN2/O9u3buf3222nbti2pqak89thjgWMXLlxIs2bNaNOmDR9++GFg+1tvvcWoUaMAiowTAGcMwe7du7n44otp3bo1zZs354svvjildc+ZM4eBAwcSGRlJSkoKTZo04bvvvit2nIwUeOaZZ07pfmUBtWwURSnXbNiwgbfffpt27doB8NRTT5GcnIzP56Nr166sWbOGs846i1tvvZWlS5fSpEmTIh2OT4Rp06bRo0cPHnroIXw+X6AHWTCjR49m2bJlxbYPHDiQsWPHFtm2c+fOwHoB6tWrx86dO4ud+5///Ic+ffoUaadTXlGxURSlXNOwYcMiL+7333+fCRMm4PV62b17N+vXr8fv95OSksKZZ54JwJAhQ5gwYcIJ3+P888/npptuorCwkKuuuorWrVsXO+b5558/5WcJZteuXcycOZPly5eX6HVLC3WjKYpSrgluWLllyxaee+45Pv30U9asWcPll19+UuMDwsPD8fv9APj9fgoKCgC4+OKL+fzzz6lbty7Dhg1jypQpxc4dPXo0rVu3LvY5WvJB3bp12b59e+D3HTt2ULdu3SLH/PDDD2zcuJEmTZrQqFEjcnJyaNKkyQk/S1lDLRtFUSoMR44cITY2lsTERPbu3cuCBQu45JJLaNasGWlpaWzatInGjRvz3nvvHfX8Ro0asWrVKq699lrmzp0baOW/detW6tWrx6233kp+fj7ff/89119/fZFzT8ay6dOnD9dddx333nsvu3btYsOGDVxwwQVFjrn88svZs2dP4Pe4uLjAeIDyiIqNoiglR01KNiPtJEcWtGrVinPPPZdmzZpRv359LrroIsCMB5gwYQKXX345MTExdOzY8aizZW699VauvPJKWrVqRc+ePQNW0/Lly3n22WfxeDzExcUd1bI5GVJTU7n22ms555xzCA8P55VXXglkol122WW8+eab1KlT55TuUdbQEQOKovxpdMRA5eVkRwxozEZRFEUJOSo2iqIoSshRsVEU5ZQoh5545RT5M//PS1RsZs2aRb9+/WjYsCHR0dE0bdqUv/3tb8UCcenp6dxyyy1Uq1aN2NhYunXrxtq1a0tyKYqinAaioqI4ePCgCk4lwrIsDh48SFRU1EmdV6LZaM899xwNGjTgn//8J/Xq1eOHH37g8ccfZ9myZXz11Ve43W4sy6J3796kpaXx8ssvk5SUxLhx4+jcuTOrV6+mXr16JbkkRVFCSL169dixYwf79+8v7aUop5GoqKiTfleXqNh8/PHHVK9ePfB7p06dSE5O5oYbbmD58uV06dKFuXPnsmLFCpYuXUrnzp0BaN++PSkpKTzzzDO89NJLJbkkRVFCiMfjISUlpbSXoZQDStSNFiw0wvnnnw8Q6Pszd+5c6tSpExAagMTERHr37s2cOXNKcjmKoihKGSHkCQKfffYZQCAfe926dTRv3rzYcampqWzbto2srKxQL0lRFEU5zYRUbHbu3Mmjjz5Kt27daNu2LQCHDh0iKSmp2LHJyckAFWJug6IoilKUkLWrycrK4sorryQ8PJzJkyef8vUmTJgQ6NKqwUhFUZTyRUgsm9zcXHr37s3mzZtZtGhRkayFpKSko1ovhw4dCuw/GsOHD2flypWsXLnyqLEhRVEUpexS4mJTWFjINddcw8qVK5k/fz4tWrQosj81NZV169YVO2/9+vU0aNCg3M/ZVhRFUYpTomLj9/sZPHgwS5cuZfbs2UUGGgl9+vRh586dgcQBMG3BP/7449M+B1xRFEU5PZRozOaOO+5g5syZPPTQQ8TGxvLNN98E9tWrV4969erRp08f2rdvz5AhQ3j22WcDRZ2WZXH//feX5HIURVGUMkKJjhho1KgRW7duPeq+xx57jMcffxww8Zn77ruP2bNnk5eXR/v27fn3v/9Nq1atTug+OmJAqchYloVlWfj9/jLZBsblchEWFobL5SrtpShljGO9m3WejaKUMXw+H+vWreOHH344qZHGp4tatWpx4YUXUqtWrdJeilLGONa7WSd1KkoZw+v18u233/LSSy8FsjTLEhdccAF16tRRsVFOChUbRSmDZGdns2fPHg4cOFDaSynGgQMHKCwsLO1lKOUMnWejKIqihBwVG0VRFCXkqNgoiqIoIUfFRlEURQk5KjaKoihKyFGxURRFUUKOio2iKIoSclRsFEVRlJCjYqMoiqKEHBUbRVEUJeRouxqlUiKdlctiH1qfz4ff7y/tZShKiaJio1RKsrKyWL16NWlpaWVOcLxeL6tWrSI/P7+0l6IoJYaKjVIpSU9P54MPPmDu3Lll0oo4cuQI2dnZpb0MRSkxVGyUSklhYSH79+8vk5aNolRENEFAURRFCTkqNoqiKErIUbFRFEVRQo6KjaIoihJyVGwURVGUkKNioyiKooQcFRtFURQl5KjYKIqiKCFHizorOOnp6aSlpZGVlVXaSylGZGQkDRo0oGbNmrhcrtJejqIoIUTFpoKzYcMGXn/9dX755ZfSXkoxatSowc0338xll11GWFhYaS9HUZQQomJTwTl8+DBr1qxh1apVpb2UYtStW5crrrhC28UoSiVAYzaKoihKyFGxURRFUUKOio2iKIoSclRsFEVRlJCjYqMoiqKEHBUbRVEUJeRo6rOiKEpJ4wOCp42HUen/aa9ioyiKcqpYQBaQaX8Kgva5gQggCogB4u3fKxkqNoqiKH8WH3AI2IcRGem65A766caITybGwvEAiUAyEH06F1u6qNgoiqL8GXKAjRixcWHepsHNMFz2J1h4hP3AEaA6kIQRoQqOio2iKMrJshdYD3hxRMSHIzYu+7v8jMBYNIX2OR4gD9iFcb/VtbdVYFRsFKWM4XK5qFGjBnXq1MHjKXtvoLPPPpu4uLjSXkbpsQ1YjXl7huEkA/hxxMZt7wu3P16M0LgwcZt8nKSBTPuaDajQgqNioyhljPDwcC666CIGDx5MYmJiaS+nGFWqVKFBgwalvYzSYTvwDRCJERlf0D6Jz4j7zI+xXiyMsETZ+7Ps88GITiSQDezECE4FzVpTsVGUMobL5aJ+/fp07NiR6tWrl/ZyFCED+ALz1vRhrJVga0aQJIEwjJCE28dl29+jMQkDhThWTiQmhpMOVA3hM5QiFVRDFUVRSpACYAmQixEXL45VE5wIIG6zMByBycSJ0/jt34Uce1u+fb29GGuoAqKWjaIoyrHwA6uAHUBNHKEJTgAg6LvEayQpwMIIiBfjSvNgXGmx9rFi2Vj290NAndA/1ulGLRtFUZRjsRX4EePyKsAIjQiK/PRg/unusbd5MYKSjRErsWqy7PMjMFYNOO44SSA4bH+vYIRUbHr27InL5eLhhx8usj09PZ1bbrmFatWqERsbS7du3Vi7dm0ol6IoinLypGOsmhyMYPgwVooIiXwy7WPyMRZKOEZQpLNAPo4wZWMEJtw+JwzjnvPb1y+wj6lghExs3nvvPX788cdi2y3Lonfv3ixcuJCXX36ZDz74gMLCQjp37syOHTtCtRxFUZSTIw9j0WTguLi8GOtDLJngj4UjFLkY4RCrJw8jLC57W459vFxXstd8GKtGrJ4KREjEJj09ndGjR/Pvf/+72L65c+eyYsUK3nnnHQYNGkTPnj2ZO3cufr+fZ555JhTLURRFOTl8wK+YSn8/RiBcGCEQ4cjBicWIa02Exxe0H4yV48cRHLe9LxwjUC6KuuiyKZ7lVs4Jidg88MADNG/enEGDBhXbN3fuXOrUqUPnzp0D2xITE+nduzdz5swJxXIURVFOHAtTZLnD/i6NNcVaCcZnb8/BuMsKCdTVfLp3O83/bxnZGYVGaESE8sx+vDhCI+dJ4oG44yoQJS42X375JVOmTOGVV1456v5169bRvHnzYttTU1PZtm0bWVlZJb0kRVGUEycd2IIRAxEGibcEE5wcIIkBeUA2LD+wlh4veln3VWcun7PCSXH24NTYhON0EijAcat57eur2PwxBQUF3Hbbbdx33300bdr0qMccOnSIpKSkYtuTk5MB44I7GhMmTKBt27a0bduW/fv3l9yiFUVRhGyM+0xe/lIDI26z3KDv2RhxEWvEDXjg04LP6fbGIXyHUoiv8xvv9Wljri01OuE47rVga0a+H61QtAJQomLzzDPPkJuby0MPPVSSlwVg+PDhrFy5kpUrV2pVtaIoJY8X2Iyp5BeBkayx4D5n8nFT1I1WAB/lfUSPD9bg29KJqLgMfritIbWjEpwizwL7PAtj3cj4AUFEpwIOWyuxos5t27bx1FNP8eabb5Kfn09+fn5gX35+PhkZGcTHx5OUlHRU6+XQoUMAR7V6FEVRQoqFidHswynWlKQACd6HU7RbgBwD4IcJORMY8b9vsL6dhDvMy+Jb42mcGGasmCgcd1l40PdYjGBF4nSQluJPaXtTQSgxsdm8eTN5eXkMGTKk2L7nnnuO5557jh9++IHU1FQWL15c7Jj169fToEGDyt1NVjltREZG0qBBA1q2bIlllS2fRUREBHXr1iU8XBt8nDYOYcQmuCAzH2ckgATy5QOBIk7LZfEP7z94dOscmPclAK/1C6NjY/vAQvsTgSNc0fY2cZnJR7pHJ4T0aUuFEvvT3Lp1a5YtW1Zse+fOnRkyZAg333wzTZo0oU+fPkyePJnPPvuMTp06AXDkyBE+/vhjrrvuupJajqIck+TkZAYMGEDHjh3LnNi43W4aNmxITExMaS+lcpCDcZ9JfUshThNNC2NlyBhneWPagXx/oZ+7XHfxStYMmLEKfFHcdiEMP9fluOAkEUAsmkKcrs/+oOtJ3CccqID/5i4xsalSpQqXXHLJUfc1bNgwsK9Pnz60b9+eIUOG8Oyzz5KUlMS4ceOwLIv777+/pJajKMckJiaGVq1a0apVq9Jeyh/iclUwP0pZxIcRGmmOGYFjiXhxrBsPJjYT5ErLd+dzg/8GZnhn4Zr5X6wjDWjfEF68GiczLRon/pKPMwY62GUmXaFFiGpiBK6CcdrtdLfbzbx587jvvvsYOXIkeXl5tG/fnmXLllG/fv3TvRylEqMv80qOhZkhcwQjJtKTLIeiA9EkUC8dBCIgMzyTvq6+LLGWELH03xSkdaZmLMwcCJHBb1URGBESERUfRcdIy08/UC8kT1vqhFxsjuaiSE5OZtKkSUyaNCnUt1cURTk6hzBjmb04vcnEsgnDcX1JN2e7keZ+/34u81/GSvdKEtffyOEVowlzw/sDoa7MqonAsVyC4zUyakCKQ4PFJgdoBpS9eXklgkYgKwFutxu3u+zlUbrdbrUulNIhD9MlQFKR5a+HFHKK20ssEjcQA2kFaXQP684G9wbqHehC+uw3AXimB1ycYl9Dmm4GZ51J0acIjyQGuHEmeiYCTUL4zKWMik0Fp3bt2vTq1YtmzZqV9lKKkZycTOPGjVVwlNOLFzM2QOpbsjAve2kRI0F7mcZpF2CuzV9LD08PdrOblvntKJixkB0Fbq5pBqPPC7q+WEXBIiZuM2lLI403xdqxgPOpkLEaQcWmgtO4cWNuu+02CgoKjn/waSYsLIwqVaqUSatLqaBYwB5MS5pCjPvMrvwvEsTPw8yvyQXi4MsjX9Lb25sMMrjY34k6Hy9m+n4PTavCpD7gktEAv3efReIUbgZnsomoifvsUqCC16qr2FRwoqOjiY6OPv6BilIZOIIRGxGGMJzRAGDERmIqEUA4zM2Zy4DcAeSRx9VhV9Nt1Qzu+MlDjAc+6APx0l1AYj3Bvwe7z36fDJCFEaUuwJmhfOiygYqNoiiVgzwgzf7uxrzsMzGurBycehgZGZAOk7yTGH5kOD58DI8czvADr3LRAtM24PXLIbUuztgBF066s3QY8AV9l95n2fY5UUBHoEVoH7usoGKjKErFx4/JPMvFiEk2zhhnmSnjwbSPyQcr2uKZI88wNmssAA97HuavEU9y/hQX+V64tR0MvQDHVSYNO/Nx2s9I/7Mo+6cPIzqJQBWMNdOKCtcD7Y9QsVEUpeJz0P4UYITAQ3HLw46r+PP83HfgPp7Peh4XLl5KfIk7wkcxYCJs3A+t6sKLV9nXicAIWIz9EbGRN6sPIzaRGJHxYCyfasC59u+VBBUbRVEqNlmYNGdJRc7BxG7C7H0eArU2hWGF3LT/Jt7NeRcPHqZUncLAsIG8tgJmroG4SHh/KERXwZk7IyFRaWuTjxGXcPsYaT0Ta98zHmhj/16JULFRFKXiUohxn/kwIpCFIzoRGGvEtj6yC7Ppt68fi3IWEeuK5aP4j7g05lJWb4XRM8zlJgyFs2rZ14nBCIbEbCwCSQW4cZptxmHcbNH2vlSgEja3V7Gp4GRnZ7N//37y8vKOf3CIcLlcJCYmUrVqVTyeSuQ3UEoXCzMyQOpoInBiNFLaFQFkw0HrIJfvvZxvc7+lWlg15teez/ne88nKgAFvQL4Xhv8FBp2HM23Tj9PjTEYCRNvbJW4TjpP+7AEaA3VOw7OXQVRsKjgbN25k6tSpbN68udTW4Ha76dq1K/379w9MZFWUkHMEY9XI6GYZdObCJAjYFfzbMrfRY28Pfin8hYbhDVlcczFnRZ8FFtzxFvy2F1rUhRcGY8RJxg2IdSTXlkJO+V0sG0mxrg40otIkBPweFZsKzr59+1i2bBkrV64stTWEh4eTlJRE7969S20NSiUjD9NkUwoqD+PEUMR9lgvrfevpsacHO7w7aO5pzsKaC6lLXciHKd/BlG8gJgJm3AzRUvkfb1+zACNacTjtacBpsCmutEiMu+0sKlVCwO9RsakElJV5LWVlHUoFx49xn4kYiAUSjtP3zANfH/yaK3ZcwSHfIf4S/RfmVptLUmQSeGFDGoycbC738g1wdmP7XLGIIu1PIU4KtXSIttOnA90EIjFpzpUsIeD3qNgoilKxyAD22t+jcHqcSSylEOZnzeeabdeQa+XSO743MxrMIDo3GvxQ4IJBb0N2Pgw8D268CGegmXSIDi7UDO4OLRlqkhbtwowMqIQJAb+nknoPFUWpkORi4jTirsrEJAhIkoAF7+x9hz6b+5Br5XJjzI18mPwh0d5oI0wWPDQdVm2BRtXh9WHgyqFo/Uy0ff14jABF2duCf5eMtNpAXZyEhEqMio2iKBUDH6bvmbSEkVY0YmV44F/p/+L6vdfjw8cD1R/g/xr+H+FWuBGiHFjyMzy3AMLc8N4dkJiMcX+5KNrjLBcnI03uLcPWIjExoTigAeo/stH/DIqiVAzS7Y+kOYMRBMvEC8emjeWZPc8A8O8a/2Z0rdHmGLvL84FcuP51s+nxvtCuEU5MJjg5wI8zB0dSncXNJvU2kcAZOAWfioqNoigVgGyM+8yNM/AsHzgCXreXW9Nu5a30twgnnLfqvsXg6MFGGOxAv2XBrS/D7gz4y1nwt14E3Grk2/eQZp1SOxM8o0biQuJiq4vGaX6Hio2iKOUbcZ9JkWUGRnBckOPKYcCWAczLmEeMK4YPGn5Az7ie5pwjBHqlTfoCZn8PCdHw7nAIk55nsRhrxk4sCNTSSAdnCyNKYTjilATUOl0PX35QsVEUpXxzEGN1ZGLcZ3bFfnphOr039GZF9gqSw5OZf8Z8Lgy70JmgmQhkwcatcPckc6lXh0PDOgTcb+Rg3GNi0RRi4jEy7lmy3YLrahrY+5UiqNgoilJ+ycEUb4JxX9nD0HYe3kmPn3uwLncd9cLrsfjMxZwdc7axVnwEBpx5o2HIBDvN+UK47kKcLs0FmOOzcGIvXvueUfb3Aoz1I2nOjdA4zR+gYqMoSvlE3GcWRmjSzfdfjvxCj597sK1wG2dHnc2iWouoT30jEj6ckc8eGD8Xvt0M9ZLh1aHgyrevJ5X/IjpRGMtFss/icFrSSDFnbYy1pBwVFRtFUconBzGWRz4Bi+W7gu+4bP1lHPQepF1cO+Y1nUfVwqrGCpFKfzdwBFbtgifeN5d66y5Iqm5fS7oEROP0QPPidAAIbsLpt6+ZiInTaD3NH6JioyhlGJ/PR0ZGBkeOHMHv9x//hBARERFBUlISsbGxuFxl4I2ai+kSkI954UfB4j2L6ftzX7J92fRK6MXMJjOJ9cSat9wRnPTlCMiNgaFvgNcHd/WAri3sfR5zLfJwugFg3yfM/kiqcwROckB99G16HPQ/j6KUYXJzc1m0aBGLFy+msLCw1NZRp04d+vfvz/nnn19qawjgB3bjDCnLh/e2vMcN62+g0CpkaPJQ/q/O/+HJ9Dgdn8OCjnfBI+/Dz7uhaW0Y1xsTl4kz+wJdAtwYdxsYayYGIzBiJUnspz4apzkBVGwUpQyTn5/P6tWrmTlzJrm5uaW2jqZNm9KuXbuyITaSfeYCjsBLG17i7q13A3BvzXt5ttGzuHE7dTBS2e8DDsOX2+HfH4PbDVPugZgaONM7ZcCZG8dNJhZNQdD2CJw0Z52acUKo2ChKGceyLPx+f6l2zbYsq2x07c4H9pufVrbFI1sf4amtTwHwzBnPMCZpjBM3icHEX2SejAeyM2DYf0wR59+uhgvOtI9NwHGViaD4cCZyuuxPrL0djDDVRZt+nSAqNoqilA/8mOyzHPB6vdy+8Xbe3P4mYYTxZpM3GVZ9mKm1EfeWGyM0hQQac46dCZv2Q4t68Gh3HJcYOC6yGPtjYayZOBzLBvv6ERj3WSWeT3OyqNgoilI+OAxkQG5YLtf97zpm75tNlDuK9xu8T++E3kZUwjDWjAcjFlLp74fPNsJ/FkB4GLx9N0TaRZ2B+IzddYA8nOab4LjTvDjxn6oYa0g5YVRsFEUp+xQA+yGjIIMrv76Sz9M/p0p4Fea1nMdFERc5ForEaHw4ggPkZMDNL5vvD/aDcxvb1w2e3unH6egscRpwEgVkfxxQE01zPklUbBRFKdtYwH7Ynb6bnl/0ZE32GupE1mFR6iKaJzY3YiHxFhfGUsmkSKrzQ/Ng0z7jPnvoiqBrSyqzhVNHI263aByXnB8nNVrHBvwp9D+Zoihlm2zYsGkD3b/tTlpOGk1jm7Ko+SIaFjR06mdycLoyyxjofMz459/gxU/MjJrJoyDComjigPQ8C7aO7AadeHEKPCOAGjjp0MpJoWKjKErZxQur1q2i14pe7C/Yz/mx5zO/xXyqeaoZQZAAfRjGmnFTZJBZfjrcbGefjbkazjsLJ/AfnHnmx8RvxLqRjLMYTOEm9r7qoX3cioyKjaIoZZZP13zKVUuuIsubRfeq3fmg2QfERcQ5EzjFzSUt/m1rRiyWp2bBzzvhrNrwWH/7orZrLTB/RjLP8nHm0RTiuNGkB1o9tJvzKaAZ4oqilElm/TiLyz65jCxvFgNrDeTjdh8T54kzVom0qcm2vwe7wOxGC2u3wbg55vubN0NUPk5fMxnzLJM1pXOAWDSF9seNEbVaOBaO8qdQy0ZRlDLHa/97jTvm34GFxV2N7+L5+s/jznQ7TTc9GIGQ+EuwxeEHXy7c+qrpfXZ7D+jYBiMeUpcqnX/ycQL/wVls0ZisMy9meJp2CThl1LJRFKXMYFkWjy9/nJHzR2Jh8VTTp3jhjBdw57md4WQe+xOOEQIJ8ov7zA2vL4RvN0DdZBg/1L64dHx2Y2pk4u2PGxOPSQj6KVZOBKZLgLrPThm1bBRFKRP4/D7uXHAnr618DbfLzRut3uCW5FvMC1+EQiZienEC/NGYTLQI8/uOQ/C3WeaaLw+CBHugWsBVJqnNHpx5NeCMEsi3f4/AuM8iQ/rYlQYVG0VRSp18bz5DPxrKzPUziQyLZHqr6VxV/yoTU5GBZ7k4rq4snOmYwY0yPWbEc2YuXHUBXN3J3l6IU4Rp2efH29u89vXtjtDE2cfHYjoFKCWCio2iKKXKkfwjXDX9KpalLSMhIoG57efSKbGTEYHDOF2YCzECA0YwxLqRrs6FMO8b+PAbiIuCl28JOteHUx/jx1gvPpx5NDIArQAn2aAOGmgoQVRsFEUpNfZm7aXX1F78sOcHasXVYmGXhbRytzKWBzhDysJwgvpgXGdH7O+2ZZPjg1Hvmk1P9oN6YpWI+y0Xx5qRhADJMCvESXMOx8RpNPusRFGxURSlVNicvpnu73RnU/ommiQ3YVHfRZyx7wxjdYhFkoNT3S8pz8F1NRKrccHfZ8HWA9CqEdx5KUawInGabIq7TBpvFlJ0Ro2kTscCVU7Df4BKhoqNoiinnR/3/EjPqT3Zk7WHNrXbMH/AfGrur+lYFtkYAcjEaUHjsbeDY+XYlf+/HITn5oDLBa/fBuEJQcd4cOpnsnDiPJI0EBv0HaA2mn0WAkIiNvPnz2f8+PF8//33uN1uzjrrLJ555hm6dOkCQHp6OmPGjGH27Nnk5ubSvn17nn/+eVq0aBGK5VRqkpOTufDCC0lKSiq1NYSFhXH22WcTGalpPQp8lvYZfab34Uj+EbqkdOGjAR+RkJXguM6O4FTzx2OsD7E6pNJf2tR4wMqDO980NTW3doV2TZ19gd5mcThxHqmjkSmcIm5RGKHREc8hocTF5o033mDUqFGMGjWKRx55BL/fz+rVq8nJyQFMHn3v3r1JS0vj5ZdfJikpiXHjxtG5c2dWr15NvXr1SnpJlZozzzyTO+64I/DfvzRwuVxUq1aNhAQdAFLZ+ejnjxj0wSDyfflcc841vHv1u0RakbAPJ2ssCiME4Li7BEldljY1wKzvYcl6SI6Df16Bic1EUTTVOTjduQAnHiMFonH2pyo6OiBElKjYpKWlcc899/Dss89yzz33BLb36NEj8H3u3LmsWLGCpUuX0rlzZwDat29PSkoKzzzzDC+99FJJLqnSk5CQwDnnnFPay1AUJq6ayIhPRuC3/IxsO5KXer1EmCsMdmOsGgtHDLJwAvsFODNmwAiRnaqcbcG9k83mcUOgWn17Xx7O282PsZbicOIzeRjBkamcPsyMGg0shIwSTeybNGkSbrebESNG/OExc+fOpU6dOgGhAUhMTKR3797MmTOnJJejKEoZwLIs/vH5Pxg+bzh+y8/jnR7nP5f9hzB3mHnpH8Bp638Yp6hSMtFEJKRLQCQmzpIL46bBjoNwXmO4uStOkWc4Rkxi7fOj7G0xGNecWDIu+7417N+VkFGiYvPll1/SrFkzpk+fTuPGjQkPD6dJkya88sorgWPWrVtH8+bNi52bmprKtm3byMrKKrZPUZTyid/yc9eCu3hk2SO4cPHa5a/x2CWP4XK5AkPR8GIsF+lLFoERDIm3uIN+F8Jhcx48t9j8+vLAoJi+uM9ycdKco+1rSexHmnJ6cEYHqPsspJSo0bhr1y527drFmDFj+Oc//0njxo2ZOXMmo0aNwuv1cvfdd3Po0CEaNWpU7NzkZNPpLj09nbi44v/EmDBhAhMmTABg//79JblspZJgWRaFhYV4vd7jHxxCXC4XHo+HsLAw89KtoBT4Crhh9g1M/2k6EWERvHv1u/RP7e8ckAOkY0RBugFk2D8LMa4vSQSQ4L4kDoTBX9+G/EIYejG0P9s+N4ZA2xrCMJZTTNA183H6o0ncpjpOsagSMkpUbPx+P5mZmbz11lv07dsXgC5dupCWlsa4ceO46667/vS1hw8fzvDhwwFo27ZtiaxXqVzk5eXxzTffsGrVqlIVnOTkZDp16sRZZ51VamsINVkFWfSd0Zf/bv4v8RHxzB44my4pXZwD/JhYjdS35OLEWiIxIhMTtF8snFwgAz7dCLO/g9goGH+9fWwkjgtOugpIV+cIHLdcDM4o6ESg9BI1KxUlKjZVq1Zlw4YNXHrppUW2d+/enYULF7J7926SkpJIT08vdu6hQ4cASjVFV6nY5OXlsWzZMiZOnEheXl6prePMM8+kVq1aFVZs9mfv5/Jpl/O/Xf+jRmwNFgxeQJvabYoelIWpobHsny6MpSEzZFw4XQKCm2jGgS8P7p1qLvNgb6hTxb6mFHr6cOI8IioJ9vbgSZ4xaE3NaaRExSY1NZVvvvnmD/e73W5SU1NZvHhxsX3r16+nQYMGR3WhKUpJ4Pf7yc3NJSMjo1TF5siRIxQUFBz/wHLI1oytdH+3O78d/I2UKiksHrqYJslNih4kVk2u/XsUxqKRlOYjFM0ky8TpYRYGk76ENTugYTUY3Rkn00y6Dlj29WJxGnTKLJtCjAUUhZlRI/3SlJBTogkCV199NQCLFi0qsn3hwoXUq1ePWrVq0adPH3bu3Mlnn30W2H/kyBE+/vhj+vTpU5LLURTlNLJ271o6TOrAbwd/o2XNlqy4aUVxoQETpzmCEzvJs3/PwYiDTNGUrgEejOBkQuYBeHiauczT10N0dXv/YYx4FWIEKhdTqBnc6kYGook7rXqJ/ydQjkGJWjaXXXYZnTt35rbbbuPAgQOcccYZzJw5k8WLFzN5skmG79OnD+3bt2fIkCE8++yzgaJOy7K4//77S3I5iqKcJlZsW8EV711BRl4GFze8mLkD55IYlVj8QC+wFyMmIgZhODNrwFgbeTjTOO2EAHLhmY9h32Fo1wSubWdvj8VYKl6cXmgS8JdWNCIwMga6NpoUcJopUbFxuVzMnj2bv/3tbzz22GOkp6fTrFkzpk6dynXXXQcYV9q8efO47777GDlyJHl5ebRv355ly5ZRv379klyOoiingXm/zePamdeS683lqmZX8V6/94gK/4OWyRmYeE1wLQ047jIZ9SypyZJZFgE7M+BfttPkXwPBlYMRGekKDUa84uxtwUPRpMVNNCYh4Cg6qISWEq+XTUhI4JVXXilSW/N7kpOTmTRpEpMmTSrp2yuKchp5a/Vb3DL3FnyWj1vb3Mprl79mijWPRiGwC6fZpg8jPvJd+pZJskAWTsKAGx6ZDrkFcM2F0OEsnEadYtnIGAIRKTCuueAiTxemgFPn1Jx29D+5oignjWVZPLPiGW6ccyM+y8fDHR/mjSve+GOhAdMpIBcnA60Qx70lggBOt4AEAjGcn3bA259BeBiMG4yxXuIxQpOHM4pApnCKcEVgrBhxs1W1z1FOO9oJSFGUk8Jv+RmzeAz//ubfuHDxYs8XufPCO499UgEmViNNMSURQIL3UrSZgxEJGSsQA+TDg++D34KRXaBJPE6zzShMnMaLk+5cELQ9HydpIBpj1VTcOtoyjYqNoignTKGvkJs/vpl317yLx+1hytVTGNh84LFPkrY0YtW4MdljMtJZxj1H4EzUlLiNC77YBB+vNqOeH+mN06zTwgiIy/5IurMHp1OAtMAJB6phBEgpFVRsFEU5IfL8eVw14yoWbFxArCeWDwd8SPfG3Y9/Yj4mVhOGSXGW4WXB4579OLEaEY4IsCwY+565zF97QY0aGKGRY7w4tTK5OC44L6aOJgpHlGRMtFIqqNgoinJcfBE+nkx7kg25G6gaXZUFgxdwft3zT+zkvRj3mFgg0nZGijoLMXEWSQ6QgWYFMG8tfLURqsXDX7vj1MuI1SIZZ5I6HY6J5Ui3ANlfBye1WikVVGwURTk2CbCt2zYKcgtomNiQRUMW0bRa0+OfB0ZEdmNExI+prZHOyxLEl8p/f9BPH/i88OB0c5mHroT4BHtfDsaakbiOFydFWtxrkpEWBVRBU53LACo2ilKGcblchIeHExkZid/vP/4JJYxV1aJwUCEFCQU0iGrAlzd+Sb3EE5yma+GkOstETOkMYGGER9xnwSMB7LY0730LP+2CBskw4i8U7ZsmgiOJBtLAUxIOfj8UTfNuSx0VG0Upw0RGRtK+fXu8Xi+FhYXHP6EE2R22m7lxcyl0F9LQ1ZB3urxD3YS6J36BI5hxzz6cfmXZOAKB/bvH3i8TNSOgIBwe/cAc8ng/iJIRBFKTU4hJn5Z4Tb79PRYntVmGommqc5lAxUZRyjDR0dF06dKFDh06YFnWabvvp1s/5aZFN5HnzaNHox78X6//o1pitRO/gBdIw7zwpVdZ0IRNwnAGmon4SB+0LHhzMWzZD83qwNCOOA01LRyrpdC+RgROOrXMsIm2P2rVlBlUbBSlDON2u4mLizut3dDfXfMuNy64Ea/fy7DWw5jYeyLh7pN8VewFDuG4vrz2x4eJ4wS3qcmiSNuZ7Ej4u90Y/h+9IFw0NrgFTTRGmOystUDSgYiMFHOqVVNmUM1XFCXA818/z9CPhuL1e7m/w/1M6jPp5IUmF2PV+DDuLYnFgLFuYnCaZUphZjZGdHLhxQWw5zC0TYG+HTDuMunoLPGafIx4FeJktYnASFynxkk/vhJC1LJRFAXLsvjbp3/j6RVPA/Dv7v9mdPvRJ38hH/AbRdvHSNBeugNI4WYk5g0USaCr88FD8PQ8c6mnB4JLLB6ZQ+O2r+HDiJakQSfY98izr1UTnVVTxlCxUU4Yy7I4cuQI6enp+Hy+UltHWFgYSUlJJCQk4HJp75FTxev3ctvHtzFptbFiJl85mSEth/y5i23HZKDJSGY/Tmqy9EIDJxtN0p7tcc3//BCO5EH3s6FLCk46tHQNkA4BhUHb8+2PD2PZeNBZNWUQFRvlhPF6vXz11VfMmTOHzMzMUltHQkICV111FV27diU8XP8Inwq5hbkM/GAgc3+dS4wnhln9Z9HrzF5/7mIHMVZN8FTM4LRnsW6iMG+eWJxRA3mw6SC8vBRcLhh/jX18Ac7AM/s4ou3vkv7sw7F6/Bih0bY0ZQ79m6qcMH6/n19//ZWPPvqI/fv3l9o6atSowTnnnEPnzp1LbQ0VgfTcdHq/15sV21eQHJ3MJ9d9Qrt67f7cxQ4BP+DMoZEmm1JbIxZMJo4lIpM67f5lD3wIhT4YdhGc2xTHOgrO+JZOBNKGRmI+YfZ1ojA90JQyh4qNctL4/f7Tmob7e0rz3hWFnUd20nNqT37a9xP1EuqxaMgizql+zp+7WDqwGuMW8+IISS5OkoBkjUUGfZfaGhd8tg4++B5iIuAfV9jHRGEslwL7vEicNjSxFBU1CyNsddAJnGUUFRtFqWT8euBXur/bnW2Ht3F2tbNZNGQR9RP/xJRcPyY+sw6nG4AMQQvH6YUWjbFIfEHHBLWcKciH2+22NA90h7pVcDLPonBSnKU9jYwpECGLt4+JQtvSlGFUbBSlErFy10p6Te3FgZwDXFj3Qj657hOqxvyJdsh5wCZgM0ZAZFQAQb8XYMRA2tP4MFZHOEZsDgPx8O9P4ec9cGYNuP8KnLYzOfZ50jVA4j1e+/qSLIB9XG202WYZRsVGUSoJ/930X66ecTXZhdn0atKLmf1nEhtxklWPXkzB5kaMWEjTS/m4MO4zmU8jlf1SvBmNEQy7k8CvG+HJj82lXx0MUZZ9jRgct5nMrPHa50nn6HD7+lGY1Oekk/0vopxOVGwUpRIw/afpXP/R9RT6CxnScgiT+kzCE3YSZoAXkwSQhhnvXIDjxpLYCzhzasTKycUIRwTG3ZVJYAZNoQeGvA+5hXD9BdCtKUZEpFZGREriMeCkQkvcR6Zz1kLfZmUc/d+jKBWcl799mbsX3o2Fxb3t7uXZ7s/idp1g85ACIAPYhtN+xhu0X0YH+DACIJM4/RiLw48RmGiMpRKHcYdlwxP/hZVboWFVeKk/jqUiLW1i7Ovl2PeSHmsJGPGRtOdkNFZTDlCxUZQKimVZPLLsEZ764ikAxncdz/0X3X/8QlhxjR0EdmLEwofTvVksGnn5i8BE4AiRxFuigq4nUzVjYc538NQCU1Mz5WZITLCPi8MZCR1sJUXiWDex9vXlenXQxlvlABUbRamAeP1eRn4ykonfTyTMFcbE3hO58dwbj32SjAHYBezHsSiCM7+Cp2uKGHhxRMeP0+pfMtCi7J+YbesyYMg08+u4y+DiM3CGoEm3AUlflmJNidHk2uvw2PuqYARKKfOo2ChKBSPPm8egDwYx+5fZRIVH8f4179O7ae8/PsGHsV72YFxluThTL0VoCnAKM2VYmbSI8eDEWDLsa0qXAEkYiDLnbD0EvV6GrHwYdAHc3wunTkZa2Mi1xGXnt+8n7WnicIau1cFp8qmUaVRsFKUCcTjvMFdOv5LPtn5GlagqzBs0j4saXHT0gwsxw8pkHIAMJROBkAC9/JRuyzJt04Vj8RRiRCAWIwySkZaPecvkw8486PIqbM+ADinw5g3gkjRnic9IwoHgCtonzTa99v3rY0RMKReo2ChKBWF35m56Te3Fj3t/pE58HRYNWUTzGs2LH+jFVP3vwinGlA8YsfBigvQSq/FgXvwRGNEREXJh3Fm59naZLePCCJDHXGvjQej1Bmw+COc1gPnDIEasGXBSmOVcGY4mXQfEyrJwhrAln+J/MOW0omKjKBWAjYc20uPdHmxO38xZVc9i8ZDFNKzSsOhBhRiR2YERFOmcnP27n3k41kQkxjUmRZXSFFOsILFoonFqbcIxYuU21/hqO/R5Aw5mw7n1YdG9kCjFmeJCk/EDwe1nJIYk2W3SgNPCWDVawFmuULFRlHLO97u/p9fUXuzL3kfbOm2Zf918qscG9dj3Ympj9mOEw4VxV7kw1otYJlE4o5eP2MdEYywYD+aFn43ThkbOy8Np+y+Witt8fWU53DcX8r3QqxnMuB7ipTATirrPgtcrTTw9ONM2fZg3Vh00KaAcomKjKOWYpVuWctX0q8gsyKTbGd348NoPiY+MNzt9GNHYjhEJyQgLTmGWF3sWToxFLAg3zhA0CdqHY8RF2sWIQLnsffb8mv0FcOs7MOcnc6mRl8CL10J4jn2+jADwBn2PCPqImIilE44RnkTMBE5NCih3qNgoSjll1vpZDP5wMAW+AgakDmDK1VOICItwLJOdGLeZuLRycNxlkkYsacmSaiz1M36c8QB59jlitXjs63twXGYF5lr+Apj8P7h/LhzKMe6yif2h/0U42WzSkkYKQaV3mrjgJPsNnJiNTORsgHZ1Lqeo2ChKOeT1la8z8pORWFjcecGdvNDzBdMVIAfYjXGbyZTMMIxYyFTLCByrJtfeJ/3HRBCO4HRcDsMZGQCOeyuPIr3KvvwF7v8Yvt5iDuvWDCZeD41i7Xsk4HQJiCGQDl2kvY2sQVxn8TjuufpB25Vyh4qNopQjLMviyc+e5PHPHgfgH53/wYMdH8TldRmB2YHjGhMLQQLwYTitY/w4LfulbsZnfxcxkRoX6RDgsa+TgBEIu8Dyh33w6GyYt96ssWY8vHAlDGgDLukMkGevRSr9g8c9y8eFERNx2xXa53gwHZ111HO5RsVGqTR4PB7OPPNMunbtSn5+/vFPCBH169enZs2aJ32ez+/jrgV38erKV3G73Lx++evc2uZW4yoTl1kYRgRcGGsi2HqQhpZ+TMfmcHu7CEwuTn2NC6eqX6yhIIGxIuDLNPjnElj4i1lfXCT89RK49wpIEHedxFvCKdrrTOpvJKMNjPiIwMTZH4nT1EbjNOUcFRul0hATE8Nll11G27Zt8fv9xz8hRERFRVG3bt2TOiffm8+Qj4Ywa/0sIsMiea/fe1ydcrXpwrwXx1IQsZCgvcRbJLYiIiJWze8TAOQawfGZbIw4RILXAx98C//+Er5LM4fERMBtF8PYLlBDREniMN6ga/ntfdE4WW4F9ncZJSDJAdJ9IB5IQd9UFQD9X6hUGsLDw6lXrx716tUr7aWcFEfyj3D1jKtZumUpCZEJzBkwh0sSLzETMrNwgv4unHoVGTQmw8fEchEBkEJNsVaC61yCM87CgGjIOAhvroGXl8K2Q2ZdybFwZ0e4swNUrWmvIxNHQMSiEQETURO3nAicFG1KnzURqzigEZoQUEFQsVGUMszerL1cNu0yvt/9PTVja7Kw/0Jae1vDbzjuJ0kltq2PQFqztI0BxwUlL/QcHPeZvAUkE00EJwc2H4EXlsCkLyHbzhA7qwaM/gtc3xliIjEuOUlhjsGZ0indAIKbaUo9jghecBGnNPaMsj9n4KRhK+UeFRtFKaNsSd9C93e7s/HQRhonNWbRFYtofLCxyRQTF5kUZobbP8WaESEpwOlrJi42+Yi4gNP/zBas77bDs/Pgw7Xgt91gXZvA6J7Qqzm4M3EEJBYnBiMWi7S1kQC/tLoRqyUWx60WbAGJODXGCJdSYVCxUZQyyI97fqTn1J7sydrDuTXPZUHHBdTcU9NxbeXhuLukGaUUVcqsF+lrJvGPTHufxEskIG8fY4XDkg0wbgks22Au6QmD6y+E0ZdCy3gc91gMTuKAiIn0QxPxkDRliQ2F40z4FOsn2IXmxvQ7a4Q22KyAqNgoShnj862f0/u93hzJP0Lnep2Z3Wo2CfsSzN/W4D5mksrswRGWYCHx4dTQROJMzpRr2OnPVh4sWAePL4D/pZk1JESbqv87z4c6NXA6BUg3Z8lukzoeaZ4pKc7i4ou0P2LNSHaauMukvscNVEOLNiswKjaKUoaY/ctsBs4aSL4vn34N+/FuyrtEZdv/zM+2D5Kq++A6Fanil2JNcU9JYoBkq/kIuNAsFyzeaGpkJLOsRjyMvhhu7wSJyRjRkGQDifdIN+dwnNRqyXSTin8RRikklaQAF0ZgJMYTPG2zlr02pUKiYqMoZYQ3v3+T2+bdht/yc1vKbbxS/xXCCDNiIS9tSUV249TASBW/VPuLdRGL0/4luJNyOPxvJzwwC5ZtNPeuEQcPdIMRl0KMdByQuhx7Hg2ROCJW6FyLWJxsM6/9uyQFxFI0bhMsUmH2z0aYWhqto6nQqNgoSiljWRbjvhzHQ0sfAuCxMx7jsXqP4SpwOdX3IiDBWWUue1vwvhjMiz44IQACArFtDzywAKb/z2yuEg1/uwzuuBhivRQt8szHsWjCg+4hlksWTjqztJ+RWTcyGkAsGrFmJOjvwnQEqIfTiFOp0KjYKEop4rf83LvoXl789kVcuPjPmf9hZO2Rzks8F6eVv4xNlsp8Ccp77H1Z9kWDs8JsayPHB08vgmcWQl4hRHng7i7wwEWQVAunUWewwEjAX9xjYfY9pNVNnL22bBzBiMQUYuZjhMWLMxQt3l5nHEZkknDa1ygVnhL9X71ixQq6d+9OjRo1iI+Pp02bNkyaNKnIMXl5eYwZM4batWsTHR1N+/bt+fzzz0tyGYpSLijwFTDkgyG8+O2LRLgimNFsBiOTRxoLIgvzEj+CIzDBfcuCuwNIHEZcZsH1Mh6YtxZSH4MnPzZCM6A1/PIkjO8PSVVwZsmIoEhwX1KYpb+a1NHI/BkpyJRx0AU4XQHkzeLBSRAIAxoCZwNVUaGpZJSYZbNmzRq6detGu3btmDhxIjExMcyaNYubb76Z/Px8br/9dgBuvvlmPvnkE5599lnOOOMMXnnlFXr06MHXX39N69atS2o5ilKmycrPot97/Vi8dTFxYXHMPns2XZO6OkIh6crS5l+C/+AMFpMMsWycDC4RHB/s3AejZsLsH8yulvXgP9dBx3o4LWQkwC+WFDhjBeQe0j1aij3jccYViFCJoMTZxwY3+wzHuMxq29fT2EylpMTEZvr06fh8Pj7++GPi4szko0svvZQ1a9YwZcoUbr/9dn788UemTZvGpEmTuPHGGwHo1KkTqampPProo8ydO7eklqMoZZYD6Qe4fNrlfHfgO6p7qrPgnAWcF32eSWN247SgEYtCalskxVncZRKEl+JNn/ndcsOb38B9H8KRPNMg8++XwaiOEJ6Ak/4c3CQzDKeiX6yXQpw6Ghn7LEkAHozoSK1MYdA6JMssGmPB1MKIkIpMpabExKagoACPx0N0dNH+EomJiaSnpwMwd+5cPB4PAwYMcBYQHs7AgQMZP348+fn5REZqtFCpoPhh65at9Jjdg1+zfqVRZCMWn7OYM60zne7HkTgxEqnoz8EITALOiGYp2rSvK9u27YKb3odPfza7ejeHV6+HelUwYiYCI21tZLBalH1/yUKTdYjoxNhrirfv58OJ58iaxLqKxBRn1rHPU5FRKEGv6bBhwwC466672LVrFxkZGUycOJFPP/2U0aNHA7Bu3TpSUlKIiSnahyI1NZWCggI2btxYUstRlLKDHbz/6buf6PB+B37N+pWWsS35qvlXnBl1plP/Ii6xLJwuAPIi99jbpcZG6mzs+hrLgsmroPl4IzTV4uC9W2COCI0IisyokXoXidH47f0xzjUDnQJig44TAaqCEZUqQeuLwsRkWgJNcFKyFYUStGyaN2/O8uXLufrqq3n11VcBMz/k9ddfZ+DAgQAcOnSIpKSkYucmJycH9v8REyZMYMKECQDs37+/pJatKKGlENgJK35ZwRWrriDDm0HH+I7MbTiXKv4qcBCnWFKyyGTmTI59jVicjDOpb7EIVPLv2wW3zoa5P5rDr2oObwyDGlUw4iQZZhIHkrHP0nEg2MIJjsmAU18jYiJxGelrZmFqZOpgssvUMaH8ASUmNhs2bKBfv36kpqby+uuvEx0dzZw5cxgxYgRRUVEMHjz4lK4/fPhwhg8fDkDbtm1LYsmKEjr8wCFgB3yy9RP6r+lPrj+Xq5KuYlrTaUTnRzvt/SWlWV724haLwslKC8cpsiwk0Bvtk3Vw02TYlwmJ0fDydTCkRZBBIdlkYiVJ6nHQjJrAvcTqEeFLsM+VRAQ5H/ucaph4TCJaRKEclxL7I/Lggw/i8XiYN28eHo8HgK5du3Lw4EHuvvtuBg0aRFJSElu3bi12rlg0YuEoSrkmB9gGHIS3d7zNzb/cjA8ft1S7hdfqvEZ4ZrgjGDJXRrLOCjECI7GQ4IaZIgIeyM6Ev74Fb3xlbtm5Cbw1BBrUwwiGVPRLxX4ERlyksDIGR4TEqpE0ZgunUaZkqEmsJgpHZBJQN5lywpSY2Kxdu5ZWrVoFhEa44IILmDZtGvv27SM1NZWPPvqInJycInGb9evXExERQZMmTUpqOUqICAsLIzIykoiI0uuWGBERQVhYGWyiVQjssT8F8OzmZ7l/8/0APFT/If6e/HdckS6nmaX0HZN+Y1E44iBdncFJOS4A8uGbnTD0/2DjPogIh39eDaO7gDuHojU3v6+ZiccZbiY1NNI8U9KYxV0nBZiRGJGJBGpi0peleaainAQlJja1atVi9erVFBQUFHkRffvtt0RFRZGcnEzv3r157LHHmDlzJjfccAMAXq+XGTNm0L17d81EK+OEhYXRsmVLhg0bRlZW1vFPCBHx8fE0b94ct7uMVAX6McWX24Ac8Bf4uX/j/fxr+78AeKHOC9xd9W5jsUh7mRic+Iz0IvPZ2+Xfa+LWslOQ88PgiQ/h6WVmxkyLujB1ELRoinn5iysMHMGRxIIC+9rROG4xubakO0fhxFwkvhOJsWJqoTUyyilRYmIzatQo+vfvT+/evRk5ciTR0dHMnTuX9957j9GjRxMREcG5557LgAEDuOeeeygsLCQlJYXXXnuNLVu2MHXq1JJaihIiwsLCOP/882nevDl+v//4J4QIt9tNTExM2bBu8oAdmEB/IRTmFXLLhluYsm8KHpeHtxu8zaDqg5x4i1gbkmIsg86kr5jEZ4LrVvJh1S64cTKs3QkuF4zpAX+/CiILcWIvIhDSM03EKx4nVVnSmINHDkhWm6RbS+1NHYwlo0PMlBKgxMTmmmuuYf78+Tz99NPccsst5OXl0bhxY1555RVuu+22wHGTJ0/moYce4uGHHyYjI4NWrVqxcOFC2rRpU1JLUUKEy+UiJiamWOp6pcSLEZjtBNKWs61srl1/LfPT5xPrjuXDxh/Sne5OMF5cW2Jx5OMkBgTHZ8Tq8ECeC578BJ5ZCj4/NKkBbw+ADk1x4jwiJJJEIFaTdIsGZ9SytL2R4kvJapN1uYEamLkyMvxMUUoAl2VZ1vEPK1u0bduWlStXlvYylMqIZI5tBzLs33PgkPcQV/x0BV9nf03VsKrMbzyfCxIucGa6ZOPESqS63sK438CZWCmCkQufbYHbpsOve4w1c08X+HtfiBVLRERLml1K92dpcJlv30fa2Yg4xQedA05bnGSMyMSjfcuUP8Wx3s2asKgoJ0oBsA/YjREEOxi/o2AHPdb1YH3OehpENGBRnUU0czeDw84xuHEq7CXcJXUrLoww2CJyMB/GzIDJX5vDmtWCSddB+4ZB1wCn6aV8l5TmgqDrSksbqYmRTs7BLrN4TBdm6f6sKCFAxUZRjoeFEY6d9k8puHTDL+m/0H1Dd7YXbic1MpVFZy+ibn5dJ94idTTgpDPH2NvByTLzgD8fJn8FD8yFg1km0+zhHnD/FRApsZ4CiraFCU4AkAB/DEbkJP1Z2uD4Ma4xyS5zY+Iy9XGsHEUJESo2inIs8jGWzH6c6ns7vvFt9rdc9utlHPIdokN0Bz6u+zHJecmOlSIpyzKXxouxPtw4QXk7x2H1drhjKny1yfzepRm8dh2cVdVeh5wj7jaZvBkRdE2pxZE1JmLcaIU4bjsRqnjMhMxkNMNMOS2o2CjK0bAwMZkdmNoUidXYacoLMxfSb3M/cvw5XFHlCmbUmkGMJ8ZJJw6u+hd3lkzclPTiMMg4Ao/Og1eWm3TmWgnw774wsB24JCsteEqmCyMuMonThSNCHnu7HC8joiXrLMI+ti7Gmim9UimlEqJioyi/JxdTmLkP88KW9jF2SvDU9KkM2zoMr+XlhvgbmFhrIp58jxObkdRhqakJD9pu18FYHnjnS7j/Q9h7BMLcZnLmE30gUeIr0vI/2DoSN1rwvBgZ/yyB/tign1IsCo41oxMylVJAxUZRBD+QjnGbZdq/H8a8mNMBF7yw7wVGbzddzMdUG8PTNZ/G5XaZuEkW5sUebM3kYqycWPv3fFizFe6YAV9uMLf9SxN4ZRC0rE3R7LJ8AoPQAh2aRXBEhGS7WC/S28yNMwsnDJPO3Ai1ZpRSQ8VGUcC8xHdjrBk3RYsbw8HKtXhw14OM3z0egOdqPMdfE/7qCIukFcvUS7Fkogm4vI5Y8PgseGmZqZmpEQ/P9oWhfwGXFFWKkPgx7rJc+yOZa9H2R/qqSZxGLBVJAJABaFHAGZjYTBmogVUqLyo2SuXGj4nN7MRpgHkY82LOML973V5uS7uNSYcmEUYYk+pO4vrk650+YzE445il51kmJkDvMZMzZ34Ho2fBrgxwu2DUJaZmpko0zshlmTmTgyMUMnxMEgLEopEOAJKlJtMxI3Cy3pKAFIw4KUopo2KjVF7ygF2YTDMZiyyxDztWk+vKZeCvA5l7ZC7Rrmhm1ZvFZXGXOdM0Cyma3izWh532vCkT7pgEi9aZ3RecAa8NgjYNcVKQwckwi7GvIUImHaFjcYaaBaczS8qzdA7A/r0+WjejlClUbJTKh2SabcfERcCxZg4TyCJLL0inz9Y+fJn5JUnuJOY1mkcHOjgxGCmqlPb90h7GDQVh8Mxc+MdCyPdCUgyMvxpuuRjcUtUPRbsJSMW/CJYMVRPryYVp6y8iJwkAMsxM0p0bYpIBFKUMoWKjVC6kbmYfTuW+jDqGQLbYruxd9NjUg58KfqJueF0WNVhEalyqY4HkUHSYmKQ258AX2+G2SfDzLrPr+ovg2QFQIzgVOgbjthMLRUYoF+BYI5E4Kc0ibB77PiJEkTgNN2thijQ1NqOUQVRslMqB9CHbQaD6nyzMi1l+95mfvxX+RvdN3dlasJVmEc1YVHsRDQobFG09E9SRWVKb0y24/y14024zc2ZNeP1a6NISIwb59j1lMmbwfBnpXSbV/yJE4HQFkNYyYumINZOMaTej1oxShlGxUSo+0tNsF8Z6kGFhhTitZ+xA+8qMlfTa1YsDvgNcEHUBn5zxCdWo5qQ2B49cst1ZVibM/BnumgJ7D4MnDB7sDWP7QJS0mJFWMjJFMxKndY24xbxB103AGWwmtTRi1UiMJhpToKmxGaUcoGKjVFyk6l8yzcIxwiJdmCUZwO6evCRzCVfvvJosfxY9Ynowq9os4txxTqaYFGraFhBhsCMXRk6Ej38yt/zLWTDhWji7IY4L7DDOwDSJz0h6tCQaROIMUJPriwUlXZojnfsGOjRrpplSTlCxUSomMm9mB+blLYkA0kZGXGh2AH5G/gyGpg2l0CrkuirXMbn2ZCIyI8zxwWOQ7aJLfw5M+Bbufw8y8yAhCp4ZBLd2AbcX4x4T6yUepymmZJfFUHQstLjWEnDGM4PTnVm6CsRhXGbJqDWjlCtUbJSKRy5Oh2ZwgvCZOLEOmQXjgpf3vMzd++7GwuKeKvfwr/r/wo3baTMjVpAtChsPwy2vw2d2B4Arz4NXroG6VXHa+UfZ64i17xWFk7UmSQLRFG2uKfU00h5HAv+WfUw9oCaOEClKOULFRqk4SIHmdpzgvT3cjHBMgoBkhHnBcls8uvNR/rHvHwCMrzGe+8Pvx5XrMuIgx9rJAD4vvLQEHnofcgtMB4D/3ADXtAOXjCGQQL/U2gRX94tVk40jKtjHiiCBERjpSOACqmJiM5KxpijlEBUbpWKQj2meKQWaUr+SidO0Ul72eeDL8TFy+0gmZEzAjZuJ9SZyU9JN5ngfznRL26X16wG48XX4erO53eCL4MV+UFUEQNKZpRuANM+MM/cL9DmTJpriRvPb2yU5QGIzXoxLrSHqMlMqBCo2SvkmeEyzxEVycTLMgptj+gAP5LnyGLxvMB9mfkiUK4oZSTPoE9fH6XEm3ZbzweeDFxbDwzMhrxBqJ8IbN0HvtjhWk6Quh+MkE4iQgFOQGTxzRroUxOFkpYXhWC8pmOaZwdlvilKOUbFRyi9e4AAmCUBSmj0YsYnGqYvxYyyLHDhccJgrt17JZ9mfkehO5OOUj+no7WjOlUC9HcjfsBuGTYCvNprb3dARnu9vugEUacApWWrhONMwJQmhMOiYOHtNEvDPxempJrGkJLRmRqmQqNgo5ZM8TBLAEQIxmECMRDLPgi0ID+xx7aHX5l6szltN7bDaLGq0iBaxLcxLP8iV5bfglUXwwHQTm6mdCBNvgsvb4gxH8+IkEIThiIdYNB6MuIhLzBt0j0h7zfFB50ais2aUCo2KjVK+kED8VgLNLonAWBHipsqnaNPKGNiUv4nuad3ZXLiZMyPOZHHyYhr5Gpn9ERhXnAe27oIbJ8Gyn83thlwELw2EJOk/JkF7ezZNwD0nCQVizcg8meCEASnSjMfJkHNhijJroy4zpUKjYqOUH7yYJICDOFle0p/MwnFnidvKLp78IeMHem7ryT7fPs6LPo/5KfOpUVjD6XEWDZYL3l4Od71j6maqx8Mbw+Dqdva9JOnAwim4lOB/8LyZKHu7F2fmjFgz0klA3HXxmASAODTLTKnwqNgo5YNcTBLAEZx2M2LNSPaYDC6TYLwblucup8+OPmT6M+kW2Y0Pa31IvDveaRHjgf17Yfg7MHuludVV58EbN0CNRByh8WFiMbk4VlMsTlt/F07rf+xzpOWM9DKTbLRwTFymKvo3UKk06B91pWxjYUYy78S4raRtjPyUfmGR9u9eArGTD7M+ZNCOQRRYBQxIHMDb1d4msjDSWDMx5hqf/AA3v2l6miVEw8tDYGgncAXPi5GgvoURGBknIB0CZN6MD6drc3BPM4nbRANVMEKjbWaUSoaKjVJ2EbfZIYxAgLFsxKUl7qvgtvz2y/+Nw28wcudI/PgZlTSKF+u9iNvnNoIAZKfDfe/D6/81v3dqBm+PgIbVcIL/MjNGAv+yPQqnp5nEh6RORsY2B3dwlnPqY8RGRwAolRAVG6VskoNJaZaWM/LSFmvGrpnBj7Ei7HRiy23x931/57EDjwHw96S/81DsQ7jyXYFGlqu2wuBX4dfdEBEOT/WD0VdAmIxVBic+4ydgBZGPEQqpp4ki0CstMM45zD5O5sxEYdxlddAEAKVSo2KjlC1kiuYunJf3EZyXvQw7k1iKWBhe8Pl83L3tbl45/Apu3LxW9zWGJww3lkkB+Pzw3EJ4eAZ4fXBOHZh2N7Sqi5NwUIixTPJxujCH4bjppOeZDyfNWeIwMRhLSObMxGGsmQQ0AUCp9KjYKGUHL2buzE6cMckiABL/kOwu6XcWZY7J9+Zz/Z7ref/w+0QSybTq0+hbpW/AvbbjMFz/Giz7xfx+Zzd4ehBEyzUkuUDcYdJ1QBpkBo8DkELRGJyRBT6ctjjROP3M9G+YogD6V0EpK+ThuM1EWCQmIskA8rKXzC87Cy3Tm0nfnX1ZkrOEBHcCc+rO4RLXJYFEgI/WwC1vwKFsqJEAb42CXmdjhEF6k0kHZrFmwoPuFYm5li/ovhK3icKZUePGpDM3sH+qNaMoAVRslNLFwjS/3IoRljzMS1rGJ2fZx+URqIkJNLn0wz72cdn2y1iVt4qaYTVZmLKQ1tGtIRNyC+HeV+H1ZeYSvVrC5FugZrJ9nWwc60XmyUg3AmlHIwPM4ux1RONkv0k6tIhONUxxpiYAKEoxVGyU0sMP7AV241grwS/zMJyXvVTqZxMo5tySu4UeO3qwoWADjT2NWVR1EY1djQH4aR8MfBHW7TRJAM8Mhbs6gUvccpEYocizrylFmVIQKj3WvDj1M1JT48EZZgbGipHYjKIoR0XFRikdCjBusyP2d7FwpOWLZJ5JurG40GyLY+2RtfTY0YPdvt20jmrNggYLqFVQCysfJnwK97wLeQXQtDZMHwGtm9rXEWtGqvqDxzT7KTqRM7jvWR5GkHw4Vo5aM4pywqjYKKefTEw3gByM6yy4zYxkmIGTERbcR8wHX+R/Qe/tvTnsP8wlkZcwu+FsEiMTyciE4W/CzP+Z02/qbPqaxUp2mRRfyggCicmIC01iNmLNeO3jwZmFI8IXnGmmKMpxUbFRTh8Wpq/ZLpyXfzbm5Z1L0VkyYThuLanI98KcrDkM2DOAfCufvvF9mVplKlF5UXy3BQa+AFv2QXyU6Ws2qBvGWsnBccNJirKLosWg4MRmxI0maxRrRpIFkjB1MzqeWVFOGBUb5fTgxcRm9mNe4n6cqZg+nFRmC+NGk5d+0Gjm/zv8fwzfPRw/fm5Lvo1X6ryCOyuMFxbD/TOg0AfnnQHTb4MmNTECJbUvmfY9pOZFKvz9ONaT9FkTK0pa//vsfVGYTLMEdAyAopwkKjZK6MkDtuGkNUvr/Tycin1xU1kYiybP3m6B5bd4+sjT/G3P3wB4JOERnqjyBBnZLm78D8yxG2je1R2euRkipauAxGGkKWY2xa2ZYGGRpACpo5F06Gj7/HpB61UU5aRQsVFCh4VJAJCRzQUYq0WmWh7BiYFIPU1Y0E8X+HP9/HX3X3kh6wVcuPhP7f8wMnIk322Ea1+FrQegSixMugmuPs++pwhIJk5bf7FMonGKQqVDtG05BYQllqKjChpgepqpNaMofxoVGyU0+DEuMxnZXIDTRVkq7cWakemaElOxzy9wFXDj/huZljUNDx7erfMu/atey0tz4b53jdvs/MYw46+QkmBfuwBjfURgrJE8ivYzc2PcZ9KCxo0RQnDSm8WqScAIjfY0U5RTRsVGKXkKMS1n0nGKJqVzcibGwgCnmaaIgQTjCyCrMItrDl7DoqxFxLnj+KjaR1zg68a1z8Csb83pd10Kzw6AiEScdjI5GHEIdstJZ2ixVsRCiQi6dzxOfY8L4zKrhqY0K0oJoWKjlCw5GLeZBOQl4ywcIyRS0+LDGaEsrV/seM0BzwEu33o53xV+R/Ww6sxvNJ+I7W1p+yJs2Avx0TBpJFzTAqeHWgzOjBjpQhAe9FMESKwUaVMj54q7LQYzPVMy1hRFKRFUbJSSIwPjNhPXlVgb0jlZgv8y/MyDY43YacbbXNvosaUHvxT+QqOwRiyuvZivvjmTERNNkWbL+jDrTjizsX09mdAZHJcJnkcTnCTgxulMEInjZhO3XnU0pVlRQoSKjXLq+IEDGNdZDualno5TJCnJAV7Myz0W50+ePb4ZC9YdWkePQz3Y6d1Ji8gWzK6ykPHv1GHiZ+bQGy+BV66zOzUXYtxgcZhEg1yKWiMyxEzqeKRIU6ZpyqgAceOlAOKOUxSlxDmhv1o7duzgzjvvpH379sTExOByuUhLSyt2XF5eHmPGjKF27dpER0fTvn17Pv/882LH+f1+xo0bR6NGjYiKiqJVq1Z88MEHp/wwSingxbjN0uzfg91XwVX3ErSXAk1xodmTM78u/JqOBzqy07uTjlEdeTf+S6591ghNpAfevBUm3QXR8fZ98nCaZcbZ1w6eMSOtZyIwIpSAySirgonPJGCsm6rAWZhCTRUaRQkZJ/TXa+PGjbz//vskJSXRsWPHPzzu5ptvZuLEiTz55JPMmzeP2rVr06NHD1avXl3kuEceeYTHH3+cUaNGsWDBAtq1a0f//v2ZP3/+KT2McprJAzZjZtD4MG60LPtnJqauJgsn00zcasHB+yPwyaFP6Lq5K+n+dK6MvpK/7v0vnccmsGozpFSHrx+Gmy/C6WcWixGVfHsdct14nC7M4kLzYoQn194u9Ts+TLuZFJxYj6IoIcNlWZZ1vIP8fj9ut9GlN998k1tvvZUtW7bQqFGjwDE//vgjrVu3ZtKkSdx4440AeL1eUlNTadq0KXPnzgVg37591K9fn7Fjx/LEE08Ezu/atSv79+9nzZo1x11027ZtWbly5Uk9qFLCZGKsmTzMSz/C3haBeclLcoB0VRZrRzo7A1gwZecUbjp0Ez583Jh4Mw2/fIMnPgzDsuDyc+GdeyApDCeTTHqVeYO2SXJBPEZ4CnDayxRirBjZJv3O6qEzZxSlhDnWu/mELBsRmmMxd+5cPB4PAwYMCGwLDw9n4MCBLFq0iPx888/QRYsWUVBQwJAhQ4qcP2TIENauXcuWLVtOZElKaWFh4jObMILix2mYKR+7jxkWTssZSTkuINCm5rkDz3HDoRvw4WO05wn2T5nI4x+YXOMn+8HcOyFJsswigq4LTrBfXGgiPuJGw94mdTwS56kKNEZHNSvKaabEEgTWrVtHSkoKMTExRbanpqZSUFDAxo0bSU1NZd26dURGRtKkSZNixwGsX7+elJSUklqWUpJ4gT04jTTDKJrWDOZFn4eTFODBefm7gVjwH/bzwKEHeO7wcwDcn/cOH742hI17ISkWpt0NPc+yz5HUZPljJbUwck/JQJNiTUmtDrfvK5ZQBFALk3GmsRlFOe2UmNgcOnSIpKSkYtuTk5MD++VnlSpVcLlcxzxOKWPI/JkDFB0D4MW8vEVQZP6LWBNenFHL0VDoLuTWw7fydtbbhBPO7buW8Z+3/0JOPrRuCB/eCSkpOM058yma1ox93XycuIwnaLsfp8+Z/OmOQccBKEopU25SnydMmMCECRMA2L9/fymvppKRjRnbfJiis17AcW0V2N/FmpFMsXCMdZENOd4crj1wLZ9kfUK0P57Lv1nNy4vPAGDoX+D1oRAThVP7EodxuUlas0zZjMVYN7lB95Lu0YWYWIx0DkhEG2gqShmgxMQmKSmJrVu3FtsulopYLklJSWRkZGBZVhHr5vfH/Z7hw4czfPhwwAShlNNEBrAF8yL34sROpLWMZJZJ8aZkfMmYAHv/Idchrth9BV/nf01SfhOazF7JrJ8TCQ+Dfw+CUVeCSzLMpKuA7XYLCImIihRphuMUZMbZ34MHrdUGaqBuM0UpA5TYX8PU1FS2bNlCTk5Oke3r168nIiIiEKNJTU0lPz+fTZs2FTsO4JxzzimpJSmngh/YixGaHIwVkY2xNDIxYnLE/oibzML8iZI5MNnm3B0FO+i4tyNf539Nzf3diZmwjv/9nEiNBPh0LNzZDVyS1iwdACStWbLYJK1ZXHPiQpMiTRnAJm68M4CaqNAoShmhxP4q9u7dm8LCQmbOnBnY5vV6mTFjBt27dycy0jSl6tmzJx6Ph6lTpxY5/91336V58+aaHFAW8GLiM9swQhNmb5MeZhFBH5n9IinOBTht/sPgl/xf6LCpA+sL1lP353s5PHEBOw9G0DYFVo6Hi8/EyVKT8+JwMsvEWvJjhEZcY7GYAs1YHCGKwWSbnWkfo9lmilJmOGE32qxZswBYtWoVAAsWLKB69epUr16dTp06ce655zJgwADuueceCgsLSUlJ4bXXXmPLli1FhKVGjRrce++9jBs3jvj4eNq0acOMGTNYunRpoBZHKUUKMPGZ/ZiXdRZGUDJxrAoJ2gsywjkPp74mEr7N/5bL917OQW86dT6bws7PhgJwQ0cTn4mKt68ps2XETReBEZ4YnOLN4M4A/qBjC+17+3A6NZebSKSiVB5OqKgTKJY9JnTq1Inly5cDkJuby0MPPcS0adPIyMigVatWPP3001xyySVFzvH5fIwbN46JEyeyZ88emjZtyqOPPso111xzQovWos4QkY3pCJCJ0xVZ0ouzMCJSaH8X4cnBiEIERqjsIWiLshfRb18/snPCqT77v+z/9XzC3PCvwXBXF3BJ/YsUX0q6dCKOJRVrf8+3v4vISJFmor2+GIzQaO2MopQqx3o3n7DYlCVUbEoYC5NptglHMGTKpgT+xZqR+TQRvztGUpNzYGruVIbtHYZ3f2MS3l/Gkf21qRoH798BXVri9EdzYdxm0vZfamgkJpOAI3CSYSZFomCEqApGaIqWdymKUgoc692sDofKjoVJBNiGsSwkvVjiMcGTNCWGko1T1S81NS6z/YUjLzA6fTT81ouIDz/gSF40LevD7HsgpRZO25gYjFWUY1/DZd9PstAk2C/WjGTDSdwoCkjGjATQP8WKUubRv6aVGR+mG8BWzAveh3nJi0srG2d0slgVEpsR68Ouc7FcFg8deYhxh8bBV/fhWvIMBZaLfufD27dCrFgs4haTTsxZGAGSe/w+rdmFE/yXOI0bqItJBtBsM0UpF6jYVFYKgY2Yjs3BMRnJCAOnM0ABTnpzBM6fGpfZ5y30ctuB25iUPhXXx1Ow1gzFAp7oBw9fBW4RCbFOpFbGjREUcc9l2+vCPk46NkuhZ6R9fEOcNjSKopQLVGwqI7kYoTmCkwQgPc6kc7Mbp+WMtH8B4/aSKF8k5IbnMnDfQObu+w7XjM+xdlxATCS8cyP0PR+ni4C4zyTBIDijTQTFhdMpwItJACjACIsX4zarhyOGiqKUG1RsKhsZGKHJxInPiCBIPzFJCsjFERzJ8pIYzRHI8GfQ50AfvticjWv6KqwjdWhYDebcC63q4sRbxEUmlpEMQBNLB4o27BSXXXBacx1Mkab+iVWUcon+1a0sWMBB4DecmpYcnJkvBZgXew7GVSWV+BKkL8C89OOAcNgdtZseu3uwdk1TXLOnYBVG85cm8MEIqFHbvqfMrpFq/3ycuA84Q9BkoFqwMEn2WQTGmklG05oVpRyjYlMZ8ON0BCjEWCySuuzBSWWWIWT5OFaJuNGigXQgCzZ4NnDpnu5s/fR6WP4EFnBTJ3htGETk48RXgoelhdn3lZgMOCnTdhEobpzuARZGjBrY2xRFKdeo2FR0vJiJmtsxIuKyf0rQX36KdSEjliMwgpOJ02U5FlYdWUXPHVdz4KNnYd0A3C6L5/q6uOdKcIXhZLVJ5pm4yez2NUTZ+/NwLKxw+3dx0UVgMs20W7OiVBhUbCoyeZj4zC6cF790BhDEspC043z7uOCuAAVm26d8Sp/tI8h5bxbsuoD4KIvpd7i47AyM1WK72LAo6j4rwIiNhVO748G4ygpxxjVL4L82ZtCZuNsURSn3qNhUVLKBX4BDONlmwcWS4k6Tgk0RiCj7WBEdu4J/Zt5Mrlv/LN5pyyCzHo2qWsy700VqE5wOBMHuM+lt5sZJRJAPQdvEChIhaggkofEZRalgqNhURDKAtZhgv7SFkQaZUk/jxxkJEIVxl0n6MziJAT54NedV7vjff+HDZVAYy0VNLD4a7qK6pDJLhlmwcMh2yXCLwlg4Mn9GRjuLxROH1s8oSgVGxaYiYQF7gJ8xwmFhxEbSh104bWEkEUAabUrml0y7LAQrxuKxI4/z90W5sOQDwM31F1pMGOYiMhqn3YwIilgzkukmcZ9gK0WyzkTwfJj4TAO0fkZRKjAqNhUFPyYJ4FeMwMjwseCkAAsnI0xmxEhcRFr4R5uPL93H7UfuZuKH58IPNwPwzz4w9gqXGXQmA82y7PsFjxyQ/mbROAkCMlxN3GUS+K+J9jdTlEqA/hWvCHgxiQC/4aQtS2Bf3GUFQdvdOCMCsjAvfrtjM37Ii8rj2qwRfPzODZDWmQiPl2k3htOvFU6WWR7G9SW1MuJCk3odd9BPaXETFvTTwrjNqqP9zRSlEqBiU97JB37CjG8O7s4scRrpbybbpL2/vOAlvoLZdoQjdP/1Dr5952E42JTk+AIWjYqgbWP7OBEp6XEmwhH7u+1Su+PHGRMgrrYooBE6f0ZRKhEqNuWZHOBbYCeOVWHZ28NxMs/EjebC6dYcixGcbIw7zAV78vfQ8Ze/sXHa85BbjbPq5rJkeDT1JagvSQPxmIQC6TYg1pOkNcfa95PU5gSMAHkw1lCKfZ6iKJUGFZvyyiFgBSZOI0Ijlox0CJACSankh6KjBCSwnwmbozbT/pv/sG/26+CL5JKzc5h7QwzxVe1jszBCIY0744OuL8Wh4jorxOmFJvcOx0kEkKaeiqJUGlRsyiPbgcXAfpx6GKnCF7EpxMkUi8SxPkSEIJD6/IN/NR3nLSZ7+b8BuLlDDq9fFUO4uL9i7GvLaAARD3CslWj7etLUM8q+r4hdHUyxphZqKkqlRMWmPOEH1gD/xVgS8kIXFxkUHXRmz5shDydDLQbH+oiCxXzGFR/sonDN/eDyM/7KAh7oFGPu5bfPk1HMkkEm26TdDDhp1TKbRupn3DiJABqfUZRKi4pNecELfAmsxAhGPE46M0E/g1/okg0WjjMgzUegcHLK4Y8Z9mECVtogwjx5vN8/jL6tohzrw4OxknJxeppJV4AYjJDJrBppgSP389jnpGDm0iiKUqlRsSkPFGDiMz9jLARp+2Id5VixMMSqEMS15QMOwzjXdB6c1QoOnE1M3GE+uyGetnXdRsAkXdnCBPeD06lluzTujMZYMTH2MeI2i8cITUxJ/UdQFKU8o2JT1vECPwBbgSo4GWe/J9iiEVeaBOxlBHMEWPkWtx+ZzBsfXQ7ZNalZfS/fDapBg5oux2LJxelpFomTYSZJAOI+k24DMhRNOjtXx7jOtGOzoig2KjZlnTT7I3Us4GR5BRP2u+8iDB4CiQP+MD+9d/0f8+cOBm8MZzfczte965MYbR8j1on0J5OBZ3Lt4MB/DI5LLgzHZVcHqI8mAiiKUgQVm7LMEWAD5gWf/wfHuIJ+ivtM4ihS+xIG+b582q+dzg+LbwbcdGm+hYUdUvCIS0xmzEThWCR2/U2gWBSctjYy+0Yy08IxbrMaaEcARVGKoWJTVvFh2s8Ez54JbmopP4MtCKnul+JKWwgOezJp+ekitn17AwA3ddzMm63PwCXTOL0YC0Wy18QiEleaxGX8ODGYaBxRigEaY0Y3K4qiHAUVm7JKJmZUgLR4EYJTnOV3sWhEYHC+byvYT+u5a0j/+RpwF/D3y3bx8DlnOP3SZK5NFM48G+nGXBB0ffmT4g9akx8TRzoTYwUpiqL8ASo2ZREL2I2T9eUP2v57ywaMsBTYP4MSAn7wbaPDtP3k7eiKO/IIb/fLZEjNRs51InFSoqUAVEQr2J3msb+H4UzwBJMI0ISiHZ8VRVGOgopNWaQQZ8hZXtD23wtNOE78pMDeFgnkwPxDv9DnHQ++g+fhSdjNfwd56FSjrhObkfMlfdmi6NROqZeR4L9YN2Jl1cXEaLT1jKIoJ4CKTVmkgKIFm8HxGSmk9OG0qPHgDCULhzf3/sDwKXWxsmsQW3Mj/xtYg7OjE5yEASnIlLk0IiiSaSbpzjJ3JriexoURGc04UxTlJFCxKYvk4Li63By9FY1kgUm8JtJse+ynb3hyanMojKPGGT+ydlBTakRHOfGZ39fPeDGCVYBJEojASRCQFjVivYRjEgHqoBlniqKcFCo2ZZFg68X1uw84IgNGDPLMz5tWfcHkD9qDFc6Zrb/ixysvJNoV5jTgDMdp3CmiI9uzceIyrqB90kwzDmgKVAvlgyuKUlFRsSmLSIpzsFUT7DoT8ci3f/pg0Y4tTP6gA1hhXHTx53x+RUfcLldxi0ayzYKbaIq7TuI0YvXIn45E4Gy0x5miKH8aFZuySPBY5WALJhunOWYEzqjnSOhRM4WBV32O17KY2bGTESIZLyAWTSFOfEYmbEqasy/oPtIhwIOxZFrgdBVQFEX5E6jYlEWCZ8NIw8vgYL5koUXjCFAMvHfxxY5QyfhnnP2BAk4vDsGWk6Q7ixjVBc7BiI+iKMopoGJTFhFrREYxQ2D+DLn2/mwcgZCMNGmiKSOYgy0acNrKiEUjH599nRgcwTkDE6PR1GZFUUoAFZuySAROK38RAxEf+SlD08TqEMGJxREYidFE4qQ6S4GoIFllEqeJAppjss4040xRlBJCxaYsEo6ZI7OPolljORgxyMKp4pckARkHLdMyJT06uH2NnCMjoi2MkOXb94sD2gAN0KmaiqKUKPpv17KIC6hK0cJKCyeTTOIvEsyXwH88TuEn9vGxOG1pfDjWTXC3Zw+mx1kHzBwaFRpFUUoYFZuySgzG2gi2bCRZQPbbHQOIw2k3E23/HoEz4ExazUQFfSQulIWxZLphEgIURVFCgLrRyioyiCwHp3VNcIZaPkZU8nBGBBQGHRuGMzbAR9FmnsJ+oBFwOZAUyodRFKWyo5ZNWSYKqIVj0Ui1fxjGgpFEgWgcCyYWZ9aMuM7AsZAkIy3bvnY/VGgURQk5atmUdapiLJbdOP+33BgLRhpqWhiBwT7WhyMuMjYgeFRBJkZoemBcdYqiKCFGxaas4wJq2t/3BG2XkczSKNNn/y4doCUJQJCpnPnAWcCF6BwaRVFOGyfkRtuxYwd33nkn7du3JyYmBpfLRVpaWpFjVq5cyfDhw2nWrBkxMTE0aNCAwYMHs2XLlmLX8/v9jBs3jkaNGhEVFUWrVq344IMPSuSBKiRujCXSGBOnkW7MERjBkAJOmbQp2yURwIPjYusAdESFRlGU08oJic3GjRt5//33SUpKomPHjkc9Zvr06axbt4677rqLBQsWMH78eL7//nvatm3L9u3bixz7yCOP8PjjjzNq1CgWLFhAu3bt6N+/P/Pnzz/1J6qouDAur8aYrLFoTOxGYjkiMpFB36W/WgymkWYnTGcAjdQpinKacVmWZR3vIL/fj9tt3lBvvvkmt956K1u2bKFRo0aBY/bv30/16tWLnLd161ZSUlJ4+OGHefLJJwHYt28f9evXZ+zYsTzxxBOBY7t27cr+/ftZs2bNcRfdtm1bVq5ceUIPWGGRaZ6HMRlrUsApWWvhGJGpYn/UklEUJcQc6918QjEbEZpj8XuhAWjYsCHVq1dn586dgW2LFi2ioKCAIUOGFDl2yJAh3HTTTWzZsoWUlJQTWVblxoPJIkvCaWkjcRrJOlMLRlGUMkJIX0c///wz+/bt4+yzzw5sW7duHZGRkTRp0qTIsampqQCsX78+lEuqmEjW2e9daIqiKGWEkL2SvF4vI0aMoHr16tx8882B7YcOHaJKlSq4XEV7oiQnJwf2K4qiKBWLkKU+jxo1iq+++opPPvmEpKRTrxqcMGECEyZMAEx8SFEURSk/hMSyGTt2LBMmTGDSpEl07969yL6kpCQyMjL4fV6CWDRi4fye4cOHs3LlSlauXHnU+JCiKIpSdilxsXnqqad4+umneemllxg6dGix/ampqeTn57Np06Yi2yVWc84555T0khRFUZRSpkTF5qWXXuLhhx/mqaeeYtSoUUc9pmfPnng8HqZOnVpk+7vvvkvz5s01E01RFKUCcsIxm1mzZgGwatUqABYsWED16tWpXr06nTp1Yvr06dxzzz307NmTLl268M033wTOTUhICFgsNWrU4N5772XcuHHEx8fTpk0bZsyYwdKlS5k7d25JPpuiKIpSRjhhsenfv3+R30eOHAlAp06dWL58OQsXLsSyLBYuXMjChQuLHCvHCE899RRxcXG8+OKL7Nmzh6ZNm/L+++9zxRVXnMKjKIqiKGWVE+ogUNbQDgKKoihlj2O9m7X0T1EURQk5KjaKoihKyFGxURRFUUKOio2iKIoSclRsFEVRlJCjYqMoiqKEHBUbRVEUJeSo2CiKoighR8VGURRFCTkqNoqiKErIUbFRFEVRQo6KjaIoihJyVGwURVGUkKNioyiKooSccjlioFq1asTGxlK9evXSXkrI2L9/f4V+Pqj4z6jPV77R5zt50tLSOHDgwFH3lUuxgYo/06aiPx9U/GfU5yvf6POVLOpGUxRFUUKOio2iKIoScsqt2AwfPry0lxBSKvrzQcV/Rn2+8o0+X8lSbmM2iqIoSvmh3Fo2iqIoSvmhXInN9u3bueaaa0hMTCQhIYG+ffuybdu20l7WSTNr1iz69etHw4YNiY6OpmnTpvztb38jMzOzyHHp6enccsstgVTvbt26sXbt2lJa9anRs2dPXC4XDz/8cJHt5f0Z58+fz8UXX0xcXBwJCQm0bduWpUuXBvaX5+dbsWIF3bt3p0aNGsTHx9OmTRsmTZpU5Ji8vDzGjBlD7dq1iY6Opn379nz++eeltOI/ZseOHdx55520b9+emJgYXC4XaWlpxY470efx+/2MGzeORo0aERUVRatWrfjggw9Ow5McnRN5vpUrVzJ8+HCaNWtGTEwMDRo0YPDgwWzZsqXY9ULyfFY5ITs722rSpImVmppqffTRR9bs2bOt5s2bW2eccYaVlZVV2ss7KS688EKrf//+1rvvvmstX77cev75563ExETrwgsvtHw+n2VZluX3+62LLrrIqlu3rjVt2jRrwYIF1sUXX2xVrVrV2r59eyk/wckxbdo0q1atWhZgPfTQQ4Ht5f0ZX3/9dSs8PNy65557rMWLF1sLFy60xo8fb3388ceWZZXv5/vxxx+tqKgo65JLLrFmz55tLV682Bo+fLgFWK+++mrguOuuu85KTEy0JkyYYC1ZssS6+uqrraioKOuHH34ovcUfhWXLllk1atSwevXqZXXv3t0CrC1bthQ77kSf58EHH7QiIiKsZ5991lq6dKk1fPhwy+VyWZ988snpeaDfcSLP99e//tXq0KGD9corr1jLly+3pk6dajVr1sxKTk62tm3bVuTYUDxfuRGbF154wXK73daGDRsC2zZv3myFhYVZ//rXv0pxZSfPvn37im17++23LcD69NNPLcuyrNmzZ1uAtXTp0sAxGRkZVlJSknXnnXeetrWeKocOHbJq1qxpTZs2rZjYlOdn3LJlixUVFWU9//zzf3hMeX6+v/3tb5bH47EyMzOLbG/Xrp3Vrl07y7Isa/Xq1RZgTZo0KbC/sLDQOuuss6zevXuf1vUeD/lHnGVZ1sSJE4/6Mj7R59m7d68VERFhPfroo0XO79Kli9WiRYvQPMBxOJHnO9p7Jy0tzXK5XNYjjzwS2Baq5ys3brS5c+fSrl07mjRpEtiWkpLCRRddxJw5c0pxZSfP0ap2zz//fAB27twJmOetU6cOnTt3DhyTmJhI7969y9XzPvDAAzRv3pxBgwYV21een3HSpEm43W5GjBjxh8eU5+crKCjA4/EQHR1dZHtiYiJ+vx8wz+fxeBgwYEBgf3h4OAMHDmTRokXk5+ef1jUfC7f7+K+6E32eRYsWUVBQwJAhQ4qcP2TIENauXXtUt1SoOZHnO9p7p2HDhlSvXj3w3oHQPV+5EZt169bRvHnzYttTU1NZv359KayoZPnss88AOPvss4FjP++2bdvIyso6rev7M3z55ZdMmTKFV1555aj7y/MzfvnllzRr1ozp06fTuHFjwsPDadKkSZFnLc/PN2zYMADuuusudu3aRUZGBhMnTuTTTz9l9OjRgHm+lJQUYmJiipybmppKQUEBGzduPN3LPiVO9HnWrVtHZGRkkX/4ynFAuXof/fzzz+zbty/w3oHQPV+5EZtDhw6RlJRUbHtycjLp6emlsKKSY+fOnTz66KN069aNtm3bAsd+XqDMP3NBQQG33XYb9913H02bNj3qMeX5GXft2sWGDRsYM2YMY8eOZfHixVx66aWMGjWKF198ESjfz9e8eXOWL1/OnDlzqFu3LklJSdxxxx28/vrrDBw4EDj+8x06dOi0rvlUOdHnOXToEFWqVMHlch3zuLKO1+tlxIgRVK9enZtvvjmwPVTPF/7nl6qUBFlZWVx55ZWEh4czefLk0l5OifHMM8+Qm5vLQw89VNpLCQl+v5/MzEzeeust+vbtC0CXLl1IS0tj3Lhx3HXXXaW8wlNjw4YN9OvXj9TUVF5//XWio6OZM2cOI0aMICoqisGDB5f2EpVTZNSoUXz11Vd88sknRxXZkqbciE1SUtJR/yX4R/8aKQ/k5ubSu3dvNm/ezGeffUa9evUC+471vLK/rLJt2zaeeuop3nzzTfLz84v47vPz88nIyCA+Pr5cP2PVqlXZsGEDl156aZHt3bt3Z+HChezevbtcP9+DDz6Ix+Nh3rx5eDweALp27crBgwe5++67GTRoEElJSWzdurXYufJ88i/h8sKJPk9SUhIZGRlYllXkX//l6bnHjh3LhAkTePvtt+nevXuRfaF6vnLjRktNTWXdunXFtq9fv55zzjmnFFZ0ahQWFnLNNdewcuVK5s+fT4sWLYrsP9bzNmjQgLi4uNO11JNm8+bN5OXlMWTIEJKSkgIfgOeee46kpCTWrl1brp9R/Nd/hNvtLtfPt3btWlq1ahUQGuGCCy7g4MGD7Nu3j9TUVLZs2UJOTk6RY9avX09EREQxn39Z50SfJzU1lfz8fDZt2lTsOKDMv4+eeuopnn76aV566SWGDh1abH/Inu9P57GdZp5//nkrLCzM2rRpU2Dbli1brPDwcOu5554rxZWdPD6fz+rfv78VFRVlLVmy5KjHfPTRRxZgLV++PLDt8OHDVnJysjVq1KjTtdQ/RXp6urVs2bJiH8AaMmSItWzZMiszM7NcP+O8efMswJo5c2aR7d27d7fq1atnWVb5/n/YqVMnKyUlxcrPzy+yfdCgQVZUVJSVn59vff/99xZgvfXWW4H9hYWFVrNmzawrrrjidC/5hPmj1OATfZ69e/daHo/Hevzxx4uc37VrV6t58+YhXfuJ8EfPZ1mW9eKLL1qA9dRTT/3h+aF6vnIjNllZWVbjxo2t5s2bW7Nnz7bmzJljtWzZ0kpJSSlWC1DWGTFiRKDm5Ouvvy7ykWI/n89ntW/f3qpXr5713nvvWQsXLrQ6depkJSUlFSvAKi/wuzqb8vyMfr/f6ty5s5WcnGy99tpr1qJFi6xbbrnFAqzJkydbllW+n2/mzJkWYHXv3t2aPXu2tWjRIuuOO+6wAGv06NGB4wYMGGBVqVLFmjhxorVkyRKrX79+VmRkpLVq1apSXP3RmTlzpjVz5szA379XX33VmjlzZpF/DJzo8zzwwANWZGSk9a9//ctatmyZNWLECMvlcgUKekuD4z3fe++9Z7lcLqtnz57F3jvr1q0rcq1QPF+5ERvLsqytW7daffv2teLj4624uDjryiuvPKp6l3UaNmxoAUf9PPbYY4HjDh48aN14441WUlKSFR0dbXXp0sVavXp16S38FPm92FhW+X7Gw4cPWyNHjrRq1KhheTweq0WLFtbUqVOLHFOen2/+/PlWp06drGrVqllxcXFWq1atrFdeecXyer2BY3JycqzRo0dbNWvWtCIjI60LLrjAWrZsWekt+hj80d+5Tp06BY450efxer3W3//+d6tBgwZWRESE1aJFi2JW7unmeM93ww03nNB/A8sKzfNp12dFURQl5JSbBAFFURSl/KJioyiKooQcFRtFURQl5KjYKIqiKCFHxUZRFEUJOSo2iqIoSshRsVEURVFCjoqNoiiKEnJUbBRFUZSQ8/8mu8wRqmj8uwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "straight_traj_dict = planner.get_variable_values_from_straight_line(\n", + " planner_inputs[\"start\"], planner_inputs[\"goal\"])\n", + "\n", + "planner_inputs.update(straight_traj_dict)\n", + "planner.layer.optimizer.set_params(max_iterations=10)\n", + "solution_dict, info = planner.layer.forward(\n", + " planner_inputs,\n", + " verbose=False,\n", + " damping=0.1,\n", + ")\n", + "plot_trajectories(straight_traj_dict, solution_dict)" + ] + }, + { + "cell_type": "markdown", + "id": "aa70c1f4", + "metadata": {}, + "source": [ + "### 3.2 Learned initial trajectories" + ] + }, + { + "cell_type": "markdown", + "id": "8c0b57b0", + "metadata": {}, + "source": [ + "On the other hand, with learned initial trajectories the plots below show 10 iterations is enough to produce smooth trajectories that avoid all obstacles, illustrating the potential of differentiating through the trajectories planner.trj" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "08714f3c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAGQCAYAAAB4X807AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAACU8klEQVR4nOydd5hU5fXHP9O2d1h6laXIUlaKgl2aWMAeCxhJVDTGGDUSifozphhrYjQxKhqwoSIYBRsgXbHginRF2tL7FraXmfv7471n7x0WBGQ75/M888zure+d3bnfe8p7jseyLAtFURRFqUG8dT0ARVEUpfGjYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOHUmNlu3buXKK68kMTGRhIQELr/8crZs2VJXw1EURVFqEE9dzLMpKiqid+/eREZG8te//hWPx8MDDzxAUVERK1asIDY2traHpCiKotQg/ro46YsvvsjGjRtZu3YtaWlpAPTq1YvOnTvzwgsvcPfdd9fFsBRFUZQaok4sm8GDB1NSUsLixYvDlp9zzjkALFy4sLaHpCiKotQgdWLZrF69mksuuaTK8vT0dKZOnXrE/Zs2bUqHDh1qYGSKoijKTyUrK4t9+/Ydcl2diE12djbJyclVlqekpJCTk3PE/Tt06EBmZmZNDE1RFEX5ifTr1++w6+pEbH4KEyZMYMKECQDs3bu3jkejKIqiHAt1kvqcnJx8SAvmcBYPwNixY8nMzCQzM5PU1NSaHqKiKIpSjdSJ2KSnp7N69eoqy9esWUP37t3rYESKoihKTVInYjNy5Ei+/PJLNm7cWLksKyuLxYsXM3LkyLoYkqIoilKD1InY3HzzzXTo0IFLLrmE6dOnM2PGDC655BLatm3LLbfcUhdDUhRFUWqQOhGb2NhY5s2bR5cuXbj++usZNWoUHTt2ZN68ecTFxdXFkBRFUZQapM6y0dq1a8c777xTV6dXFEVRahGt+qwoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOCo2iqIoSo2jYqMoiqLUOP66HoCiKDWEZb8AQvbPIddLfve4trMwj6A++2eP/bPX9bOndoavNC5UbBSlISOCErRfZfarwv693H5Z9jIRmgjXshL79wh7newfiREXEaZI+5x+IGBvH+X63W9vryiHQMVGUeo7IiYhjHBU2K9ioNT1ewXmZh+wfy619/djrBGxYgAO2MeMBhKAIiDbPleCfYwijPBE278X2sf12q+Q/S7CFYERpFggBkes1BJSULFRlPqFW1RKccSkzF6G/V6C+fZGufbx2uvyMDf5eHv7Ant5gHBLRQQl314eaS/PsfeLsl8l9jZivVRgBCSAEbxCjCAF7ePl4bji4oBEe33E8X88SsNFxUZR6gK3tVKCEZViHNdXEHPDDtrrPYRbJ/H2dmKNRGBu7mU47q599jaJGEujHNhrb5vgOncFjhAV44iJCFqJa8x++xzF9viS7G1l/H57zB77nMXAfhzhamovV3fbCYeKjaLUBCIm4MRTQjg35RLX7+A89RfiuKmKMN/QWHtdPo6rzB1X8drH2W9vG8CJ1Xjs/aLs88bYx8rBicNE29vuxIhYpD22PHvbRPscxa5tYuxr2u8au7jl5HhF9nlL7euNxohjApCC465TTghUbBTlaHBbIhKUDxEenBcrRKwSCcBLgN4tEBGYm3AQIx459nZJmG9lAUYogva6oL1flGuZiJIfc2MX6yXXHluk6xi7MTd3ibWU2cslKUCsGLFORPSwt7XsbUTIvPbvJfa6ONd+RTjuuiL7mmT/WIz4FdnjS0VF5wRBxUY5sQm5fhbBEKtAbsqlODEKt2tL3EWS4eXHCcYfsPf1YQSizN43FiMk2fbyGNfxY3Bu1tE4MZRiHNeauKbE7VVorxNhCWAsEsk4E3eYxFd8ONZUpL2/395eEgv242SbgRGvgD0m7OOJ1RKD41aTJAULYw1F2OsKccS1ACNM8jmX2MdphbGYNJmg0aJio5wYWK6XWBgiKPKEXoH5RkjMo8J+99v7FWJuln7MjTEaJ94ilk4RTgwl3l63F8cdVmKfP8bebi/mBhuHI1xBzM1bxh3E3JBDGCHw4dz4sY8pWWhiRYmVIUInrrF8wtOYK+wxJ7rOV4wjNCJyIgIitBaOSy5kX0eS6zwiMGX29nE4gmgRLsBFwHqgCdDcXqc0OlRslMaJhWOhSCaXxEo8mBuhl3Drw4u5+ebZywM48RIRiVicm3YpzlwTCZIX4gTzRaCicWInxThB9SBOKnEh5mZcjCMEhfYxJavMg2M9uBMCJC7ic7377GuWLDJwbuzurDCZjyOxIDm/CFgRjsVT5louGXMyt0esI+xlefZnFWW/i7sQe0wilnJ9HnufIqAlRvzUymlUqNgoDR+xBmSCojsALzdWEQ93OrDEJfw4glCAk7Jr2b+7XUbgiFSZfQ6ZSyKWhMceQxFOnEQsKhGdOBwXlg/HfSX749pHLBO3pSVjkrhLwB4rOJaBXE+y/bskLIj1I+6/gOvzEauo3B6fXK/PHre4Cr0YcRB3m1hEEguSY4noittRxlGBky59wB6zuNg2Ai2AZugdqhGhf0qlYXFwyrC8JJgt8Q+5AR7AmSEv7iuJw8S6jnkAc7NLJPxbUeY6LjhxFcm8kn3ldxEKsRiK7fOIqy0C58lfxgmOZeTFuVHLhEoJ2otVIfNpRBhlf8s+hoiNpEmX4wiajM/tHhMrqJzwlORCjFjKZ1SEySIDx50WfdD2EmuSc4g1JNUL4l3rfDip1CJikgix096vtet4SoNGxUap34jV4p7kCOZGJKm9kr0lgXNxkRVjbpLxrmPtsY8hN01wbuyRmBuqCIdMqBRBkHNKRhk47jCJUYgQiFBICnA5zrfNhxEodxBeJl1KLEniN3ITLiTcUgtgMtLk2uT6xGIRl6AE5sUykgw2ET+fa1+xSNy/C+Kek2OL8IlgFbnWyf4yb0gqC8jxxMWZjyOyZThCnIDzN92MERx5MFAaLCo2Sv1BbpQiLOKSKcJ5Khc3lASoI3HcWXLjDQK77GWJzuErLaJonLknYjmI+0jiHvLk707J9WNcXXGuZVIWRiyeRJzANzhlXiRTDByLQ4TFnU4scRW3xSBuO7fwSbKDJDXI9cm2Ua7fQziWlrj8RHjcLju3uMjnbbn2l2PJ5+iO3cjEUHHNyd8s2vVZyN9Strdw5uWIkLotpjz79y1AW8I/d6XBoWKj1B1y45IAvgTY5cYr9b8kUO/FEQl5qt5r7+8WFYltRLq2j8KJkUg9LxG0CsIzoAKY1GSZDyOIdSKJARKgd9f/KiBcFCDcJeauAiCusQCOBSAcKIe9G2HH97BtLeTsg/wi8BRDaREEKyDoA68Ponzg80O5F3wewIsV4aW0yENBGRQUQ1EZFJdAUTkUl0F5BQSDFhUhqLBFxoeF12vh84SI8FpE+kJERECkD6ICHmIiPETHRRCd1JSEFql4E1IhuSW06QPx8eGxMymHI0kDZfZnI5lwkuHntoTESpK/v6RuF2MEpx0qOA0YFRuldpAnZBGWEszNX2IiIjruGe0VGJGxMKJxwF7nP+iY0a5jyUz5AE5MpBTzRO0l/D9eXFFSY0xw3/TEGpAgt9wkZTa+xBvkeHIeN25LptJVVga5u2HPLijcCZs3wt4NsHMD7NgAuzYaQfmJeAivEFOThDw+ClucQnTPs/F3PQd6DYWYaEd0JMblnocE4dmCMsFVHjLExSdxomJgK2rhNGCqVWymTZvGm2++SWZmJnv27KFdu3Zcfvnl3HfffcTHO87lnJwcxo0bx3vvvUdxcTEDBw7kqaeeomfPntU5HKWucLvDxM1UiOOukvRdd6FHmfFuYWp6FeNYKx4cIQnhiI7EEURUgva+UjBSCODESESEwHEbyXhkH4mZyKTJeFyuIwvKSyE3D4IHILcAyguguACK8iE7H0ryIZhvfi/Mh5wcKMyBwlzznrcf8rN//DP0eKBZB2jXjfIWXdlltWBbdixbi2PYnB/Dtmwf+3KDeKwgPoL4qcCDhZcQHix8BIn0Q0ocJMdBYjTER1nEx3qIjzLGUIQPAlEeIuzimqFyqIj0Egx6KC3xUlTmoTDkoajQ4kCxRW6hRVFRKZ78fUSV7qUZe+hAFqdY3xK/MxN2ZsLsf1AaSCDU/0qiz7oeOp0NkV7HVYjrf0Pci+5KCCJGkjwg/zvi0twKdCDczag0CDyWZVlH3uzoGDBgAO3ateOSSy6hTZs2fPvttzz00EN069aNzz//HK/Xi2VZnHXWWWRlZfHEE0+QnJzMI488wurVq1m2bBlt2rQ54nn69etHZmZmdQ1bOR4Otljc81ncM/BFWEpx/P1BjGsliLmhi2BIiu3B2VLueIWUv08iPO6Qh1MmX4Lf7jResZwASizYmwf5u2HfbijdAzl7YNceyNsHBdmQux8Ks+FANuTnQdEBqJC75nHg9UFyc0huAYktoHkHQq06sSe6E6vKOvHVnk4s3RnNii2wYScc6lvq8UCHVEhrCZ3awEkpcFIz6NgU2reDlEjwiIXgFlCpKhDCSeeW9gGJOAkPYCzLQqpMGD1QAt9vhO92wfKsAgpWfUHbvYu4kI/oy9LKMRbFtyP64t/gGXQzBBKdGJvEnOSBotw+l1SVlvlL7uWSRBAPtEez1OohP3Zvrlax2bt3L6mpqWHLXn31VW644Qbmzp3LoEGDmD59Opdeeinz5s3jvPPOAyAvL4+OHTsyevRonnnmmSOeR8WmDhBRcZcZkbRWuQlJqq8Xc6MQd4jU35KAbwzODHKJWYjHSGIX7qDyPox4uJ9m81zbxuK4vQqx03xDENwLe7bCli2wfSvs3wp7tkPuDti7HfbugNJijplABMQmQlQ8xNqv6Djz8sdBdAIk2suj4sGfBEnJEJUMqcmUB1LIym/Cdzu9fL8NvtsE322D1VtNfOVg/D7o2gZ6toYe7aBbmvk9LdFYKPgIr04gyRNS6FPmzcjES3chTxEbKYmTQLjYxGEeCERsxAKVv0EMlX+//Tvhk82wYP53tP/+da7jddqzBYCKiHh8Z92M54I7oXlb5wFBzi8PJJK+LRNtJTNQ5j0l29eZjBEcrR5dr/ixe3O1utEOFhqA/v37A7B9+3YAZsyYQatWrSqFBiAxMZERI0Ywffr0oxIbpYaQTCL3rHKpUiwuMcm0kqwn981L6nvJhD/JtHJnM0kKrBSelNnjUrsrH6cUDISn72IvlzkbZbmwfoMJpO/dCFuzYPsm2J0FO7OMu+tIRMdCSktIbgZJzSG1GcQ2g5SmkNAEElIgJsW8JyRBXAJE2HfbXMLmlZSWwd5tsDcH9pbCvkLYmw07tsKW/bBlD2zZBzv2Qyh06OG0SIbeJ0Gv9tCrJfRKN8ISGWF/NkGMNYfrbyM3XHE7CRJrkkC8e66OfObudGV5lyQGCdCD0zba5/o95Pq9DJrEwTVnwDWnn8yeHQ/zyhd/4dtZM7mp+EkGlc2Huf/AWvgsnhG/hwvGQ3yM8z8lIutO4BBL1z5+5f+eTGDdhampppUGGgQ1niCwcOFCAE4++WQAVq9eTY8ePapsl56ezquvvkpBQQFxcRoBrFZCrneJmUjRScnIKne9wHnCFDeM3IBktrnMK4nFeUqWbDEpfSL1v9xlWiA8lVdmjYOTQiznjQH25cL2H2D7Otj8A+xaB9vXw9YNR457xCdDi3bQpK15b9YWmrWBpFbQrhVEtYYk10QVKWhZaq4zNx+274Pde2H3dth9AHbnwu4c2J9nQjE5RZBTCNkHoPgotA2M+6ttUzi5HXRrCye3hm6toXsHaNbE3kgmfrrnA0XgVAkAJ1lBcP+d5IFA5gFFUPXbLtcq2wpS8UCOJ1ZjJOEN3CJc20pMzZ430ywOxl3spXDEhfxn5oX89f2l3FLyGFdXvA3v/gVr4ct4rnkS+l9lMuhkzpGM33fQeNzZa2JJZ2P+/5IO8SEr9Y4aFZvt27fz4IMPMmTIEPr16wdAdnY2HTp0qLJtSor5VuXk5DRusXHf7KWch3yRLNc2B//ufneX/RAhEZeVWBEVrp/FxSUuMAifWS77SSVhSTmWOIvMmZAnY/H1SzC/AvOFd89slxuRLIu0l8u8CynGWAHkBiEnC9Z+b1J9t66FLEn53XP4zzIqBlp1Mq8WHc2rVQdo1RFatIfIeOf87rRiV9bT/jxYuRF+2Aobt8PGnbBxh3nPyT/8qQ9FwA9NE8wrNQlSU6BpIrSIg/bNoF0qtG8JrVpBxMHFLkXEBXmCd9cUO7jFslQTkGUenM/Ze9Dv0YTP2JcSN+4HCvf55CXnEcGSMR5qQqhUbZD4UITRgnGDYUy/Ptw2bQrPfH0Hz3AHfbOXwn+uhrM+hF9MMNai24qS44j17D6PFPusALYRHodT6i01JjYFBQVccskl+P1+Jk2adNzHmzBhAhMmTABMbKhBUY55St2NeRrLtn8vwCm34sfcwBMwN2HJmnJ/6d0+d3AEQlJrJfYhP7tFRMRNsn3E+nBPDJRxBnB6o+zAfJnjcSwRGZOU2/fj1LdylzDJd43Vi0nl/WEDrF8NO9bAljWQtQa2rIUyaQd5EJEx0LYztOkMbbtAq86QkgZpadC0uTET5DOWSYQHf/aWcVut3QLfroMVG2DFelix0VguhyMmCto2g+bJ0DwemjeF5k2guS0iKVGQHIDkVpAcD7HR4JG4USRV4yHyd5EguVtI3AF4ebqPPmgbEQ935pzceAVxQ8lx3EU4ZftijDXpzviKI7wSQBzhM/7luOIyDbjWy3jkQcUddys2x0tNgal3wtuLz2DoxCVcWfJfnvbcRfSnr8LuTTDufxDZNNylJg8KXtd5wLHGmtpj2YmZg6MTOeo1NfLnKS4uZsSIEWzcuJGFCxeGZZglJyeTk5NTZZ/s7OzK9Ydi7NixjB07FqDSSqr3lGFu1tswAiOFCt1l4CNxZpnvxKR2RmCaSollIW4Et/BI0F1KkYjbS9JHJaNL0n4l4Cp9R6TcezHOTHH3E2UB4enIOTgT8uSmVILT4teLiWF4AL9lAvPrV8LqlbBzFWxeBVu+h7LD+JqatIZ2J0PHbtC2K7TvBqldIbY1NPGGP9Fn49wcBblmW2x27IMl38FXq8z71z+YOZEHExMFPU+Cbu2gU2uTzXVSS+jU3lgnHnchSfk8BbkxuieUyix7t1tKLAiPa7lMMnVbD2I5yAOEpHeL4IgIiDUiDyOSbCH/E0k4c5YkM08eLnyu5eBUeHbPf7Fw+u4cXEVBKizE2eOXhxUReUkikOC+iKwdq/tZD+g5zsdFz4/ljL39+MgzghY/fAr3D4A/LILEVuZY7szDSNfnJunrxZj/a/lf3IdpT6Dxm3pLtYtNeXk5V155JZmZmXzyySdV5s6kp6cze/bsKvutWbOGdu3aNQ4XmoW58W7AfAkkuCk+Z3lyDRLuGhBLoQDIwtwkEjA3dAmMiziJm8uLIyYROEUPpQRItv1zAKcXSjxO//p4nCZaxZgsHx/mxi2pxeJ6kb4nYnWVAeWFsHMlbFgB36+ATStg8woozDv0Z9OsLbTtAe3SoUt36Ngd2nSDYKK5FnniBqdyskyGFCTmY9+QysqNxfLFEvhiI3yxBrYewvvWthn07Qq9O0GvDtArDU5qAd6DqwTI3+hgl9XBiHhYrm3lpn2wEMpTv9tKFWGSa5KYiHtZDOZvITdbt8Ugri+xgOTJPwbn7wPmb1zi+l0yuuT/LxknEcNdrVmSEhJxYmwiJD7XOSSLDHtc4ip1u1ylDE8ITu4IX/4JLn2qD33XLWGWbwQ99nwLk34Jd30MEfaHJ+Nwf5byOfgJrwKxF+e7otRLqlVsQqEQo0aNYt68eXzwwQcMGDCgyjYjR45k0qRJLFy4kHPOOQeAAwcO8P7773PddddV53DqhiDGktmCM3dBXjIjXX6XOIrMRxH3lnzp92OEpynmSx+LU3dKgqkSj3BPlpSn30KMWAQxBSjFCjmAU5dKnpzLccq8SyxF/P0yU754JyxfBtuXwdplsHEZ7Fh36EkgiU2hY09o29O8d+gB3bpDIMHpKSMpuyKwUiDSfcNwu21sduTBFyvhyy3wxWr4Zi2UuOMdmESnU0+GU7vBaZ3g1B7QsoVrA/ncDmVoiSUiNzaLcFek++YXQ1UxlMmnHLQsx3VMydzLw7EK3OIiVojHXi9/F1kudcaSXMfKxSmJ45706nOtl4cSsUSl34w8bEiKsVTSlmzBSHv8CTj/M5KxFsARswicB50S+/hisdnWlhWwyPdsZOxN3/LQf05n6OYPWePpQfKKWbDgObjkNmcMUpVB/kclViQJJGJVlwHbgZPQdOh6SrWKza9//WumTp3K/fffT2xsLF9++WXlujZt2tCmTRtGjhzJwIEDGT16dNikTsuy+P3vf1+dw6l9gpgqtdtwKgaLyBwsMO4aUvI07XaPlGJuJPn28VrgPNFKsDcXx+KRYGoSjshJGRdxy8jNROqQiUsiB6cOmAfYZ0FBFmR9A+u/hR+WwqZvTXmVg/H5oe3JcFJvaN8L0npDu57QpIU5mHtSoaSySlKEiC+Eb+d6oi2tgG/XwpdZxmL5YvWhrZZu7WBgNxjYBQb2g5Pbg09uOiJuVcaO40J0r5ebpyBzPfyEd7EERwgEESbJAnPHrERY5Vt3cPmWg5fJcskac49VHjBCrmXxhDeEi8exSsFpSCafcSLOQ0rA7FvmKWNd2TrSQmlERkSa68u190/CmTflrqwATvsGQVy60WafwqJCJpdNZmrhVDJLM8kN2Qe9JhFemc/Nu15gGldR8cbd+HsNhaadzTFF7Dw4sUx3BQIRPZlntBfzXVHqHdU6qbNDhw5s3rz5kOv++Mc/8tBDDwEmPnPPPffw3nvvUVJSwsCBA/nHP/5B7969j+o89XZS53ZM4yf3ZEcJBMu7uEnkybkIJ0grN2JxkcjTnTyhxtmvWJygrLjSynCeSsW9kYCTepxE+M1JMuCiLNiwETZlwrZvYP03sG4pFOVWvb7YRCMq7U6B9hmQnmGEJhBprCWZ6Q2OkEiMR84tM8TFrSRPxvnm56074MtN8MVa+GIVLP0ByioIIyEWTkuDgT1hQC84rTukSFwiH6dZmOAOMrurCsTipHu7MwLjcQRbRDEOx2Up1xi0P2OJxUmQXx4GDuBMgCyzzycxMvn7iiDJDd9983dPxJTgvPt/IwHHvWnhZAQW2suScSyDUtffpxAnJmSZ9YWeQl7a9xJPbnySbaXbiPHFcHbi2QyJG8LQJkPpGdUTj9/jtAWQz/KA/bMc7wBh4rneWs9/9v2HiUUTybMc12pzT3P6R/WndaA1S/Zv4tv//IfXcv7IaCaz/eRetB63DKI94b1xZPySECPfA8nckyreHdHW0nVErVUQqC3qpdjkA6tweq6I26AC5ylZZtcXYb6oslzmu7hTi2W9xHlyMDe8Jjg3O3EjyA0rBsdqinUtL7F/j7Bg0xbYmglbvoZ1mbDxm0MLS2IzSOsLaX2g9SnQ/hTo1BH8HnMdEhiXm/d+nJIz4LROTiC88KIEuv2mCvHStfDFJvhyBXz5w6Gzw7q3gwEnG2EZmA4ntwVfPuHdM8Ep+hhFuM1ejtMCwJ1uLFNsDhBeeVliICIsQRyrQCZSyucvFobcdEUEJGYh4iDxKMnok3iNxKgKcSyOOBwXnwThk3GKVObilOaXMcnTvwi6ZAaW4KQGl9nbituuDPLL8/nn9n/y9Man2V++H4CmEU3ZVxb+h2gb2Zbrm17PDa1uoEtEF6cskVsEbRdvYWwh7+W8x6t5rzK7xInPDowayK2xtzIoOIjWia3xSGymFL7ZXMbIv+1mdXlPkshj5i2/ZviwfzsPKkH77yWeAJlMKxa8xCujMW7nNlQtiKrUOCo2NU0FsBYTcC/FCdKL4Ij/GpwvqLi+PK5lsp3c4OTnEI7LIBXHpSCunVIgEULBEPs9+9lZupP90fuJPlBIysYNJO74gbiN3xGzeRWeA4dIG09qAZ36Qoe+0KkfnNQHmraCkMecQyoZi6DJjVGy5cCJB7jmSJJNpSCFQrBuCyz5Ab78Hr5aB8s3OuXtK4cSBwO6OsJyaluzrNINJcjnkeBaJlaBuHpkWcgel2TYibUiAiKiAOZGL/GXEnufWMJTqvPtz7wJjpBW4CRcuLcttF9NCU/hzbXPI5lsIpT237LS0pKYkjsZQQLw+faxJMnD7WqTb3WE6ziSuWY/wIS8IYbOG8q83fMAOK3Jafyh+x8Y0WIEe0r2MHfXXObsmsPsPbPZUbqj8pIGxA/g+mbX0zmqM7GhWOIq4oj1xbKudB2Tsyfz7oF3KbRMvZtITyTXxV7Hr5N+TV9/X2dOkVhxYvEHYOpS+OyZp3maO1mfEEXCcz/QLLKt+f9zW8jyoCDfmQTXZy6TjE9CJ3vWAbVWruaEJR9z8wi5XuL6EdeYfPndbi6fazvxpcsN6eD5BWIR5FE5435zaDNzDsxhjmcOX2//jBbbd9J3e5DTtsGp2yGtaoY5uTF+drRtRUWnDFK6Dqd1ixF42rVx0mHzcG7C4v6D8HiK3LAl7dr9VB0yb5t2wdIVkLkZvt4I36yDAwelHnu9JivstHQ73tIJunQAr4iIPLUXER6Ah/AqwoIErCE8hVhiIDEYC0TcUu6YiKSku78RMifl4Cdk6WXjTgzwHvS5uVPUI3BccnJeERSxaEVEmuC45URIJNhf6LoWSQSRuVUlOC5JL86DSAAnc8vrOr8Xnst8jnm755Eak8qUK6Zwbsdz8dhxthaeFowKjWKUNYpQeYhPt3zKKyteYeq6qXyZ/yVf5jvx2EMxMHogoxNHc3Xc1TQpbuIkN0hMSzLb5DPyw1X9YcnQ21j9yQTSD6xh3uTbGHTZ++Fjl+w2ca+VEv4wJi7Mvfbno3e4eoNaNseLBfyASXEWt4VYJe74jDxdSsxErBb3DbgYx5KRny3Cbng7infwepPXmFP8X1K2rWPANhiwDU7ZBZEHWQklAS/ft4xmWdtIvm7j4eMW+9mURJhPvaO/I5fGXMqlTS/ljOgz8OX6zBOhiFsZTtBa3FHytB2EfA+s2QGr18LyTfDtVvN+sLAAtGoC/dNgQBcY0AP6dYE4cfnIPJkInIKQImB59nq31STJFuKvB8evL6nbkkoe69qmFCM4CYRXDZaEjoiDjidzV0RYRPhj7XO441AyvgIc60JcfRLQFkFLxJm0eADHVSfuPDmf/A3AqdoAjnUgbr9IHHedexLoYdiQvYFez/eiqLyId372DpeffPnhN3ZRWFbI/777H+//8D7ZRdkUFhdSUFxAYWkh8d54rky+kuvirqOTv1N4eRlx60mM8uCHAtvKL/XDjXdM5/X9l5LvjSLuqQ142rRyHthEZEW4xGqVdfIZJWKsyWZHdVlKNaFutJqkBFiB4z4TkZHUVPHHS+BWArbgCIncrEoP8XMIKChhVc4rfLX3KVJ2rGXANmjpzoACLI8Hq1k3vO0HQtqp0OE0aNkDIvyVwdS8nDy+8X7DkuASlhQv4bPSz9gbdNxqTX1NGRE5ghua3sDZiWfj8XigECp8sCUX1u+AdXtg3Xb4YYepUrzlMDPwWyRDRkfo3wH69TTC0qoJ4SVyxDqIxOmqCY5rRG6WxTiVfgURPnfAXdwrctx8nHiHe8KiWAhu10wQcxOTNFuxZsRNV+T6+4lQWBhhKbTPIeIVdF2LO2lC5lodZAlWWihud55bPCJxMrFEDGXdTyBkhTjvlfNYtHkR1/a4ljeueOOnHchNMSY5ZhvO31geruTBSlKZpepBAc7nLw8cxbA0q4ItD17OpbzP6vRLSP/Le+YcIlpihUsGnDwQyJwjydiMBNLQVgS1iLrRahKZke9+inRbGOIqsA5a7y4tI8vk5nFgD2xbDDsWU7RtDoHdK+gRsujhOkRZdDz+NmfibXs6dDgNT+dT8fgSHb+8fLklPmRBoi+RQYFBDIoeBOUQ9Ab5qvgr3i14j3ey57Fph8WknANMyv2AlAO7aZE/kJJ9rdmy31sltiJE+E0xyfS20KsVZKRBRndokYJjlSQd9PlI6uzB8yGiCZ/AKMg1iStK8ONYGBA+Q9+PuRFJ+RU5VwVO1pZkcYHjcom3lxfguMXA/L0kdiTzbcRtk4STGCIWSXOcJAG38Mh1HDxnRAL5iTguMHGDHTyx8Tj595J/s2jzIprHNudfF/yreg4aDXTHpB1/j7HWJF7kdf0sD1OS+BCD4xq0/x592vn587mDuWDBLNJXT6d41VdEdznNnEfS9mUyNDhJNQk4E2gl7X8fWhm6nqBic7wU2u/i1nHHZmQ5OLEb92S/IGZC5L71sO5T2PYZbP0MstdVbhJj77KqmYfilhmcnPBL4loOJaJjFzPT2oeTbSSB1BCUBiEvx1QuzsuHnBDsy4a9B2BvGezKga3ZPrZmn87W7NMpOKg0Wbb9EpolV3ByKz+dm0NaO+jSxGSJdWoJ/gSc7K0InEC+B8ct5P5PkziCfF5ugXAHugWZQ1KMI8oiLDLxMAenr43gxbhRZMKh21KQuT45hIuePAi0xsm4Ctp/iCR7mwKcGIlUfhARicJJOpCxN8WxUt0CZccqwuqO1TDrs9czfs54AF64+AWaxDSpvoN7MDGnU4DVmM9WYlju+UBi3chnJRaiuCijoN1lW3hqyXWML3qZPf+4k/ZPfg7xnnCBTiDcPSqxG3eGWi7ms3Ynlyh1gorN8SJPx/KU5fYti7h4Xb9bYBXu55P/vUXPwoU02fMpEcW7wg5Z7o3h2+QOzEyMY3Fie8oiL6dn8GJ8ZXEU74XiXVC8HAoroKgCCoPm58ISyC+D/FIoP4wlcjjioqBjc+jYBNqlVJDbdBlfR73F2piPIHkTewIltI3uS7f4a7m61dW0KW0TXoXYh3NDdSOz6S3XthKslyd+tyUjcQqxYtxZVT7MTcmdtizndk9gFKtR3HMRGNEowPHvYx+nKU4iQAGOtSRiFOMaUzFO3bAUwrO+5PpFNHyu5e7PqI7Tce+ZfQ/FFcWM6jmKS7pdUjMniQF6AGswlq2Iu7jAZI6ZuBHFIpTYmgdSAvE8fNV8xrzSnPa5X7J57hu0v2qUOb5YjyI0Mp9LMtzEUqzAZG/uwTRa01ToOkXF5niRCWbujDPhMKb7zn0HGLbm9srf95DKp5zFZ5zJp5zF8lBvKvYHzNwVmwXHOKyADxIiTf/5pGiTPtw0FppGQdOm0DwB2jaBNgmmZlhyPHgk+F/oh5R+QD8yD1zDf3L+w7T8aXxT/A3fFH/DuD3jODv2bIZFD6PcV06Bt4DCUCEFZQUUVBSQ58kjN5hLXkUeecE8CkOFZERncEHSBVyYfCGnWKfg9XnNzedQbrNkHPckhFsisTjldgQJzsvcFBExibeI+6Y1TsxGrBCZhyRP5VLlAJxSK24hcbu03G67BsCyXcuYvnY60f5o/j7s7zV7smigG0ZwJPFDrFJ5kHCLhdsqDUFieSIFHb/luS738qcfHiN66r1YF1+KJyLWKceDs33lfCt3CZ0gjru2CKdvklInqNgcLxIoPgaspA581urXLCrsydKIc9gf25Vor4dAIMi2wEIq/K8T6/MzkuG0IZVIL0T7Me+W+Tk60ZS0jwmY5pGxCRBrQXwCxEdBZBLmSV2Co5KlU4C5mYtrS9KH3TPsXSLZL7ofE5Mn8mzRs3xU/BFv5r7JB/kfsLBwIQsLFx71NX9R+AVfFH7Bg9sfpHlEcy5LvYw/tfsTzeKahdd3EzeL3x6rzHx30xwn8Cw3F7lOP06bBskAk6oA7jiLFKuUJ25xaYll0gh9/H9Z9BcAbu13K83jmtf8CWOBzhjBkbiZDycd3IOTdAHOhOQiyLaduIU3R7FsfF8yKr5h1dOP0WPcn52/rXsumvuBLxEn/Vwme+7DKQiq1AkqNseL3CQhvHaVZERJ6qfX+X1HfCZbf3MGfwhdg6fCxF2sYosrfFex2/suza3mfFr0KZ09qeGZaSIWXsxTuMx0d1eFllRgeZqXMYnbQr5s8rME6+Vm7Y6ziCsoANH+aK6IuYIrUq4gz5/HeznvsSJnBbGBWGIjYonzxRFHHLHRsSRWJJIUSCLRk0hiYiIBX4BPsz/l410f81HBR2wt3crz25/n7d1v88+T/8no1NF4yjzO7Hm3CIgbTqwcSZUGJ+NJCj1KWnGCa73Es+RmJ1lKEkg+QW4+K3av4H/f/Y8ofxTjTh9XeydOwkyw3IjzuUvGXwXGghWxkf+7GFiRuwKA/k1OZvvIIWS8exadljxJ4fabiW3Z1mwv/ydiFcn/iZR4kgSECMz35gA60bMOUbE5XsQ0d7tY5EYuN/xy5/egJ8gtFbfwrfUtL3lfYpI1iXa04y3fW7zrfZckK4nZZbPpHOrs3Azdbhs55sHrwHEjSWqp3UMkLF4igWz3mN1B+hDhNafkFU1lED7Rn8gNqTc42VruCXfx9r6SLGALwyWpl3BJk0uwLIsVhSu4Z9M9zMmZw89X/pzJqZN5vtvzdAh1CHOl4MMIRxBzs5B6b5KM4cNkP4mVI3XixE0TjxNzccdRTkDEqhnbZywt41vW7slTMX8/CU1KcoDEbiTjThJDArCi2IhNL38vul55Mp/MvoqhhVNZ9vc/kPH0605cVCoJgPO/606NFss2FWPdSGq8UuucIM91NYjUJ3Pf/N2ByoNu5h48/Drwa1JIYV5oHndyJ0VWEfd67wXgSetJenl7hVtLEB6Ilpu7+wYqIuT+4kmmk9vFEH3QsXw4AXA5jjt1OsK1baRrO4l5SA026WsC4QVCg67tY8GT4KF3Ym9md57NpB6TSA4kM2vvLLot7sa9++4l15vrzFlyZyvFAW1x0vPk+gtxRKgZ0BITDG6PucHE4sRdTlBW7VnFtDXTiPBF8Psz6qCyug8TLxMrQyoqiBtUCtTak2HzSvLIKs8i0hNJ55jOeH3Q5JbHKCWCjK2T2fPVV05Cgfv/30N4bT5JDglhHn5KcGrYKbWOis3xIm4aecIXF5oEtN1BZB94Q15ujLmRjyM+BmA96/ln6J9sZSsZoQzGhMY4YmK5jiMZXeKHlmOK7xvXOrcF5HXt73HtKwJUgVM4UrJ8ZF6LHBOcGe5iqUnZEQnay/HBST1NprKqcKULxU499jTzMKbtGNacvoZrm19LaaiUx394nE5zOvHY9sfIT853KgHIuSSduAlm7kRrjIumE8bCkRpZjdRetyyLUChEMBg8ptdfFhqr5qZTbqJFbItj3v/gVygU4pjngkdjWjeD+R9IwIiPFB21qKyGsMRaAkCPqB74g34IQJ8zO/Jxq7sAOPDcXRBnOa7lIE7BU6lHKDXYwKlYUIZJhRbLXqlVGunXshbxYQLWuTiWghSDDOD0P5HYgl0xoKXHuDK2spWnrKcAeIIn8AV94eVhJBNKJgKKa0riLTLnQMRJSqOImMgckEKceIj81UXA5Akwh/DCkIk4T56yXSQmnVVEVWiOM7HRg1MBQGbQy/XIE6ctUC38LXjj9De4K+cuxq0cx8L9Cxm/ejyPr3uch3o+xG0Zt+EL+JzrFjFshAH8I1FQUMCyZcvIyso66pv9jrIdTN0+FR8+0nanMXny5OMeR7NmzejTpw/Nmh1jLZhkzP/Ofpz+TmK9llL5/zlr1ywABscMDmtZ0fO397H73kmkHfiCrGnT6HDxVc73ItJ1TEmp9uGIkFRpKMRYNyk/8eKVn4yKTXXQhHAftLimygi3HV3urCYRTaAUcu3OVKdyKoN9gx2rQQTG7TIqxolDuOefuCeSuht7SXkPsQp8OHMSRDSiXMsTcOpNSQZXCCOkkskj80xEQMGZPxHAuEJEcOWam9o/y2x+P04JHzvm1b95f+a3m8+sPbP4y7K/8Pmuz7njmzt4dcerPD7kcc5rft6hP/sTiJycHN555x1mzJhBKHTwhKZDs++sfVhpFtHfR/P0pKerZRz9+/cnNTX12MXGh3Fz5mL+V+T/uJiwoqqzcozYDI8Z7sQOo6FTrwTe7vknfrbyV0RMHg9DRoI/0vwPufvXSP8h9/woSYFOxGkhrXe/WkU/7uogAtOw6TucmfEypyASp89GkfMeExHeLP1e7sXj9TiT3sT0l5IzIZwUXREQSTwQH7jESORLLNWSg5gnOZk8l4QzV0GEJoT5IpbjtKAWcZFEALFaZAKll/Dq1SGM8MoTpKQty+zuWNf53MUjY8w1eQIehrcfzvmnns+MtTO4/ePbydyZyaDXBjE8bTgvXPwC7RLFF1OzWJZFMBikvLz82F1G1UwgEMDv91NeXs7evXuP3rJJxrgZQ1Aws4CC3IIj7XFUtG3bltLS0iNveChiMC7QnfbvkklmxyK3F29nVckqYr2xnJF6hpM8YIvGoF/fxPe/eoZuZd+x/uVnSfvN3eEPWAfP4RFrX+baSJUKtW5qHRWb6qIZpr7LDsIbcbmLawZc764nuc6ezlzqv9RJ3RV3gHxppJ9MMuFzFcR1IBlX4Myur8B8mYpwUogPLqEjLrM8nB4pch554pTCk5J27RbTKIy4SAFJSSAotI8nmWNuYRORkTIvhwjce/BwSbdLGHLSEP755T95bPFjzFw/k1NeOIXXLnuNCztfeKi/QLViWRYrV65k8eLFFBRUz036pxAVFcXAgQPp06fPse98JuYz/hantXN9oCnOxFx3ar8P5uyeA8B5CecREWGriPxPhaBpEz+fnPEE3T67mGZz/4J18xg8ESnOdiIugjzkSLxSHsLyMN+Tg1tXKDWGik114cEEqoswPmn5ZN2ZaeKisuMsNwVuYnbFbGYHZuP1e50vn1g3bldUNE6TLKlwK090UnYdnErIkhIqZV2kW2cAY7lI35MQRijlCx1l7yMBVpkXIXN7xJqRJASxnpLt84ilJeMTF9uhUrWPQGxELPeffT9j+45lzPQxfLTuIy564yLuPeNe/jror/i9NffvGwqFWL58Oc8++yy7du068g41RHJyMj6f76hbpleSCGRg/q6fVf+4jgspVJqP+b8Vy7ccFu9fDMC58ec68UKpCGE/aF38mwtZ9MVgzg7OZf1TfyPtgSed7LZoHCGTWKa7ZJI7O00mOCu1gopNdRIBpGPaQ+/FyQKTrBn52b7hvpjwIqGyEF6v13m68+DU95LMGpnAKZaB9HyRdGQvjstK3FISPxLhEMox4iKlPCTZQIpWSjmXaBxrSGbbS4n8aBwhkuQCedXAZMnU2FTev/Z9Hl/8OA/Me4DHFj/GZ1s+480r3qRtYtvqO5ELy7IoLS3lwIED5Obm1sg5jgav10tJScmxu/LOwPwdVhJW9qjeEI+50efiBPDL4fOizwE4PfF0p2q2u2yQB+LxsP3Sx+GdvrT74l+Evr8db4sO4e5dMN8BiaXKg5x8j6Jw5t3oXbBW0NTn6iYK6IWJ4UhwX7Kx3Ddj+90baf8JJCVZkgvEv9wM84VIwomHRGC+qBJXibTXuY5LE5zMtGjMlzkB50lSLJVY1zkl5pyCIzayXxOM5dbdfu8IdMD431NxSuOLxVTNeD1exp85nvk3zKd1fGsWb11MxgsZvL/2/eo/WUMnFhCv26d1OZAfwYf535YWA2WQW5rL6uLVRHgi6NO0j/O9cbchAAjB5T/rw7sR1xFBGVte+j/zP+ueEiD/9+6Ue2ngdgAnY1Tn3dQaKjY1QQSmCOGpOPEPCO9R4nW9y5dErI1CzBesKeYmn4LTCjcRJ0YTidNxUp4Oo3HK2Mj27koAkRjhirN/jsOIRQvMRMjOGCHpjGk81QlHVJKoDOZXjr2WU5DPan8Wy25dxoWdLyS7OJuRb43krpl3UVrxEwPWjZHOmP+z9ZiKx/WVOJzU+Fj4KvsrAPrF9yMyFBleykYoA3IgsgJKrvgrpUTQ7rvJBNctc7aReKe7aZok2EjqvGTASRVxpcZRsalJmgIDgLMwN3IJUErtpgKMP3o/5qYgbq0WmNnyLTFCk4QRnSYY8Uq2j51i/9zMfrWw92mNEYqTMMJxMsYiOdl+dQG62i8RlzYY0UnAKfnhdknUI5rGNOX9a9/nyaFP4vf6+edX/6Trv7sy6dtJVIR0xh6d7Pcf6nQUR8aD+Z+z4yg/lJsB90zo6STJyDwa6S0kD2LJcOWojrwe82u8WOz6xx/MMSVD011TT8TE7S6TWoJ5ON1RlRpFvZU1jbi0mmD+uSUTphAjOpIiLW4rma8icQ+3+0D+Wu6qBAeXvD9B8Hq8/O7033FmuzO5ccaNrN67ml/O+CWPLX6MP5/3Z67sfiVeTz1UyppGElUANtTlQI4SyWjcD1kFWQB0jO9o/teTCa9KUYhjqQMBL8TccB8HnnuJ1lkzCX65AF/Pc4145bvOUYaxovJwUqnFyi/FxG7cpW+UGuEE/DbWIVKRuC3GzZYB9MV0NuyGcVe1xFgtyZgvVizmSc5dV8pdWNJdeuYE5LQ2p7H81uW8dtlrnJR8Emv3r+XqaVfTd0Jf3l79Njvzdx75II2Jlpj/lRzqZ2LAwXio7COUVZ4FQAd/h/COrOBMSj7ojnXVhU2ZGGeqWGc//weItJz4qJRuktI4Us5G2n5L0doy+3elRlHLRmnw+Lw+RvcazdXpVzPx24n8ZdFfWLZrGVdPuxqAFnEt6NOyD31b9qVPyz70bt4bv9dPSUUJpcFS815RWvlztD+a1nGtCVrH2O60PtCQrBrBjiPmlecBkBhrTwAT91cZzvw0KUVj10Tzl0HTa+9iz4v/otmuLwl+9T6+M0Y6+7tjNyJc8fbPEq9JwWTFxaHWTQ2iYqM0GgK+ALf0u4Wf9/45L3zzAtPXTmfpzqXsKtjFR+s+4qN1Hx3T8Tx48F7rNRZCrv3Ksd934ZTrqU+k2e/r63QUx04ytI1vC/tgc9Fmk5AilSmkriCEF6a1O3xefUkcD7/1fzyUfwf5z91H0oCLIORzMtkOrpwecB1HREeqdRzcqE+pNlRslEZHdCCaOwfcyZ0D7iRkhdiYs5GlO5eydOdSvtn5DWv2rsHr8RLpiyTKH0Wk3373RRLpj6SgrICsnCy2528nGB90Jsa6KQOWA19hfP71hdb2e1ZdDuInEAEnpZ4Em2B5znKT8CIToiWOKZThVKzwmFXtfj6WrGf/Tod9qwnNmYL3guucCdDuyhkHi4nUBpQCnVJJQ6l2VGyURo3X4yUtJY20lDR+lv6zo96vvLycCf+dwJ//+Wf2lO8xGYHyaoK5qfe3X+sxorOeY24RXu2IG6g+Wl1HYOjJQ3lwyYNM2DaBG9rewGn+04zV4RYaSbJxt9YARg+N5L5X/8iT+b+k6JU/EjfoZ1Dqd/4eEgPy4cR/JJ4j0xAKcLIxlWpHNVxRDoPf48ef7zctjZcC84D/AS8CzwKZGJ9/GjAK+DVmblXEoY9XKzTgRJEBHQZwd8bdBK0g1y2/ji3eLc50gXKMRSOlaKThmv2KiISO113PD3QmLns91ievOhOgpZZgEmaZ1CiswGStFeEUA82pxQs+wVCxUZSfwl7gA+AfwCeYtNqmwIXA3cD51G3drbq2sH4ifxv2NzKSM9hYuJGMTzJ4N+9dk5WZgDOROcb1Hkdl99ZfDvHzj5g/AVDy6p+grNRJMpBeToJUL5cJntLyoginSZ9SrajYKMrxUAwsBp4G3gY2Y+IAA4E7gMsJr0Ks/CiR0ZF8cvknXNzyYnLKc7j828v5xbJfsLFoo1PhXKqGS3Zaodk3Oh7aX3U1K+lBdO4W+Oi/ZpuDhUaKzIorTWoM5mPOYbenVqoXFRtFqQ5CwBpgEvACsAzj6ukF3Iap2qAcFU1bNGXGOTP4Z59/EuGN4OWtL9NlQReu//561hStcTYsxwiN1AMEbrvYy+MRDwFQ+tYj4C91+uGI603aYwjSVVc66RairaNrABUbRaludgLvAf/GWDpxwHXACGo2ntOA4zVh+MHT1MNvO/yWlYNXckOHGwB4fe/rpH+VzohlI5i7ay5Wnm1+uOI3iXHQ5rLLWEFPInO3wXsTjcVywH7l2ec4gElhz8eZjyM9b0oIr0CgVAsqNopSU+QCLwOzME/KfYFfATXdbLQxuIDsWf9dIrvw8qkvs/7s9fyq/a+I9EbyQfYHDFkzhF7revFS3kuURJY41clj4TeXe3nY+0cAKib/DSpKncKxiTgtC+xq0wQx4iM10qIxiQJH13lbOUpUbBSlJrGAL4AJGIsnGfgFMJSam3jQGMoXBTDZY9FAqSlh859e/2HrkK38Je0vtAi0YFXxKm7ecDNdv+nKa3teIxgKQiG0ioa4Qca68edsg5kTnTk2B1eRlmrp0jY6HyfbrbC2LvbEQMVGUWqDPcBLwELMzewM4GZMpe7qQlp5g7lJN3RkDkwCRpjLIdWXygOtHmDz6Zt5rftr9IjtwZbSLfz8u5/TZ0kfZmbPxApY3H2llz9hrJvg249AdFlVcZeEAxFnP8bqOUBYK2qlelCxUZTaIgjMByZiSuA0xwjOWVTfN1GexhvDxESxbkoxacrxGEEtg4hQBKObjGZZv2W83OVl2gbasqJ4BResu4Dz15xPYuo2SvpdxirS8e3fCrNfc45rH6Ny/o0QwingWYgR7rKav8wTBRUbRalttgHPY6oO+IDBGNdadczLEbFpLOnWKRjRKcFpPJhEZaNAn8fHDfE38EPPH3ii3RMk+5P5JP8Ten/Xm1Mv/IK/cR8A1muPQEGFcZPtw8k4y8WkOhdjHgakbloERmjyUKoJLVejVCsFBQVs3bqVAwfqX79dv99Pq1ataN68OV5vHT9nlQMfA2uBSzFtJ24CXsfEdn4qRfZ7Y7BswNz8EzBiU2a/3IU0bVdYVCCKe5Lu4eftfs6Y78bw8f6PechzJk1bbGbdrjQ671wPc6fA2aOclh0ejJUTwohNFEaMoux1UTiTdbUa9HGjYqNUK1u3buWll15i2bJldT2UKsTHx3Pttddy6aWXEhkZeeQdaoONwH+AKzFlb8YAb2BSpn8KjcmNBk79smzMxMtcwoWmjDBXWLOIZnzQ6wOeyXqG32f9nn39/sojH/yBidyI9fbDeIZdC1He8IaD0po9DiM8kvacaK8/QN1Wg2gkqNgo1cqBAwdYtmwZ8+bNq+uhVCElJYUzzjiDUKieRX1LgDeBy4AewPXAVIzVc6w0NjcaGAsjBhO7ScRJhMjHsU5EPELgLfJyZ+KdnHPyOfyMMbw+50/8saQd7bd+B19Ph7MvCz9+OU6PG6kqEMQIW6R9rgTUujlONGajKPWBIPAO8DXmEfBqoPdPOE5jc6MJTTCfkcz+j8MIQQTOpM5SjNvLju+cEn8K3w5YTOszF/Ak9wBQ+OqDYLkmIpVjhMz92G33yalsLy3uO+W4ULFRlPqCBXyISY/2ApdBScYxVoVsbG40IRrH+ijHuNAiMNfpLtQZgxEme4JnXHQcM2+8jP8yhr00JfaHVaxd9IIRpDyc6s/FruNCeKJAKcaVphwXKjaKUt+YD8w0PxadXcSssllY1lGWBRBXTwPsaP2jeDHWTBTGCiki/O4lQmE3VHPTtXUUgwfG8Ax3ALDt5dv5as9XRkik3UAZRqhzcCyZkH2sSIzYaL2046JGxWb48OF4PB4eeOCBsOU5OTncdNNNNG3alNjYWIYMGcLKlStrciiK0rD4ElNfLQRzK+Zy15y7CFlHEWuKs98Lam5odYa0BIjGCEUIR2SKqFodAIzoFsBvhvt4ll9TQAyDNwS5//ML2O7dboRLXlL9Oco+3gGMVSMdQxvjZ1qL1JjYvPnmmyxfvrzKcsuyGDFiBDNnzuRf//oX77zzDuXl5Zx33nls27atpoajKA2PZRD3URx+/Ez4dgJ3f3Y3wSOZLOI+a4ylVryYrLBijNgkYtxmMkdGXGIh+1WEsVRKYUgGNGmVwgRuAeDGRTlcsfIKSkN2S1MLp6KAD6eWmpSw8WM+08ZmMdYiNSI2OTk53HXXXfzjH/+osm7GjBksXryY1157jWuvvZbhw4czY8YMQqEQjz/+eE0MR1EaLBEbI/hl5C+Jj4jng80fMKftHKymP+JSa8yWDRjrJoCxaKSdcxRm8qcE9b04rrB4IBG8SfCry+Ap7qLC4+eqNbBzy1f85offOJ1AY6maKJBkv+fZ75oo8JOpEbG599576dGjB9dee22VdTNmzKBVq1acd955lcsSExMZMWIE06dPr4nhKEqDJs2XxkdXf0TzmObsj94Pt2Bqqx3q29uYLRtwJnlK4F5u/iI80RjLRCo8R1LZPuCG4bAn0JYp1s/wh+B3X/l4cceLTNg8wWkffbCnUtpIezBJBY31c60Fql1sPvvsM1599VWeffbZQ65fvXo1PXr0qLI8PT2dLVu2UFDQWB/JFOWn07dlXz6++GNOyjvJ3DyHAr8EUl0bRWCexKHxWjYeHAsmHuPmcru2JLNMBMJFk0S48hz4O78D4FdLAyQWw+2bbmdJ9hKn300exv3mdR1D+t0cQF1pP5FqndRZVlbGLbfcwj333EPXrl0PuU12djYdOnSosjwlJQUwLri4uLgq6ydMmMCECRMA2Lt3b/UNWlEOgcfjITY2lpYtW+Lx1F29/uTkZOLj4/F4PCREJDBg9wA2vr/RNGJrg7FyFmBusmfizIJvzDW9pB+NCE8Zxg0mkz1lYubBVMDNg+C8OX34zD+IM0vn8cr607m05+eMXT+Wb/p/g8/jC+/qmYfjmpPEgSKM0CnHRLWKzeOPP05xcTH3339/dR4WgLFjxzJ27FgA+vXrV+3HVxQ3Xq+Xvn37cvfdd1NUVHTkHWqIyMhIevfujd/vfFU9GzxY/7FgGKYh2xDXDtuAT3BaDTRGPJhEgV0YgYmnMuussuum2x0WorK22jnp0Lk1PLz9Hj5mHiMXZtGpTzuWFyznhe0vcFvr28yxJDZk2fsGcdx3eRhRb+g9g2qZahObLVu28PDDD/PSSy9RWlpKaWlp5brS0lJyc3OJj48nOTmZnJycKvtnZ2cD5klOUeoaj8dD165d6dy5c10PBa/XW9W6KgXeB9YAF2HEZQGwrpYHV1fEYKyaQowgeO33FJxyM9IErQATu0kAjwduHgm/f244W6O70Xbf97yx9y5OS3qKBzY+wM+Sf0bTxKZOGrW7dloBZrkPI2o12eK7EVJtYrNx40ZKSkoYPXp0lXVPPvkkTz75JN9++y3p6enMnj27yjZr1qyhXbt2h3ShKUpt4/F48Hg8dV8d2kVkZCTt2rWjV69e4ZM8F9jv0UCvmh9HWloaMTF1XHxNkgFiMTd+SXeWO5ofp9umBP9tbhgO903w8EjJb/kPv6L/7C8YcssQ5uTM4f7t9/NCsxfCz2VhXGjSzTPgOq5y1FSb2GRkZDB//vwqy8877zxGjx7NjTfeSFpaGiNHjmTSpEksXLiQc845BzDFG99//32uu+666hqOojQ6UlJSuPrqqznrrLOOvqJADY2jffv2dXb+SkTvEoC9ONaMFOcswCmu6aJZMlw0EF757Hr+Hnkf0Wu+ZELxm3TxLODFbS8ytsVY+ib3NftZGHGRO2UAY1UWYrLd6s+zSL2n2sQmKSmJc88995Dr2rdvX7lu5MiRDBw4kNGjR/PEE0+QnJzMI488gmVZ/P73v6+u4ShKoyMmJobevXvTu/dPqdBZvdRl0kQlUfYrhDMZU7z3pTidNw/BL4bB9M9ieSPiZm4sfZyOs2bw21G/5e/b/s7d6+5mYe+FzkTRJJw+OtjnKrKXRdXIlTVKal2XvV4vH3zwAUOHDuW2227jsssuw+fzMX/+fNq2bVvbw1GUBoW49+r6VS+QRAHJRovBCEOkvUyKdrpfJUAOXNgdUhPhT/m/xvL6YNFU/hh/E0n+JBblLeKLwi/M/kmYR/ICTGJAOU62mxbnPCZqXGwsy+Kvf/1r2LKUlBQmTpxIdnY2RUVFzJ07t148rSmK0sCQuI2UqZHkgAScatBx9ksskzgINIPR58NW2vFti8sgWEH8x2/yq9a/AuCJrCecFgaSIABO6rMfk5ShxTmPGvU4KorScPHjNFeT6s2S/iyZZH7X8kgqRecXF5j3P+b82vww4wV+0+wWIjwRvLf/PX4I/hB+LgsjYGUYq8aD47ZTjoiKjaIoDRcPRmi8GCtGqkC78yeKcKo3uzyAPTtB7zT4oPgcDjRLh+zdtPz8C37e6udYWPx949+djSXxIICxcmT+TfFB51IOi4qNoigNmygcF5nbrSZxmlIOmygweiiAh7fibOvmvX/zuxamnM0rW19h997dJvNM5uq4y9dgL69nXcbrKyo2iqI0bNyJAtImIAkjPAU4VZ0PkSxw3Wlmouf4LddjxSTA6sV021PCJU0vodQq5T+7/uNUECjGuM/K7fNG2+c8xmaqJyoqNoqiNHxicCpBgzPXJg4jRPEYiycWx+3mgVbtYHBfyKmIY1XXG8y+05/nt21/C8CrO1/F8lsmViPuszyMaw6MhZOHutKOAhUbRVEaPlJRIAZjeQQxwiOJAu6GaNJszW4/cP0wc4jHC01jNT6ZzNkRp9A6ojVZpVl8GfzScZ/5XK8DOE3XNCvtiKjYKIrS8PFgYjd+jDVjF94MK5YZxHTdhLAYzmVnQ1QEvP5DOiXdzoTiAnyzpnBN82sAeGP7G1XPJ9aRVNdWV9oRUbFRFKVxEItTXiYaIygSx5GCnJIK7SI+Bi4+3fw8t9Wt5oePXuC61qb549s73qaivMKJ/UTiWDd+jAWlWWlHRMVGUZTGgXTnlIC+TOqMwFg0ZZjMMREf6VlTDteebQ7x16wrIKEJ/PAtp6wP0jW6K3vK9jBv6zzTUK0Ap2aanNNnL9emaj+Kio2iKI2HBMxdrRTjQvPjTPxMwVg/MrEzYP/shQtPg4RY+HJjFNlnjgHA8/EErmthigO/sf8Nc1xpa5CHETXLPkYFTtKAckhUbBRFaTyICy2AsWTAuLikzUAAIzzROC4wC6Li4bKzzOZvRN5kfpj3FtfFjwDg3b3vUhZdZo4jx8rHsaKk7YC60g6Lio2iKI0HP+bGL6nQhRhX2cF3uiIcl5gdw7nW7nj63LfdoNeZUFxI2udLSY9J50DwAJ/lf+bsL1ZTJE6BTnHLKYdExUZRlMaFVBCIw4iBBPbF6pDyNVJk02ZQH0hJgDVZsL3/jWbhx//lopYXAfDh7g+djUP2vlE4XTyhcbfjPk5UbBRFaVxEYURGBCYF4zaTzp2FVO3kZUHAA5efaX59uegqiImH1V9wVW53AD7c9aFT0LMCI2rgWDklOHEcpQoqNkq1Um96nRyC+jw2pZqR6swVGFdZFKaKgBdj8Rzc6yZoll11ntl98uJYrEEmOaDPgm9J8iextmgt6/eth1z72HJ8cFKqC9GstMNQbZ06FQVMr6JzzjmHpk2b1vVQqhAXF0fXrl3x+XxH3lhp2EgSgBTKlMoBEtNxY1E5V+a8ntAkEb7bAhvH/JJOH7yAd+4bXHjuUN7YP5UPcz7ktx1+a45dirFkJNtNUqyLMcKmhKFio1Qrbdq04Ze//CVFRfUvD9Tn85GcnIzfr//2jR67FA1xOPXSyqha/TmIieFUAAEIRJistJc+gFc39+dPHbpD1hpu2tKaN2IxYtP9t06vHOz9PTitowtxrCelEv3WKdVKVFQUbdq0qeth1GtCoRDBYJBQqH7Wpvf5fPh8vobtdvRiBCcfM/dmH5WCUikCFRjLx0OYCF11rhGbdxZ5+NPwG+D5ezl98To8wzwszFtIUbCIGH+M2VgqFsTY55LUazmXUomKjaLUMrm5uXz++eesX7++rodSBb/fT0ZGBv369SMqKqquh3N8xGBcaD6cuTFSw8yDU77mIK/quadAUhys3gTru4wmzfsHIpfM5uzh3VkYWs3SvKWc2cTOJKjAiIvPPlYBjjWlYhOGio2i1DL79+/nnXfe4cMPP8Sy6lfqUnR0NDfffDM9evRo+GITgckYsyduEo2JrZRhLB0J5Ieq7jZyILz6CUz9vhV/6H8+fPUxt6xKYGF3WLJrCWdGnel07oy0dxQLSWI5cTV6dQ0OFZsGjGVZFBcXU1JSUu9uWgARERFER0drjOQggsEg+fn57N27t66HUoXo6GgKCwvr5f/TMePB3PD3YywQ6bQpwhPACM3B3swgXH62EZv/LYQ/XD0GvvqYIZk7oDssKVjiZLHFYdxnUnHaax/3ACblWvN9K9G7QAOmtLSUTz/9lE8//ZTS0tIj71DLdO/enQsuuIAWLVrU9VCUExWZc+PFWDR+TLaYxG6kejMY8SgCLBh2KsRGQ+Za2NJlBO1iE0jduJlue+HrqK/N9sn28dytC1LsZTJxNLrmL7GhoGLTgCkrK2PJkiW88MILFBQUHHmHWuaiiy7i1FNPVbFR6o4IjKjEY8QkH2PJuHMfLIwASbM1L0QH4MIBMHU+/O/LaO4850r4aCJjVvoZn7qRfZH7aOq30/tFsIpxhCxg/6xiU4mKTQOnvLyckpISSkrqX/emsrKyeptxpZwgeDE3fElHLsSZ0Cl3PxGag9KiLzvLiM30xXDnz6+Hjyby81U+/nBuBV8f+JoLoi9wNg7izO0pso8tlQb0LguoR1FRlMaOlJWpwAhKIk5bgHyMOBxioucF/cHvg0XLYf9JZ0OzNrTMLuX0rbAke4mTYGBhrKUYnKQBqSSg7aIrUbFRFKVxE8C40Upw4jQxGBHy4YiRu3xNCJIS4bw+EArBB5974bxRAFy/AlblrjJJALn2K4BzN5UeN2IxKYAaeEotEwwG2blzJ7t27SIYrLsiUlFRUbRu3ZomTZo07MmLypGRCZ4SuA9gBKEI4/Y6+JHbwiQTlMClA+GTr+G9xXDDjaNgymNcuQYmFK512k77MKJSgXGlSTVoH0aIEmv6AhsGKjZKrVJSUsKcOXOYNm0axcV1V4+9ZcuWjBkzhkGDBqnYnAhEYyyRKJx5NxZVheag8jUjz4ZfPwOzvoaiB3sS2eFkmmR9R7uVawn1DeGN8DruM0k0KAda4tRK07gNoB+BUssEg0E2btzIwoUL6zSD7qSTTuKCCy448oZK40AmXsZixGAnVeM05ThVBexkgTbNoF83yPwe5n4DIwZfB//9Py5fUcY2ttEupp2T2SatBsRqkvppkuV2gqMxG0VRGj+SlSZzbeLt9zKcDpsF9rKDhGHkGeZ9xmfAOdcAcOn3sK5wRdVim9K0LQIjOEGMtaNJmSo2iqKcICRgbv4SZ4m3lwHkYCwSySCT93IYcarZ5IMvINQxjaz2TYkvg/J57zluM3tb/Dhze8SqsRMOTnRUbBRFOTFwZ6XJnc+HEYZkTCA/FuNyk5pnMdC7O7RtBruyIXM5bDp9AAAtPl1ghKsQE5vJtY/pTomOtdeX1+SFNQxUbBRFOTHw4QhJBU7WmbQD8NnvMjnTAxSBpxRGnG4O8f4SKDv3EgC6rdwE5YWO2y3JPlYezjwbyXw7UONXV+9RsVEU5cTAg1MwU9oAFFI1USCEid9IZ88IGGl3FJixGJq36ccXbSCqPASLPzQCE49TpkYqP+djhCYKE785wV1pKjaKopw4ROKUlonBibdIkWspqilWiZ0AcG6GKcy5YgP48jowtbtZbi2cauI+B2ebeTAutHycyaRlNXRNDQQVG0VRThz8OFlpIUyVZildI9aIbOciMgKG9TM/f/ZVIrN62s1qvvwQSgrDN7ZwrKgIjJUkc3BOYFRsFEU5sUjEERc/xsJJwkljtggvXWO/Lj7NrP7wKw++Fh35og14Sovhk49MckABxoopto/pxZl7U44RuEbQJuinomKjKMqJhSQAuN1a8nOS/UrAxGHiMe6wAFxoi83cpdDG24VptiuNxf8zghLCWEZlGMERcRHBKeSELsypYqMoyomFH5N5FofjTpM+NILX3qYCY7GUQwu7mkBJGfg2D+Hdbva2n38AZSVO5edEjMDkY7LQQhhXnVg3JygqNoqinFhIirNM7CzCiMDBDdUkI02yzDxw8UCzet+qAWxKgW1tU6G4AL6aa+IzUidNin+WuV4B+5gnKCo2iqKceERjLI4InAmcYuWI0Mj8G5cIXWSLzboVncGCxb2amQVf/c8RGsHCiQkV4JTGOUFdaSo2iqKceMh8G+k3k4Bxq3kwwf4SjOVzUEC/Txo0S4b9+2Jhb3c+SLcrdi6aDkUV4aVpKjCiJiVsCu11J2g1ARUbRVFOPLwYF5rc+D0YQYjAWDmJ9jZSpNN+ee0OngCsu5DPkrKhTRfI2w9LPjOCkoeptSauMzl+wF52gjZUqxGx+eijjzj77LOJi4sjISGBfv36MW/evMr1OTk53HTTTTRt2pTY2FiGDBnCypUra2IoiqIoh0bmwZTjFOGUhmoRmCy0REx2WhyVls6FA+z9f7iIHeU7sc4caX7/croRFJmj48ckCEh/HD9GdPJq+LrqKdUuNi+88AKXXHIJffv25d1332Xq1KlcddVVFBUVAWBZFiNGjGDmzJn861//4p133qG8vJzzzjuPbdu2VfdwFEVRDk0k5uYfj5OuDFXjLiU4pWciYNhp4PMBW8+krCiavAHnmm0/mw4VlhGtBJw6bKUYS0dqs0lXzxOMam3pk5WVxZ133skTTzzBnXfeWbn8/PPPr/x5xowZLF68mHnz5nHeeecBMHDgQDp27Mjjjz/OM888U51DUhRFOTQBnEZpfmA74XdEaYIm7jBbhJLi4fR0+HSFHzYNZvMZLUlKSoWdmyBrNfTsEf4YH8DEakpw3GolGGvpBKJaLZuJEyfi9Xq59dZbD7vNjBkzaNWqVaXQACQmJjJixAimT59encNRFEU5PB5MppikPSdghKcM41qT1gDSn8bFBeJKW3cBe8uy4bSLze9fT696Vy3HuOyicKpBl3DCVROoVrH57LPP6NatG2+99RadOnXC7/eTlpbGs88+W7nN6tWr6dGjR5V909PT2bJlS522ClYU5QRDMtCkdE0cRnTctczKMQIRojJ1+YI+9rr1w9lXvB9ON20HmDfduMzycRqrgdOyQLqDVnDCVYGuVrHZsWMH69atY9y4cYwfP57Zs2czdOhQbr/9dp5++mkAsrOzSU5OrrJvSkoKYJIHDsWECRPo168f/fr1Y+/evdU5bEVRTlSkkkAxVVs8NyGsXA1eKjPUereHmIQ8yG/Dym0WDBwKEVGw9mso2G3ESmI10sXTwmk5cAKWrqlWsQmFQuTn5/PCCy9w8803M2jQIJ577jmGDx/OI488gmX9dLtx7NixZGZmkpmZSWpqajWOWlGUExYfxtqQys8Sp/HhlKwJYCwTabrmBU8MdOq1HoDMb1MhKgb6DDLH/PIj586aaB+7EJOZVobT5uCgYtGNnWoVmyZNmgAwdOjQsOXDhg1j9+7d7Ny5k+Tk5ENaL9nZ2QCHtHoURVFqhIMbqhVjLBKfaxsLIwxF9nZ2skDvjB0ArFnezmw38CLz/vkHzoTOSJwkBAunV44I0AkUt6nWbLT09HS+/PLLw673er2kp6cze/bsKuvWrFlDu3btiIs7wVI0lBMOn89HYmIizZo1q+uhVCE6Opq4uDg8noN9So2YKJximZE4sRa5O4rL66COnqedks/rniA71nekoAjiBl4ET/0avp4NEWWmCY4bCyNoRRjxiTr0cRsr1So2l112Gf/973+ZNWsWV155ZeXymTNn0qZNG1q0aMHIkSOZNGkSCxcu5JxzzgHgwIEDvP/++1x33XXVORxFqZc0bdqUK664goyMjLoeShX8fj+9e/cmKiqqrodSe4gbrRRjdaTgWDhifURU3a1NUgy0XkJo20Dmfwkj+reHjj1h00r4ehEMGOK44kS7JVGgEKd9tIrNsXPhhRdy3nnnccstt7Bv3z5OOukkpk6dyuzZs5k0aRIAI0eOZODAgYwePZonnniC5OTkynjO73//++ocjqLUS5KSkhg6dChDhgyp66EcEq/Xi8/nO/KGjYlEYC/GwpGUaC8mRRmq1jMLQXRpNKTNhG0DmZkJI87CuNI2rYTFH0Jv++8rPW2ScAIXAfuYpZww822qVWw8Hg/vvfcef/jDH/jjH/9ITk4O3bp1Y/LkyZVWi9fr5YMPPuCee+7htttuo6SkhIEDBzJ//nzatm1bncNRlHqJx+MhEDhBHmcbCjIPJh9j5UjlgFiMQFg4FaFLzCs6YIvNgj8xcylGQAZcAG88Cl9/DIGnzLFFqMowSQKxOLXS8jCW1AngtaxWsQFISEjg2WefDZtbczApKSlMnDiRiRMnVvfpFUVRjh3pb+NuqGbhWCIe+1WMibNEQVR5FLTKxBeTy8YdSazfBmk9BkJsAmxZCzs2QWpHc5cVganApENHYURGOntG1t6l1hVa9VlRFMWPuRsGMMJQQLi1IS2fg1Rmo0V7o8EbIq7zFwDM/ArwB6CfnY37xcxwoXGfB0zcxssJUwVaxUZRFAWMKFTgWDg+nOoB+fY2Ll9QtDcagIjOCwGY/bW9/SkXmA2+/thYLGIlgRO/icW448o5YUrXqNgoiqKAcW35MDd/mXcTi7FAygiP2wAxvhjzw0lmKsf8pVC2F8gYbpYvnQvZpUaocu3jSBzIgxGiEozABWv20uoDKjYNHI/HUy/nRNTHMSnKjyICU4K5M8q/cCQmviJpyxVAGcSXxANQHLeO9PZQUAKf7wJOag0n9YSSIlj3hXG7RVCZWFAZ9wnhlK45Abp3VnuCgFJ7+P1+0tPTueyyyygpKTnyDrVM3759SUxMrOthKMrRITEbKV3jwUzAlLpoPte6QoiNigWgIFTAsNMsVm/2MOtrOLcP0H8YbFxpJniecq4T64nHiMwBzN03GadCQXRtXWjdoGLTgImKimLw4MH06dOHUKj+lZCNjY2ladOmdT0MRTl6ojHCEotTRNM9v7Uc4xbzgzfSS6wvlsJgIef0LeGpt6OZtQQeuQUjNlP+bsTml38zwiUVpqXuWhFmPk8MRmya0KhToFVsGjBer5cmTZpU1qRTFOU4kTk2kTh3x3KcSZgFOJYOEOeLozBYSK8euURFRPPtOtizH5qlnwWBSPhhKeTshTap4UELCyMyIYzQxOD0zmmkaMxGURRF8GJu/CUYKyMFIy4lmAmYYpXYxPnM9P9y8jm7p1k251OgKBq6nw2WBUvnhCcJlONYTBGu38tq+NrqGBUbRVEUNwk4AXsvxv0l9dNC9jpbIOK9JkmgIKeA822xmb0cU5pmoD3f5ts5RlSkRUEu4enOfkxMR5q1NVLUjabUKh6Ph5iYGJKTk4mIqDufQWJiYp2eX6nHRGJiNwU4lkcIE9yX9OcgUARxHmPZFEQVMOws4GWYvQwsP3j6DjbHWzrXWDgej9NuINZ1fD/GysnDxG0aqQmgYqPUKhEREZx55plER0dTVlZ3foOkpCTS09M1RVupisRkpHRNMU5/G4+9rhiwID7CWDb53nzOSoOWTWDnfli5EXp1zoCEFNi1GXZshJadjEgl4GS3yaTOZEwlgXIabekaFRulVomIiOC0006jb9++dToOj8eD36///sohkO6cUlomFyctWdKUK4AIiPfbbrRgAR4PDOsPr8yET76GXu29kDEIFk2Dz+fAyE6muvTBlksMxqoJYERMxUZRjh+peKxVj5V6jQiAD2OJlGGsjjLCGp7F+4zY5FXkQQiGnWLEZvYX8LsLgO6DjdgsnQuX3mLm13hxuoNi/2zh9NCRkjaNjEbqHVQURTkORAykq2YCRmCK7PVBwIKUQAoAOcU5cACGdjerF62G4lKgnx23WTUPfCFzLJljs891PrkTSwO3RoiKjaIoysG42w2IlVGBSYWW7LQQpGDEJrs4GzyQmgR90qCkDD7dBHROg9TWkLcfNq02x/HiTBwFY+2UYKypIhptFWgVG0VRlIPx4rQHEPdZCCNCfowwRDqWTbY/21g/CTDsNHOI2ZmYDLRTzjMLvp1v3sVyicVJRjiAU9KmsEavrM5QsVEURTkUUgU6HpOm7L5blgJF0CTKVO/YXb67ssHasP5mk1lLMNZQT1tsMuc7kzrdPW6kJpvEg0owwtbIULFRFEU5FFK6xodTH00mdBYCAegc2xmAH4p+MOtDcHoaxEbBqk2wYzPQzRabFQuhNGTuugUYa0aOF2GfowgjOBU1f3m1jYqNoijKofBhLJBijOWRgBGFPCpn/3eO7owHD+uL11NWWAYHINKCc3uZ9Z+sBFp0hGbtoSAH9i13qgmAU8ZGWhr4MBZOI4zbqNgoiqIcDmkJEMQJ7MfZryBEh6LpENmBoBVkfd76yjvqsN7mfdY3mPjOKeeYBcsXOceWOmsJGNdZvmt5Xg1eUx2hYqMoinI4pJyMhRGcUox7LRIjEjHQM9oURfui/ItKd9j5Z5jdP1kGIR+QcQixKbePHWG/KjCCE4lx0zWyuI2KjaIoyuEI4CQJSAtndyp0IVzc7GIA3sl9x8RdIqFLR2jXHPblwdKVQNrZZp9li+CAZVxnQbNt5fGkVXQ+RmgaWRVoFRtFUZTDIckBHozweHAKcxYCfri02aV48TInew655blQAZ4iON92pc3+GmjVCVJawoF9kPWd45IrwLjMJLU6xj5vKUbcGhEqNoqiKD+GTO60MAUzIzCZZCHAC6kRqZybfC7lVjnPbXqu0jI5/1Sz+6xvAa8HetiutHWLnGQDqShwAKfFgBT9LMRpQ9AIULFRFEX5MWzXGBU4LQIC9jI7dfm+VvcB8MiOR9jj3QNeGNwLfF74fA0cOAD0Psscb81n4cf34EwgPYARmVicOTmNBBUbRVGUH8PtSqvAqcwcS6WFMjh6MBclXkR+KJ8HNz8IFZAUD6d1g4ogzF8H9DrTHG/FZ+Y4QZxmbLE4yQKFOJZUI0qBVrFRFEX5MWSGfwxGJEpxKjZLhlo8PN7tcfwePy/se4EPyz+EaDi/n9ls1mIgNR1iE2H3Zti01Vgx+6ks6lnpMhMrqpxG1b1TxUZRFOVIxGLEQAL4kiRQhBEeD3SP7c7DJz0MwJg1Y9i+b7vTKnoZEOWD7qebBT8sNpZSAk6qcx5O985onCSBRlIFWsVGURTlSEgKdAWQhJOiLJM9be5peQ/DEoaxr2If122+joyTK0iJhw07YcMO4GR7As6qz0ziQSROHMiHsWQkxVqaqTWS0jUqNoqiKEfCj5lrU2z/HIkRgwic+mbF4C308mqnV2kRaMGi/EU8tOP/GHKKOcSsJUA3W2zWfeFknQnSMtqPcbHJnJ5GUgVaxUZRFOVoiMYIRDnGxRXAWCcJGFEoAbzQ3Nuctzq+hRcvj+54lOadlwF2CnSX/uD1wfrlsLvAyT6Tpmxi4fhw5tkU0ChSoFVsFEVRjoYonLTnAsKTBEKYOTgJQDyc0+ocHm5v4jf/TboagHkroCwiFjplQCgIm752xKvYPk4BTjwoHqeZWiNwpanYKIqiHA0Bwl1o5RiBsCsJAJU9bSiDe1Pu5dqUaymK/wF/s7UUFMMX3wMn20kCaz43IuLDCFUUxoLJdx3Ti7FwGsF8GxUbRVGUo8GDE7eJtV8HJwmIdVICnggPE9Mmcmb8mVSc9CEA078ucjLSVn9uLKU41/5SCdqPUyPNh1MRugGjYqMoinK0uFsO+DHWSBTG8ijBCA0YEfFAlDeKd7u8S+vuqwF4/vMsclp0Ndus/QrKLbNPsX0MqVIQa7/nY6yofBp83EbFRlEU5WiRMjUyu18arCXgtASQqgDlQBk0LWvKwoF/xeMvpXh7d87adxehpFTI3w+7NzjFPQsw1aA9ONaSHxO3kZbRDRgVG0VRlKNFSsrEYeIqkr5cghGiRCqTBCpjMH7o1KolZ/UwpsnqNa2Y39IuDfD9l+Yu7MOITCJGrA5grBlJSCihwVcTULFRFEU5WjyYKgJB17vM9A+4tivFaScdCfhgZP8oAJI2X82cVsbflrtqphGXCoyF5E5CKLGPI6VsCmjQqNgoiqIcC3ZLaHwYK8YtCBZOurK0D7AZZk/ujNw4ggMdTwYga8VUdu7fafaTemhB+xWNEZ88nAKdDbh0jYqNoijKseDHsWosjBUSb/9+ABNfcVs5IaAUejSBlsmwO8fLmNZzCXmg+84yLt96EYWewsrOn2EuNHGvlVIZA2qoqNgoiqIcC9JywI9xbUltM7FEIjHCI5M1c8x2nhAM62UOsWBLS0JtuhARguDGb7kx60ZzXDuLrbI4p5SqicC41Rpw6RoVG0VRlGPBgxGWCIxlE8KpAB2FsXricaybBEzxziQ4f4BZNHsZ+LuZ+TZn7Yxgyv4pfJH3hRGoaNfxJUutEXTvVLFRFEU5VqIxN30RBalf5sWJ25ThWDu2O2xob/B44NNVUNayPwCj95p5N3/M+qOT4lyOIypxOJM6i2iw1QSqVWwWL17MsGHDaNasGfHx8fTp04eJEyeGbVNSUsK4ceNo2bIl0dHRDBw4kEWLFlXnMBRFUWoWmdBZgREDsTwqcAQhwrW9XdamqQ/6dITScvgmYMSm5/YyErwJfFL8CZmhTGMlSX+bchyxKsIIUAkNkmoTmxUrVjBkyBDKy8t58cUX+d///kf//v258cYbee655yq3u/HGG3nxxRf585//zAcffEDLli05//zzWbZsWXUNRVEUpWbxYtxlUu1Z5thISwCZmBnCxG2k9EwAzs8wh/jf9l7gC+Df8QM/i70QgGXFy4ywBFznyceIWKR9LKkQ3cCoNrF56623CAaDvP/++1xyySUMHTqUF154gQEDBvDqq68CsHz5ct544w2eeuopbr75ZgYPHszbb79Nu3btePDBB6trKIqiKDVPDE7g3mMvCwFN7HVeTGZZkb3eriwwrLfZ9OM1kdChJ1gW/bYb02hn8U6nhUEMxnryYKwcr/2zCFcDo9rEpqysjEAgQHR0dNjyxMREQiHzycyYMYNAIMDVV19dud7v93PNNdcwa9YsSktLq2s4iqIoNYtkpEn8Rtxb0o9G2gSkYJIEEoBYGNgV4qJg9RYobNkPgO5bTJrZzuKdRlgqcNKdPRhRK7WPWUaDTIGuNrEZM2YMAHfccQc7duwgNzeXF198kblz53LXXXcBsHr1ajp27EhMTEzYvunp6ZSVlbF+/frqGo6iKErNInGbcoyolOBM7JQUZZknI9lqhRDhg/N6mNXL/X0BiNu8DoAkX5IRpTh7/0IgGyM2kfYxKmiQpWuqTWx69OjBggULmD59Oq1btyY5OZlf//rXPP/881xzzTUAZGdnk5ycXGXflJSUyvWHY8KECfTr149+/fqxd+/e6hq2oijKTyfefg9iys1EYQSiDOfuWorJVrNjNvhgWIZZ9WF2HwCSN28CYGDiQCfZQCykSPsYRfb+0saggaVAV5vYrFu3jiuuuIL09HTef/995syZw6233sqtt97K5MmTj/v4Y8eOJTMzk8zMTFJTU6thxIqiKMdJFMaNVoy5m0ptsxiMBSLVAKSpml2WZmgXs/srG3pg+fy021tEbCkM9A10qj8X4MRuInCsGmiQcRv/kTc5Ou677z4CgQAffPABgYBJpRg8eDD79+/nt7/9Lddeey3Jycls3ry5yr5i0YiFoyiK0iCQrp0RGMGpwNxV5c5agZMiHcS42iqgSwto2wS27o9iX4sOpO5az6XZHWga3dTsZ2GsI7GUIuxj+nBaG4gQNRCqzbJZuXIlvXv3rhQa4dRTT2X//v3s2bOH9PR0Nm3aRFFReO7emjVriIiIIC0trbqGoyiKUvP4MJaNuNEKMXdVCfDLpE+pNhAAEsATB0PsrLTPAi0B+HlOT3OcMpw4UDTOHJsSjOBIoc4GVrqm2sSmRYsWLFu2jLKy8DSJr776iqioKFJSUhgxYgTl5eVMnTq1cn1FRQVTpkxh2LBhREZGVtdwFEVRaocEnPhJPEZo8nBiL6U4IiTP4kEY0t38uKDIZAucsSPOuM+K7eMV2y+pMB2FU0kgRIPr3lltbrTbb7+dq666ihEjRnDbbbcRHR3NjBkzePPNN7nrrruIiIjglFNO4eqrr+bOO++kvLycjh078txzz7Fp06ZqiesoiqLUOpEYC0QqNVv2u0zolGoCMtHTTmkebMdtluZeBjxH7Na1Tk0117wcCjHZaTLPpgjjPsvHaU/dAKi2YV555ZV89NFHPPbYY9x0002UlJTQqVMnnn32WW655ZbK7SZNmsT999/PAw88QG5uLr1792bmzJn06dOnuoaiKIpSe0hasghLOUYcpCVAwLW8hMrOnM0TIKb5RpbvPs0cZ/sqCJaDJ+AkFIQwQuauMJ2MIzhlNBix8ViW1YAMMUO/fv3IzMys62EoiqIYdgLb7J+zMeIj1ZrFGikjbN7NzrKdtPr3VPjqDvbFnESTok3w1xXQtqcRGXGfJdrH82LEKgnHrdYOaF5L13gU/Ni9Was+K4qiHC8y38aDsWoKMGIhQhOksj00ZUAxvJvzLnScC8Bqb4bZf/ty564ci4kHVeCkQ8cQXq3gAA0mbqNioyiKcrxI8zRpBx1pv/JxXF0iGnYl5/dK3oP2i/B4LBYU2alpPyw3yQUhe3vJt/LgpDtLiwGpJNBAWkWr2CiKohwvIjB+TEBfZv5L505pGSDWTimsLlkN0bl0a1nO0pAtNrtXGDdZlH3cMoxAFeC0GpDMtjJzHBpISUkVG0VRlOrAnTEm82BEgPyY2Es0UGEKF+8M7cSLl8Hd/azA7he9daXTRkAaqVn2sctwUp/BabLWQObbqNgoiqJUB3EYkRFrptz+vQinkZrtUtseuR0Li9a+1px7kpcsOlDkjYO8nXBgr5M2Le0LinGSBoKEd/MsqL1LPB5UbBRFUaoDidtIaZoEHJEow3F/eWBL/hYA2vnacUZnsPCy0rJLQf+w0ql9JvNzsI8lqdQlOKJzgAYRt1GxURRFqQ4iMLGWACaOYuFM+JSWAMXAAcgOmXqQyVYyLTzQqQkst3qabfasMvvI/JxijPUiFlIJznwdidtI0kA9RsVGURSlOpC0Z3AqCvgwQpCAUxkgElqGTD20ncGd4IUzOsAqbMtmxyojMIUYgZG6aG6hERedTBxtAP1tVGwURVGqC4nbeDCCI20BxPqIMOvbRLQBYJu1DUIwsJ1LbLavNJlrucB+nDk7EsORXjYlOEJzoFau7rhQsVEURakuxI1WjpP+LOnKUlwzBC3LW+LDx25rN6UFpQxMgtWkm2PsWQOFVnhSQB6wF9iHY9XIMSuAHOr95E4VG0VRlOpCMtGkUoCFsXKkjlkJcAB8hT5aWa0A2FS4ifQmUOhvxj6aQNkBCG4z823iceI+HozQ5AG7MG66cpxOoPU8SUDFRlEUpbrwEp4CXY6TnSZdPL1AIZwePB2AD6M+xB8D/Vp7HOsmZ7WThYZ9jAjMXJ0YKkWLHIyI5VLv4zYqNoqiKNVJgv0uqcsVOIH9IoxFEoArI64EYJpvGkTAae1crrS93znHsMvbEIUjQLEYa2cvxtIpJHzCZz1ExUZRFKU6icWIg9Q3kyy0fExFaA8QARd4LiCaaL7kS7ZZ2xjQFr7jZHOMPWucdOYYjCvNg3HLSVHPAEZwcjFWzq5aubqfjIqNoihKdSLusgBGGEpwRCKKyqKcscFYLuACAKYGp3JaC1iDad9p7VljhCQGp+OnhREgaRVdYa+PwYjNDntZPUXFRlEUpToJYKwb6V8TwsRWpKlaDJVusFH+UQD8nb+T1LSIAwndAAju/R58rvSyChxx8eN06BRhiwE2Y9x09RQVG0VRlOrEg9PfRlxh7jYDso0PLo28lD7ePmxnO//iX5zUoRV5JOAvyYaCvU6pGz9GqPwYCyeEcaWJS02SBqSBWz1ExUZRFKW6iceZ93IAIwhSRUBaRAfBi5fHIh4D4JGyR8joUMRaupr9dq510qYlOUDm1bh/99s/xwIba+HafiIqNoqiKNVNLEZgpDqztIOOxFgoUVS62IZYQxjqGUoeeWS2esYRmwNrTXzGbx9TEg5k4miFfTzJekvGuOuki2c9Q8VGURSlupE+NuJGk7k35RjRkVTmWLPu8YTHCRDgf6n/xwZfGgAF234w+1qEZ6ZJ4oEPp3WBB2cOTz0tXaNioyiKUt34MAIj1gcYYYnAcaOJm80DGYEMJsRNAF+Q1ammF3Ru1lqnNI20L/DZv1s47QykDYEPIzh5NX51Pwn/kTdRFEVRjpkmmFIyUr5GhEcERywdmzG+MayNXMvHHbbCLgjuW+OkSstkTrGMYu13sZLAqcVWT5upqWWjKIpSEyTb7yICkTj10sRSicOpBhANDyc9TOc+zQFoUbKFfxT83UkEkMZpcTh10qIxMRsRnGjqbY00FRtFUZSaIBZn9r8IQixODEbSoL0Y68UP3oCXKRmPs8vTnEjK+NeKF8kvzTf7x2DEBHvfaHtfL0bQAjiJCPUQFRtFUZSaIIApnCmVA0IYoRFrxu5tUxnDsV/eoIcfTn2YX/U7n+wmu9gdudvs4y6BIxlpQRwBEsGRpIF6hsZsFEVRagIP0B5YYf8chREHqSwgrjUpZeNKGDj79htpHxzC37wJJHmTnPk1XoxQeTHiFIPTflqqFUi30HqGio2iKEpN0Qb4DideI+VlyuyXuMAOcSdu72nvWDIyR0esFneNNHGlSSKBio2iKMoJRhKmcoCIhdQ4i3b9flBWWiWuWE5lLKYCY8nIfBt3NQHZNramLub4ULFRFEWpKSIxrrQNGFHwYkRCGp3JHBmL8LbOktYsBDFWjp/wJAFJOJAWBPGu9fUMFRtFUZSapD2m14wXIxoVOBUFpP0AOALjwQhLyHWMAE67AZlrI8eQeA1AKvU2G03FRlEUpSZJBFoD23HaApQQHl8J4iQJWDgVASQmIy60MoyFFEl43MaHEaOkWrien4iKjaIoSk3iAdIwZWQkRhOFEZVSHMsmQNXJKGIJyXopfSO9bSRW48EkI9TjO3o9HpqiKEojIQroDPyA0/xMEgXEbSZCJOIjhTYlW01ca+64jdzBm+P00KmnqNgoiqLUBqkYQcnCyRwDx6UmlsvBiQHY+wVwRMaDMyk0FWhBvY3VCCo2iqIotYEHaImxVLbiuMdiXNtU4CQGiKBIzEZiOe5U6CZAK5wEgXqMio2iKEpt4cG4vGKBLUA+4S6ywGH2cScLiJXTCpN8UM8tGkHFRlEUpbaJA7pgOmvuA4pwrBW3eEjCgFhBEUAzTEXpSBoUKjaKoih1gR8Tb2kCFLpeEsMBZ8JmNJVdPRvqXbuBDltRFKWR4MVkksVjxMU9oVPqnjUQV9mPoWKjKIpSX3DHZxoZ2s9GURRFqXFUbBRFUZQaR8VGURRFqXGOSmy2bdvGb37zGwYOHEhMTAwej4esrKwq25WUlDBu3DhatmxJdHQ0AwcOZNGiRVW2C4VCPPLII3To0IGoqCh69+7NO++8c9wXoyiKotRPjkps1q9fz9tvv01ycjJnnXXWYbe78cYbefHFF/nzn//MBx98QMuWLTn//PNZtmxZ2Hb/93//x0MPPcTtt9/Oxx9/zIABA7jqqqv46KOPjutiFEVRlHqKdRQEg8HKn1988UULsDZt2hS2zbJlyyzAmjhxYuWy8vJyq0uXLtaIESMql+3evduKiIiwHnzwwbD9Bw0aZPXs2fNohmP17dv3qLZTFEVRao8fuzcflWXj9R55sxkzZhAIBLj66qsrl/n9fq655hpmzZpFaWkpALNmzaKsrIzRo0eH7T969GhWrlzJpk2bjl4pFUVRlAZBtSUIrF69mo4dOxITExO2PD09nbKyMtavX1+5XWRkJGlpaVW2A1izZk11DUlRFEWpJ1Sb2GRnZ5OcnFxleUpKSuV6eU9KSsLj8fzodoqiKErjocFUEJgwYQITJkwAYO/evXU8GkVRFOVYqDbLJjk5mZycnCrLxVIRyyU5OZnc3Fwsy/rR7Q5m7NixZGZmkpmZSWpqanUNW1EURakFqk1s0tPT2bRpE0VFRWHL16xZQ0RERGWMJj09ndLSUjZs2FBlO4Du3btX15AURVGUekK1ic2IESMoLy9n6tSplcsqKiqYMmUKw4YNIzLSNF8YPnw4gUCAyZMnh+3/+uuv06NHDzp27FhdQ1IURVHqCUcds5k2bRoA33zzDQAff/wxqamppKamcs4553DKKadw9dVXc+edd1JeXk7Hjh157rnn2LRpU5iwNGvWjLvvvptHHnmE+Ph4+vTpw5QpU5g3bx4zZsyo5stTFEVR6gVHO1kH02mhyuucc86p3KaoqMi66667rObNm1uRkZHWqaeeas2fP7/KsSoqKqy//OUvVrt27ayIiAirZ8+e1tSpU6tl4pCiKIpSN/zYvdljWQdF6hsA/fr1IzMzs66HoSiKorj4sXuzVn1WFEVRahwVG0VRFKXGaTCTOo9EeXk527Zto6SkpK6HotQiUVFRtGnThkAgUNdDURTlR2g0YrNt2zbi4+Pp0KFDlVI4SuPEsiz279/Ptm3bNGVeUeo5jcaNVlJSQpMmTVRoTiA8Hg9NmjRRa1ZRGgCNRmwAFZoTEP2bK0rDoFGJTV1z+umnH3Gbm266qbI0z9/+9rdj3j8uLq7KstzcXP7zn/8c5SjDufDCC8nNzT3m/RYsWMDnn3/+k86pKMqJR6OZZ/Pdd99x8sknV/7e4skW7C7cXW3nbB7bnF337Kq244ERjoKCguPeJysri4svvphVq1ZV2b6iogK/v/pDcw899BBxcXHcc889R71PTY3l4L+9oih1wwk5z6Y6heZojydWx4IFCzj33HO58sor6datG6NGjaqscn3uueeSmZnJ+PHjKS4uJiMjg1GjRoXtX1BQwODBg+nTpw89e/Zk+vTpP3re8ePHs2HDBjIyMhg3bhwLFizgrLPOYuTIkZWFTS+99FL69u1Lenp6ZasGgA4dOrBv3z7A1Kc79dRTycjI4JZbbiEYDAIwc+ZM+vTpQ+/evRk8eDBZWVk8//zzPPXUU2RkZPDpp5+SlZXFoEGD6NWrF4MHD2bLli0AjBkzhltvvZXTTjuN3//+93Tu3LmyRUQoFCItLU1bRijKCUCjyUarb3z77besXr2aVq1accYZZ7B48WLOPPPMyvWPPvoo//73v1m2bFmVfaOionj33XdJSEhg3759DBgwgJEjRx42PvHoo4+yatWqymMtWLCApUuXsmrVqsosrYkTJ5KSkkJxcTH9+/fniiuuoEmTJpXH+O6775gyZQqLFy8mEAhw2223MXnyZC644AJuvvlmFi1aRMeOHcnOziYlJYVbb701zLIZMWIEN9xwAzfccAMTJ07kjjvu4L333gNMpuDnn3+Oz+cjMTGRyZMnc+eddzJnzhx69+6tLSMU5QSg0Vo2dc2pp55KmzZt8Hq9ZGRkkJWVddT7WpbFfffdR69evRgyZAjbt29n9+5js9ROPfXUsHTgZ555ht69ezNgwAC2bt3KunXrwrafO3cu33zzDf379ycjI4O5c+eyceNGvvzyS84+++zKYx2u39AXX3zBddddB8D111/PZ599VrnuqquuwufzAfDLX/6SV199FTAC+Itf/OKYrktRlIaJWjY1hLRUAPD5fFRUVBz1vpMnT2bv3r188803BAIBOnTocMzpvbGxsZU/L1iwgDlz5vDFF18QExPDueeeW+V4lmVxww038Mgjj4Qtf//994/pvEcaS9u2bWnevDnz5s1jyZIlVVpNKIrSOFHLpg4JBAKUl5dXWZ6Xl0ezZs0IBALMnz+fzZs3/+hx4uPjyc/PP+z6vLw8kpOTiYmJ4fvvv+fLL7+sss3gwYOZNm0ae/bsAUzn1M2bNzNgwAAWLVrEpk2bKpcf6pynn346b731FmDE8qyzzjrseG666SZGjx4dZvEoitK4UbGpQ8aOHUuvXr0qEwSEUaNGkZmZSc+ePXn11Vfp1q3bjx6nSZMmnHHGGfTo0YNx48ZVWT98+HAqKio4+eSTGT9+PAMGDAhb7/F46N69O3/9618ZNmwYvXr1YujQoezcuZPU1FQmTJjA5ZdfTu/evbn66qsBE6N59913KxME/vWvfzFp0iR69erFa6+9xtNPP33Y8Y4cOZKCggJ1oSnKCYSmPh8lNZH6XNcEg0GaNWvGrl27arW2WGZmJnfddReffvpptRxPU58VpX7wY6nPjTZm09iEoSZIT0/npptuqlWhefTRR3nuuec0VqMoJxiNVmyUI/P999/X+jnHjx/P+PHja/28iqLULRqzURRFUWocFRtFURSlxlGxURRFUWocFRtFURSlxlGxqSOkIOeP8d5771W2IwB48MEHmTNnznGfe9myZXz00UfHvN+OHTu48sorf9I5X375ZXbs2PGT9lUUpeHTaMWmRQvweKrv1aJF7V/DwWLz5z//mSFDhhz3cX9MbH6srE6rVq2YNm3aTzrnTxEbqTqtKErDp9GKzTHWrTzu4xUWFnLRRRfRu3dvevTowZQpUwBT4PKUU06hZ8+e/PKXv6S0tLTKvu6GaNOmTWPMmDF8/vnnzJgxg3HjxpGRkcGGDRsYM2ZM5c3+cMft0KEDf/zjHyvbExyc3lxWVsaDDz7IlClTyMjIYMqUKTz00ENcf/31nHHGGVx//fVkZWVx1lln0adPH/r06VPZJC0rK4sePXoARgjGjRtH//796dWrFy+88ELlOR577DF69uxJ7969GT9+PNOmTSMzM5NRo0aRkZFBcXHxj47/3nvvpU+fPjz66KP06dOn8rjr1q0L+11RlIZDoxWb2mbmzJm0atWK5cuXs2rVKoYPH05JSQljxoxhypQprFy5koqKCp577rmjOt7pp5/OyJEjeeKJJ1i2bBmdOnWqXHek4zZt2pSlS5fyq1/9iieffDLsuBEREfz5z3/m6quvZtmyZZXlZ9asWcOcOXN48803adasGZ988glLly5lypQp3HHHHVXG99///pfExES+/vprvv76a1588UU2bdrExx9/zPTp0/nqq69Yvnw5v//977nyyivp168fkydPZtmyZXg8nh8df5MmTVi6dCn3338/iYmJla0TJk2apCVuFKWBomJTTfTs2ZNPPvmEe++9l08//ZTExETWrl1Lx44d6dKlCwA33HADixYtOu5zHem4l19+OQB9+/Y96tYGI0eOJDo6GoDy8nJuvvlmevbsyVVXXRXmyhNmz57Nq6++SkZGBqeddhr79+9n3bp1zJkzh1/84hfExMQAh25JcKTxiwCCKdo5adIkgsEgU6ZMqWxjoChKw0IrCFQTXbp0YenSpXz00Uc88MADDB48mEsuueSo9nU3RTvWVgKHQtobHEtrA3cbgKeeeormzZuzfPlyQqEQUVFRVba3LIt//etfnH/++WHLZ82adRwjrzqWK664gj/96U8MGjSIvn37hjV8UxSl4aCWTTWxY8cOYmJiGD16NOPGjWPp0qV07dqVrKws1q9fD8Brr73GOeecU2Xf5s2b89133xEKhXj33Xcrlx+udcDRHvdwHE1LgpYtW+L1ennttdcOGag///zzee655ypbJPzwww8UFhYydOhQJk2aRFFREXDolgTHMv6oqCjOP/98fvWrX6kLTVEaMCo21cTKlSs59dRTycjI4E9/+hMPPPAAUVFRTJo0iauuuoqePXvi9Xq59dZbq+z76KOPcvHFF3P66afTsmXLyuXXXHMNTzzxBKeccgobNmyoXH60xz0c5513HmvWrKlMEDiY2267jVdeeYXevXvz/fffh1kaYoXddNNNdO/enT59+tCjRw9uueUWKioqGD58OCNHjqRfv35kZGRUxozGjBnDrbfeSkZGBpZlHdP4R40ahdfrZdiwYUd9jYqi1C8ab4uBFtWbkda8Oew6wQtJf/PNN9x9990sXLiwVs/75JNPkpeXx1/+8pdDrtcWA4pSPzgxWwyc4MJQ3WRmZnLdddfx6KOP1up5L7vsMjZs2MC8efNq9byKolQvjVZslOqlX79+/PDDD7V+XncMS1GUhovGbBRFUZQaR8VGURRFqXFUbBRFUZQaR8VGURRFqXFUbBooWrJfUZSGROMVm8bQY+AwBINBFRtFURoUjVdsarvHAPD6669XVhG45ZZb+Oqrr+jVqxclJSUUFhaSnp7OqlWrWLBgAWeffTYXXXQRXbt25dZbbyUUCgGmwOXAgQPp06cPV111FQUFBUB46f0333yzSsl+RVGU+kzjFZta5rvvvmPKlCksXryYZcuW4fP5WLt2LSNHjuSBBx7g97//PaNHj67sB7NkyRL+9a9/sWbNGjZs2MD//vc/9u3bx1//+lfmzJnD0qVL6devH//4xz8qzyGl90ePHh1Wsl+qNSuKotRXdFJnNTF37ly++eYb+vfvD0BxcTHNmjXjwQcfpH///kRFRfHMM89Ubn/qqady0kknAXDttdfy2WefERUVxZo1azjjjDMA0+hs4MCBlfu4S+8riqI0JFRsqgnLsrjhhht45JFHwpbv3LmTgoICysvLKSkpqSxq6W4rIL9blsXQoUN58803D3kOd0FMRVGUhoS60aqJwYMHM23aNPbs2QOY0vqbN2/mlltu4S9/+QujRo3i3nvvrdx+yZIlbNq0iVAoxJQpUzjzzDMZMGAAixcvriy9X1hYeNgSMUdqE6AoilKfOCqx2bZtG7/5zW8YOHAgMTExeDyeKh0gMzMzGTt2LN26dSMmJoZ27doxatQoNm3aVOV4oVCIRx55hA4dOhAVFUXv3r155513quWC6oru3bvz17/+lWHDhtGrVy+GDh3KK6+8QiAQ4LrrrmP8+PF8/fXXlQUl+/fvz+23387JJ59Mx44dueyyy0hNTeXll1/m2muvpVevXgwcOJDvv//+kOdzl+zXBAFFUeo91lEwf/58q1mzZtYFF1xgDRs2zAKsTZs2hW3zu9/9zjr99NOtZ5991lqwYIE1efJkq1u3blZKSoq1ZcuWsG3vu+8+KyIiwnriiSesefPmWWPHjrU8Ho/14YcfHs1wrL59+1ZZtmbNmvAFzZtbFlTfq3nzoxrb0TB//nzroosuqrbjnehU+dsrilInHOreLBxVzObss89mt536+9JLLzF79uwq29x7772kpqaGLTvjjDPo2LEjL774In/+858B2LNnD08++STjx4/nnnvuAUwzr/Xr1zN+/HguvPDC49FOB+0xoCiKUm84Kjea13vkzQ4WGoD27duTmprK9u3bK5fNmjWLsrIyRo8eHbbt6NGjWbly5SHdbo2Nc889lw8++KCuh6EoilJr1GiCwHfffceePXvCuiiuXr2ayMhI0tLSwrZNT08HYM2aNTU5JEVRFKUOqDGxqaio4NZbbyU1NZUbb7yxcnl2djZJSUlVUn9TUlIq1yuKoiiNixqbZ3P77bfz+eef8+GHH5KcnHzcx5swYQITJkwAYO/evcd9PEVRFKX2qBHLZvz48UyYMIGJEycybNiwsHXJycnk5uZiWVbYcrFoxMI5mLFjx5KZmUlmZuYh40OKoihK/aXaxebhhx/mscce45lnnuH666+vsj49PZ3S0lI2bNgQtlxiNd27d6/uIdVrFixYwMUXXwzAjBkzePTRR2t9DI888ghpaWl07dqVWbNm/ei2d9xxB3FxcbU0MkVRGgvVKjbPPPMMDzzwAA8//DC33377IbcZPnw4gUCAyZMnhy1//fXX6dGjBx07dqyewbQAPNX4OoYOA5ZlVVZxPhZGjhzJ+PHjj3m/42HNmjW89dZbrF69mpkzZ3LbbbcRDAYPuW1mZiY5OTm1Oj5FURoHRy0206ZNY9q0aXzzzTcAfPzxx0ybNo2FCxcC8NZbb3HnnXcyfPhwBg0axJdffln5cmeYNWvWjLvvvptHHnmEf/zjHyxYsIBf/epXzJs3r0pdseOimjsMHOl4WVlZdO3alZ///Of06NGDrVu38qtf/Yp+/fqRnp7OH//4x8ptZ86cSbdu3ejTpw//+9//Kpe//PLLlSI9ZswYpk2bVrlOrImdO3dy9tlnk5GRQY8ePfj000+P67KmT5/ONddcQ2RkJB07diQtLY0lS5ZU2S4YDDJu3Dgef/zx4zqfoignJkedIHDVVVeF/X7bbbcBcM4557BgwQJmzpyJZVnMnDmTmTNnhm0r2wgPP/wwcXFxPP300+zatYuuXbvy9ttvV7qTGirr1q3jlVdeYcCAAYC5zpSUFILBIIMHD2bFihV06dKFm2++mXnz5pGWlnbMlZzfeOMNzj//fO6//36CwSBFRUVVtrnrrruYP39+leXXXHNNFctp+/btleMFaNOmTdi8KOHf//43I0eOpGXLlsc0XkVRFDgGsTk4oH8wL7/8Mi+//PJRHcvn8/HAAw/wwAMPHO3pGwTt27cPu3G//fbbTJgwgYqKCnbu3MmaNWsIhUJ07NiRzp07A2Yyq2TZHQ39+/fnl7/8JeXl5Vx66aVkZGRU2eapp5467mtxs2PHDqZOnRr2wKAoinIsaNXnasTdAmDTpk08+eSTzJ07lxUrVnDRRRdRUlJy1Mfy+/2VcZ9QKERZWRlgSgctWrSI1q1bM2bMGF599dUq+951111kZGRUeR0q+aB169Zs3bq18vdt27bRunXrsG2+/fZb1q9fT1paGh06dKCoqKjKpFxFUZQfQ/vZ1BAHDhwgNjaWxMREdu/ezccff8y5555Lt27dyMrKYsOGDXTq1OmwvWs6dOjAN998w89+9jNmzJhBeXk5AJs3b6ZNmzbcfPPNlJaWsnTpUn7+85+H7Xssls3IkSO57rrruPvuu9mxYwfr1q3j1FNPDdvmoosuYper1lxcXFxlGwRFUZSjQcWmhujduzennHIK3bp1o23btpXdN6OiopgwYQIXXXQRMTExnHXWWYfsS3PzzTdzySWX0Lt3b4YPH15pNS1YsIAnnniCQCBAXFzcIS2bYyE9PZ2f/exndO/eHb/fz7PPPovP5wPgwgsv5KWXXqJVq1bHdQ5FURSPdaRgTD2kX79+ZGZmhi377rvvwmqw0YLqzUhrDmgh6XpJlb+9oih1wqHuzULjtWxUGBRFUeoNmiCgKIqi1DgqNoqiKEqN06jEpgGGn5TjRP/mitIwaDRiExUVxf79+/XmcwJhWRb79+8nKiqqroeiKMoRaDQJAm3atGHbtm3a6+YEIyoqijZt2tT1MBRFOQKNRmwCgUD1VYxWFEVRqpVG40ZTFEVR6i8qNoqiKEqNo2KjKIqi1DgNslxN06ZNiY2NJTU1ta6HUmPs3bu3UV8fNP5r1Otr2Oj1HTtZWVns27fvkOsapNjAj9fgaQw09uuDxn+Nen0NG72+6kXdaIqiKEqNo2KjKIqi1DgNVmzGjh1b10OoURr79UHjv0a9voaNXl/10mBjNoqiKErDocFaNoqiKErDoUGJzdatW7nyyitJTEwkISGByy+/nC1bttT1sI6ZadOmccUVV9C+fXuio6Pp2rUrf/jDH6q0h87JyeGmm26qTPUeMmQIK1eurKNRHx/Dhw/H4/HwwAMPhC1v6Nf40UcfcfbZZxMXF0dCQgL9+vVj3rx5lesb8vUtXryYYcOG0axZM+Lj4+nTpw8TJ04M26akpIRx48bRsmVLoqOjGThwIIsWLaqjER+ebdu28Zvf/IaBAwcSExODx+MhKyurynZHez2hUIhHHnmEDh06EBUVRe/evXnnnXdq4UoOzdFcX2ZmJmPHjqVbt27ExMTQrl07Ro0axaZNm6ocr0auz2ogFBYWWmlpaVZ6err17rvvWu+9957Vo0cP66STTrIKCgrqenjHxGmnnWZdddVV1uuvv24tWLDAeuqpp6zExETrtNNOs4LBoGVZlhUKhawzzjjDat26tfXGG29YH3/8sXX22WdbTZo0sbZu3VrHV3BsvPHGG1aLFi0swLr//vsrlzf0a3z++ectv99v3Xnnndbs2bOtmTNnWo8++qj1/vvvW5bVsK9v+fLlVlRUlHXuueda7733njV79mxr7NixFmD95z//qdzuuuuusxITE60JEyZYc+bMsS677DIrKirK+vbbb+tu8Idg/vz5VrNmzawLLrjAGjZsmAVYmzZtqrLd0V7PfffdZ0VERFhPPPGENW/ePGvs2LGWx+OxPvzww9q5oIM4muv73e9+Z51++unWs88+ay1YsMCaPHmy1a1bNyslJcXasmVL2LY1cX0NRmz++c9/Wl6v11q3bl3lso0bN1o+n8/6+9//XocjO3b27NlTZdkrr7xiAdbcuXMty7Ks9957zwKsefPmVW6Tm5trJScnW7/5zW9qbazHS3Z2ttW8eXPrjTfeqCI2DfkaN23aZEVFRVlPPfXUYbdpyNf3hz/8wQoEAlZ+fn7Y8gEDBlgDBgywLMuyli1bZgHWxIkTK9eXl5dbXbp0sUaMGFGr4z0S8hBnWZb14osvHvJmfLTXs3v3bisiIsJ68MEHw/YfNGiQ1bNnz5q5gCNwNNd3qPtOVlaW5fF4rP/7v/+rXFZT19dg3GgzZsxgwIABpKWlVS7r2LEjZ5xxBtOnT6/DkR07h5q1279/fwC2b98OmOtt1aoV5513XuU2iYmJjBgxokFd77333kuPHj249tprq6xryNc4ceJEvF4vt95662G3acjXV1ZWRiAQIDo6Omx5YmIioVAIMNcXCAS4+uqrK9f7/X6uueYaZs2aRWlpaa2O+cfweo98qzva65k1axZlZWWMHj06bP/Ro0ezcuXKQ7qlapqjub5D3Xfat29Pampq5X0Hau76GozYrF69mh49elRZnp6ezpo1a+pgRNXLwoULATj55JOBH7/eLVu2UFBQUKvj+yl89tlnvPrqqzz77LOHXN+Qr/Gzzz6jW7duvPXWW3Tq1Am/309aWlrYtTbk6xszZgwAd9xxBzt27CA3N5cXX3yRuXPnctdddwHm+jp27EhMTEzYvunp6ZSVlbF+/fraHvZxcbTXs3r1aiIjI8MefGU7oEHdj7777jv27NlTed+Bmru+BiM22dnZJCcnV1mekpJCTk5OHYyo+ti+fTsPPvggQ4YMoV+/fsCPXy9Q76+5rKyMW265hXvuuYeuXbsecpuGfI07duxg3bp1jBs3jvHjxzN79myGDh3K7bffztNPPw007Ovr0aMHCxYsYPr06bRu3Zrk5GR+/etf8/zzz3PNNdcAR76+7OzsWh3z8XK015OdnU1SUhIej+dHt6vvVFRUcOutt5KamsqNN95Yubymrq/RNE9rqBQUFHDJJZfg9/uZNGlSXQ+n2nj88ccpLi7m/vvvr+uh1AihUIj8/HxefvllLr/8cgAGDRpEVlYWjzzyCHfccUcdj/D4WLduHVdccQXp6ek8//zzREdHM336dG699VaioqIYNWpUXQ9ROU5uv/12Pv/8cz788MNDimx102DEJjk5+ZBPgod7GmkIFBcXM2LECDZu3MjChQvD2hv/2PXK+vrKli1bePjhh3nppZcoLS0N892XlpaSm5tLfHx8g77GJk2asG7dOoYOHRq2fNiwYcycOZOdO3c26Ou77777CAQCfPDBBwQCAQAGDx7M/v37+e1vf8u1115LcnIymzdvrrKvXJ88CTcUjvZ6kpOTyc3NxbKssKf/hnTd48ePZ8KECbzyyisMGzYsbF1NXV+DcaOlp6ezevXqKsvXrFlD9+7d62BEx0d5eTlXXnklmZmZfPTRR/Ts2TNs/Y9db7t27YiLi6utoR4zGzdupKSkhNGjR5OcnFz5AnjyySdJTk5m5cqVDfoaxX99OLxeb4O+vpUrV9K7d+9KoRFOPfVU9u/fz549e0hPT2fTpk0UFRWFbbNmzRoiIiKq+PzrO0d7Penp6ZSWlrJhw4Yq2wH1/n708MMP89hjj/HMM89w/fXXV1lfY9f3k/PYapmnnnrK8vl81oYNGyqXbdq0yfL7/daTTz5ZhyM7doLBoHXVVVdZUVFR1pw5cw65zbvvvmsB1oIFCyqX5eXlWSkpKdbtt99eW0P9SeTk5Fjz58+v8gKs0aNHW/Pnz7fy8/Mb9DV+8MEHFmBNnTo1bPmwYcOsNm3aWJbVsP+G55xzjtWxY0ertLQ0bPm1115rRUVFWaWlpdbSpUstwHr55Zcr15eXl1vdunWzLr744toe8lFzuNTgo72e3bt3W4FAwHrooYfC9h88eLDVo0ePGh370XC467Msy3r66actwHr44YcPu39NXV+DEZuC/2/n/l1Oi+M4gH8Nj5M6ypFYRLL6sSnTiUE2xWCUMojpTEaD2YjBYJLhu7CIUkaTsvgD/AEG9WTC+073lId7H3U713Pq/aqzfdL3rc55Uz4+PxGNRhGLxTCdTjGbzZBIJBCJRB52AX66er1u7pxsNpu76/ey3/V6RTqdRjAYxGQywWKxgK7r0DTtYQHLLsSXPRs7Z7zdbshkMvB6vRgMBlgul6jVahBCYDQaAbB3PiklhBDI5XKYTqdYLpdoNpsQQsAwDHOuXC7D4/FgOBxitVqhVCpBURRst9s3nv45KSWklOb91+/3IaW8+zDwap5WqwVFUdDtdrFer1Gv1+FwOMyF3nf4Lt9kMoHD4UA+n3947uz3+7vXsiKfbcoGAA6HA4rFItxuN1RVRaFQeNreP104HIYQ4unVbrfNuePxiGq1Ck3T4HK5kM1msdvt3nfwf/S1bAB7ZzydTmg0GvD7/fj4+EA8Hsd4PL6bsXO++XwOXdfh8/mgqiqSySR6vR4ul4s5cz6fYRgGAoEAFEVBKpXCer1+36H/4k/3nK7r5syreS6XCzqdDkKhEJxOJ+Lx+MO33P/tu3yVSuWl9wCwJh//9ZmIiCxnmx8IEBGRfbFsiIjIciwbIiKyHMuGiIgsx7IhIiLLsWyIiMhyLBsiIrIcy4aIiCzHsiEiIsv9ArLrihupUJH7AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAGQCAYAAAB4X807AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAACDKUlEQVR4nO2deZyNdfvH32ebfTFj39eyjC0h6ilJ4amoxFOiXyqh0qInpZJW0V7aKUop0oJKtKBF24OkaLENsmQdZjtnznL//viea+4zjYTmmO16v17nNefc516+t+X+zLU7LMuyUBRFUZQo4iztBSiKoigVHxUbRVEUJeqo2CiKoihRR8VGURRFiToqNoqiKErUUbFRFEVRok6pic2WLVvo378/qamppKSk0K9fPzZv3lxay1EURVGiiKM06mzy8vJo164dsbGx3H///TgcDsaOHUteXh6rVq0iMTHxWC9JURRFiSLu0rjolClT2LBhA7/++ivNmjUDoG3bthx33HG88MIL3HTTTaWxLEVRFCVKlIpl06NHD7xeL0uXLi2yvVu3bgB89tlnx3pJiqIoShQpFctm9erVnHfeecW2Z2RkMHv27L89vlq1ajRq1CgKK1MURVGOlszMTHbv3n3Q70pFbPbu3UtaWlqx7enp6ezbt+9vj2/UqBHLli2LxtIURVGUo6Rjx45/+V2piM3RMHnyZCZPngzArl27Snk1iqIoypFQKqnPaWlpB7Vg/sriARg2bBjLli1j2bJlVK9ePdpLVBRFUUqQUhGbjIwMVq9eXWz7mjVraNWqVSmsSFEURYkmpeJG69u3LzfffDMbNmygSZMmgAksLV26lIkTJ5bGkhRFOQr8fj+///47Xq+3tJeiHEPi4uKoV68eHo/nsI8pldTn3Nxc2rVrR3x8fGFR55133kl2djarVq0iKSnpkMd37NhREwQUpQywceNGkpOTqVq1Kg6Ho7SXoxwDLMtiz549ZGdn07hx4yLfHerZXCputMTERBYtWsTxxx/PpZdeyqBBg2jcuDGLFi36W6FRFKXs4PV6VWgqGQ6Hg6pVqx6xNVtq2WgNGjTg7bffLq3LK4pSQqjQVD6O5u9cuz4riqIoUafc1NkoilL2qfVILf7I/aPEzlczsSY7bt5xyH1OPvlkvvrqq0PuM3ToUG666SZatWrFAw88wO23335ExyclJZGTk1NkW1ZWFq+//jrXXHPN39xFcc4++2xef/11qlSpckTHLVmyhJiYGE4++eQjvmZpo5aNoiglRkkKzeGe7++EAuDFF18sLKt44IEHjvj4g5GVlcWzzz570O8CgcAhj50/f/4RCw0YsTnS9f7dWo4VKjaKopRrJKloyZIlnH766fTv358WLVowaNAgJNn29NNPZ9myZYwZM4b8/Hzat2/PoEGDihyfk5NDjx496NChA23atGHu3LmHvO6YMWNYv3497du3Z/To0SxZsoRTTz2Vvn37Fgrb+eefz4knnkhGRkZhBxQwLbekh9hrr71G586dad++PcOHDycYDAKwYMECOnToQLt27ejRoweZmZk8//zzPP7447Rv354vvviCzMxMzjjjDNq2bUuPHj0KZ4INGTKEESNGcNJJJ3HLLbdw3HHHFXZeCYVCNGvW7Jh3YlE3mqIoFYbvv/+e1atXU6dOHU455RSWLl3Kv/71r8LvJ06cyNNPP83KlSuLHRsXF8e7775LSkoKu3fvpkuXLvTt2/cvg+ETJ07kp59+KjzXkiVLWLFiBT/99FNhSvDUqVNJT08nPz+fTp06ceGFF1K1atXCc/z888/MmjWLpUuX4vF4uOaaa5gxYwb//ve/ueqqq/j8889p3Lgxe/fuJT09nREjRpCUlMTNN98MQJ8+fbjsssu47LLLmDp1Ktdffz1z5swB4Pfff+err77C5XKRmprKjBkzuPHGG/nkk09o167dMe/EopaNoigVhs6dO1OvXj2cTift27cnMzPzsI+1LIvbb7+dtm3bcuaZZ7J161b++OPI3IKdO3cuUnsyadIk2rVrR5cuXdiyZQtr164tsv+nn37K8uXL6dSpE+3bt+fTTz9lw4YNfPPNN5x22mmF50pPTz/o9b7++msuueQSAC699FK+/PLLwu8GDBiAy+UC4IorrmD69OmAEcDLL7/8iO6rJFDLRlGUCkNsbGzhe5fLdUTxihkzZrBr1y6WL1+Ox+OhUaNGR1xLEjlleMmSJXzyySd8/fXXJCQkcPrppxc7n2VZXHbZZUyYMKHI9vfee++Irvt3a6lfvz41a9Zk0aJFfPfdd8yYMeMfn/9IUctGUZRKhcfjwe/3F9u+f/9+atSogcfjYfHixWzatOmQ50lOTiY7O/svv9+/fz9paWkkJCTwyy+/8M033xTbp0ePHrz11lvs3LkTMM2IN23aRJcuXfj888/ZuHFj4faDXfPkk09m5syZgBHLU0899S/XM3ToUAYPHlzE4jmWqNgoilJi1EysWabPB6aDfNu2bQsTBIRBgwaxbNky2rRpw/Tp02nRosUhz1O1alVOOeUUWrduzejRo4t937t3bwKBAC1btmTMmDF06dKlyPcOh4NWrVpx//3307NnT9q2bctZZ53F9u3bqV69OpMnT6Zfv360a9eOiy66CDAxmnfffbcwQeCpp55i2rRptG3blldffZUnn3zyL9fbt29fcnJySsWFBqXUG+2for3RFKVs8PPPP9OyZcvSXka5IhgMUqNGDXbs2HFEjSz/KcuWLWPUqFF88cUXJXK+g/3dH+rZrDEbRVGUY0hGRgZDhw49pkIzceJEnnvuuVKJ1QgqNoqiKMeQX3755Zhfc8yYMYwZM+aYXzcSjdkoiqIoUUfFRlEURYk6KjaKoihK1FGxURRFUaKOio2iKCVGrVrgcJTcq1atkl2fNOQ8FHPmzGHNmjWFn8eNG8cnn3zyj6+9cuVK5s+ff8THbdu2jf79+x/VNV9++WW2bdt2VMeWNCo2iqKUGEfYSuyYn+9w+LPY3HvvvZx55pn/+LyHEptDtdWpU6cOb7311lFd82jERrpOlzQqNoqilFtyc3M555xzaNeuHa1bt2bWrFmAaXB5wgkn0KZNG6644gp8Pl+xY2W0AMBbb73FkCFD+Oqrr5g3bx6jR4+mffv2rF+/niFDhhQ+7P/qvI0aNeKuu+4qHE/w5/TmgoICxo0bx6xZs2jfvj2zZs3i7rvv5tJLL+WUU07h0ksvJTMzk1NPPZUOHTrQoUOHwrk1mZmZtG7dGjBCMHr0aDp16kTbtm154YUXCq/x4IMP0qZNG9q1a8eYMWN46623WLZsGYMGDaJ9+/bk5+cfcv233norHTp0YOLEiXTo0KHwvGvXri3y+WhRsVEUpdyyYMEC6tSpww8//MBPP/1E79698Xq9DBkyhFmzZvHjjz8SCAR47rnnDut8J598Mn379uXhhx9m5cqVNG3atPC7vztvtWrVWLFiBVdffTWPPPJIkfPGxMRw7733ctFFF7Fy5crC9jNr1qzhk08+4Y033qBGjRp8/PHHrFixglmzZnH99dcXW99LL71Eamoq//vf//jf//7HlClT2LhxIx9++CFz587l22+/5YcffuCWW26hf//+dOzYkRkzZrBy5UocDsch11+1alVWrFjBHXfcQWpqauHohGnTppVIixsVG0VRyi1t2rTh448/5tZbb+WLL74gNTWVX3/9lcaNG3P88ccDcNlll/H555//42v93Xn79esHwIknnnjYow369u1LfHw8AH6/n6uuuoo2bdowYMCAIq484aOPPmL69Om0b9+ek046iT179rB27Vo++eQTLr/8chISEoCDjyT4u/WLAIJp2jlt2jSCwSCzZs0qHGPwT9AOAoqilFuOP/54VqxYwfz58xk7diw9evTgvPPOO6xjI4eiHekogYMh4w2OZLRB5BiAxx9/nJo1a/LDDz8QCoWIi4srtr9lWTz11FP06tWryPaFCxf+g5UXX8uFF17IPffcwxlnnMGJJ55YZODb0aKWjaIo5ZZt27aRkJDA4MGDGT16NCtWrKB58+ZkZmaybt06AF599VW6detW7NiaNWvy888/EwqFePfddwu3/9XogMM9719xOCMJateujdPp5NVXXz1ooL5Xr14899xzhSMSfvvtN3JzcznrrLOYNm0aeXl5wMFHEhzJ+uPi4ujVqxdXX311iXWJVrFRFKXEqFnCEwH+7nw//vgjnTt3pn379txzzz2MHTuWuLg4pk2bxoABA2jTpg1Op5MRI0YUO3bixImce+65nHzyydSuXbtw+8UXX8zDDz/MCSecwPr16wu3H+55/4ru3buzZs2awgSBP3PNNdfwyiuv0K5dO3755ZciloZYYUOHDqVVq1Z06NCB1q1bM3z4cAKBAL1796Zv37507NiR9u3bF8aMhgwZwogRI2jfvj2WZR3R+gcNGoTT6aRnz56HfY+HQkcMKIpy1OiIgeizfPlybrrpJj777LNjet1HHnmE/fv3c9999x30ex0xoCiKUkFYtmwZl1xyCRMnTjym173gggtYv349ixYtKrFzqtgoiqKUUTp27Mhvv/12zK8bGcMqKTRmoyiKokQdFRtFURQl6qjYKIqiKFFHxUZRFEWJOio2iqKUHGV9xkAJUJba9pcnVGwURSk5KsKMgUMQDAZVbI4SFRtFUco9r732WmEngeHDh/Ptt9/Stm1bvF4vubm5ZGRk8NNPP7FkyRJOO+00zjnnHJo3b86IESMIhUKAaXLZtWtXOnTowIABA8jJyQGKtt9/4403irXtVw4PFRtFUco1P//8M7NmzWLp0qWsXLkSl8vFr7/+St++fRk7diy33HILgwcPLpwJ89133/HUU0+xZs0a1q9fzzvvvMPu3bu5//77+eSTT1ixYgUdO3bkscceK7yGtN8fPHhwkbb90rFZ+Xu0qFNRlHLNp59+yvLly+nUqRMA+fn51KhRg3HjxtGpUyfi4uKYNGlS4f6dO3emSZMmAAwcOJAvv/ySuLg41qxZwymnnAKYYWddu3YtPCay/b5ydKjYKIpSrrEsi8suu4wJEyYU2b59+3ZycnLw+/14vd7CxpaRowXks2VZnHXWWbzxxhsHvUZkU0zl6FA3mqIo5ZoePXrw1ltvsXPnTsC019+0aRPDhw/nvvvuY9CgQdx6662F+3/33Xds3LiRUCjErFmz+Ne//kWXLl1YunRpYfv93Nzcv2wT83ejApSDo5aNoiglR82aJZtBdhgzC1q1asX9999Pz549CYVCeDwezjvvPDweD5dccgnBYJCTTz6ZRYsW4XQ66dSpEyNHjmTdunV0796dCy64AKfTycsvv8zAgQPx+XwA3H///YVTLSORtv3x8fF8/fXXGrc5THTEgKIoR015GzGwZMkSHnnkEd5///3SXkq550hHDKgbTVEURYk66kZTFKXScPrpp3P66aeX9jIqJWrZKIqiKFGnRMXmrbfe4sILL6Rhw4bEx8fTvHlzbrvttmKZG/v27WPo0KFUq1aNxMREzjzzTH788ceSXIqiKIpShihRsXnkkUdwuVw88MADLFiwgKuvvprnnnuOs846q7AlhGVZ9OnThwULFvDUU0/x9ttv4/f76d69O7///ntJLkdRFEUpI5RozOa9996jevXqhZ+7detGeno6l112GUuWLOGMM85g3rx5LF26lEWLFtG9e3cAunbtSuPGjXnooYeKVPoqiqIoFYMStWwihUaQFhJbt24FYN68edSpU6dQaABSU1Pp06cPc+fOLcnlKIpyrKkFOErwFaUJA0uWLOHcc88FzDNp4sSJ0bnQIZgwYQLNmjWjefPmLFy48JD7Xn/99SQlJR2jlUWHqGejffbZZwCF+dirV68ubIgXSUZGBtOnTycnJ6fc/6EqSqWlpCcCHMH5LMvCsiycziP7Hbpv37707dv3CBf2z1izZg0zZ85k9erVbNu2jTPPPJPffvsNl8tVbN9ly5axb9++Y7q+aBDVbLStW7cybtw4zjzzTDp27AiYVhJpaWnF9k1PTweoEH+oiqIcGzIzM2nevDn/93//R+vWrdmyZQtXX301HTt2JCMjg7vuuqtw3wULFtCiRQs6dOjAO++8U7j95ZdfZuTIkYDpDvDWW28Vfie/+G7fvp3TTjuN9u3b07p1a7744ot/tO65c+dy8cUXExsbS+PGjWnWrBnfffddsf2CwSCjR4/moYce+kfXKwtEzbLJycnhvPPOw+12M23atH98vsmTJzN58mQAdu3a9Y/PpyhKxWDt2rW88sordOnSBYDx48eTnp5OMBikR48erFq1iuOPP56rrrqKRYsW0axZsyPu4vz666/Tq1cv7rjjDoLBIHl5ecX2GTVqFIsXLy62/eKLL2bMmDFFtm3durVwvQD16tUrDDVE8vTTT9O3b19q1659ROsti0RFbPLz8+nTpw8bNmzgs88+o169eoXfpaWlHdR62bt3b+H3B2PYsGEMGzYMoNBKUhRFadiwYZEH95tvvsnkyZMJBAJs376dNWvWEAqFaNy4MccddxwAgwcPLvzl9XDo1KkTV1xxBX6/n/PPP5/27dsX2+fxxx//x/cSybZt25g9ezZLliwp0fOWFiXuRvP7/fTv359ly5Yxf/582rRpU+T7jIwMVq9eXey4NWvW0KBBA43XKIpyRES2/9+4cSOPPPIIn376KatWreKcc87B6/Ue9rncbndhmUYoFKKgoACA0047jc8//5y6desyZMgQpk+fXuzYUaNG0b59+2KvgyUf1K1bly1bthR+/v3336lbt26Rfb7//nvWrVtHs2bNaNSoEXl5eTRr1uyw76WsUaJiEwqFGDRoEIsWLWLOnDlFftsQ+vbty9atWwsTBwAOHDjAe++9d8yDdIqiVCwOHDhAYmIiqamp/PHHH3z44YcAtGjRgszMTNavXw/wl3NrGjVqxPLlywGTpeb3+wHYtGkTNWvW5KqrrmLo0KGsWLGi2LGPP/44K1euLPb6swsNzHNw5syZ+Hw+Nm7cyNq1a+ncuXORfc455xx27NhBZmYmmZmZJCQkFI5AKI+UqBvt2muvZfbs2dxxxx0kJibyzTffFH5Xr1496tWrR9++fenatSuDBw/m4YcfJi0tjQkTJmBZFrfccktJLkdRlGNNTUo2I+3vJwwUoV27dpxwwgm0aNGC+vXrF07ejIuLY/LkyZxzzjkkJCRw6qmnHnQmzVVXXcV5551Hu3bt6N27d6HVtGTJEh5++GE8Hg9JSUkHtWyOhIyMDP7zn//QqlUr3G43zzzzTGEm2tlnn82LL75InTp1/tE1yholOmKgUaNGbNq06aDf3XXXXdx9992Aic/cfPPNzJkzB6/XS9euXXnsscdo167dYV1HRwwoFRlJ4Q2FQpTFCSAOhwOXy4XD4Sh3IwaUkuNIRwyUqGWTmZl5WPulp6czdepUpk6dWpKXV5QKQSgUYvXq1Xz//fdHFG84VtSqVYuTTjqJWrWiVHGpVEh0xICilDECgQDffvstkyZNKszSLEt07tyZOnXqqNgoR4SKjaKUQXJzc9mxYwe7d+8u7aUUY/fu3YWBczBuP4fDUYorUo41R+Pe1Xk2iqIcNXFxcezZs6dMxpaU6GBZFnv27CEuLu6IjlPLRlGUo6ZevXr8/vvv2tWjkhEXF1ekWP9wULFRFOWo8Xg8NG7cuLSXoZQD1I2mKIqiRB0VG0VRFCXqqNgoiqIoUUfFRlEURYk6KjaKoihK1FGxURRFUaKOio2iKIoSdbTORqmUSGflslj5HgwGCwd4KUpFQcVGqZTk5OSwcuVKMjMzy5zgBAIBli9fjs/nK+2lKEqJoWKjVEr27dvH22+/zbx588qkFXHgwAFyc3NLexmKUmKo2CiVEr/fz65du8qkZaMoFRFNEFAURVGijoqNoiiKEnVUbBRFUZSoo2KjKIqiRB0VG0VRFCXqqNgoiqIoUUfFRlEURYk6KjaKoihK1NGizgrOvn37yMzMJCcnp7SXUozY2FgaNGhAzZo1cTgcpb0cRVGiiIpNBWft2rU8//zz/PLLL6W9lGLUqFGDK6+8krPPPhuXy1Xay1EUJYqo2FRw9u/fz6pVq1i+fHlpL6UYdevW5dxzz9V2MYpSCdCYjaIoihJ1VGwURVGUqKNioyiKokQdFRtFURQl6qjYKIqiKFFHxUZRFEWJOpr6rCiKUpJYQCj8ElxU+l/tVWwURVH+KSEgD9gPZAMF4ReYp2wMEAckAynhz5WsaYaKjaIoytESAvYB24EDgD+8TawYB8bScWCsmx0Y0UkDagCJx3i9pYiKjaIoytHgBdYDu4EAEAy/wIhLCFtkLIwAucL75gK7gPpAdSrFk7gS3KKiKEoJsw/4BcjCjtEEI95HWjTycoV/usPvQ8AGjOutCca1VoFRsVGUMobD4aBGjRrUqVMHj8dT2sspRsuWLUlKSirtZZQee4CVmBhNEFtoQhiBkRcYcXFiC44bY9k4wz89wB/h98dToQVHxUZRyhhut5tTTjmFQYMGkZqaWtrLKUaVKlVo0KBBaS+jdMgGVoR/hjAxGgsjFpFWzV+50GS7GztjzcII2FqgRXi/CoiKjaKUMRwOB/Xr1+fUU0+levXqpb0cRQgC32NiLfJZXmLJBMI/xY0mIuPAjueI0Lgpagn9AaQDtaN5E6WHio2iKMrhsB7YiBGOALY1E8AWFygaq4GiMRzxiorbLdK6iQEyManRFTBLTcVGURTl7/Bi4jQiMME/vcAWGcGJLTRgxKQg/DkG2+0mxzkxWWrbgOOidielhoqNoijK37EJU0sj8RQRHXlFutLAdqGJG80J+MLfxWJiPRLD+TPbgYZUuGSBqDZQ6N27Nw6Hg7FjxxbZvm/fPoYOHUq1atVITEzkzDPP5Mcff4zmUhRFUY6eZRixCIR/SocAf3ibWCjyU56sVng/L0aQRHT8EcfKuYLhzzmY1OoKRtTE5o033uCHH34ott2yLPr06cOCBQt46qmnePvtt/H7/XTv3p3ff/89WstRFEU5OrIw9TBQVCT8GOHwYsTCF/EzP7w9H1tkguHPYgmJwIjIiPD4MYWiFYyoiM2+ffsYNWoUjz32WLHv5s2bx9KlS3n11VcZOHAgvXv3Zt68eYRCIR566KFoLEdRFOXo2YCxNEQgIi2aSOEQIQpGHOsIb8sP/3RE7Ev4vXQf8EW8P0BRt1wFICpic+utt9K6dWsGDhxY7Lt58+ZRp04dunfvXrgtNTWVPn36MHfu3GgsR1EU5ej5HtvyEDEQ0fFRtEUNGFeaD5749F6eePVi3slcYKc+e8P7yDlErMQVF8QIUQ5FRasCUOIJAl9++SXTp08/qAsNYPXq1bRu3brY9oyMDKZPn05OTk7lrk5WFKXskINJd3ZT1JqRVObI1GWwCzld0Hb5h5yR+w2zY+pA7d4m4O/CCI4E/yOFSjoLuDBCdLDkgXJMiVo2BQUFDB8+nJtvvpnmzZsfdJ+9e/eSlpZWbHt6ejpgXHAHY/LkyXTs2JGOHTuya9eug+6jKIpSomzGCE4IWwAkSUA6PIvASFuaIPi3Z3J67rf4iMHR+d+2gEjqs4/iHQgiXwVUOEpUbB566CHy8/O54447SvK0AAwbNoxly5axbNkyrapWFCX6BIGfMU9JicWI4IhASDKAJASEg/6/fzUZJxazY3vSpmoDs11SnX3hc4qgSKJApEsNKlxhSondzubNmxk/fjwvvvgiPp8Pn89X+J3P5yMrK4vk5GTS0tIOar3s3bsX4KBWj6IoyjFnP6bAMgZj3Ui8JtJlJsWYYLvVfF6qrnsRgOda1ORcZ007Ey0W27KJwYiYBztF2olp8FmDCjfZs8TEZsOGDXi9XgYPHlzsu0ceeYRHHnmE77//noyMDD766KNi+6xZs4YGDRpovEY5JsTGxtKgQQPatm2LZZWttJ+YmBjq1q2L213BfrUtb/yGEQPp1CzZZJFtZoQIV1rBb2+SEtzF97Tnfyd/R6o31Z7M6cUMTwO767MkBkQmEVRA502J/Wtu3749ixcvLra9e/fuDB48mCuvvJJmzZrRt29fpk2bxmeffUa3bt0AOHDgAO+99x6XXHJJSS1HUQ5Jeno6F110EaeeemqZExun00nDhg1JSEgo7aVUXvKw+6CBsTLysZtqRlo0QgCwLHL+9yTpwFOpfahd7RUcWQ47KUASBOIxwiLD1JzYrjkvFbIZZ4mJTZUqVTj99NMP+l3Dhg0Lv+vbty9du3Zl8ODBPPzww6SlpTFhwgQsy+KWW24pqeUoyiFJSEigXbt2tGvXrrSX8pc4HJVsSH1ZYium1iVyDk0eRUcDCNKWxgH88SXp2SvYRTXe6PIHbYI17XMUYCwZF8aNJm1rPNiFn2BEqUZ0b680OOZeQafTyfvvv89ZZ53FNddcwwUXXIDL5WLx4sXUr1//WC9HqcQ4HI4y/VJKiQDGhSZuLTfG9SXFmQfrixauucn9nylkn+wYjrf9m9QM1TTHhLATDeS4yPNI0sFuoBUVri8aHIN8h4O5KNLT05k6dSpTp06N9uUVRVGOjJ2YjgGRYwNigGSMtRNP0SQBMNbKgXXEb5uLjxi+bNIb4sdTK6+WnfYcg90bLR4jNrHYVo0IWdso318poRHISoDT6cTpLHupLU6nU3+DV8oWQezEgEjXloVdS5OLPZdGBMkBgZWP4sbiNQZTvaNpLFyvoJ7dmFMEh/B7ccmJBXUAY9XUieYNlh4qNhWc2rVr8+9//5sWLVqU9lKKkZ6eTtOmTVVwlLJDFsaykVk0UvMSg3GlWeF9ghR1deXvhHUvAzA/5b8k1JsIQP1A/aJWTUH4POKek+SAvPA+ndCx0Er5pGnTpgwfPpyCgrJXkuxyuahSpUqZtLqUSkgIY9XIFE0wT8jIdGcwVk0edpDfCdbPk3BbXubSl7NbtuI11xYA6ofCcehIq0Zm2QQjPicA9YEmUbq3MoCKTQUnPj6e+Pj40l6GopR99mOsGgnYS3sZMAITj927TASnAAgeIPjL07iByZ5beKsJPODZDEBDX0Pb1ebHjtG4MO44B1Al/GoBpEb7JksPFRtFURQLM0rAiy02LswTUjoxR45wFivFA6x8HndwP59zKic2O4VYK8QWd9iy8dU3AuWJOI9kr8VgijeTMCLT6hjcZymiYqMoipIF7Ai/F1GJnDUDtvhENt/05xNY+yhu4GHH7bzYBLYGt+J3+KkRqEF8IN7uFCC1Nsnhz8nhVwImKaAC1tZEomKjKErlJgRswp41I0PM4iK+j8xhEVdaCPh5Cm7fTv5HR2o37UXN6rDEsx6ApgVNbYFyYbvhErHFJi78aoWd4VZBUbFRFKVykwX8EX4vWWYBTEzFixEVL0VHODuAkJfg9w/iAiZwBw91cIATfk78GYDjOd5YLVKX48S4zAj/jMd8nwbUjfI9lgFUbBRFqbxEWjXSENOLLSjx2L3MZNaM9DP7cSquvG2spB2eZufRrLY5x8qYlQC0c7QzVosLkxhgYT47sC2aWKApRnQqOCo2FZzc3Fx27dqF1+v9+52jhMPhIDU1lapVq+LxVHBfgVK+2IPJQJNBZuL28mPXxUhLGWm+GQMEvQSXPYALuJ+xjOvsKHyafhX7FQCdrE5GREIYUSF8fqnZicV83yzaN1k2ULGp4Kxbt44ZM2awYcOGUluD0+mkR48eDBgwoHAiq6KUOkFMZ2fpUeYGsrHbxojbzEHRiZohYNWLuHK38gNtCTTtR9s65rusmCx+cv1EjBVDx1BHYxFJI06xXuIwguMBGmC71io4KjYVnJ07d7J48WKWLVtWamtwu92kpaXRp0+fUluDohRjN6YHmkzPlGp+D3YdTQJGfJLC+7gBXz7BL41Vcw93Ma6b0+zvhi+tLwE4kROJiw1nGMg4AbFuxKqJwxRxVpIGGio2lYCyMq+lrKxDUSjA1NVIsWVM+L0kAkin5gKKjgcIAsufw5W3nRWcQLDpBXSoTmHG2QJrAQBnOc4yQiUxHnfET7Fq6mKSAyoJKjaKolQ+/sC4zCQpoAAjMNJ8M/LJKLGcXCCQTfCLibiAcdzL+F4Oey6NCz52fAxAr2AvY7mIyMikzliM0MQAjamwfdAOhjalUhSlcpELbA6/F/eZGyMC0k4mNrw9HnvCZirwvydx5e/iK7oS3+oc2tWi0PW21r+W3/iNFFLoHNPZnF86Dch5JF5TA6gW7RstW6hloyhK5cHCTOGUJACxYvKxe5X5sd1pB7CtkgN7CH75MC7gTscDPHuaw+wTTgJ4yfcShOAC9wW4E9125wAZTyAvDyZWU4msGlDLRlGUykQ2dluaYPiz9CtLwIhKIkYIEjAWjvRLWzwBV8EBPuIsmnQ6nebiBvOC1+9lWmgaAFc5rjLnlxhNHOZJKy60quFXJUMtG0VRKgdBTFKAHxOjCWHPqPFiBEHSmz3YXZqdwB9bCH3zNE7gLtdEZp8ePj5cNjYtNI2d7KSdqx0nx5xsu88CmEw2ad7pxlg1lbDcTMVGUZTKwZ7wSwaXSTuaAuwMNAf2JM4Ahd0DrMV34gz6mMlFnHFqB+pVodBy8Qa93J97PwBj48bi8IRzmRMpWijqxlg01Y/FzZY9VGwURan4eDFtaVwYUfGHX3HYFk429rA0L8aqcQC//QDfTacAD4/Gj+eTHuFz+syxU5xT2MY22jvbc2HChcYSkkJRSQiQeE0ljNUIKjaKolRsLOB3bEtGUpB92LNr/BhrJh/btRaexBn69BacWDzLNVzRqympVcLnDUJOfg7j88cDMC5+HI6gw1g0AYrOvHFgss8qWQZaJCo2iqJUbA4Au8LvwwH9wmw06X2Wh532nBV+7wZWLcD560dkkcrsGmP5rHP4fOGg/4M8yB/WH3T2dOb8pPPtNjeREz3lZyMqrVUDKjaKolRkApiamgDGesmmaAqyBPkd4X0SMRaND7AC+OfcjAcYzx2M61cNdwqF/dF+cvzERN9EAB6LewxHrMNudSMpzyI2taiUGWiRqNgoShkmGAySlZXFgQMHCIVCpbaOmJgY0tLSSExMxOEoR828dgF7MULjoKiwyAA0L+ZJWICdogyw5CU8u1azkUZsaHkdvVqFzwMEggEuz72cAAGGJwznlNhT7HoaP2YwmszGiQXqUekLTVRsFKUMk5+fz8KFC/noo4/w+/2lto46deowYMAAOnXqVGprOGLyMFaNM/zyAjnYowMi93NhxCKbcPeA/fg/GIsHuN35EA/2izPiEZ5HM/HARJYFl9HA2YCHUh8qOhxNhE2KQWsBVaJ+t2UeFRtFKcP4fD5WrlzJ7Nmzyc/PL7V1NG/enC5dupQfsQkCW7DTlyUxIC78XSJGZCSI7wu/dwK5EJp7H5783XzBv2hyZn+aVQ8flwPLYpdxT949AExNmUpKfIp9jiBFh6XFY8YIlCNjMFqo2ChKGceyLEKhUKl2zbYsq3x17d6LqakRYzAOIy5gu80k48zNnzLSfsX68klCOJiY9ARvnusoHBOQU5DD4KzBBAhwQ8IN9HD1MOdIwgiVjBOQBIFa4W2Kio2iKBUMH8aqCWLEIzv8Xoo1xQKxwtuktiYsPr4Zo4i1AkxhKFcMOJFE6S4QCyO9I/k19Cut3K2YUHtC0fhPELtmx42xntSqKUTFRlGUikMIIzTSTFO2xWHPpJGMMynazMeO2Xz/PrG/fEgWqSxqMZ7XT8aIiA9eOfAKr+S9QjzxvJn4JvG+ePMElWmeSdgjBZyYeTWSbKBU9vwIRVEqFFkY95kPO0gPxvoAO3ZjYWeLxYW/t7zkvXkDAOOddzP+4ho4QoAbfnT/yNXZVwPwVPWnyEjLMMKVjBEvsV5kVEEKxoWmFKKWjaIoFQMfpiWN9D7bjxGXPMyv1cHwe6mvycYe0RwL/rceImH/Bn6kNennjqRJNcCCbF82/ff2J598hsQN4YrEK8z1CrAneUqXaHf4Wk3Qp+uf0D8ORVHKPyHMnJoCCnuWFRZVJmC70KQ9jYfCzDOCwN6NsHACAI+mP83kf7sL63BG5o3kt+BvtPa05pkqz+BwOexpmzIUTVKpnZjBaCnH4J7LGepGUxSl/LMX2I1taUgVvx/zlPNjRCYGW4hiMEIUsjjw8nV4Ql5mcAnDRnQjJmwJvZr7KtNzp5PgSODNOm+SkJhgjpVf073h88Viu+Qaok/Wg6B/JIqilG+kozOYJ1o+pn9Z5E9f+H0eJj6TQ6FIBNfMJWXtB2SRyk9dH+XkhkA8/Ob5jatzwnGa5Kdo6WhZmCxACCNU8eGf0gKnTvizUgx1oymKUn4JYjo6S/GmWDYOjBDIILRk7DRnN0Z0sgByyJ12PSnAIwnjuW1ALQiAL8fHRVkXkWvlcnHSxVxe5XI7JuPHHoTmC68jDpONpkkBf4laNoqilF/2APuws8/EfSbzZCLrYGLD72X0cyLse20cKXlb+B8d6Tp8BCnVgXgYnT2alQUraeJuwgvpL+DAYawhccdJxwFJd3ZiujpXwgmch4uKjaIo5ZNcTO8zMGJSgMkwy4l4+TEWTDZ2+nM4mB/auoKUb58kiJN5rSZzTisXOODNgjd5Kv8pPHiYWXMmKYkpxqqRmEwKxmqS2hwXZvqmJgUcEnWjKYpS/vBjss+ka7OMdAYjCJIY4InY38JuUeMNsPvxodQgxPMxNzFqxAngg1+9v3Ll3isBeKz6Y3Ryd7JbzxC+XggjXOKSi8F0CtBf3Q+J/vEoilK+sIA/MBaLFGnGYBdwSqaZWDJx4Z/O8PZE2PXu49TY+z2ZNKTuVfeQXhVyU3Lpf6A/OVYOFyVdxLUJ19q1Of6Icydg3GcOjNg0iLiG8peoZaMoSvkiC9iOXUzpw7jUwGScSdGmTN/0YNxe+YAbgn+sJ+XjcQC8efzz3HJCElbIYtiuYfzk/4nm7uZMqTsFR4zDiEsAu9GmCJgIWiqQHu0brhio2CiKUn7Ix7jPHBiBkbHO0v8sAbsBJ+F9xKXmBLwhfn/oKhpaXt6OGcRVN/WGEDy952lez36dREci71R/h2Rvst2gU9KdPdgzbULh89VDn6KHibrRFEUpHwSBbdgxGonJxGDHVGLC+4GxauKxLSALtsyZQsOdi9lJdapc9ThpyfCF4wtuyroJgKl1ptIqpZXdO81BYTsbkjEWjghN3fBn5bBQTVYUpXywC5Pq7MSuccnDnrApLWgiU5PjKHSFeXdvJu2D0QDMbfU0V3Wszqb8TfTb3o8AAW5KvYn/pP/HjsU4MSnSMjpAfnowIlP92Nx2RSEqYjN//nwmTpzIihUrcDqdHH/88Tz00EOcccYZAOzbt4/Ro0czZ84c8vPz6dq1K48//jht2rSJxnIqNenp6Zx00kmkpaWV2hpcLhctW7YkNlb7rStHSRZGbKTBZhATP5ECSxkhAEYscrBrbmIAy2LThKtobmWzMLYfl9wwgBxHDn139WV3aDc943ryYNKDxu0m/0x92OMCnNguOicmKUBrao6IEhebF154gZEjRzJy5EjuvPNOQqEQK1euJC/PjMmzLIs+ffqQmZnJU089RVpaGhMmTKB79+6sXLmSevXqlfSSKjXHHXcc1157beGff2ngcDioVq0aKSlaiKAcBT5MnCYfu0OAVPAHKBxsRgF22xiZUxMw23+eNYWWez5iD+lUG/kMcYkhBmz7P1b5VnFczHHMbDITt+U2+8dhF4gmYheJSrpzLdR9dhSUqNhkZmZy44038vDDD3PjjTcWbu/Vq1fh+3nz5rF06VIWLVpE9+7dAejatSuNGzfmoYceYtKkSSW5pEpPSkoKrVq1Ku1lKMrRIe1o8rAbXkprf7AbYYr1IaOdYzDuMy/s25pJ/QX/BWDxSc/S/4RajNo5infz3iXVkcq8uvNIc4ctf6nViZyD48AWtESgJjp98ygo0QSBqVOn4nQ6GTFixF/uM2/ePOrUqVMoNACpqan06dOHuXPnluRyFEUpz1jATozbzINxjR3AnlEjtS+5GCtGZtTkYUQqFixPiK0TLyeJHBYl9ef8Yf9hUs4knjjwBB48vFvzXVr4WxjryU/RuhrpDpCI7Vqrjy1EyhFRomLz5Zdf0qJFC2bOnEnTpk1xu900a9aMZ555pnCf1atX07p162LHZmRksHnzZnJyckpySYqilFcOYMYGSPaZ1LbEYVfvS8dlqeRPCu+bbX5+/fQTtM5Zwk5q0PS/z/JewRxu3HEjAC/Vf4nuNbqbY8LFniRjMtiSMe1nZEyBG9PRWd1nR02JutG2bdvGtm3bGD16NA888ABNmzZl9uzZjBw5kkAgwA033MDevXtp1KhRsWPT001l1L59+0hKKv43OnnyZCZPngzArl27SnLZSiXBsiz8fj+BQODvd44iDocDj8eDy+XC4VB/zEHxAlsoGqMBu8GmxGokMy2IER0RHC+s+3Y1Hb65HYCf+76I1WA1A7cMxMLi3ir3cmnqpeacEpNJwK6rETddLHZH5xqo++wfUKJiEwqFyM7O5uWXX6Zfv34AnHHGGWRmZjJhwgSuv/76oz73sGHDGDZsGAAdO3YskfUqlQuv18s333zD8uXLS1Vw0tPT6datG8cff3ypraFME8AkBHgxT6hcjGsrsv9ZQfh7Z3ibDyMK4e7O+bk+/M8NJg4fn9e8kpR+9ei2uRs+fFyTdg1jk8ea84mI+cPnlwwzOZd8ro9mn/1DSlRsqlatytq1aznrrLOKbO/ZsycLFixg+/btpKWlsW/fvmLH7t27F6BUU3SVio3X62Xx4sVMmTIFr9dbaus47rjjqFWrlorNwQhh+p7lEG6YibFkYsPfFWBX9udht6YBM2ogHF/5/L476eVfyRZXY9L+ex1nbu5Jdiib/6T+h0n1JpmRAdKGRsYQyAhpGUkg3QJqh8+r/CNKVGwyMjL45ptv/vJ7p9NJRkYGH330UbHv1qxZQ4MGDQ7qQlOUkiAUCpGfn09WVlapis2BAwcoKCj4+x0rI/sxfc+k07L0PrOwCzbBFhzJREuk0ApaNHMRZ216hAAutg57iosLzmNnaCdnxp7J9JrTceGyB6wFMEImadRSuyMpz8mY4k11n/1jSjRB4IILLgBg4cKFRbYvWLCAevXqUatWLfr27cvWrVv57LPPCr8/cOAA7733Hn379i3J5SiKUp7IxcRpXNijAyQDbX/4e5lX48UO3guxsG77Hpq/9384sfi2/S0MafBfNgU2cVL8SbxT/x1iQ7HGYpHMs9yI9yFMcoAIlxvtfVaClOgf49lnn0337t0ZPnw4u3fvpkmTJsyePZuPPvqIadOmAdC3b1+6du3K4MGDefjhhwuLOi3L4pZbbinJ5SiKUl4owNTThDCxE1/4vRsjKmLJhGtnCptv+sOvBMgvsNjywJV0Zyu/JHbhugs+4Fffr7SNa8v8xvNJdiebfeOw4y8FGFeaK3weF7YbrS5GfJQSoUTFxuFwMGfOHG677Tbuuusu9u3bR4sWLZgxYwaXXHIJYFxp77//PjfffDPXXHMNXq+Xrl27snjxYurXr1+Sy1EUpTwQxLjOJE4jBZnSrdmNPeLZE94n0s2WC+yHdx58lkH5czngSGXMMBffW6s4znUcH9X6iHRHul03I9cAO/ssFHG9OKAKUDXqd16pKHEDMSUlhWeeeaZIbc2fSU9PZ+rUqUydOrWkL68oSnlCBqHtwQiAFzszLA87luLDWBkiBuJGcwJJMPf1VVz4m+kS8OA5nZmb+jH1PfX5pPEn1LRq2vEeERUPthUTMucozECLwSQFaE/8EkW9kYqilB57gL2YB34udgNNQcYDSEU/2Jli4e7MP/2aTcu3BxCHj3cbn8IDHT+muqs6Hzf+mAZxDeyBZ8nYNTpSHBoMvxcXnQMTp9GesSWOareiKKVDDsZ9JjUzEp+RV2TRpiSperBFIQey9lqsv/dqjuc3fo1vwqCBS0lxpLCg1gKau5obERMLRgRKJm6Kq076nyViCje1X2xUULFRFOXY48MkBEgrGCd25b4P270lVkkMtissXLgZiodX75nKed4Z5DoSOO//fseKjeO9xu/RIbmDbbFISnN++HqS6ebAWDcp2OnT2mQzaqgbTVGUY4t0CCjACEwORghkIJofWyRkKFpM+CVV/g6YNn0lQ7eMBGDEmQmsrb2Xdxu8y2lJp9nzbqRoU4QrObwGqaXxhtcQj+kSIBM/lRJHxUZRlGNHCNPJWeplZDiZzKdJwE5BluSAyJiNBfjg0xVZnP7hhcTjZXLT+rx2yhamVJ9C34S+9jFi0YhLThAhk+vEYtKcNU4TVdSNpijKsWMPsC38XmI0YKwLialIlwAHxuIIUqTeZsNWC9/jl9GUDSxPqs31F2/h3pr3MrTaUCMYMdjpzFK0KbEZqd9JwFg2LkyKc5Xo3bJiUMtGUcowDocDt9tNbGwsoVDo7w+IEjExMbhc/9DHtBdj1YjVkY098CwXO/ifgxGNWOwuAeEpmXkB+GD8BK4LzmOfM4n+l2/n8pojGFtjbKHVQzImFiMFoDKSoCD8XQy2uy4djdMcI1RsFKUMExsbS9euXQkEAvj9/r8/IErUrFmTJk2aHP0J8jCZZxKjEYtG6l38EdssTGwmGP4ca95bAXjivoWMyRpLCAeD+wU5vn5PnqrxFI6Aw36aFWCPdhY3nRSASuJBHMZqqoPGaY4RKjaKUoaJj4/njDPO4OSTT8ayrL8/IEq4XC4SE4+y9bEXkxAgDTVFCMSJL1aFpDTL6GcRgfBxL8zYwIg1A3FiMa5TddafmM439WbhTnCbYwvC58jFHj0g7W/ACE0qdvZbLbQdzTFExUZRyjBOp5OkpKTy2w3djxGaHMzTJgc7cC/jniXt2Y8RGhdGBHIxLi8PfPRVDl3fu4B09jGvTk2eOdfHN43eo0pMFSMmyeFjxZUWPo4QRnRSsQeu+YGGaJzmGKNioyhKdAhikgG8GDea9DGTjsqxmIe/9CzLwa67kQmZPvj1D4v8x4fQjlX8Gp/OkEt3Mrv+xxznPM5cx4ERnETs/mlgi410DRD3WjomKUDjNMcUzUZTFKXksTDJALuwLQqplXFhi0Iw/J0He0CZZJEFYJ8XFtx+P+cF32a/M4G+l+/ljoYP06NaD5NRJmIinaKlM4AkAEhyQUz4/PGYvmcapznmqGWjHDaWZXHgwAH27dtHMBj8+wOihMvlIi0tjZSUFBwO/fW0zGFhRGY3doZZEHu0s1gyYNe6SDV/AYUB/YAPJo17l7tyxhHCwcALg7Q/7j/cVP0m23Um/c682E005bwJ2EkChNdSB62nKSVUbJTDJhAI8NVXXzF37lyys7NLbR0pKSmcf/759OjRA7db/wmXKSwgCyM0fuz2M/mYh7wj/NmNHbwX95aMZQ6nQT/+zPfcvGkwALedksrmDnX5ptZL5heMP7vOJF4TF16HZKBJooEDU7ip451LDf2fqhw2oVCIX3/9lXfffZddu3aV2jpq1KhBq1at6N69e6mtQfkLcjDTNkVIpN5FCjPBjsuAsUxyMZaJ1MTEw6sztzHwyz4kkscrTWvwzFk5LG/2NklxSXZWm7SuCbevKTJxM9J9JvNpqqBxmlJExUY5YkKhUKmm4ZbmtZVDkIfJPAtiLI087G7L0u9MiByGJrEbF+CFxT/k0eKN86jHVr6sWothA3fwQt1pNLeam32SsJtpWhHnA3u0cwymwWYexlqqhUaoSxkVG0VR/jn5mC7OQYwI5GM6BEh/M3GXgW2ZSLuY+PC+wG9/BMl9cBDdWcbmuGpccMUO+qVfzGVpl9nnTsAIh7SykdhMQcQ5pe4mBeM+0yddqaNaryjKP6MAIzQ5mNoZmUEj3Zrjw58lGy0BI0A5GJEJAYmwew8sHXMz5wbnsN+VQq/Ld5OU1ojnGz+PwwqbRTImAOyBZ5KRJp2eZaqnNtgsU6jeK4py9AQwQnMAIyKSTSaNNaVdjNTTSHZYAkYUAHLA64BX73yKUXlPUICH8y8u4NeaDr6o+xqpcanmuCBFxYbwex92H7WEiOvVwB66ppQ6KjaKohwdQYzQSDpzNsaykaeKzKIR8Qlgu7xiKYzjWB547o53uOGPGwC4/dzGLDnuN26udjOnxJ1iBEw6CySGzyPTNP0YKybOPh9+zGyaqtG7deXIUTeaoihHThDTWDMLE4T3YQf8xV0m1oY0wfRixEjEKcGcZ8qTX3D1L5fgxOKtU3vxaMffaBnXkvuq3Wf2k0QDsF1nkSOeZbyz1OxUQzsElEFUbBRFOTJCwB+Yws3I4WciBILU0jjC72VKZgGFsZpZc9cw4PO+xOHjh3YX8Z8zFuLCxfRm04lLjTOWjBMjVJEdAQqw63jEUpLRAnVQn00ZRP9KFEU5fELADoyFIhbGXuxfWw9ghEe6Lou1AXaac1iAPp2/mZNf70UaWWxo1JdBA37ACsAtVW+hY2xH21JKCV9HCjJlHEESRngSsd1pdbG7CChlCrVsFEU5PGSk8y6MKywX230mrrN4irq2crEbbMrkzRB8v3IX9Z8/i/r8Tma1f/HG6DasDvzC8THHMy59nDlOrCQZFSBtbtzYXQEiBqtRF+OaU8okatkoivL3SL+zHRHb4rAzvwRpESOCIB0CpINAHKzfcQDnA7053vqNTYntyL/nYe7ZfRoAk5tOJi4+zhyTh22lSGGoWFNgj3eOw8RoxE2nlElUbBRFOTQhTK+zPeH3bmBfxPcHsBMDxAoRYjAiETKvHRvy2HVzX7qEVrAtpim1HpxPj9z/4Lf8DKs+jG6p3cxxbmzXmWSySQGo9ECLwR6IVg1NCCjjqBtNUZS/JoSJyezAuMxyMC40efi7sJtgFmC71/KwLZCw6+xAto/1t1xIF/9n7HLVJvXJj5kRv4CleUup6arJg9UftK8bWZcjs20kww3shp5VgZrok6wcoJaNoigHx8JYNFuxrYY4isZMwO5tFvk0OYCxaMIi4XUFWHnDQE7LX8BeRzWc4z7FWzuFW366BYDH6j1GlYIqdnNNERuZdyNFojHYlo0H0/NMZ9OUC1RsFEUpTqTrDMwDfx920D5iZHMx95UkCoR7mQXygnxz3RBOz36XLEcV8m/9mLoZLRmWOYw9gT10T+nOwFoD7Y7Nsdh1NT7s9Oo4TAaadCmoiz2VUynzqNgoilKUEEZkdmAEIBsjAAXY4uLCrnMBu7uyfB9urhmygnx74xWcvnsG2SSx5+YPadq5Pd8c+IYpWVPwODw82/hZM6PGHXENcZ1J1wCxknzhn3XD11DKDerpVBTFJoTJOtuKHegXl5XEaKSIE4qmPB/ACFN4DLQVG+K7/17FKX9MJ4dENg39kKbduxCMD3LtzmsBuDn1Zlo4WxQtBg396b0017TC69AhaOUStWwURTFIHY24y9yY5AAw1koexuJwc3DXWVz4HDmAJ8SyW0bQZes08ojn50s/oFPPf0E+vJTzEivyVlAvph531L/Djs8Ewi9v+DoSo4kJXzMWkxCQcpDrK2UeFRul0uDxeDjuuOPo0aMHPp/v7w+IEvXr16dmzZqldv2DEsQu2CzAFhZxa0lmWD5GHELYBZ0iPvEYoXGFWDX6KjptmEo+cawY9D7/6tsNQrDXu5fbN98OwKMNHyUxIdFcOwlbcGRAWgDbVWYBaWiKczlGxUapNCQkJHD22WfTsWNHQqHQ3x8QJeLi4qhbt26pXb8YQWAbdq8zEQ5XxAuM+PiwnxohTCPO+Ij9Y4KsuflK2m54hTzi+arf+5x56RmFqdF3bryTPcE9dE/uzoD0AXZdTjDi2uLc92O762oC1VHHfzlGxUapNLjdburVq0e9evVKeyllhwAmPpOL/aA/gN0ZID+8X2SHZUHa+gcIJxEE+HXM5bRa/xq5JLCozwf0uej0wnkzy/3LeS7rOVy4eKruUzgCDnsN0nVAanMC2K65OMxsGk1xLteo2ChKZcWPsWj2hd97MS6zAHbtjMRgHNiFlE5sKyQBI1ShAjaMGkTzjW+RQyIf9f2QfoNONdfxQtAV5OqNV2NhMarWKDJSM8yx4o6Lw47TyJRNsWx0rHOFQI1SRamM+IDNGNeZxFwkdTnSfSYJARKktzDiJNlhbsDhZfOt/Wiy8S32k8L7535Ev2tONb3K3IAfpmybwv9y/0fdmLrcVf8ue9qmK3xuF0VrdvIxcRvt4lxh0N8XFKWykY+ZsClxEj/GdSZPAwn8S/C/ANuSkV5l0rLGymHrTefR4PdF7KYqH5z9EZdd2sE+dyLsdO3k9g0mKeCJek+QFEqyU5pd2L3P/OH3ceFr1Q6/VyoEKjaKUpnIBbZgxELiMdIORmIyPopOxvRjWz9OjAWUBxzYxx+3nk3dnd+wg5osOO8ThlzS2uznBRLBsixGbBrBvtA+elXpxYV1L7Snb8q13RgrRtxoDqAeOi6ggqFioyiVhf2YGI0XYzFIWrNYIZFZZ2LZSGv/AoxbTNxeO3ewe3RPau77kU004NMBH3PFlccbkcqjsMfZGwfe4N1975LsTGZyw8k4PGE/mVhJnvC5Jc06HmPRJEX1T0IpBVRsFKWiE8IWGiv8yiZcfEnRmhkZThY5kyYxYv8YYH8m+0adSbXs9fxMC5b2/5ihF4cz/GT8cz5sO7CNkRtHAvB4g8dp4G5gr8kKX1dGPYewRzqnROePQSldVGwUpSIjfc62Y/c5i8HONJMaFy/GKhFXmUy/hMKhZxSA9dsqcu7qTZp3O8s4kZ/+bwFDz6lmBCOc4owDgvFBhmwcwr7gPv6d9m+uqHGFuYYv4vzS9kYSE2qg3QEqMCo2ilJRCWJEJiv8PrIjgJ+ivc482J0CAhgrRtKQwx2XrZWf4x3fl+TAfhZzOttHzmXIOSlGQLzYkzUdMGbzGD7O/phq7mpMaTUFR6zDfCcZagUYYfGa/akGpEf/j0QpPUo09Xnp0qX07NmTGjVqkJycTIcOHZg6dWqRfbxeL6NHj6Z27drEx8fTtWtXPv/885JchqIofkyx5n5sN1UWxrLJxghDAbY1I2nNDozoiBDkmH2CS+cQuK8n8YH9vEV/9oz8kEt6ppj94zAi4jTnnbFjBo9sfwS3w81bx79F3diIbgnSMUCyzyTrrGo0/zCUskCJic2qVas488wz8fv9TJkyhXfeeYdOnTpx5ZVX8txzzxXud+WVVzJlyhTuvfde3n//fWrXrk2vXr1YuXJlSS1FUSo3ktq8EyMoYqWI64rw+/zwdwfC3xdEnEPGPAOBt5/Dcf+FeEI+XnBeTcItM+l/Wpw5Xrr+uIBk+Mz3GUM3DwVgUt1JdIvpZsdlpNtAEDPK2Y2po0lDXWeVgBJzo82cOZNgMMh7771HUpJJJTnrrLNYtWoV06dP5+qrr+aHH37g9ddfZ+rUqVx++eUAdOvWjYyMDMaNG8e8efNKajmKUjnJwVg0+RgXmB+7MBOKu80kQcAfPla+cwDxFgXP30HM+xMAeMBzN6c+MI5TMxzm/N7wK5yivCR7CeesOwdvyMuIuiMY0XSEEZbk8LX9GFeZN3yddLSxZiWixCybgoICPB4P8fFFJxqlpqYWNj2cN28eHo+Hiy66qPB7t9vNxRdfzMKFC0u1E6+ilGssjMtsC3atTB6m2j8XY73kYxdORv7Pd2IEKTa8Xy5QUIDvwf8j5v0JBHBxQ9xL9L7vLiM0UmuTHL6GDz7d9yln/3o2eaE8Lq95OU8f/zQOp8POcJPrSDFodUxCgApNpaHExGbIkCEAXH/99Wzbto2srCymTJnCp59+yqhRowBYvXo1jRs3JiGhaLVWRkYGBQUFrFu3rqSWoyiVhyBmhPMmjNWQhe3eCmFX/0sNjKQxQ9FBZQkYCygrC+/ofxP7xWvkkMjQ1Pe4dsIVdGgUPj/h88WAlWTxyO5H6PVrL/JD+VyRdgUvNngRV8BlC5v0XUsMn78+xqLRZlmVihJzo7Vu3ZolS5ZwwQUX8OyzzwJmfsjzzz/PxRdfDMDevXtJS0srdmx6enrh93/F5MmTmTx5MgC7du0qqWUrSvkmgKmfycEIRwIm9hKDHfCXVxyFxZaFMRo/drGmEziQiXfsOcT9sYYd1OSG2u/z+CMdqZOCXawZa/bfF9jH0E1DeWffOwDc0uAWJtSbgNPjNC48aUeTgrG0XNiuMxWaSkeJic3atWu58MILycjI4Pnnnyc+Pp65c+cyYsQI4uLiGDRo0D86/7Bhwxg2bBgAHTt2LIklK0r5xgvswM4sy8e2XDwUHREgYkLE5wTsDLUEIHMZBbefS1zOH/xEBnc1+YAX72tImmSKpYT3zYW5vrmM2DyCHf4dpDhTeCXjFc6vfr7dMdoTPiYO2xqqhnGfqeusUlJiYnP77bfj8Xh4//338XjMv7QePXqwZ88ebrjhBgYOHEhaWhqbNm0qdqxYNGLhKIryN2RjhEYaaOZiLI4Q9sNeXFgh7JTmILYQODBtYXKBJe8QmDSYmEA+n3IG0058m9duqEK8zJgJu+K2x21n1IZRzMqaBcApiafwcr2XaZbSzOwX2VSzACM2Hszws6qo0FRiSkxsfvzxR9q1a1coNELnzp15/fXX2blzJxkZGbz77rvk5eUVidusWbOGmJgYmjVrVlLLUaKEy+UiNjaWmJjS6/seExODy1VJJ2lZwF5MWrOMTZaUZgdFh5tJH7PIdGcvRhASwvu7Laz3HsQx4zbcwFQuZ9W5z/PKiBhckh6dD4HEAM/seIY7f7+T7GA2Cc4EJjadyLW1r8VZ4DRCJ408kyjaGSAdIzTqOqvUlJjY1KpVi5UrV1JQUFDkQfTtt98SFxdHeno6ffr04a677mL27NlcdtllAAQCAWbNmkXPnj2JjY0tqeUoUcDlctG2bVuGDBlCTk7O3x8QJZKTk2ndujVOZyV7egUxrWd2YIRkP+Zhno/dy+zPfc4iuwVId+Ws8PncBYSeHoHz42kA3MpE0gfewuODHDjc4f3d8MWOLxi5biSrvKsA6JPahyebPknjKo3NeULY6dXS2sYXvnZVjOtMqfSUmNiMHDmSAQMG0KdPH6655hri4+OZN28eb7zxBqNGjSImJoYTTjiBiy66iBtvvBG/30/jxo157rnn2LhxIzNmzCippShRwuVy0alTJ1q3bl2Yzl4aOJ1OEhISKpd14wP+wGSdubAD/R7sSZcySTOyZkbGB8gflRtjcezcRfDxC3H9+gV5xDPE+RoXjOzHwK7hc7hhe8F2btl8C6/tfg2ARjGNmFR/En2S+9idAMSaCmEET9xosZimmlWi+GeilCtKTGz69+/P/PnzefDBBxk6dCher5emTZvyzDPPMHz48ML9pk2bxh133MHYsWPJysqiXbt2LFiwgA4dOpTUUpQo4XA4SEhIKJa6rkQRC7sjANjWSi52i5kQtovKE94eWayZiwnuy//2HT8SuLMv7j2ZbKUOg+Pnce99J3JqK7NvMC/Ic1nPcce2OzgQPECsI5Yxtcdw63G3Eu+ItzsBSPzFgREXsEWvFqZLgMZolDAOy7Ksv9+tbNGxY0eWLVtW2stQlOgihZpbMRZDLnY6cza2tSINMGW88gHsJADCx+VjUpxXzCM4cRAuXw7f0Ykbqs3h5bvr0DwcLl2Rs4Lh64ezLN/8/zq3yrk82ehJmjibGMFyYkTMwnwWAzcpvI4ETK+z1BL+s1DKBYd6NmvXZ0UpiwSx3WaSQSatZILY1guYoL+0+Jf/0ZHWTgJQYMHrD2DNuhMXFjO4hJeav8i8/8ZTPRlyfDmM2z6OJ3c8SYgQ9Tz1eKrxU5yXdB4Oh8Nu2umJOHcQ24WXjxGc2tjtaRQlAhUbRSlrFGCsGR/GgnBjN7KUWEkkMeHvIos1pSeZA/DlYj19OY6ls7FwcDsPsPn0Mcy/yUGcA+btmMfI9SPZEtiCEyejao3inpr3kJySbK4t4ibXFvFzYbLhpONzHXSUs/KXqNgoSlnBwgT2t2O3k8nBxEPysGtXpP1LArbrTCr0pVhzH+G2NJmE7j0PZ+YqDpDMJbxOp/7n8trl8EdwByM3juTtvW8D0DG+Iy80eYEOyR2MFSOWjMSGIsc4y9hmid3Uw1g4ivIXqNgoSlkghHGZ7cYISTx2DMSJLSQySbMAe2iZpDwLUqz59ceEJl2MM3cvv3I8//HM5Y7rWjCgg8XL217mph03kRXMIsmZxPi647m2xrW43BE9zSxsAZHIrhSKSgFnNUzBplYtKH+Dio2ilDZ+THxmP0YoZEyzjHB2YbvJwC7g9EQcL21pYgHLgrmPYE0dg9MK8QFnc3PaDF69pwrVGmTSa90wPs75GIB/V/k3zzd+ngaeBrb1IllsAexkAH/4/BYmndmDEbSaFE1GUJS/QMVGUUqTHIzQ7MVu8S+1M37s1OYczP/WWOx5M5GTNePl2Bx49kr4/E0cwL3cyfuN7+bjO2Ge81luXXUrOaEc0l3pPFnzSQbVGoTDEx4FINeTRAQ3JuPMwghgFYy15cROBqhEpU7KP0PFRlFKgxCmG8AejLAkYB7qUqgpHZPlf6j0PZOhZWAH7sFYQHt/xXqoH46tazhAMv/HdJJOP59pgzdw6a4rWZK/BID+6f15utHT1KSmcZMVhK8tGWcSo3FGbHdhhMaF6QignZuVI0TFRlGONX7s+IwLUwfjwU4CkPhHZNNMV/izjA7wYkRKal+WvoP16BAc+dmsoSX9He9w+RXHE3PKJDr/fht5oTyqu6rzbJNn6V+1vzmntLiJpahwSVxGBM6FsWr8mIyzVFRolCNGxUZRjiXZmCaaWRjhkOJMsSLcGOtCJm36MdaHfA/hkc0YsdgXgHduh7cfxgG8yQBuin+Ju689wCu1uvHl5i8BGFh1IJOqTaJafDXbegEjJtLTTJpousPnltRpca/VRdvPKEeNio2iHAtCmHTkndjuMqnGj8GuYYkMtgfCP3OxEwciv/dthwcuhl8+J4CL0TzMR/Vv4P+Gv8L1wWvJz8mnlqcWzzd+nvPSzzMCIlZTAXbTzj8H+EPha0vNThomESAJRTlqVGwUJdr4MSIjbrMs7GJJyThzYlxjjvB3EniXlOcgxtKRaZw/LMGaeDGOfX+wjdpczEziupxA0rk9mRD4FIBLky/liaZPkB6Xbp/LCh8vnZnBTgCQ2TfSiiYm/LMWWkOj/GNUbBQlWljYbrMcilonUhwZQ/HKfxdF61tkXEACsC8E7zyINWMsDivEIroziBm0P3cdizvWwBfwUsdThxeavMC5CefaacuuiOuDLWqe8HmlTicFk4Tgxrjq6qJPCaVE0DCfokQDmT0jTTQt7IwusSRk6BnY3ZzFpZWDPfFSyNsNj58Dr92OwwoxntsZkPghVa6ayIKOp+HDy5BqQ/ip3U+cm3aunb4sg9KkficQXoMImYU9A0dGE1TFdAVQoVFKCP2npCgljRdjzezCPPCl9YwPe4yzAzuVOA47SC/ZXy7sws4qwC9LYcJFsHsre0jnUl5lVcMO5FzQlr1VfqOOpw6Ta07mnCrn2C44aZYpFow0zhQXXiB8bYkVpWBnnEmWm6KUECo2ilJSWJh4zE6MS0wsC7Fa5KEu1kssxmUVWVMTxH7IxwH5IXjtYaxZd+AIBVnKyVzMTHwdv2dXr0bg8XFF9St4tOGjVHFWMWIiiQWSYSaCI+sQS6cAk8YsrrUQUB/t2qxEBRUbRSkJImtnxJpxYyyTfEz8488dm2WSplgd+eHziOWTtRMe/j9YsRAH8BCjuct9D4FzbyDQfgr1XPWY0mgKvav3ts/pxrScEStGXHF/tlIk2yyAEZ1qQI3wOhUlCqjYKMo/RVrOZGGEIxsjFg7suIx0a87FHpscKQBSa+MNn2/dInhoEOzbwR6qcinTWVy1Ld7+XaH2D1xR/Qoeq/0Yqd7UonUzEouJxSQfSHfoyE4AofCravhnTbTHmRJ1VGwU5WgJYnqa7aForMWPnc4sRZMiPlLfko3tWhMcQKwfXr4Ha94DOCyLzziNS3idP1ouJ9i3NTWT45jSdB590vqYY6Qpp1gvTuzRAJEZZ57wmmQsQV74mlJDo/EZJcqo2CjK0ZCHSQDYg/lftB/zQM/FdqE5sccEiOCALUoBinZr3pEJD14CP39NCCf3cyf3O24j0HMMdHmS/6T+h2erP0vVtKr2OiKz2GTkgHQikLoacd8VYOJAORhXWy1MzMaBokQdFRtFORJC2NZMZPW91MvIKACJw/gxD3eZagm2AEgtywFg1Zvw7DDI3c9WR10usWbwZWpTQv1PJ7XRLzzbeAaXVL3EWERejDjJbJsgRkQSsDsTxGKnTcs+qeH9XJiMszhUaJRjhoqNohwuBRSdO5OPHaOR/0k+itbORKYdS+2MI2L/YA68dD0smQbAHM7jSusl9jb/Es5rS7dqbZnefBUNYhuY/SNrZqQzs2STEXHuWOwaGynUFBGsixFHRTmGqNgoyt8hKc27MFaIzJ0RV5jEYbwYt5kD23Um6ceRrrMDGCtj/TLjNtu6Fq8jjpusR3nOORR6jsbd5TkeqDuem+JuwuWMGBojAf5k7IadXowVI7Eb6eLsxfQzyw2vo2r4pTNolFJAxUZRDoVM0czGfpBHDi2T9jLyP0lSjXOxA/ahiO/jgfwgvPYwvHknBAP85GjDRdYbrKkSDwNOoWWzXGY0+5YTEk8wQiZxHQn6i1UjloobIypi4Ui8Jh5blGqH32sigFJK6D89RTkYFsYC2YixaPIx7rNcTMwmO/xZREja/4vARDbXzIv4fucmeOAMeOM2CAZ4ghvoaH3HmtarYMQJXNvqJJa1XGaEhvB54sI//RHXkvNJMoA7vE8ixmoSF5oFNEBn0Ciljlo2ivJn/MAOjJD4MA/7PIylIAIijTIj585EpjtL3Uw8ZrRALvDtDHjmGsg7wE5nLf4vNI2FnlPh39dQvfMHTGvyOue4z7HjK5HBexGdAkzsRzo0S3cAsag82BlpVTBTNbV+RikDqNgoimBhHuQ7MA91aVopGWUiNlKUCXbqsQwck4JJecA7gNBeeORa+GYmAHM5j6GhKeyuuRX6d+ScZk15qekqasbUNGvIxW53I9aICJhkoSWGvxMxjA1/nxo+tiZGbDQ+o5QR1LBWFDCCsh3IxLjP/BixEXGRdjI5GItHOgGI9SExHHf4O4mzrPgYbmgL38wk35HAUKZwPu+yu8trxA3rznMZN/BevfeM0Mh5pPhSAv5Bimacge0682CEh/BxQUy3Zk0EUMoYatkolZtIa0ZiItIgU+IvEoyXTDCp/M+J2Ca4MIH4Pfnw4hj4YBIA/3N2YWDoVdYnJsP5Z3Ni+13MaPYNzT3NTaZbHEWtIQu7x1keds2ONNaMHHYWCL9qYNxmYnUpShlCLRul8lKAsWbWY4RjP0ZksjFxlrzwtn3YGWCRXQBisN1evojzbvgOxp4AH0wi4HBzB/fTNfQF64/7FcfV7bn95A58nfE1zeObm1/3kjGiIWIi1pRcJw7jEkvFWDzJGCGKxQiMB9OtuTYqNEqZRS0bpfIhmWZ/YNenxGLER2pUxEUl+0sqswiMIF0AsoGAH96+D2Y+AKEga10tuTj4KivcraDnDTT813xeqzebfzn/Vfwc4j7zYo8ZiBxu5oh4JWLHcCyMyGjbGaWMo2KjVC4KMKnMuzEP5/2Yh3wWxjqJx3ZVidhIZhnYRZSOiO9jgXU/wbOXwcYVhHDwBKO4Izgeb61foN+J/F+rTkxqtJJUV6oRulyMhRLZciay5Y0P25oSd1kBtsUTAtIxbjMdC6CUA1RslMpBCCMsO7Er6iWrzI1xn4lVk4d52PuxU5zBtj4C4XMlA+4AvPUIvHoXBArY6m7EJYGX+dxxKpzyIKlnPMHkek/xn7r/sQP2KRQfchbCtmAk7iMWjB9juch6LIz7rBqaBKCUG1RslIqPD5MAcADbQpA2//Lwjkxn9mC7uXKwXVbST0zqXdb/Cs9fBr99C8CLzqsYFXiUnCq74YJudGvt4tXGy6jvrW8ETCZgijWTFL6upFjLXJpQxLVkdLNkxDkwac3qNlPKGZogoFRcghh32TqMRSMZZDLMTFr8H6BoOrMQWddyIHwMgBWEDx+FW9vDb9+yy12XXizgqtBkctrPxjXiRCaefC6ftvqU+gn17dqXyCQCsIecpWBcYRIP8kf8lGJRB8Zl1gCTLKBCo5Qz1LJRKh4WxlrYhj1qWRIAwA66SzqzxF9EjKQdjPzviAm/zwO2/ApPXw4/fw3ADNdlXBt4gv2JPujbh+Pb/saMWh/RMaVj0cy1JOyWM3L9SGIxmWZgLJkUjPBJP7OqGLFRt5lSTlGxUSoWfkzvsu3YNSgyd2Y/5mEto5el55ggnZl94e8lS80JOIPw3uMw807we9ntqcNl/snMD54DLd+Gc0cwrHE/Hmswk0R/orGSJB4EtqDFYXckiOypJpllkd2b/dgtZ1JK8M9IUUoBFRulYmBhXGE7sDO5pDgzAbsYU+IxUssi82cis8/EkvGGP+9YA49eDr99B8AM9xBG+h9jf5wDzh5EWvsFvFT7RS6od4E9TyYUPrcE/F3YGWeRfdNkmqf8T7Qwo5r9mEma1dHZM0qFQMVGKf94MTGZnZgHeU7ET2/4JYkB8vAXUZCstEhLprDmxQ9vPAxv3QOBAnZ76nKZ/wXmB86B4z6APldxRr2WTK+xirquuuY8IliSxebETl3+c4Q0JmJbKrYYWZgkgKoHOUZRyikqNkr5JYhxme3G7pQsFozER+Kxp2rKtEyxakQYpIBT2sLEA1u+h8evhMzvAZjuHsp1/kfIiXVCrytwnfAq99e/j9F1R+OyXCaBIA8jGmBbS9JyRoo1/9xyRgTQH35fHZPSLP3OFKWCoGKjlD+kon8bxnVmYR70MdhxGVd4m0zMlIQAEYHc8DYRI0kgyPXCa/fCnIcgFOSPmIYMLpjCJ4GzcDRbiNXnSppWi+P1ukvpnNrZrupPwY7DiJssMh4TE75GLMa95gqvrQCTBBDEuM3S0f+VSoVE/1kr5YsCjLtsH+ahHYsRDrEgJC7jx3aPyThm+dcuD3qJ7SSGj/nhS5h8FWz9BQsHL7iv5+aC8fjigF6XY7V/maE1h/J47cdJ8iXZ7f2lbiYOuyYmctCZZKW5w/tJdlpknEhbzigVHBUbpXwQxFgtW7FrVsQ9JplfUvgoMRmZNSPBeh9GWGSMsz987l0H4M3b4INnAdgc24KBvhf5KnAK7uYfEjhnKNXSCnixzhzOq3ueOcYdXo/UwQhSmyMjCcB2kcWH1yBdoi2My6wa2kBTqfCo2ChlGxkBsBPzcIeio5d92PUpka34JftLXGfSLDMH27UGsOI9eOFq2LuVoMPNw84x3O27AyuhAP49kEDrmZyTcg4v1nqRWu5atoXkwe4I4OfggfxE7C4BFnabGrGG6mBSmzUJQKkEHNY/899//53rrruOrl27kpCQgMPhIDMzs9h+Xq+X0aNHU7t2beLj4+natSuff/55sf1CoRATJkygUaNGxMXF0a5dO95+++1/fDNKBcMH/A6sxTTKzMFupJmLEZ9c7DYuUpEvbrQc7Kp9cV/JcLNd2+GJATChL+zdyurYzrS3VnBb8D5ovYCCa5qR1O59Xmr8Eu+1fI9acbVs95ggVoxYS+K6k1dk7Uwsdip1LGYkQDoqNEql4bD+qa9bt44333yTtLQ0Tj311L/c78orr2TKlCnce++9vP/++9SuXZtevXqxcuXKIvvdeeed3H333YwcOZIPP/yQLl26MGDAAObPn/+PbkapIAQwlsw6TBKAtJdJCL93Y6waaVKZg91ORmIkMjVT3GwSCwmFYPFkuLolfP0WPlci/3U+TlvfV2xIrQsD++DrfwHdarTixyY/ckXqFTicDnNtcYPJ1EwRODfF58yIG0/cevHYItMA2xpTlEqCw7KsPzfOKEYoFMLpNLr04osvctVVV7Fx40YaNWpUuM8PP/xA+/btmTp1KpdffjkAgUCAjIwMmjdvzrx58wDYuXMn9evXZ8yYMdxzzz2Fx/fo0YNdu3axatWqv110x44dWbZs2RHdqFIOkHYx28I/xUIg/FncZpI9JinFnvD7fZiHehxGlA5gWzluYOsaeHY4/PIlAEtiz+Yy37NspiGJnaeTe8Z1xMUXMLH+RK5LvA6nw2nERNKZwYiXdAEQSyoBu5lnFewOzqnh/WV0QG3scc+KUgE51LP5sCwbEZpDMW/ePDweDxdddFHhNrfbzcUXX8zChQvx+Yw/Y+HChRQUFDB48OAixw8ePJgff/yRjRs3Hs6SlIqEpC5nYqyZA9i1MPGYh3c8tqgEsOMvYmW4Ir7Pxlg54sayvDDjThjVHn75kv0xNbmImXT3vU92rRi4/F/knn0ZJ6W1ZGXjldxQ+wacyU67/uXPzTnFipHkA5k/Iy4zb3j9UtOTDjTCWD0qNEolpcQSBFavXk3jxo1JSCjqH8jIyKCgoIB169aRkZHB6tWriY2NpVmzZsX2A1izZg2NGzcuqWUpZR0fsAeTZQbmQS2ZXi6MZZCHERv51yrpwjKOOYSxNqSeRo5zA99/Ci9dDdvXAjDdM4wbCiaS604lvdsk9na9BY/H4u5q47ml5i24g2674DMJW3Cg6P8WqZ1JwnbXJWInLkjftVqoNaMolKDY7N27l7S0tGLb09PTC7+Xn1WqVMHhcBxyP6WCI+3/d2Ie1C5sa0TqVkRUpOVLPkaM5AHvxs4+kywxEYq8nfDSTbB0BgCZsa0Y7HuBpf5/0aDZRvb3OoW91X+mdWxrXm30Ku2T2xvxCmFnsUnbGonB+P50LVkn2HEkb3j/VExKs/Y1UxSgHKU+T548mcmTJwOwa9euUl6NctTIxMytGItFHuIujPssBnssgAw2k6QAZ/i7HMwDXdxk4j7zAcEQLH4RXr0VcrPwO+O4l3E86PsvCUlOGva+m00Z9+BwOBhdbTT31biPWEesOX8ydj81ERgJ9EvcRYasFYSvGcIWwCAmvbkWtlWkKApQgmKTlpbGpk2bim0XS0Usl7S0NLKysrAsq4h18+f9/sywYcMYNmwYYIJQSjlD6mW2Y2IqXuyWMh7sQL+8j8NOGADbRSUWj1ToS4GkC9i40tTMrP8GgM9jezPE9wwbaULnrmv44dRebEr4nUaeRkxvNJ1TXafa3QVE1JIwgiZxmhC2aFjhdYlLT+p1JAEhGSM0YpEpilJIiWX5Z2RksHHjRvLy8opsX7NmDTExMYUxmoyMDHw+H+vXry+2H0CrVq1KaklKWUAGmW0GfsFYLxKX8WIH1aWR5f6I7VIX48C2OCR3UuIz+UD2AZg6CsacCOu/Ya+nNv9hFt1883HVrUf74TfyXa8MfAm/c3na5fzQ+AdOTT3VCJhkvIloScxFpmfKTBzpAkD4pwidzMWpg0lrVqFRlINSYmLTp08f/H4/s2fPLtwWCASYNWsWPXv2JDbW9OPo3bs3Ho+HGTNmFDn+tddeo3Xr1pocUJEoALYAqzHFmT7sLLJc7CaaIeyYhwMjKtnYac4iAJL1JRlobguWz4LrW8D7TxCy4BnXDTTy/8L7MQM4/4Lv2H5lLVbWfpIarhrMqTeHqfWmkuJOsWM7yeFrS6KB1OlIvCYFM18mCSNOqeHt0jCzHtAYk/KsKMpfcthutLfeeguA5cuXA/Dhhx9SvXp1qlevTrdu3TjhhBO46KKLuPHGG/H7/TRu3JjnnnuOjRs3FhGWGjVqcNNNNzFhwgSSk5Pp0KEDs2bNYtGiRYW1OEo5J4AJ/v+BERVJEZYAujTBlHiHWAoS55Dgvxe7El9iJgWEizh/hZdGwqpPAPgh5iSGFDzHyuAJ9DgxmwM9/sOcBPNv9j+p/+GZms9QLbGasYSk8zPYrWxisWfegN2VAOzkhYSI7R6gBkaMNDajKH/LYRV1AsWyx4Ru3bqxZMkSAPLz87njjjt4/fXXycrKol27djz44IOcfvrpRY4JBoNMmDCBKVOmsGPHDpo3b864cePo37//YS1aizrLKFJYuQUjJgUY0cjGjm1IkWMQO/6ST9GpltLeBewMtNTw/rm58MZ4WPgIBP1ku9O5KTCRl7iSRjXhpH6v83atK/Bbfmq4ajCpziQuSrvIiFYituutAGOdOMLvpfOzZMR5MEIZhz3dU0YzS/PMcpNeoyjHhkM9mw9bbMoSKjZlDOnIvB3Tw0yq6aVBZQHmoZ2HeZCDESURGws7RlOAecinhD9L9prbgp/mwLQbYfdmAF52XsnNoYnkxlTl/LN+5utO/djErwBcVeUqJtaYSLoz3c4iS4i4XjbGopH1p2DEowBjYUkSQipG7GKw62a0OFNRDsqhns36u5ly9IQwwvA7ZmKmWCXy0I7FBP7BuL6ysR/4Yj0EsBMGxF0m6c1J4W27f4VXr4efPgJgtfsErgw8y7ehLvTouJ/8noOZGfM6ABmxGTxX7zlOjQ/38IvF7gYtSQAOjPilYM+8ke8lWUCGseWF11YdE7vR/zGKclTofx3lyJH2Mlsw1f8B7PYsUi9jYQRH5riAbblEuthEZHLD+0jlfQDYmwPz74f3HzMuM2cVxoTu5/nACJrWCXLm2Q+yqN7thAiR5krjvvT7GJ46HHe82678d2NEJbKFjXSAlvhQcngfGU0gA9BkGFot7LEAiqIcFSo2ypGRjXGX7cLusixDy+QBLfUoIiwSjLewuzVnh7dJzYrESwBcFnwxE2aNhn1bCeFgKkO5LfQA/oSqnPLvd/k641LWOvNx4+bqqldzT817qGpVtbsISEZZCLufmRSGijtP0pcje67FYScxiDUjrj9FUY4aFRvl75Gg+o7wS6r1I4sxfZgHeXZ4X3FLuSP2k1b/UrwJ5sEfF37vAdatgjeug5/NHKQVzk6MCD3Nckcn2nT9il/+dRFfJGzFgYPBqYO5J+4emqQ3sVvXgLFS8sLriMeO/USmV4vI5WFiMCHssczpGGtGW80oSomhYqMcGhGZ7dg9zHyYfzlZ4X3kwZ6EeWhLGnE2Rd1W0rAycnSzFH3m7IE547A+fR6HFWKPszq3hCYwLXQ59ZptwHPWSfxQ838AnJ94PvfWuZc28W3MsYV1N9ipyRLE/3PnZqmjEQtLstCkDU117Mw3RVFKDBUb5eB4MYH/ndhupwDG1SWtZmQkszSulGJMiXVIinMOdmNLcZdJexhnAD6dAm+Phdy9hHDxNNdzd+hurGp+XGcNYMvx74AD+ib35e4qd3OCdYJtDcm0zBB2+rTgxggg2CnXcm1p4JmMPaK5GtoBQFGihIqNUhSxZLZhREaC/9nYmWKRw8PysBtjSpv/IMZlJunMIkwiSnKe1Utg9g2w2QzM+4Qe3MCTrI1vSODUu7E6TwK3n34J/bg9/XZOTD/RnPNA+Hyx4bWFsGMy0sQT7O4DQYzbLBE7IcAf/pyAjgFQlGOAio1iEJGRbswSc5EWLhJQL8BOeZaCR3mgy3RN6SmWF/4uASMM+wjPqtkIb9wC35kK/0wacROPMsfRB6vzM9DtPtwJB7g45WJuq3obrUKtbJdbZLxFXGOuiJ9J4et5sQebSXeCyFEGHqAuxmWmCQCKEnVUbCo7Xkw8ZgtGQJzYLf/zMSKRg201SGNMiYWIxRKZZeYMHxcTcWwM4MiGNyZgLX4MR8BHLgk8wO08xk14W86HM1uRXn0vw5OHc22Va6nrqmtEwoddCyPXj8FOmZZmmjKmWep1pFN0AHMeKS5NwbSakS4FiqJEHRWbyooXY8VswVggEseQwkdp3i3t9t3YRY/Ss0x6mckYAInTeMOfpYYmPwRfTceafTuOrO04gNcYxBgmsrXu79DrTFo1y+L6lJu5NOlSEpzhaa9iVcVix1ik34VYWzIu2okRHonLBLGTDxLC91EFqI1xoymKckxRsalsSOB/C3atC9ipyxL8z8U8yOWhnoN5YCdjV9aLK036nlnYrf+94eM3fgmzboTNy3EA39KZG3iSb9OrQo8bOftELzfG3c2ZVc/E4XTYbrjI9GmxVOKw40GRbW7EynFhrBYfdhGmuPpqojUzilKKqNhUFmSmzGaKjj+2sN1mvvC+UhTpx84uk2wyicO4Iz4Hsa0PiaNkbYAXb4UVJi7zO3W5lQd5I74HMd0f4drTgtxQcwLHxRxn1uPHiIZYLonYiQDSgUDiNSIi4j4LRhxfELFvEJPKXB07e01RlFJBxaaik48RmI2Yh7cTO1tL6mUip2Umhj9LEaRke0nMJDF8nBdj5UhsR3qdZWWR98FYYj5/AXcwQB7xPMQtPOy+mrTT3uPpcz/mspR7SMxLtP/1xWL3UwPbmpHsMUm5FsFwYk/yTMYe05z0p2NrYqc2K4pSqqjYVFTyMQKzDmOdiKUiw8rkJVlbDsxDPS/8SsDOKJOHvtTbiBtNXGUxsPfAH/z63X9pOf8tquQbE2k6l3K7826qnbyFD3rv5fSqQ815XdhdCMRVJ50GDmAPNhMXmbjVxPr6c52MCJFYWHUw8Rl1mSlKmUHFpqKRC6wH1mJa80tasgTbfdh1LvIzHjury4XdnkYsG7GApO0+QBzs8+1jrm8OmT8+zUXv/0DXvaaU/wv+xU08QnzHWnxyXi1a1Glix3Ek2SByWJm4vOIw1olYX3ItwmtNxi4klcSBSJGpisky08JMRSlzqNhUFA4AvwJrsOtZJC4jVoukAfuwW+nL5wLMA90XfiVhpzZLEWQB5CTk8F7ue7wReIMdmz5k4sIQQzaZFLZ1NOUWHiK/7dlMPTuONvWxxzknhtfixQiEtLHxYHeLBjsu48LOkpP+a1LjI2KUEF5/VUyWmcRzFEUpc6jYlHf2AN8BP2J6lUkNjEXR3/Al9iEjmqUjsiQDiBCI+0osnjyw8i0+d3/OywUvM9s7m/SsXMZ/4uTSn4zI7CWNexnHL02v4e7zYujSCtsqkSQEsIP+Ml4AjEUi15ceZtKXLB4jdBJLCmBPy/SEt9UKb9NeZopSplGxKa/8DnwGrMJ2iUnFvPyUAWZO7EFhCeHvZBSAFEpK+xcRIDdk52XzivMVns57ml/5ldR8GPuFk1HfuIkNBfARw9OM5KP6YxkzII0nmmJbI9J8UzLUJOFAanEk1RqM2IhgSHabuMa8FBVPiePURYeZKUo5Qv+rljd2A18CqzFikk5Rq0AC7zKXxY9xU8nESSg+78VHkfjJTnbypPdJnuZpDnCAmACM+y6N/y72k+LPAUK8wcXMqPkA1/RpzE0Z4JAuAeKGk1ToREyCgqRXR1b/F0SsW9xf0q9MXG+R7W9iMQKj7f8VpdyhYlNeCAK/AF9jxEMmS4ol8+cOAGKxWNitWiQJQAL/kgwQju/ssfYwkYk843iGfPJxhOCun1oyckEW1fK2A7CEbjyf9jD/6deJ91qDQxIQpL5G0pPlM9gZZyKG0iAzFTvLTZIXkjAiGYvt9gtiOjLXxgiRoijlDhWb8oAfWI6xZiSALmnM4nKSuIjMivFjHujSvFIaYgYwWWoOCrO68jx5PO5+nAfdD5LtMG0F7ljblWve30+d/WsAWE0rnk6ZyOnnncvrLRw4xToRi0MEQyr5JbAv8SGppcnFtsREmJLC9yQZaZJpFsC2ZJLRuIyilGNUbMo6fmAFpl5GMrLkYR2K2C+IXYApD3siPgvi6joAIUK8lvQat8fezlbnVgCu2XIy/53noMmupQBspQ5PJ95Lm56X8XQnN64YbNEQYYjBzmoT95ZYIGLNSNPLZAoTD3BhF4fmY2esSZp1E0y9jIqMopR7VGzKOhuATZi/KZlKKT3BxG3mjPgJttBE4ojYHgtLXUu5sdqNLItfBkDfP1pz/5y6tNm+EIAsUnkh/jbqnnkd952cgDsyVTrOnKMw4yyEHYeJbJYpnZe94W2SsBDEiFFc+KeMapY2NLUw6cxaL6MoFQYVm7LMHkycRgL5IjKR8ZBI6+avRAYKhSjTlcmtNW/lzaQ3AWi/pxaT5nSm65YPcPMTXmKZHnMdCWfcxk0nppvnvUzYdGDXukR2EhCrKoaiVo647zzYBZnx2AkEktIs82aqo8F/RamgqNiUVQIYoQHbKonsBGBFbD9UIWM4bpPvzOeh1IeYmDoRr9NLrZw4nn2nJ703fEw88wjiZKbnSkJd7mJIx/rEpFC0/kWERLLWZHxAYni/yE7LSdhtcaSLdAF2IWdyxLEujMjUjjheUZQKh4pNWWUfJpAPdnfjSCLThQXHn34CltNinmseN6beSKYrk0QvPD/nbAb+spQU5gEw33Uh2W3vo98pLYmRNGMRGEmhlnHPMpxMXGKSaCDJAsGI7anYLjNxv0m9jIhMTey5OIqiVFhUbMoiFqZTc+hP24N/2ufPgfNIsXHBGucaRlUZxUeej4gtgLvndOfan36imjUfgM+dZ7GrxXjObdGJ2OSI60k8RqwXyRSTrDIRHGnmKbUzIirSwdmPbR2JAHkwQf96aIaZolQiVGzKIvmYQkgXtkUjrjM4tEXjgN3Wbu5OuJvnY57HGQgxck5XbluVSZ3QYgCWO7uwreV4erY7g1gnRhgi62CkUabUz7ixBUdSlmWbZMXJd7HYMRrCnyVjLQUjMtpeRlEqHSo2ZRFpjAkHD/oLfwr++ywfk2InMd4znpxANpe+dxJ3rdxKo9DXAKxxtmVz8/vpfuK5nBgbPtiLnc0mXZ094Ze4ziI7LEssRn6KpeOJ+CzItjRM2/+qaNt/RamkqNiURbIpmgAQKTiyTboDhBMAPnV/ytXOq1lvbWDg+1256/utHBcWmXXOFmQ2v4fTTulPK4+zaPdkySiTVv8x2AIjLWhkeFlkUgAR1/djD09zYVszSUB9TPW//ktTlEqNPgLKItKkMrKHmbyEsPsr5AwxNDCU6dbLXPzev3hvZYjmoS8ByHQ2Y1PzcZzc/RKauV3GigEjIFL7IlMvJYXZhV3ZL2MH4jH/UiKnZsZH7BuDneLswIhRA7RWRlGUQlRsyiJidfw5NiNdlOU98KPrR6YFplF383G8tOJbYilgi7Mxm1vdyUk9L6VRyG2LloiFWB8y9dKJ/S9Bzi9pyJJy7QrvkxxxLrFk5HgRmWporYyiKEVQsSmLiKg4KSo6kjAg2xzQ2tma5x3P81GTj/i84aUkptblpLMvpX7QY8dhJMYiVoYIThx21hjY1opkmSVg9zQTq0UyyySuI3UzdTG1MpExG0VRlDAqNmURmUETaeGIVSKZXV7z0+VzMTx2OMPzh8NV2O4xFyarTc4nYwQiBUdiNpFzcKSFjIiLxI7EGoqNOCYZE5NRkVEU5W9QsSmLJGM3zJQHvfQlE0sF7Mac0jpGCjHzsetj8jGiIoITxM40k6aejvA1pRYmFHG8FHZKA07pHt0Ak8as7jJFUQ4DFZuySBx2wF1qXSTu4sEIiIiHBPcliwxskRHBkFb/Uqwp55JamrCVVDhqOS58nIeiIpMINMS4zOKjc+uKolRMVGzKIjKRcif2fBhxefkwIiDFlhJDKcCIRD72zJvI9xKbicwqixzXnIrdiVksH/mZAjTCWDI6vExRlKNAxaYs4sB0P96NPVFTrBz5G4t0mUnNjZ+ilkwCtsiIcPixkw9iw8cnhI+Tws248DVTgGYYl5mKjKIo/wAVm7JKFUwcZT+20MRgxCOAbblIo0wJ6ItrTUZGS3pypBUTHgNdWDNTA9uycWGKMZtjRCYpyvepKEqlQMWmrOLCWBWrsFOOpeZFZsGI4MgIZSf2vBixXGQsgVT/S5sZwvulYepi4jEC1xLjMpPeZoqiKCWAik1ZJgWTWpyJsVCkdY3EZ2QImbSLkaaasdiWCxS2tCmsn3Fi90Srgqn0b4mxZlKifVOKolRGVGzKOvUwwrAVe3wy2IWYkrosQ82kL5kfu1ZHhEa25WAsoqbASUBrVGQURYkqKjZlHXGnOYEt2JX/0l1ABETSmSMHn4klFIz4fld4/38D/8LEahRFUaLMYU0V+f3337nuuuvo2rUrCQkJOBwOMjMzi+yzbNkyhg0bRosWLUhISKBBgwYMGjSIjRs3FjtfKBRiwoQJNGrUiLi4ONq1a8fbb79dIjdUIXFhrJAMTK2LpEFLwaW0j4n70zbZLlloeUA7YBRwDio0iqIcMw5LbNatW8ebb75JWloap5566kH3mTlzJqtXr+b666/nww8/ZOLEiaxYsYKOHTuyZcuWIvveeeed3H333YwcOZIPP/yQLl26MGDAAObPn//P76ii4sSMUD4BaIzJHJOgvwwyi8FOX45sQ+PAxGSuBC7BZJ8piqIcQxyWZR1qPBdgLBGn0+jSiy++yFVXXcXGjRtp1KhR4T67du2ievXqRY7btGkTjRs3ZuzYsdx7770A7Ny5k/r16zNmzBjuueeewn179OjBrl27WLVq1d8uumPHjixbtuywbrDC4gP2An9gx2CkHY10aU7B1OvURWMyiqJEnUM9mw8rZiNCcyj+LDQADRs2pHr16mzdurVw28KFCykoKGDw4MFF9h08eDBXXHEFGzdupHHjxoezrMpNLKYBZm3skczSrkZa0ehUTEVRyghRnQT/888/s3PnTlq2bFm4bfXq1cTGxtKsWbMi+2ZkZACwZs2aaC6pYiJJA4nhlxRnKoqilBGiJjaBQIARI0ZQvXp1rrzyysLte/fupUqVKjgcRSeDpaenF36vKIqiVCyilvo8cuRIvvrqKz744APS0tL+8fkmT57M5MmTARMfUhRFUcoPUbFsxowZw+TJk5k6dSo9e/Ys8l1aWhpZWVn8OS9BLBqxcP7MsGHDWLZsGcuWLTtofEhRFEUpu5S42IwfP54HH3yQSZMmcemllxb7PiMjA5/Px/r164tsl1hNq1atSnpJiqIoSilTomIzadIkxo4dy/jx4xk5cuRB9+nduzcej4cZM2YU2f7aa6/RunVrzURTFEWpgBx2zOatt94CYPny5QB8+OGHVK9enerVq9OtWzdmzpzJjTfeSO/evTnjjDP45ptvCo9NSUkptFhq1KjBTTfdxIQJE0hOTqZDhw7MmjWLRYsWMW/evJK8N0VRFKWMcNhiM2DAgCKfr7nmGgC6devGkiVLWLBgAZZlsWDBAhYsWFBkX9lHGD9+PElJSTz55JPs2LGD5s2b8+abb3Luuef+g1tRFEVRyiqH1UGgrKEdBBRFUcoeh3o2R7WoU1EURVFAxUZRFEU5BqjYKIqiKFFHxUZRFEWJOio2iqIoStRRsVEURVGijoqNoiiKEnVUbBRFUZSoo2KjKIqiRB0VG0VRFCXqqNgoiqIoUUfFRlEURYk6KjaKoihK1FGxURRFUaJOuRwxUK1aNRITE6levXppLyVq7Nq1q0LfH1T8e9T7K9/o/R05mZmZ7N69+6DflUuxgYo/06ai3x9U/HvU+yvf6P2VLOpGUxRFUaKOio2iKIoSdcqt2AwbNqy0lxBVKvr9QcW/R72/8o3eX8lSbmM2iqIoSvmh3Fo2iqIoSvmhXInNli1b6N+/P6mpqaSkpNCvXz82b95c2ss6Yt566y0uvPBCGjZsSHx8PM2bN+e2224jOzu7yH779u1j6NChhaneZ555Jj/++GMprfqf0bt3bxwOB2PHji2yvbzf4/z58znttNNISkoiJSWFjh07smjRosLvy/P9LV26lJ49e1KjRg2Sk5Pp0KEDU6dOLbKP1+tl9OjR1K5dm/j4eLp27crnn39eSiv+a37//Xeuu+46unbtSkJCAg6Hg8zMzGL7He79hEIhJkyYQKNGjYiLi6Ndu3a8/fbbx+BODs7h3N+yZcsYNmwYLVq0ICEhgQYNGjBo0CA2btxY7HxRuT+rnJCbm2s1a9bMysjIsN59911rzpw5VuvWra0mTZpYOTk5pb28I+Kkk06yBgwYYL322mvWkiVLrMcff9xKTU21TjrpJCsYDFqWZVmhUMg65ZRTrLp161qvv/669eGHH1qnnXaaVbVqVWvLli2lfAdHxuuvv27VqlXLAqw77rijcHt5v8fnn3/ecrvd1o033mh99NFH1oIFC6yJEyda7733nmVZ5fv+fvjhBysuLs46/fTTrTlz5lgfffSRNWzYMAuwnn322cL9LrnkEis1NdWaPHmy9cknn1gXXHCBFRcXZ33//felt/iDsHjxYqtGjRrWv//9b6tnz54WYG3cuLHYfod7P7fffrsVExNjPfzww9aiRYusYcOGWQ6Hw/rggw+OzQ39icO5v//+97/WySefbD3zzDPWkiVLrBkzZlgtWrSw0tPTrc2bNxfZNxr3V27E5oknnrCcTqe1du3awm0bNmywXC6X9eijj5biyo6cnTt3Ftv2yiuvWID16aefWpZlWXPmzLEAa9GiRYX7ZGVlWWlpadZ11113zNb6T9m7d69Vs2ZN6/XXXy8mNuX5Hjdu3GjFxcVZjz/++F/uU57v77bbbrM8Ho+VnZ1dZHuXLl2sLl26WJZlWStXrrQAa+rUqYXf+/1+6/jjj7f69OlzTNf7d8gvcZZlWVOmTDnow/hw7+ePP/6wYmJirHHjxhU5/owzzrDatGkTnRv4Gw7n/g723MnMzLQcDod15513Fm6L1v2VGzfavHnz6NKlC82aNSvc1rhxY0455RTmzp1biis7cg5WtdupUycAtm7dCpj7rVOnDt27dy/cJzU1lT59+pSr+7311ltp3bo1AwcOLPZdeb7HqVOn4nQ6GTFixF/uU57vr6CgAI/HQ3x8fJHtqamphEIhwNyfx+PhoosuKvze7XZz8cUXs3DhQnw+3zFd86FwOv/+UXe497Nw4UIKCgoYPHhwkeMHDx7Mjz/+eFC3VLQ5nPs72HOnYcOGVK9evfC5A9G7v3IjNqtXr6Z169bFtmdkZLBmzZpSWFHJ8tlnnwHQsmVL4ND3u3nzZnJyco7p+o6GL7/8kunTp/PMM88c9PvyfI9ffvklLVq0YObMmTRt2hS3202zZs2K3Gt5vr8hQ4YAcP3117Nt2zaysrKYMmUKn376KaNGjQLM/TVu3JiEhIQix2ZkZFBQUMC6deuO9bL/EYd7P6tXryY2NrbIL76yH1Cunkc///wzO3fuLHzuQPTur9yIzd69e0lLSyu2PT09nX379pXCikqOrVu3Mm7cOM4880w6duwIHPp+gTJ/zwUFBQwfPpybb76Z5s2bH3Sf8nyP27ZtY+3atYwePZoxY8bw0UcfcdZZZzFy5EiefPJJoHzfX+vWrVmyZAlz586lbt26pKWlce211/L8889z8cUXA39/f3v37j2ma/6nHO797N27lypVquBwOA65X1knEAgwYsQIqlevzpVXXlm4PVr35z76pSolQU5ODueddx5ut5tp06aV9nJKjIceeoj8/HzuuOOO0l5KVAiFQmRnZ/Pyyy/Tr18/AM444wwyMzOZMGEC119/fSmv8J+xdu1aLrzwQjIyMnj++eeJj49n7ty5jBgxgri4OAYNGlTaS1T+ISNHjuSrr77igw8+OKjIljTlRmzS0tIO+pvgX/02Uh7Iz8+nT58+bNiwgc8++4x69eoVfneo+5XvyyqbN29m/PjxvPjii/h8viK+e5/PR1ZWFsnJyeX6HqtWrcratWs566yzimzv2bMnCxYsYPv27eX6/m6//XY8Hg/vv/8+Ho8HgB49erBnzx5uuOEGBg4cSFpaGps2bSp2rNyf/CZcXjjc+0lLSyMrKwvLsor89l+e7nvMmDFMnjyZV155hZ49exb5Llr3V27caBkZGaxevbrY9jVr1tCqVatSWNE/w+/3079/f5YtW8b8+fNp06ZNke8Pdb8NGjQgKSnpWC31iNmwYQNer5fBgweTlpZW+AJ45JFHSEtL48cffyzX9yj+67/C6XSW6/v78ccfadeuXaHQCJ07d2bPnj3s3LmTjIwMNm7cSF5eXpF91qxZQ0xMTDGff1nncO8nIyMDn8/H+vXri+0HlPnn0fjx43nwwQeZNGkSl156abHvo3Z/R53Hdox5/PHHLZfLZa1fv75w28aNGy2322098sgjpbiyIycYDFoDBgyw4uLirE8++eSg+7z77rsWYC1ZsqRw2/79+6309HRr5MiRx2qpR8W+ffusxYsXF3sB1uDBg63Fixdb2dnZ5foe33//fQuwZs+eXWR7z549rXr16lmWVb7/Drt162Y1btzY8vl8RbYPHDjQiouLs3w+n7VixQoLsF5++eXC7/1+v9WiRQvr3HPPPdZLPmz+KjX4cO/njz/+sDwej3X33XcXOb5Hjx5W69ato7r2w+Gv7s+yLOvJJ5+0AGv8+PF/eXy07q/ciE1OTo7VtGlTq3Xr1tacOXOsuXPnWm3btrUaN25crBagrDNixIjCmpOvv/66yEuK/YLBoNW1a1erXr161htvvGEtWLDA6tatm5WWllasAKu8wJ/qbMrzPYZCIat79+5Wenq69dxzz1kLFy60hg4dagHWtGnTLMsq3/c3e/ZsC7B69uxpzZkzx1q4cKF17bXXWoA1atSowv0uuugiq0qVKtaUKVOsTz75xLrwwgut2NhYa/ny5aW4+oMze/Zsa/bs2YX//5599llr9uzZRX4ZONz7ufXWW63Y2Fjr0UcftRYvXmyNGDHCcjgchQW9pcHf3d8bb7xhORwOq3fv3sWeO6tXry5yrmjcX7kRG8uyrE2bNln9+vWzkpOTraSkJOu88847qHqXdRo2bGgBB33dddddhfvt2bPHuvzyy620tDQrPj7eOuOMM6yVK1eW3sL/IX8WG8sq3/e4f/9+65prrrFq1KhheTweq02bNtaMGTOK7FOe72/+/PlWt27drGrVqllJSUlWu3btrGeeecYKBAKF++Tl5VmjRo2yatasacXGxlqdO3e2Fi9eXHqLPgR/9X+uW7duhfsc7v0EAgHrvvvusxo0aGDFxMRYbdq0KWblHmv+7v4uu+yyw/ozsKzo3J92fVYURVGiTrlJEFAURVHKLyo2iqIoStRRsVEURVGijoqNoiiKEnVUbBRFUZSoo2KjKIqiRB0VG0VRFCXqqNgoiqIoUUfFRlEURYk6/w8uxI58jQoIOgAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "planner_inputs.update(initial_trajectory_dicts[best_epoch])\n", + "planner.layer.optimizer.set_params(max_iterations=10)\n", + "solution_dict, info = planner.layer.forward(\n", + " planner_inputs,\n", + " verbose=False,\n", + " damping=0.1,\n", + ")\n", + "plot_trajectories(\n", + " initial_trajectory_dicts[best_epoch], solution_dict, include_expert=True)" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "79897f2dca37465f1a50ce007bdb1248c5125cbdf40b2afbe1ada0fadb4cca51" + }, + "kernelspec": { + "display_name": "Theseus", + "language": "python", + "name": "theseus" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/tutorials/README.md b/tutorials/README.md new file mode 100644 index 000000000..79c6bbbcd --- /dev/null +++ b/tutorials/README.md @@ -0,0 +1,7 @@ +Theseus includes a number of tutorials to help a user get started: +- [Tutorial 0](https://github.com/facebookresearch/theseus/blob/main/tutorials/00_introduction.ipynb) introduces Theseus and its fundamental concepts, and shows how to use its different basic building blocks. +- [Tutorial 1](https://github.com/facebookresearch/theseus/blob/main/tutorials/01_least_squares_optimization.ipynb) describes how to model and solve a simple least-squares optimization problem. +- [Tutorial 2](https://github.com/facebookresearch/theseus/blob/main/tutorials/02_differentiable_nlls.ipynb) describes how to model and solve a collection of least-squares optimization problems with shared parameters. +- [Tutorial 3](https://github.com/facebookresearch/theseus/blob/main/tutorials/03_custom_cost_functions.ipynb) describes how to write custom cost functions for use in Theseus optimization problems. +- [Tutorial 4](https://github.com/facebookresearch/theseus/blob/main/tutorials/04_motion_planning.ipynb) shows how to implement GPMP2 motion-planning algorithm [(Mukadam et al 2018)](https://arxiv.org/abs/1707.07383). +- [Tutorial 5](https://github.com/facebookresearch/theseus/blob/main/tutorials/05_differentiable_motion_planning.ipynb) shows how to implement a differentiable motion planner, similar to dGPMP2 [(Bhardwaj et al 2020)](https://arxiv.org/pdf/1907.09591.pdf). diff --git a/tutorials/data/motion_planning_2d/im_sdf/tarpit/0_im.png b/tutorials/data/motion_planning_2d/im_sdf/tarpit/0_im.png new file mode 100644 index 000000000..275acb43d Binary files /dev/null and b/tutorials/data/motion_planning_2d/im_sdf/tarpit/0_im.png differ diff --git a/tutorials/data/motion_planning_2d/im_sdf/tarpit/0_sdf.npy b/tutorials/data/motion_planning_2d/im_sdf/tarpit/0_sdf.npy new file mode 100644 index 000000000..317ba42b6 Binary files /dev/null and b/tutorials/data/motion_planning_2d/im_sdf/tarpit/0_sdf.npy differ diff --git a/tutorials/data/motion_planning_2d/im_sdf/tarpit/1_im.png b/tutorials/data/motion_planning_2d/im_sdf/tarpit/1_im.png new file mode 100644 index 000000000..56cf1de89 Binary files /dev/null and b/tutorials/data/motion_planning_2d/im_sdf/tarpit/1_im.png differ diff --git a/tutorials/data/motion_planning_2d/im_sdf/tarpit/1_sdf.npy b/tutorials/data/motion_planning_2d/im_sdf/tarpit/1_sdf.npy new file mode 100644 index 000000000..4190b8f57 Binary files /dev/null and b/tutorials/data/motion_planning_2d/im_sdf/tarpit/1_sdf.npy differ diff --git a/tutorials/data/motion_planning_2d/meta.yaml b/tutorials/data/motion_planning_2d/meta.yaml new file mode 100644 index 000000000..710da9537 --- /dev/null +++ b/tutorials/data/motion_planning_2d/meta.yaml @@ -0,0 +1,12 @@ +env_params: + padlen: 11 + x_lims: + - -5.0 + - 5.0 + y_lims: + - -5.0 + - 5.0 +im_size: 128 +num_envs: 2 +probs_per_env: 1 +map_types: [tarpit] diff --git a/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_0_prob_0.npz b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_0_prob_0.npz new file mode 100644 index 000000000..f39e6cbea Binary files /dev/null and b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_0_prob_0.npz differ diff --git a/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_1_prob_0.npz b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_1_prob_0.npz new file mode 100644 index 000000000..37b44c69c Binary files /dev/null and b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/env_1_prob_0.npz differ diff --git a/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/image_env_0_prob_0.png b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/image_env_0_prob_0.png new file mode 100644 index 000000000..ba32cc1a2 Binary files /dev/null and b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/image_env_0_prob_0.png differ diff --git a/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/image_env_1_prob_0.png b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/image_env_1_prob_0.png new file mode 100644 index 000000000..65d29b397 Binary files /dev/null and b/tutorials/data/motion_planning_2d/opt_trajs_gpmp2/tarpit/image_env_1_prob_0.png differ diff --git a/tutorials/fig/theseus_objective.png b/tutorials/fig/theseus_objective.png new file mode 100644 index 000000000..501487233 Binary files /dev/null and b/tutorials/fig/theseus_objective.png differ diff --git a/version.txt b/version.txt new file mode 100644 index 000000000..f74ea74a6 --- /dev/null +++ b/version.txt @@ -0,0 +1 @@ +0.1.0-b.1