From 086b36424d342d4345dfd06f8da57509a3eb8342 Mon Sep 17 00:00:00 2001 From: happinesslz Date: Wed, 21 Dec 2022 21:04:58 +0800 Subject: [PATCH] release code --- LICENSE | 21 + LICENSE.PointRCNN | 21 + README.PointRCNN.md | 175 + README_epnet_plus_plus.md | 115 + build_and_install.sh | 14 + data/KITTI/ImageSets/test.txt | 7518 +++++++++++++++++ data/KITTI/ImageSets/train.txt | 3712 ++++++++ data/KITTI/ImageSets/trainval.txt | 7481 ++++++++++++++++ data/KITTI/ImageSets/val.txt | 3769 +++++++++ img/framework.png | Bin 0 -> 205992 bytes lib/config.py | 320 + lib/datasets/kitti_dataset.py | 181 + lib/datasets/kitti_rcnn_dataset.py | 1474 ++++ lib/net/cross_entropy_loss.py | 196 + lib/net/dice_loss.py | 117 + lib/net/lovasz_loss.py | 300 + lib/net/point_rcnn.py | 112 + lib/net/pointnet2_msg.py | 413 + lib/net/rcnn_net.py | 443 + lib/net/rpn.py | 145 + lib/net/sa_block.py | 144 + lib/net/seg_utils.py | 101 + lib/net/self_attention.py | 40 + lib/net/train_functions.py | 382 + lib/rpn/proposal_layer.py | 142 + lib/rpn/proposal_target_layer.py | 369 + lib/utils/bbox_transform.py | 260 + lib/utils/calibration.py | 140 + lib/utils/iou3d/iou3d_utils.py | 91 + lib/utils/iou3d/setup.py | 14 + lib/utils/iou3d/src/iou3d.cpp | 180 + lib/utils/iou3d/src/iou3d_kernel.cu | 387 + lib/utils/kitti_utils.py | 238 + lib/utils/loss_utils.py | 350 + lib/utils/object3d.py | 103 + lib/utils/roipool3d/roipool3d_utils.py | 112 + lib/utils/roipool3d/setup.py | 14 + lib/utils/roipool3d/src/roipool3d.cpp | 204 + lib/utils/roipool3d/src/roipool3d_kernel.cu | 237 + lib/utils/sample2grid/Bilinear_voxel_gpu.cu | 228 + lib/utils/sample2grid/Gaussian_voxel_gpu.cu | 215 + lib/utils/sample2grid/Voxel_gpu.cu | 203 + lib/utils/sample2grid/__init__.py | 9 + lib/utils/sample2grid/myGridSampler.cuh | 57 + lib/utils/sample2grid/setup.py | 44 + lib/utils/sample2grid/voxel_cuda.py | 190 + pointnet2_lib/.gitignore | 4 + pointnet2_lib/LICENSE | 21 + pointnet2_lib/README.md | 51 + pointnet2_lib/pointnet2/pointnet2_modules.py | 177 + pointnet2_lib/pointnet2/pointnet2_utils.py | 290 + pointnet2_lib/pointnet2/pytorch_utils.py | 235 + pointnet2_lib/pointnet2/setup.py | 23 + pointnet2_lib/pointnet2/src/ball_query.cpp | 25 + pointnet2_lib/pointnet2/src/ball_query_gpu.cu | 67 + pointnet2_lib/pointnet2/src/ball_query_gpu.h | 15 + pointnet2_lib/pointnet2/src/cuda_utils.h | 15 + pointnet2_lib/pointnet2/src/group_points.cpp | 36 + .../pointnet2/src/group_points_gpu.cu | 86 + .../pointnet2/src/group_points_gpu.h | 22 + pointnet2_lib/pointnet2/src/interpolate.cpp | 54 + .../pointnet2/src/interpolate_gpu.cu | 161 + pointnet2_lib/pointnet2/src/interpolate_gpu.h | 30 + pointnet2_lib/pointnet2/src/pointnet2_api.cpp | 24 + pointnet2_lib/pointnet2/src/sampling.cpp | 46 + pointnet2_lib/pointnet2/src/sampling_gpu.cu | 253 + pointnet2_lib/pointnet2/src/sampling_gpu.h | 29 + pointnet2_lib/tools/_init_path.py | 3 + .../tools/data/KITTI/ImageSets/test.txt | 7518 +++++++++++++++++ .../tools/data/KITTI/ImageSets/train.txt | 3712 ++++++++ .../tools/data/KITTI/ImageSets/trainval.txt | 7481 ++++++++++++++++ .../tools/data/KITTI/ImageSets/val.txt | 3769 +++++++++ pointnet2_lib/tools/dataset.py | 187 + pointnet2_lib/tools/kitti_utils.py | 231 + pointnet2_lib/tools/pointnet2_msg.py | 90 + pointnet2_lib/tools/train_and_eval.py | 216 + requirements.txt | 17 + tools/_init_path.py | 5 + tools/cfgs/CAR_EPNet_plus_plus.yaml | 221 + tools/cfgs/CYC_EPNet_plus_plus.yaml | 215 + tools/cfgs/PED_EPNet_plus_plus.yaml | 216 + tools/eval_rcnn.py | 1026 +++ tools/generate_aug_scene.py | 325 + tools/generate_gt_database.py | 121 + tools/kitti_object_eval_python/LICENSE | 21 + tools/kitti_object_eval_python/README.md | 32 + tools/kitti_object_eval_python/eval.py | 744 ++ tools/kitti_object_eval_python/evaluate.py | 32 + .../kitti_object_eval_python/kitti_common.py | 416 + tools/kitti_object_eval_python/rotate_iou.py | 332 + tools/run_all_eval_epnet_plus_plus_models.sh | 23 + .../run_train_and_eval_epnet_plus_plus_car.sh | 22 + .../run_train_and_eval_epnet_plus_plus_cyc.sh | 22 + .../run_train_and_eval_epnet_plus_plus_ped.sh | 22 + tools/run_train_epnet_plus_plus_car.sh | 23 + tools/run_train_epnet_plus_plus_car_2.sh | 23 + tools/run_train_epnet_plus_plus_car_3.sh | 23 + tools/run_train_epnet_plus_plus_car_4.sh | 23 + ...n_train_epnet_plus_plus_car_del_mc_loss.sh | 23 + ...rain_epnet_plus_plus_car_only_cb_fusion.sh | 18 + ...epnet_plus_plus_car_only_cb_fusion_run2.sh | 18 + .../run_train_epnet_plus_plus_car_run_80ep.sh | 24 + tools/run_train_epnet_plus_plus_cyc.sh | 44 + tools/run_train_epnet_plus_plus_cyc_run2.sh | 26 + tools/run_train_epnet_plus_plus_ped.sh | 21 + tools/run_train_epnet_plus_plus_ped_2.sh | 21 + tools/run_train_epnet_plus_plus_ped_3.sh | 21 + ...rain_epnet_plus_plus_ped_only_cb_fusion.sh | 79 + tools/train_rcnn.py | 279 + tools/train_utils/fastai_optim.py | 261 + .../train_utils/learning_schedules_fastai.py | 98 + tools/train_utils/train_utils.py | 242 + 112 files changed, 60931 insertions(+) create mode 100644 LICENSE create mode 100644 LICENSE.PointRCNN create mode 100644 README.PointRCNN.md create mode 100644 README_epnet_plus_plus.md create mode 100644 build_and_install.sh create mode 100644 data/KITTI/ImageSets/test.txt create mode 100644 data/KITTI/ImageSets/train.txt create mode 100644 data/KITTI/ImageSets/trainval.txt create mode 100644 data/KITTI/ImageSets/val.txt create mode 100644 img/framework.png create mode 100644 lib/config.py create mode 100644 lib/datasets/kitti_dataset.py create mode 100644 lib/datasets/kitti_rcnn_dataset.py create mode 100644 lib/net/cross_entropy_loss.py create mode 100644 lib/net/dice_loss.py create mode 100644 lib/net/lovasz_loss.py create mode 100644 lib/net/point_rcnn.py create mode 100644 lib/net/pointnet2_msg.py create mode 100644 lib/net/rcnn_net.py create mode 100644 lib/net/rpn.py create mode 100644 lib/net/sa_block.py create mode 100644 lib/net/seg_utils.py create mode 100644 lib/net/self_attention.py create mode 100644 lib/net/train_functions.py create mode 100644 lib/rpn/proposal_layer.py create mode 100644 lib/rpn/proposal_target_layer.py create mode 100644 lib/utils/bbox_transform.py create mode 100644 lib/utils/calibration.py create mode 100644 lib/utils/iou3d/iou3d_utils.py create mode 100644 lib/utils/iou3d/setup.py create mode 100644 lib/utils/iou3d/src/iou3d.cpp create mode 100644 lib/utils/iou3d/src/iou3d_kernel.cu create mode 100644 lib/utils/kitti_utils.py create mode 100644 lib/utils/loss_utils.py create mode 100644 lib/utils/object3d.py create mode 100644 lib/utils/roipool3d/roipool3d_utils.py create mode 100644 lib/utils/roipool3d/setup.py create mode 100644 lib/utils/roipool3d/src/roipool3d.cpp create mode 100644 lib/utils/roipool3d/src/roipool3d_kernel.cu create mode 100644 lib/utils/sample2grid/Bilinear_voxel_gpu.cu create mode 100644 lib/utils/sample2grid/Gaussian_voxel_gpu.cu create mode 100644 lib/utils/sample2grid/Voxel_gpu.cu create mode 100644 lib/utils/sample2grid/__init__.py create mode 100644 lib/utils/sample2grid/myGridSampler.cuh create mode 100644 lib/utils/sample2grid/setup.py create mode 100644 lib/utils/sample2grid/voxel_cuda.py create mode 100644 pointnet2_lib/.gitignore create mode 100644 pointnet2_lib/LICENSE create mode 100644 pointnet2_lib/README.md create mode 100644 pointnet2_lib/pointnet2/pointnet2_modules.py create mode 100644 pointnet2_lib/pointnet2/pointnet2_utils.py create mode 100644 pointnet2_lib/pointnet2/pytorch_utils.py create mode 100644 pointnet2_lib/pointnet2/setup.py create mode 100644 pointnet2_lib/pointnet2/src/ball_query.cpp create mode 100644 pointnet2_lib/pointnet2/src/ball_query_gpu.cu create mode 100644 pointnet2_lib/pointnet2/src/ball_query_gpu.h create mode 100644 pointnet2_lib/pointnet2/src/cuda_utils.h create mode 100644 pointnet2_lib/pointnet2/src/group_points.cpp create mode 100644 pointnet2_lib/pointnet2/src/group_points_gpu.cu create mode 100644 pointnet2_lib/pointnet2/src/group_points_gpu.h create mode 100644 pointnet2_lib/pointnet2/src/interpolate.cpp create mode 100644 pointnet2_lib/pointnet2/src/interpolate_gpu.cu create mode 100644 pointnet2_lib/pointnet2/src/interpolate_gpu.h create mode 100644 pointnet2_lib/pointnet2/src/pointnet2_api.cpp create mode 100644 pointnet2_lib/pointnet2/src/sampling.cpp create mode 100644 pointnet2_lib/pointnet2/src/sampling_gpu.cu create mode 100644 pointnet2_lib/pointnet2/src/sampling_gpu.h create mode 100644 pointnet2_lib/tools/_init_path.py create mode 100644 pointnet2_lib/tools/data/KITTI/ImageSets/test.txt create mode 100644 pointnet2_lib/tools/data/KITTI/ImageSets/train.txt create mode 100644 pointnet2_lib/tools/data/KITTI/ImageSets/trainval.txt create mode 100644 pointnet2_lib/tools/data/KITTI/ImageSets/val.txt create mode 100644 pointnet2_lib/tools/dataset.py create mode 100644 pointnet2_lib/tools/kitti_utils.py create mode 100644 pointnet2_lib/tools/pointnet2_msg.py create mode 100644 pointnet2_lib/tools/train_and_eval.py create mode 100644 requirements.txt create mode 100644 tools/_init_path.py create mode 100644 tools/cfgs/CAR_EPNet_plus_plus.yaml create mode 100644 tools/cfgs/CYC_EPNet_plus_plus.yaml create mode 100644 tools/cfgs/PED_EPNet_plus_plus.yaml create mode 100644 tools/eval_rcnn.py create mode 100644 tools/generate_aug_scene.py create mode 100644 tools/generate_gt_database.py create mode 100644 tools/kitti_object_eval_python/LICENSE create mode 100644 tools/kitti_object_eval_python/README.md create mode 100644 tools/kitti_object_eval_python/eval.py create mode 100644 tools/kitti_object_eval_python/evaluate.py create mode 100644 tools/kitti_object_eval_python/kitti_common.py create mode 100644 tools/kitti_object_eval_python/rotate_iou.py create mode 100644 tools/run_all_eval_epnet_plus_plus_models.sh create mode 100644 tools/run_train_and_eval_epnet_plus_plus_car.sh create mode 100644 tools/run_train_and_eval_epnet_plus_plus_cyc.sh create mode 100644 tools/run_train_and_eval_epnet_plus_plus_ped.sh create mode 100644 tools/run_train_epnet_plus_plus_car.sh create mode 100644 tools/run_train_epnet_plus_plus_car_2.sh create mode 100644 tools/run_train_epnet_plus_plus_car_3.sh create mode 100644 tools/run_train_epnet_plus_plus_car_4.sh create mode 100644 tools/run_train_epnet_plus_plus_car_del_mc_loss.sh create mode 100644 tools/run_train_epnet_plus_plus_car_only_cb_fusion.sh create mode 100644 tools/run_train_epnet_plus_plus_car_only_cb_fusion_run2.sh create mode 100644 tools/run_train_epnet_plus_plus_car_run_80ep.sh create mode 100644 tools/run_train_epnet_plus_plus_cyc.sh create mode 100644 tools/run_train_epnet_plus_plus_cyc_run2.sh create mode 100644 tools/run_train_epnet_plus_plus_ped.sh create mode 100644 tools/run_train_epnet_plus_plus_ped_2.sh create mode 100644 tools/run_train_epnet_plus_plus_ped_3.sh create mode 100644 tools/run_train_epnet_plus_plus_ped_only_cb_fusion.sh create mode 100644 tools/train_rcnn.py create mode 100644 tools/train_utils/fastai_optim.py create mode 100644 tools/train_utils/learning_schedules_fastai.py create mode 100644 tools/train_utils/train_utils.py diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d718874 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Zhe Liu + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/LICENSE.PointRCNN b/LICENSE.PointRCNN new file mode 100644 index 0000000..77c8ebe --- /dev/null +++ b/LICENSE.PointRCNN @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Shaoshuai Shi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.PointRCNN.md b/README.PointRCNN.md new file mode 100644 index 0000000..0d033bc --- /dev/null +++ b/README.PointRCNN.md @@ -0,0 +1,175 @@ +# PointRCNN + +## PointRCNN: 3D Object Proposal Generation and Detection from Point Cloud +![teaser](https://github.com/sshaoshuai/PointRCNN/blob/master/doc/teaser.png) + +Code release for the paper **PointRCNN:3D Object Proposal Generation and Detection from Point Cloud**, CVPR 2019. + +**Authors**: [Shaoshuai Shi](https://sshaoshuai.github.io/), [Xiaogang Wang](http://www.ee.cuhk.edu.hk/~xgwang/), [Hongsheng Li](http://www.ee.cuhk.edu.hk/~hsli/). + +[[arXiv]](https://arxiv.org/abs/1812.04244)  [[Project Page]](#)  + + +**New:** We have provided another implementation of PointRCNN for joint training with multi-class in a general 3D object detection toolbox [[OpenPCDet]](https://github.com/open-mmlab/OpenPCDet). + + +## Introduction +In this work, we propose the PointRCNN 3D object detector to directly generated accurate 3D box proposals from raw point cloud in a bottom-up manner, which are then refined in the canonical coordinate by the proposed bin-based 3D box regression loss. +To the best of our knowledge, PointRCNN is **the first two-stage 3D object detector** for 3D object detection by using only the raw point cloud as input. PointRCNN is evaluated on the KITTI dataset and achieves state-of-the-art performance on the KITTI 3D object detection [leaderboard](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d) among all published works at the time of submission. + +For more details of PointRCNN, please refer to [our paper](https://arxiv.org/abs/1812.04244) or [project page](#). + +### Supported features and ToDo list +- [x] Multiple GPUs for training +- [x] GPU version rotated NMS +- [x] Faster PointNet++ inference and training supported by [Pointnet2.PyTorch](https://github.com/sshaoshuai/Pointnet2.PyTorch) +- [x] PyTorch 1.0 +- [x] TensorboardX +- [ ] Still in progress + +## Installation +### Requirements +All the codes are tested in the following environment: +* Linux (tested on Ubuntu 14.04/16.04) +* Python 3.6+ +* PyTorch 1.0 + +### Install PointRCNN + +a. Clone the PointRCNN repository. +```shell +git clone --recursive https://github.com/sshaoshuai/PointRCNN.git +``` +If you forget to add the `--recursive` parameter, just run the following command to clone the `Pointnet2.PyTorch` submodule. +```shell +git submodule update --init --recursive +``` + +b. Install the dependent python libraries like `easydict`,`tqdm`, `tensorboardX ` etc. + +c. Build and install the `pointnet2_lib`, `iou3d`, `roipool3d` libraries by executing the following command: +```shell +sh build_and_install.sh +``` + +## Dataset preparation +Please download the official [KITTI 3D object detection](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d) dataset and organize the downloaded files as follows: +``` +PointRCNN +├── data +│ ├── KITTI +│ │ ├── ImageSets +│ │ ├── object +│ │ │ ├──training +│ │ │ ├──calib & velodyne & label_2 & image_2 & (optional: planes) +│ │ │ ├──testing +│ │ │ ├──calib & velodyne & image_2 +├── lib +├── pointnet2_lib +├── tools +``` +Here the images are only used for visualization and the [road planes](https://drive.google.com/file/d/1d5mq0RXRnvHPVeKx6Q612z0YRO1t2wAp/view?usp=sharing) are optional for data augmentation in the training. + + +## Pretrained model +You could download the pretrained model(Car) of PointRCNN from [here(~15MB)](https://drive.google.com/file/d/1aapMXBkSn5c5hNTDdRNI74Ptxfny7PuC/view?usp=sharing), which is trained on the *train* split (3712 samples) and evaluated on the *val* split (3769 samples) and *test* split (7518 samples). The performance on validation set is as follows: +``` +Car AP@0.70, 0.70, 0.70: +bbox AP:96.91, 89.53, 88.74 +bev AP:90.21, 87.89, 85.51 +3d AP:89.19, 78.85, 77.91 +aos AP:96.90, 89.41, 88.54 +``` +### Quick demo +You could run the following command to evaluate the pretrained model (set `RPN.LOC_XZ_FINE=False` since it is a little different with the default configuration): +``` +python eval_rcnn.py --cfg_file cfgs/default.yaml --ckpt PointRCNN.pth --batch_size 1 --eval_mode rcnn --set RPN.LOC_XZ_FINE False +``` + +## Inference +* To evaluate a single checkpoint, run the following command with `--ckpt` to specify the checkpoint to be evaluated: +``` +python eval_rcnn.py --cfg_file cfgs/default.yaml --ckpt ../output/rpn/ckpt/checkpoint_epoch_200.pth --batch_size 4 --eval_mode rcnn +``` + +* To evaluate all the checkpoints of a specific training config file, add the `--eval_all` argument, and run the command as follows: +``` +python eval_rcnn.py --cfg_file cfgs/default.yaml --eval_mode rcnn --eval_all +``` + +* To generate the results on the *test* split, please modify the `TEST.SPLIT=TEST` and add the `--test` argument. + +Here you could specify a bigger `--batch_size` for faster inference based on your GPU memory. Note that the `--eval_mode` argument should be consistent with the `--train_mode` used in the training process. If you are using `--eval_mode=rcnn_offline`, then you should use `--rcnn_eval_roi_dir` and `--rcnn_eval_feature_dir` to specify the saved features and proposals of the validation set. Please refer to the training section for more details. + +## Training +Currently, the two stages of PointRCNN are trained separately. Firstly, to use the ground truth sampling data augmentation for training, we should generate the ground truth database as follows: +``` +python generate_gt_database.py --class_name 'Car' --split train +``` + +### Training of RPN stage +* To train the first proposal generation stage of PointRCNN with a single GPU, run the following command: +``` +python train_rcnn.py --cfg_file cfgs/default.yaml --batch_size 16 --train_mode rpn --epochs 200 +``` + +* To use **mutiple GPUs for training**, simply add the `--mgpus` argument as follows: +``` +CUDA_VISIBLE_DEVICES=0,1 python train_rcnn.py --cfg_file cfgs/default.yaml --batch_size 16 --train_mode rpn --epochs 200 --mgpus +``` + +After training, the checkpoints and training logs will be saved to the corresponding directory according to the name of your configuration file. Such as for the `default.yaml`, you could find the checkpoints and logs in the following directory: +``` +PointRCNN/output/rpn/default/ +``` +which will be used for the training of RCNN stage. + +### Training of RCNN stage +Suppose you have a well-trained RPN model saved at `output/rpn/default/ckpt/checkpoint_epoch_200.pth`, +then there are two strategies to train the second stage of PointRCNN. + +(a) Train RCNN network with fixed RPN network to use online GT augmentation: Use `--rpn_ckpt` to specify the path of a well-trained RPN model and run the command as follows: +``` +python train_rcnn.py --cfg_file cfgs/default.yaml --batch_size 4 --train_mode rcnn --epochs 70 --ckpt_save_interval 2 --rpn_ckpt ../output/rpn/default/ckpt/checkpoint_epoch_200.pth +``` +(b) Train RCNN network with offline GT augmentation: +1. Generate the augmented offline scenes by running the following command: +``` +python generate_aug_scene.py --class_name Car --split train --aug_times 4 +``` +2. Save the RPN features and proposals by adding `--save_rpn_feature`: + +* To save features and proposals for the training, we set `TEST.RPN_POST_NMS_TOP_N=300` and `TEST.RPN_NMS_THRESH=0.85` as follows: +``` +python eval_rcnn.py --cfg_file cfgs/default.yaml --batch_size 4 --eval_mode rpn --ckpt ../output/rpn/default/ckpt/checkpoint_epoch_200.pth --save_rpn_feature --set TEST.SPLIT train_aug TEST.RPN_POST_NMS_TOP_N 300 TEST.RPN_NMS_THRESH 0.85 +``` + +* To save features and proposals for the evaluation, we keep `TEST.RPN_POST_NMS_TOP_N=100` and `TEST.RPN_NMS_THRESH=0.8` as default: +``` +python eval_rcnn.py --cfg_file cfgs/default.yaml --batch_size 4 --eval_mode rpn --ckpt ../output/rpn/default/ckpt/checkpoint_epoch_200.pth --save_rpn_feature +``` +3. Now we could train our RCNN network. Note that you should modify `TRAIN.SPLIT=train_aug` to use the augmented scenes for the training, and use `--rcnn_training_roi_dir` and `--rcnn_training_feature_dir` to specify the saved features and proposals in the above step: +``` +python train_rcnn.py --cfg_file cfgs/default.yaml --batch_size 4 --train_mode rcnn_offline --epochs 30 --ckpt_save_interval 1 --rcnn_training_roi_dir ../output/rpn/default/eval/epoch_200/train_aug/detections/data --rcnn_training_feature_dir ../output/rpn/default/eval/epoch_200/train_aug/features +``` +For the offline GT sampling augmentation, the default setting to train the RCNN network is `RCNN.ROI_SAMPLE_JIT=True`, which means that we sample the RoIs and calculate their GTs in the GPU. I also provide the CPU version proposal sampling, which is implemented in the dataloader, and you could enable this feature by setting `RCNN.ROI_SAMPLE_JIT=False`. Typically the CPU version is faster but costs more CPU resources since they use mutiple workers. + +All the codes supported **mutiple GPUs**, simply add the `--mgpus` argument as above. And you could also increase the `--batch_size` by using multiple GPUs for training. + +**Note**: +* The strategy (a), online augmentation, is more elegant and easy to train. +* The best model is trained by the offline augmentation strategy with CPU proposal sampling (set `RCNN.ROI_SAMPLE_JIT=False`). +* Theoretically, the online augmentation should be better, but currently the online augmentation is a bit lower than the offline augmentation, and I still didn't know why. All discussions are welcomed. +* I am still working on this codes to make it more stable. + +## Citation +If you find this work useful in your research, please consider cite: +``` +@InProceedings{Shi_2019_CVPR, + author = {Shi, Shaoshuai and Wang, Xiaogang and Li, Hongsheng}, + title = {PointRCNN: 3D Object Proposal Generation and Detection From Point Cloud}, + booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2019} +} +``` diff --git a/README_epnet_plus_plus.md b/README_epnet_plus_plus.md new file mode 100644 index 0000000..f58487f --- /dev/null +++ b/README_epnet_plus_plus.md @@ -0,0 +1,115 @@ +# EPNet++ + +EPNet++: Cascade Bi-directional Fusion forMulti-Modal 3D Object Detection (TPAMI 2022). + +Paper is now available in [IEEE Explore](https://ieeexplore.ieee.org/abstract/document/9983516) or Arxiv [EPNet++](https://arxiv.org/pdf/2112.11088v4.pdf) , and the code is based on [EPNet](https://github.com/happinesslz/EPNet) and [PointRCNN](https://github.com/sshaoshuai/PointRCNN). + +## Abstract + +Recently, fusing the LiDAR point cloud and camera image to improve the performance and robustness of 3D object detection has received more and more attention, as these two modalities naturally possess strong complementarity. In this paper, we propose EPNet++ for multi-modal 3D object detection by introducing a novel Cascade Bi-directional Fusion (CB-Fusion) module and a Multi-Modal Consistency (MC) loss. More concretely, the proposed CB-Fusion module enhances point features with plentiful semantic information absorbed from the image features in a cascade bi-directional interaction fusion manner, leading to more powerful and discriminative feature representations. The MC loss explicitly guarantees the consistency between predicted scores from two modalities to obtain more comprehensive and reliable confidence scores. The experimental results on the KITTI, JRDB and SUN-RGBD datasets demonstrate the superiority of EPNet++ over the state-of-the-art methods. Besides, we emphasize a critical but easily overlooked problem, which is to explore the performance and robustness of a 3D detector in a sparser scene. Extensive experiments present that EPNet++ outperforms the existing SOTA methods with remarkable margins in highly sparse point cloud cases, which might be an available direction to reduce the expensive cost of LiDAR sensors. + +![image](img/framework.png) + + + +## Install(Same with [PointRCNN](https://github.com/sshaoshuai/PointRCNN)) + +The Environment: +* Linux (tested on Ubuntu 16.04) +* Python 3.7.6 +* PyTorch 1.20 + CUDA-10.0/10.1 + +a. Clone the EPNet++ repository. +```shell +git clone https://github.com/happinesslz/EPNetV2.git +``` + +b. Create conda environment. + +```shell +conda create -n epnet_plus_plus_open python==3.7.6 +conda activate epnet_plus_plus_open +conda install pytorch==1.2.0 torchvision==0.4.0 cudatoolkit=10.0 -c pytorch +pip install -r requirements.txt +``` + +c. Build and install the `pointnet2_lib`, `iou3d`, `roipool3d` libraries by executing the following command: +```shell +sh build_and_install.sh +``` + +## Dataset preparation +Please download the official [KITTI 3D object detection](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d) dataset and our provided [train mask](https://drive.google.com/file/d/1XqqErkAl8t72HM65PJfZC6SQ1oeBF4X0/view?usp=share_link) based on the [KINS dataset](https://github.com/qqlu/Amodal-Instance-Segmentation-through-KINS-Dataset). Then organize the downloaded files as follows: + +``` +EPNetV2 +├── data +│ ├── KITTI +│ │ ├── ImageSets +│ │ ├── object +│ │ │ ├──training +│ │ │ ├──calib & velodyne & label_2 & image_2 & (optional: planes) & train_mask +│ │ │ ├──testing +│ │ │ ├──calib & velodyne & image_2 +├── lib +├── pointnet2_lib +├── tools +``` + + +## Trained model +The results of Car on Recall 40: + +| Models | Easy | Moderate | Hard | +| :--: | :--: | :--: | :--: | +| Car | 92.98 | 83.45 | 82.44 | +| Pedestrian | 77.70 | 70.20 | 63.80 | +| Cyclist | 86.86 | 64.11 | 60.24 | + +To evaluate all these models, please download the above models from [Google](https://drive.google.com/file/d/1k7Ugvz5AYJhmrLAvnOt-ZFhNntmB8DZe/view?usp=share_link) or [Baidu Pan](https://pan.baidu.com/s/1GhGgekaob1b_P3d4vWtpbg) (1rw2). Unzip these models and move them to "./tools". Then run: +```shell +bash run_all_eval_epnet_plus_plus_models.sh +``` +## Implementation +### Training & Inference +```shell +bash run_train_and_eval_epnet_plus_plus_car.sh +bash run_train_and_eval_epnet_plus_plus_ped.sh +bash run_train_and_eval_epnet_plus_plus_cyc.sh +``` +## Acknowledgement +Thanks for the superior open-source project [PointRCNN](https://github.com/sshaoshuai/PointRCNN). Thanks for all co-authors. + +## Citation +If you find this work useful in your research, please consider cite: +``` +@article{liu2022epnet++, + title={EPNet++: Cascade bi-directional fusion for multi-modal 3D object detection}, + author={Liu, Zhe and Huang, Tengteng and Li, Bingling and Chen, Xiwu and Wang, Xi and Bai, Xiang}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + year={2022}, + publisher={IEEE} +} +``` + +``` +@article{Huang2020EPNetEP, + title={EPNet: Enhancing Point Features with Image Semantics for 3D Object Detection}, + author={Tengteng Huang and Zhe Liu and Xiwu Chen and Xiang Bai}, + booktitle ={ECCV}, + month = {July}, + year={2020} +} +``` +``` +@InProceedings{Shi_2019_CVPR, + author = {Shi, Shaoshuai and Wang, Xiaogang and Li, Hongsheng}, + title = {PointRCNN: 3D Object Proposal Generation and Detection From Point Cloud}, + booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2019} +} +``` + + + diff --git a/build_and_install.sh b/build_and_install.sh new file mode 100644 index 0000000..815b35b --- /dev/null +++ b/build_and_install.sh @@ -0,0 +1,14 @@ +cd pointnet2_lib/pointnet2 +python setup.py install +cd ../../ + +cd lib/utils/iou3d/ +python setup.py install + +cd ../roipool3d/ +python setup.py install + +cd ../sample2grid/ +python setup.py install + +cd ../../../tools diff --git a/data/KITTI/ImageSets/test.txt b/data/KITTI/ImageSets/test.txt new file mode 100644 index 0000000..5d39002 --- /dev/null +++ b/data/KITTI/ImageSets/test.txt @@ -0,0 +1,7518 @@ +000000 +000001 +000002 +000003 +000004 +000005 +000006 +000007 +000008 +000009 +000010 +000011 +000012 +000013 +000014 +000015 +000016 +000017 +000018 +000019 +000020 +000021 +000022 +000023 +000024 +000025 +000026 +000027 +000028 +000029 +000030 +000031 +000032 +000033 +000034 +000035 +000036 +000037 +000038 +000039 +000040 +000041 +000042 +000043 +000044 +000045 +000046 +000047 +000048 +000049 +000050 +000051 +000052 +000053 +000054 +000055 +000056 +000057 +000058 +000059 +000060 +000061 +000062 +000063 +000064 +000065 +000066 +000067 +000068 +000069 +000070 +000071 +000072 +000073 +000074 +000075 +000076 +000077 +000078 +000079 +000080 +000081 +000082 +000083 +000084 +000085 +000086 +000087 +000088 +000089 +000090 +000091 +000092 +000093 +000094 +000095 +000096 +000097 +000098 +000099 +000100 +000101 +000102 +000103 +000104 +000105 +000106 +000107 +000108 +000109 +000110 +000111 +000112 +000113 +000114 +000115 +000116 +000117 +000118 +000119 +000120 +000121 +000122 +000123 +000124 +000125 +000126 +000127 +000128 +000129 +000130 +000131 +000132 +000133 +000134 +000135 +000136 +000137 +000138 +000139 +000140 +000141 +000142 +000143 +000144 +000145 +000146 +000147 +000148 +000149 +000150 +000151 +000152 +000153 +000154 +000155 +000156 +000157 +000158 +000159 +000160 +000161 +000162 +000163 +000164 +000165 +000166 +000167 +000168 +000169 +000170 +000171 +000172 +000173 +000174 +000175 +000176 +000177 +000178 +000179 +000180 +000181 +000182 +000183 +000184 +000185 +000186 +000187 +000188 +000189 +000190 +000191 +000192 +000193 +000194 +000195 +000196 +000197 +000198 +000199 +000200 +000201 +000202 +000203 +000204 +000205 +000206 +000207 +000208 +000209 +000210 +000211 +000212 +000213 +000214 +000215 +000216 +000217 +000218 +000219 +000220 +000221 +000222 +000223 +000224 +000225 +000226 +000227 +000228 +000229 +000230 +000231 +000232 +000233 +000234 +000235 +000236 +000237 +000238 +000239 +000240 +000241 +000242 +000243 +000244 +000245 +000246 +000247 +000248 +000249 +000250 +000251 +000252 +000253 +000254 +000255 +000256 +000257 +000258 +000259 +000260 +000261 +000262 +000263 +000264 +000265 +000266 +000267 +000268 +000269 +000270 +000271 +000272 +000273 +000274 +000275 +000276 +000277 +000278 +000279 +000280 +000281 +000282 +000283 +000284 +000285 +000286 +000287 +000288 +000289 +000290 +000291 +000292 +000293 +000294 +000295 +000296 +000297 +000298 +000299 +000300 +000301 +000302 +000303 +000304 +000305 +000306 +000307 +000308 +000309 +000310 +000311 +000312 +000313 +000314 +000315 +000316 +000317 +000318 +000319 +000320 +000321 +000322 +000323 +000324 +000325 +000326 +000327 +000328 +000329 +000330 +000331 +000332 +000333 +000334 +000335 +000336 +000337 +000338 +000339 +000340 +000341 +000342 +000343 +000344 +000345 +000346 +000347 +000348 +000349 +000350 +000351 +000352 +000353 +000354 +000355 +000356 +000357 +000358 +000359 +000360 +000361 +000362 +000363 +000364 +000365 +000366 +000367 +000368 +000369 +000370 +000371 +000372 +000373 +000374 +000375 +000376 +000377 +000378 +000379 +000380 +000381 +000382 +000383 +000384 +000385 +000386 +000387 +000388 +000389 +000390 +000391 +000392 +000393 +000394 +000395 +000396 +000397 +000398 +000399 +000400 +000401 +000402 +000403 +000404 +000405 +000406 +000407 +000408 +000409 +000410 +000411 +000412 +000413 +000414 +000415 +000416 +000417 +000418 +000419 +000420 +000421 +000422 +000423 +000424 +000425 +000426 +000427 +000428 +000429 +000430 +000431 +000432 +000433 +000434 +000435 +000436 +000437 +000438 +000439 +000440 +000441 +000442 +000443 +000444 +000445 +000446 +000447 +000448 +000449 +000450 +000451 +000452 +000453 +000454 +000455 +000456 +000457 +000458 +000459 +000460 +000461 +000462 +000463 +000464 +000465 +000466 +000467 +000468 +000469 +000470 +000471 +000472 +000473 +000474 +000475 +000476 +000477 +000478 +000479 +000480 +000481 +000482 +000483 +000484 +000485 +000486 +000487 +000488 +000489 +000490 +000491 +000492 +000493 +000494 +000495 +000496 +000497 +000498 +000499 +000500 +000501 +000502 +000503 +000504 +000505 +000506 +000507 +000508 +000509 +000510 +000511 +000512 +000513 +000514 +000515 +000516 +000517 +000518 +000519 +000520 +000521 +000522 +000523 +000524 +000525 +000526 +000527 +000528 +000529 +000530 +000531 +000532 +000533 +000534 +000535 +000536 +000537 +000538 +000539 +000540 +000541 +000542 +000543 +000544 +000545 +000546 +000547 +000548 +000549 +000550 +000551 +000552 +000553 +000554 +000555 +000556 +000557 +000558 +000559 +000560 +000561 +000562 +000563 +000564 +000565 +000566 +000567 +000568 +000569 +000570 +000571 +000572 +000573 +000574 +000575 +000576 +000577 +000578 +000579 +000580 +000581 +000582 +000583 +000584 +000585 +000586 +000587 +000588 +000589 +000590 +000591 +000592 +000593 +000594 +000595 +000596 +000597 +000598 +000599 +000600 +000601 +000602 +000603 +000604 +000605 +000606 +000607 +000608 +000609 +000610 +000611 +000612 +000613 +000614 +000615 +000616 +000617 +000618 +000619 +000620 +000621 +000622 +000623 +000624 +000625 +000626 +000627 +000628 +000629 +000630 +000631 +000632 +000633 +000634 +000635 +000636 +000637 +000638 +000639 +000640 +000641 +000642 +000643 +000644 +000645 +000646 +000647 +000648 +000649 +000650 +000651 +000652 +000653 +000654 +000655 +000656 +000657 +000658 +000659 +000660 +000661 +000662 +000663 +000664 +000665 +000666 +000667 +000668 +000669 +000670 +000671 +000672 +000673 +000674 +000675 +000676 +000677 +000678 +000679 +000680 +000681 +000682 +000683 +000684 +000685 +000686 +000687 +000688 +000689 +000690 +000691 +000692 +000693 +000694 +000695 +000696 +000697 +000698 +000699 +000700 +000701 +000702 +000703 +000704 +000705 +000706 +000707 +000708 +000709 +000710 +000711 +000712 +000713 +000714 +000715 +000716 +000717 +000718 +000719 +000720 +000721 +000722 +000723 +000724 +000725 +000726 +000727 +000728 +000729 +000730 +000731 +000732 +000733 +000734 +000735 +000736 +000737 +000738 +000739 +000740 +000741 +000742 +000743 +000744 +000745 +000746 +000747 +000748 +000749 +000750 +000751 +000752 +000753 +000754 +000755 +000756 +000757 +000758 +000759 +000760 +000761 +000762 +000763 +000764 +000765 +000766 +000767 +000768 +000769 +000770 +000771 +000772 +000773 +000774 +000775 +000776 +000777 +000778 +000779 +000780 +000781 +000782 +000783 +000784 +000785 +000786 +000787 +000788 +000789 +000790 +000791 +000792 +000793 +000794 +000795 +000796 +000797 +000798 +000799 +000800 +000801 +000802 +000803 +000804 +000805 +000806 +000807 +000808 +000809 +000810 +000811 +000812 +000813 +000814 +000815 +000816 +000817 +000818 +000819 +000820 +000821 +000822 +000823 +000824 +000825 +000826 +000827 +000828 +000829 +000830 +000831 +000832 +000833 +000834 +000835 +000836 +000837 +000838 +000839 +000840 +000841 +000842 +000843 +000844 +000845 +000846 +000847 +000848 +000849 +000850 +000851 +000852 +000853 +000854 +000855 +000856 +000857 +000858 +000859 +000860 +000861 +000862 +000863 +000864 +000865 +000866 +000867 +000868 +000869 +000870 +000871 +000872 +000873 +000874 +000875 +000876 +000877 +000878 +000879 +000880 +000881 +000882 +000883 +000884 +000885 +000886 +000887 +000888 +000889 +000890 +000891 +000892 +000893 +000894 +000895 +000896 +000897 +000898 +000899 +000900 +000901 +000902 +000903 +000904 +000905 +000906 +000907 +000908 +000909 +000910 +000911 +000912 +000913 +000914 +000915 +000916 +000917 +000918 +000919 +000920 +000921 +000922 +000923 +000924 +000925 +000926 +000927 +000928 +000929 +000930 +000931 +000932 +000933 +000934 +000935 +000936 +000937 +000938 +000939 +000940 +000941 +000942 +000943 +000944 +000945 +000946 +000947 +000948 +000949 +000950 +000951 +000952 +000953 +000954 +000955 +000956 +000957 +000958 +000959 +000960 +000961 +000962 +000963 +000964 +000965 +000966 +000967 +000968 +000969 +000970 +000971 +000972 +000973 +000974 +000975 +000976 +000977 +000978 +000979 +000980 +000981 +000982 +000983 +000984 +000985 +000986 +000987 +000988 +000989 +000990 +000991 +000992 +000993 +000994 +000995 +000996 +000997 +000998 +000999 +001000 +001001 +001002 +001003 +001004 +001005 +001006 +001007 +001008 +001009 +001010 +001011 +001012 +001013 +001014 +001015 +001016 +001017 +001018 +001019 +001020 +001021 +001022 +001023 +001024 +001025 +001026 +001027 +001028 +001029 +001030 +001031 +001032 +001033 +001034 +001035 +001036 +001037 +001038 +001039 +001040 +001041 +001042 +001043 +001044 +001045 +001046 +001047 +001048 +001049 +001050 +001051 +001052 +001053 +001054 +001055 +001056 +001057 +001058 +001059 +001060 +001061 +001062 +001063 +001064 +001065 +001066 +001067 +001068 +001069 +001070 +001071 +001072 +001073 +001074 +001075 +001076 +001077 +001078 +001079 +001080 +001081 +001082 +001083 +001084 +001085 +001086 +001087 +001088 +001089 +001090 +001091 +001092 +001093 +001094 +001095 +001096 +001097 +001098 +001099 +001100 +001101 +001102 +001103 +001104 +001105 +001106 +001107 +001108 +001109 +001110 +001111 +001112 +001113 +001114 +001115 +001116 +001117 +001118 +001119 +001120 +001121 +001122 +001123 +001124 +001125 +001126 +001127 +001128 +001129 +001130 +001131 +001132 +001133 +001134 +001135 +001136 +001137 +001138 +001139 +001140 +001141 +001142 +001143 +001144 +001145 +001146 +001147 +001148 +001149 +001150 +001151 +001152 +001153 +001154 +001155 +001156 +001157 +001158 +001159 +001160 +001161 +001162 +001163 +001164 +001165 +001166 +001167 +001168 +001169 +001170 +001171 +001172 +001173 +001174 +001175 +001176 +001177 +001178 +001179 +001180 +001181 +001182 +001183 +001184 +001185 +001186 +001187 +001188 +001189 +001190 +001191 +001192 +001193 +001194 +001195 +001196 +001197 +001198 +001199 +001200 +001201 +001202 +001203 +001204 +001205 +001206 +001207 +001208 +001209 +001210 +001211 +001212 +001213 +001214 +001215 +001216 +001217 +001218 +001219 +001220 +001221 +001222 +001223 +001224 +001225 +001226 +001227 +001228 +001229 +001230 +001231 +001232 +001233 +001234 +001235 +001236 +001237 +001238 +001239 +001240 +001241 +001242 +001243 +001244 +001245 +001246 +001247 +001248 +001249 +001250 +001251 +001252 +001253 +001254 +001255 +001256 +001257 +001258 +001259 +001260 +001261 +001262 +001263 +001264 +001265 +001266 +001267 +001268 +001269 +001270 +001271 +001272 +001273 +001274 +001275 +001276 +001277 +001278 +001279 +001280 +001281 +001282 +001283 +001284 +001285 +001286 +001287 +001288 +001289 +001290 +001291 +001292 +001293 +001294 +001295 +001296 +001297 +001298 +001299 +001300 +001301 +001302 +001303 +001304 +001305 +001306 +001307 +001308 +001309 +001310 +001311 +001312 +001313 +001314 +001315 +001316 +001317 +001318 +001319 +001320 +001321 +001322 +001323 +001324 +001325 +001326 +001327 +001328 +001329 +001330 +001331 +001332 +001333 +001334 +001335 +001336 +001337 +001338 +001339 +001340 +001341 +001342 +001343 +001344 +001345 +001346 +001347 +001348 +001349 +001350 +001351 +001352 +001353 +001354 +001355 +001356 +001357 +001358 +001359 +001360 +001361 +001362 +001363 +001364 +001365 +001366 +001367 +001368 +001369 +001370 +001371 +001372 +001373 +001374 +001375 +001376 +001377 +001378 +001379 +001380 +001381 +001382 +001383 +001384 +001385 +001386 +001387 +001388 +001389 +001390 +001391 +001392 +001393 +001394 +001395 +001396 +001397 +001398 +001399 +001400 +001401 +001402 +001403 +001404 +001405 +001406 +001407 +001408 +001409 +001410 +001411 +001412 +001413 +001414 +001415 +001416 +001417 +001418 +001419 +001420 +001421 +001422 +001423 +001424 +001425 +001426 +001427 +001428 +001429 +001430 +001431 +001432 +001433 +001434 +001435 +001436 +001437 +001438 +001439 +001440 +001441 +001442 +001443 +001444 +001445 +001446 +001447 +001448 +001449 +001450 +001451 +001452 +001453 +001454 +001455 +001456 +001457 +001458 +001459 +001460 +001461 +001462 +001463 +001464 +001465 +001466 +001467 +001468 +001469 +001470 +001471 +001472 +001473 +001474 +001475 +001476 +001477 +001478 +001479 +001480 +001481 +001482 +001483 +001484 +001485 +001486 +001487 +001488 +001489 +001490 +001491 +001492 +001493 +001494 +001495 +001496 +001497 +001498 +001499 +001500 +001501 +001502 +001503 +001504 +001505 +001506 +001507 +001508 +001509 +001510 +001511 +001512 +001513 +001514 +001515 +001516 +001517 +001518 +001519 +001520 +001521 +001522 +001523 +001524 +001525 +001526 +001527 +001528 +001529 +001530 +001531 +001532 +001533 +001534 +001535 +001536 +001537 +001538 +001539 +001540 +001541 +001542 +001543 +001544 +001545 +001546 +001547 +001548 +001549 +001550 +001551 +001552 +001553 +001554 +001555 +001556 +001557 +001558 +001559 +001560 +001561 +001562 +001563 +001564 +001565 +001566 +001567 +001568 +001569 +001570 +001571 +001572 +001573 +001574 +001575 +001576 +001577 +001578 +001579 +001580 +001581 +001582 +001583 +001584 +001585 +001586 +001587 +001588 +001589 +001590 +001591 +001592 +001593 +001594 +001595 +001596 +001597 +001598 +001599 +001600 +001601 +001602 +001603 +001604 +001605 +001606 +001607 +001608 +001609 +001610 +001611 +001612 +001613 +001614 +001615 +001616 +001617 +001618 +001619 +001620 +001621 +001622 +001623 +001624 +001625 +001626 +001627 +001628 +001629 +001630 +001631 +001632 +001633 +001634 +001635 +001636 +001637 +001638 +001639 +001640 +001641 +001642 +001643 +001644 +001645 +001646 +001647 +001648 +001649 +001650 +001651 +001652 +001653 +001654 +001655 +001656 +001657 +001658 +001659 +001660 +001661 +001662 +001663 +001664 +001665 +001666 +001667 +001668 +001669 +001670 +001671 +001672 +001673 +001674 +001675 +001676 +001677 +001678 +001679 +001680 +001681 +001682 +001683 +001684 +001685 +001686 +001687 +001688 +001689 +001690 +001691 +001692 +001693 +001694 +001695 +001696 +001697 +001698 +001699 +001700 +001701 +001702 +001703 +001704 +001705 +001706 +001707 +001708 +001709 +001710 +001711 +001712 +001713 +001714 +001715 +001716 +001717 +001718 +001719 +001720 +001721 +001722 +001723 +001724 +001725 +001726 +001727 +001728 +001729 +001730 +001731 +001732 +001733 +001734 +001735 +001736 +001737 +001738 +001739 +001740 +001741 +001742 +001743 +001744 +001745 +001746 +001747 +001748 +001749 +001750 +001751 +001752 +001753 +001754 +001755 +001756 +001757 +001758 +001759 +001760 +001761 +001762 +001763 +001764 +001765 +001766 +001767 +001768 +001769 +001770 +001771 +001772 +001773 +001774 +001775 +001776 +001777 +001778 +001779 +001780 +001781 +001782 +001783 +001784 +001785 +001786 +001787 +001788 +001789 +001790 +001791 +001792 +001793 +001794 +001795 +001796 +001797 +001798 +001799 +001800 +001801 +001802 +001803 +001804 +001805 +001806 +001807 +001808 +001809 +001810 +001811 +001812 +001813 +001814 +001815 +001816 +001817 +001818 +001819 +001820 +001821 +001822 +001823 +001824 +001825 +001826 +001827 +001828 +001829 +001830 +001831 +001832 +001833 +001834 +001835 +001836 +001837 +001838 +001839 +001840 +001841 +001842 +001843 +001844 +001845 +001846 +001847 +001848 +001849 +001850 +001851 +001852 +001853 +001854 +001855 +001856 +001857 +001858 +001859 +001860 +001861 +001862 +001863 +001864 +001865 +001866 +001867 +001868 +001869 +001870 +001871 +001872 +001873 +001874 +001875 +001876 +001877 +001878 +001879 +001880 +001881 +001882 +001883 +001884 +001885 +001886 +001887 +001888 +001889 +001890 +001891 +001892 +001893 +001894 +001895 +001896 +001897 +001898 +001899 +001900 +001901 +001902 +001903 +001904 +001905 +001906 +001907 +001908 +001909 +001910 +001911 +001912 +001913 +001914 +001915 +001916 +001917 +001918 +001919 +001920 +001921 +001922 +001923 +001924 +001925 +001926 +001927 +001928 +001929 +001930 +001931 +001932 +001933 +001934 +001935 +001936 +001937 +001938 +001939 +001940 +001941 +001942 +001943 +001944 +001945 +001946 +001947 +001948 +001949 +001950 +001951 +001952 +001953 +001954 +001955 +001956 +001957 +001958 +001959 +001960 +001961 +001962 +001963 +001964 +001965 +001966 +001967 +001968 +001969 +001970 +001971 +001972 +001973 +001974 +001975 +001976 +001977 +001978 +001979 +001980 +001981 +001982 +001983 +001984 +001985 +001986 +001987 +001988 +001989 +001990 +001991 +001992 +001993 +001994 +001995 +001996 +001997 +001998 +001999 +002000 +002001 +002002 +002003 +002004 +002005 +002006 +002007 +002008 +002009 +002010 +002011 +002012 +002013 +002014 +002015 +002016 +002017 +002018 +002019 +002020 +002021 +002022 +002023 +002024 +002025 +002026 +002027 +002028 +002029 +002030 +002031 +002032 +002033 +002034 +002035 +002036 +002037 +002038 +002039 +002040 +002041 +002042 +002043 +002044 +002045 +002046 +002047 +002048 +002049 +002050 +002051 +002052 +002053 +002054 +002055 +002056 +002057 +002058 +002059 +002060 +002061 +002062 +002063 +002064 +002065 +002066 +002067 +002068 +002069 +002070 +002071 +002072 +002073 +002074 +002075 +002076 +002077 +002078 +002079 +002080 +002081 +002082 +002083 +002084 +002085 +002086 +002087 +002088 +002089 +002090 +002091 +002092 +002093 +002094 +002095 +002096 +002097 +002098 +002099 +002100 +002101 +002102 +002103 +002104 +002105 +002106 +002107 +002108 +002109 +002110 +002111 +002112 +002113 +002114 +002115 +002116 +002117 +002118 +002119 +002120 +002121 +002122 +002123 +002124 +002125 +002126 +002127 +002128 +002129 +002130 +002131 +002132 +002133 +002134 +002135 +002136 +002137 +002138 +002139 +002140 +002141 +002142 +002143 +002144 +002145 +002146 +002147 +002148 +002149 +002150 +002151 +002152 +002153 +002154 +002155 +002156 +002157 +002158 +002159 +002160 +002161 +002162 +002163 +002164 +002165 +002166 +002167 +002168 +002169 +002170 +002171 +002172 +002173 +002174 +002175 +002176 +002177 +002178 +002179 +002180 +002181 +002182 +002183 +002184 +002185 +002186 +002187 +002188 +002189 +002190 +002191 +002192 +002193 +002194 +002195 +002196 +002197 +002198 +002199 +002200 +002201 +002202 +002203 +002204 +002205 +002206 +002207 +002208 +002209 +002210 +002211 +002212 +002213 +002214 +002215 +002216 +002217 +002218 +002219 +002220 +002221 +002222 +002223 +002224 +002225 +002226 +002227 +002228 +002229 +002230 +002231 +002232 +002233 +002234 +002235 +002236 +002237 +002238 +002239 +002240 +002241 +002242 +002243 +002244 +002245 +002246 +002247 +002248 +002249 +002250 +002251 +002252 +002253 +002254 +002255 +002256 +002257 +002258 +002259 +002260 +002261 +002262 +002263 +002264 +002265 +002266 +002267 +002268 +002269 +002270 +002271 +002272 +002273 +002274 +002275 +002276 +002277 +002278 +002279 +002280 +002281 +002282 +002283 +002284 +002285 +002286 +002287 +002288 +002289 +002290 +002291 +002292 +002293 +002294 +002295 +002296 +002297 +002298 +002299 +002300 +002301 +002302 +002303 +002304 +002305 +002306 +002307 +002308 +002309 +002310 +002311 +002312 +002313 +002314 +002315 +002316 +002317 +002318 +002319 +002320 +002321 +002322 +002323 +002324 +002325 +002326 +002327 +002328 +002329 +002330 +002331 +002332 +002333 +002334 +002335 +002336 +002337 +002338 +002339 +002340 +002341 +002342 +002343 +002344 +002345 +002346 +002347 +002348 +002349 +002350 +002351 +002352 +002353 +002354 +002355 +002356 +002357 +002358 +002359 +002360 +002361 +002362 +002363 +002364 +002365 +002366 +002367 +002368 +002369 +002370 +002371 +002372 +002373 +002374 +002375 +002376 +002377 +002378 +002379 +002380 +002381 +002382 +002383 +002384 +002385 +002386 +002387 +002388 +002389 +002390 +002391 +002392 +002393 +002394 +002395 +002396 +002397 +002398 +002399 +002400 +002401 +002402 +002403 +002404 +002405 +002406 +002407 +002408 +002409 +002410 +002411 +002412 +002413 +002414 +002415 +002416 +002417 +002418 +002419 +002420 +002421 +002422 +002423 +002424 +002425 +002426 +002427 +002428 +002429 +002430 +002431 +002432 +002433 +002434 +002435 +002436 +002437 +002438 +002439 +002440 +002441 +002442 +002443 +002444 +002445 +002446 +002447 +002448 +002449 +002450 +002451 +002452 +002453 +002454 +002455 +002456 +002457 +002458 +002459 +002460 +002461 +002462 +002463 +002464 +002465 +002466 +002467 +002468 +002469 +002470 +002471 +002472 +002473 +002474 +002475 +002476 +002477 +002478 +002479 +002480 +002481 +002482 +002483 +002484 +002485 +002486 +002487 +002488 +002489 +002490 +002491 +002492 +002493 +002494 +002495 +002496 +002497 +002498 +002499 +002500 +002501 +002502 +002503 +002504 +002505 +002506 +002507 +002508 +002509 +002510 +002511 +002512 +002513 +002514 +002515 +002516 +002517 +002518 +002519 +002520 +002521 +002522 +002523 +002524 +002525 +002526 +002527 +002528 +002529 +002530 +002531 +002532 +002533 +002534 +002535 +002536 +002537 +002538 +002539 +002540 +002541 +002542 +002543 +002544 +002545 +002546 +002547 +002548 +002549 +002550 +002551 +002552 +002553 +002554 +002555 +002556 +002557 +002558 +002559 +002560 +002561 +002562 +002563 +002564 +002565 +002566 +002567 +002568 +002569 +002570 +002571 +002572 +002573 +002574 +002575 +002576 +002577 +002578 +002579 +002580 +002581 +002582 +002583 +002584 +002585 +002586 +002587 +002588 +002589 +002590 +002591 +002592 +002593 +002594 +002595 +002596 +002597 +002598 +002599 +002600 +002601 +002602 +002603 +002604 +002605 +002606 +002607 +002608 +002609 +002610 +002611 +002612 +002613 +002614 +002615 +002616 +002617 +002618 +002619 +002620 +002621 +002622 +002623 +002624 +002625 +002626 +002627 +002628 +002629 +002630 +002631 +002632 +002633 +002634 +002635 +002636 +002637 +002638 +002639 +002640 +002641 +002642 +002643 +002644 +002645 +002646 +002647 +002648 +002649 +002650 +002651 +002652 +002653 +002654 +002655 +002656 +002657 +002658 +002659 +002660 +002661 +002662 +002663 +002664 +002665 +002666 +002667 +002668 +002669 +002670 +002671 +002672 +002673 +002674 +002675 +002676 +002677 +002678 +002679 +002680 +002681 +002682 +002683 +002684 +002685 +002686 +002687 +002688 +002689 +002690 +002691 +002692 +002693 +002694 +002695 +002696 +002697 +002698 +002699 +002700 +002701 +002702 +002703 +002704 +002705 +002706 +002707 +002708 +002709 +002710 +002711 +002712 +002713 +002714 +002715 +002716 +002717 +002718 +002719 +002720 +002721 +002722 +002723 +002724 +002725 +002726 +002727 +002728 +002729 +002730 +002731 +002732 +002733 +002734 +002735 +002736 +002737 +002738 +002739 +002740 +002741 +002742 +002743 +002744 +002745 +002746 +002747 +002748 +002749 +002750 +002751 +002752 +002753 +002754 +002755 +002756 +002757 +002758 +002759 +002760 +002761 +002762 +002763 +002764 +002765 +002766 +002767 +002768 +002769 +002770 +002771 +002772 +002773 +002774 +002775 +002776 +002777 +002778 +002779 +002780 +002781 +002782 +002783 +002784 +002785 +002786 +002787 +002788 +002789 +002790 +002791 +002792 +002793 +002794 +002795 +002796 +002797 +002798 +002799 +002800 +002801 +002802 +002803 +002804 +002805 +002806 +002807 +002808 +002809 +002810 +002811 +002812 +002813 +002814 +002815 +002816 +002817 +002818 +002819 +002820 +002821 +002822 +002823 +002824 +002825 +002826 +002827 +002828 +002829 +002830 +002831 +002832 +002833 +002834 +002835 +002836 +002837 +002838 +002839 +002840 +002841 +002842 +002843 +002844 +002845 +002846 +002847 +002848 +002849 +002850 +002851 +002852 +002853 +002854 +002855 +002856 +002857 +002858 +002859 +002860 +002861 +002862 +002863 +002864 +002865 +002866 +002867 +002868 +002869 +002870 +002871 +002872 +002873 +002874 +002875 +002876 +002877 +002878 +002879 +002880 +002881 +002882 +002883 +002884 +002885 +002886 +002887 +002888 +002889 +002890 +002891 +002892 +002893 +002894 +002895 +002896 +002897 +002898 +002899 +002900 +002901 +002902 +002903 +002904 +002905 +002906 +002907 +002908 +002909 +002910 +002911 +002912 +002913 +002914 +002915 +002916 +002917 +002918 +002919 +002920 +002921 +002922 +002923 +002924 +002925 +002926 +002927 +002928 +002929 +002930 +002931 +002932 +002933 +002934 +002935 +002936 +002937 +002938 +002939 +002940 +002941 +002942 +002943 +002944 +002945 +002946 +002947 +002948 +002949 +002950 +002951 +002952 +002953 +002954 +002955 +002956 +002957 +002958 +002959 +002960 +002961 +002962 +002963 +002964 +002965 +002966 +002967 +002968 +002969 +002970 +002971 +002972 +002973 +002974 +002975 +002976 +002977 +002978 +002979 +002980 +002981 +002982 +002983 +002984 +002985 +002986 +002987 +002988 +002989 +002990 +002991 +002992 +002993 +002994 +002995 +002996 +002997 +002998 +002999 +003000 +003001 +003002 +003003 +003004 +003005 +003006 +003007 +003008 +003009 +003010 +003011 +003012 +003013 +003014 +003015 +003016 +003017 +003018 +003019 +003020 +003021 +003022 +003023 +003024 +003025 +003026 +003027 +003028 +003029 +003030 +003031 +003032 +003033 +003034 +003035 +003036 +003037 +003038 +003039 +003040 +003041 +003042 +003043 +003044 +003045 +003046 +003047 +003048 +003049 +003050 +003051 +003052 +003053 +003054 +003055 +003056 +003057 +003058 +003059 +003060 +003061 +003062 +003063 +003064 +003065 +003066 +003067 +003068 +003069 +003070 +003071 +003072 +003073 +003074 +003075 +003076 +003077 +003078 +003079 +003080 +003081 +003082 +003083 +003084 +003085 +003086 +003087 +003088 +003089 +003090 +003091 +003092 +003093 +003094 +003095 +003096 +003097 +003098 +003099 +003100 +003101 +003102 +003103 +003104 +003105 +003106 +003107 +003108 +003109 +003110 +003111 +003112 +003113 +003114 +003115 +003116 +003117 +003118 +003119 +003120 +003121 +003122 +003123 +003124 +003125 +003126 +003127 +003128 +003129 +003130 +003131 +003132 +003133 +003134 +003135 +003136 +003137 +003138 +003139 +003140 +003141 +003142 +003143 +003144 +003145 +003146 +003147 +003148 +003149 +003150 +003151 +003152 +003153 +003154 +003155 +003156 +003157 +003158 +003159 +003160 +003161 +003162 +003163 +003164 +003165 +003166 +003167 +003168 +003169 +003170 +003171 +003172 +003173 +003174 +003175 +003176 +003177 +003178 +003179 +003180 +003181 +003182 +003183 +003184 +003185 +003186 +003187 +003188 +003189 +003190 +003191 +003192 +003193 +003194 +003195 +003196 +003197 +003198 +003199 +003200 +003201 +003202 +003203 +003204 +003205 +003206 +003207 +003208 +003209 +003210 +003211 +003212 +003213 +003214 +003215 +003216 +003217 +003218 +003219 +003220 +003221 +003222 +003223 +003224 +003225 +003226 +003227 +003228 +003229 +003230 +003231 +003232 +003233 +003234 +003235 +003236 +003237 +003238 +003239 +003240 +003241 +003242 +003243 +003244 +003245 +003246 +003247 +003248 +003249 +003250 +003251 +003252 +003253 +003254 +003255 +003256 +003257 +003258 +003259 +003260 +003261 +003262 +003263 +003264 +003265 +003266 +003267 +003268 +003269 +003270 +003271 +003272 +003273 +003274 +003275 +003276 +003277 +003278 +003279 +003280 +003281 +003282 +003283 +003284 +003285 +003286 +003287 +003288 +003289 +003290 +003291 +003292 +003293 +003294 +003295 +003296 +003297 +003298 +003299 +003300 +003301 +003302 +003303 +003304 +003305 +003306 +003307 +003308 +003309 +003310 +003311 +003312 +003313 +003314 +003315 +003316 +003317 +003318 +003319 +003320 +003321 +003322 +003323 +003324 +003325 +003326 +003327 +003328 +003329 +003330 +003331 +003332 +003333 +003334 +003335 +003336 +003337 +003338 +003339 +003340 +003341 +003342 +003343 +003344 +003345 +003346 +003347 +003348 +003349 +003350 +003351 +003352 +003353 +003354 +003355 +003356 +003357 +003358 +003359 +003360 +003361 +003362 +003363 +003364 +003365 +003366 +003367 +003368 +003369 +003370 +003371 +003372 +003373 +003374 +003375 +003376 +003377 +003378 +003379 +003380 +003381 +003382 +003383 +003384 +003385 +003386 +003387 +003388 +003389 +003390 +003391 +003392 +003393 +003394 +003395 +003396 +003397 +003398 +003399 +003400 +003401 +003402 +003403 +003404 +003405 +003406 +003407 +003408 +003409 +003410 +003411 +003412 +003413 +003414 +003415 +003416 +003417 +003418 +003419 +003420 +003421 +003422 +003423 +003424 +003425 +003426 +003427 +003428 +003429 +003430 +003431 +003432 +003433 +003434 +003435 +003436 +003437 +003438 +003439 +003440 +003441 +003442 +003443 +003444 +003445 +003446 +003447 +003448 +003449 +003450 +003451 +003452 +003453 +003454 +003455 +003456 +003457 +003458 +003459 +003460 +003461 +003462 +003463 +003464 +003465 +003466 +003467 +003468 +003469 +003470 +003471 +003472 +003473 +003474 +003475 +003476 +003477 +003478 +003479 +003480 +003481 +003482 +003483 +003484 +003485 +003486 +003487 +003488 +003489 +003490 +003491 +003492 +003493 +003494 +003495 +003496 +003497 +003498 +003499 +003500 +003501 +003502 +003503 +003504 +003505 +003506 +003507 +003508 +003509 +003510 +003511 +003512 +003513 +003514 +003515 +003516 +003517 +003518 +003519 +003520 +003521 +003522 +003523 +003524 +003525 +003526 +003527 +003528 +003529 +003530 +003531 +003532 +003533 +003534 +003535 +003536 +003537 +003538 +003539 +003540 +003541 +003542 +003543 +003544 +003545 +003546 +003547 +003548 +003549 +003550 +003551 +003552 +003553 +003554 +003555 +003556 +003557 +003558 +003559 +003560 +003561 +003562 +003563 +003564 +003565 +003566 +003567 +003568 +003569 +003570 +003571 +003572 +003573 +003574 +003575 +003576 +003577 +003578 +003579 +003580 +003581 +003582 +003583 +003584 +003585 +003586 +003587 +003588 +003589 +003590 +003591 +003592 +003593 +003594 +003595 +003596 +003597 +003598 +003599 +003600 +003601 +003602 +003603 +003604 +003605 +003606 +003607 +003608 +003609 +003610 +003611 +003612 +003613 +003614 +003615 +003616 +003617 +003618 +003619 +003620 +003621 +003622 +003623 +003624 +003625 +003626 +003627 +003628 +003629 +003630 +003631 +003632 +003633 +003634 +003635 +003636 +003637 +003638 +003639 +003640 +003641 +003642 +003643 +003644 +003645 +003646 +003647 +003648 +003649 +003650 +003651 +003652 +003653 +003654 +003655 +003656 +003657 +003658 +003659 +003660 +003661 +003662 +003663 +003664 +003665 +003666 +003667 +003668 +003669 +003670 +003671 +003672 +003673 +003674 +003675 +003676 +003677 +003678 +003679 +003680 +003681 +003682 +003683 +003684 +003685 +003686 +003687 +003688 +003689 +003690 +003691 +003692 +003693 +003694 +003695 +003696 +003697 +003698 +003699 +003700 +003701 +003702 +003703 +003704 +003705 +003706 +003707 +003708 +003709 +003710 +003711 +003712 +003713 +003714 +003715 +003716 +003717 +003718 +003719 +003720 +003721 +003722 +003723 +003724 +003725 +003726 +003727 +003728 +003729 +003730 +003731 +003732 +003733 +003734 +003735 +003736 +003737 +003738 +003739 +003740 +003741 +003742 +003743 +003744 +003745 +003746 +003747 +003748 +003749 +003750 +003751 +003752 +003753 +003754 +003755 +003756 +003757 +003758 +003759 +003760 +003761 +003762 +003763 +003764 +003765 +003766 +003767 +003768 +003769 +003770 +003771 +003772 +003773 +003774 +003775 +003776 +003777 +003778 +003779 +003780 +003781 +003782 +003783 +003784 +003785 +003786 +003787 +003788 +003789 +003790 +003791 +003792 +003793 +003794 +003795 +003796 +003797 +003798 +003799 +003800 +003801 +003802 +003803 +003804 +003805 +003806 +003807 +003808 +003809 +003810 +003811 +003812 +003813 +003814 +003815 +003816 +003817 +003818 +003819 +003820 +003821 +003822 +003823 +003824 +003825 +003826 +003827 +003828 +003829 +003830 +003831 +003832 +003833 +003834 +003835 +003836 +003837 +003838 +003839 +003840 +003841 +003842 +003843 +003844 +003845 +003846 +003847 +003848 +003849 +003850 +003851 +003852 +003853 +003854 +003855 +003856 +003857 +003858 +003859 +003860 +003861 +003862 +003863 +003864 +003865 +003866 +003867 +003868 +003869 +003870 +003871 +003872 +003873 +003874 +003875 +003876 +003877 +003878 +003879 +003880 +003881 +003882 +003883 +003884 +003885 +003886 +003887 +003888 +003889 +003890 +003891 +003892 +003893 +003894 +003895 +003896 +003897 +003898 +003899 +003900 +003901 +003902 +003903 +003904 +003905 +003906 +003907 +003908 +003909 +003910 +003911 +003912 +003913 +003914 +003915 +003916 +003917 +003918 +003919 +003920 +003921 +003922 +003923 +003924 +003925 +003926 +003927 +003928 +003929 +003930 +003931 +003932 +003933 +003934 +003935 +003936 +003937 +003938 +003939 +003940 +003941 +003942 +003943 +003944 +003945 +003946 +003947 +003948 +003949 +003950 +003951 +003952 +003953 +003954 +003955 +003956 +003957 +003958 +003959 +003960 +003961 +003962 +003963 +003964 +003965 +003966 +003967 +003968 +003969 +003970 +003971 +003972 +003973 +003974 +003975 +003976 +003977 +003978 +003979 +003980 +003981 +003982 +003983 +003984 +003985 +003986 +003987 +003988 +003989 +003990 +003991 +003992 +003993 +003994 +003995 +003996 +003997 +003998 +003999 +004000 +004001 +004002 +004003 +004004 +004005 +004006 +004007 +004008 +004009 +004010 +004011 +004012 +004013 +004014 +004015 +004016 +004017 +004018 +004019 +004020 +004021 +004022 +004023 +004024 +004025 +004026 +004027 +004028 +004029 +004030 +004031 +004032 +004033 +004034 +004035 +004036 +004037 +004038 +004039 +004040 +004041 +004042 +004043 +004044 +004045 +004046 +004047 +004048 +004049 +004050 +004051 +004052 +004053 +004054 +004055 +004056 +004057 +004058 +004059 +004060 +004061 +004062 +004063 +004064 +004065 +004066 +004067 +004068 +004069 +004070 +004071 +004072 +004073 +004074 +004075 +004076 +004077 +004078 +004079 +004080 +004081 +004082 +004083 +004084 +004085 +004086 +004087 +004088 +004089 +004090 +004091 +004092 +004093 +004094 +004095 +004096 +004097 +004098 +004099 +004100 +004101 +004102 +004103 +004104 +004105 +004106 +004107 +004108 +004109 +004110 +004111 +004112 +004113 +004114 +004115 +004116 +004117 +004118 +004119 +004120 +004121 +004122 +004123 +004124 +004125 +004126 +004127 +004128 +004129 +004130 +004131 +004132 +004133 +004134 +004135 +004136 +004137 +004138 +004139 +004140 +004141 +004142 +004143 +004144 +004145 +004146 +004147 +004148 +004149 +004150 +004151 +004152 +004153 +004154 +004155 +004156 +004157 +004158 +004159 +004160 +004161 +004162 +004163 +004164 +004165 +004166 +004167 +004168 +004169 +004170 +004171 +004172 +004173 +004174 +004175 +004176 +004177 +004178 +004179 +004180 +004181 +004182 +004183 +004184 +004185 +004186 +004187 +004188 +004189 +004190 +004191 +004192 +004193 +004194 +004195 +004196 +004197 +004198 +004199 +004200 +004201 +004202 +004203 +004204 +004205 +004206 +004207 +004208 +004209 +004210 +004211 +004212 +004213 +004214 +004215 +004216 +004217 +004218 +004219 +004220 +004221 +004222 +004223 +004224 +004225 +004226 +004227 +004228 +004229 +004230 +004231 +004232 +004233 +004234 +004235 +004236 +004237 +004238 +004239 +004240 +004241 +004242 +004243 +004244 +004245 +004246 +004247 +004248 +004249 +004250 +004251 +004252 +004253 +004254 +004255 +004256 +004257 +004258 +004259 +004260 +004261 +004262 +004263 +004264 +004265 +004266 +004267 +004268 +004269 +004270 +004271 +004272 +004273 +004274 +004275 +004276 +004277 +004278 +004279 +004280 +004281 +004282 +004283 +004284 +004285 +004286 +004287 +004288 +004289 +004290 +004291 +004292 +004293 +004294 +004295 +004296 +004297 +004298 +004299 +004300 +004301 +004302 +004303 +004304 +004305 +004306 +004307 +004308 +004309 +004310 +004311 +004312 +004313 +004314 +004315 +004316 +004317 +004318 +004319 +004320 +004321 +004322 +004323 +004324 +004325 +004326 +004327 +004328 +004329 +004330 +004331 +004332 +004333 +004334 +004335 +004336 +004337 +004338 +004339 +004340 +004341 +004342 +004343 +004344 +004345 +004346 +004347 +004348 +004349 +004350 +004351 +004352 +004353 +004354 +004355 +004356 +004357 +004358 +004359 +004360 +004361 +004362 +004363 +004364 +004365 +004366 +004367 +004368 +004369 +004370 +004371 +004372 +004373 +004374 +004375 +004376 +004377 +004378 +004379 +004380 +004381 +004382 +004383 +004384 +004385 +004386 +004387 +004388 +004389 +004390 +004391 +004392 +004393 +004394 +004395 +004396 +004397 +004398 +004399 +004400 +004401 +004402 +004403 +004404 +004405 +004406 +004407 +004408 +004409 +004410 +004411 +004412 +004413 +004414 +004415 +004416 +004417 +004418 +004419 +004420 +004421 +004422 +004423 +004424 +004425 +004426 +004427 +004428 +004429 +004430 +004431 +004432 +004433 +004434 +004435 +004436 +004437 +004438 +004439 +004440 +004441 +004442 +004443 +004444 +004445 +004446 +004447 +004448 +004449 +004450 +004451 +004452 +004453 +004454 +004455 +004456 +004457 +004458 +004459 +004460 +004461 +004462 +004463 +004464 +004465 +004466 +004467 +004468 +004469 +004470 +004471 +004472 +004473 +004474 +004475 +004476 +004477 +004478 +004479 +004480 +004481 +004482 +004483 +004484 +004485 +004486 +004487 +004488 +004489 +004490 +004491 +004492 +004493 +004494 +004495 +004496 +004497 +004498 +004499 +004500 +004501 +004502 +004503 +004504 +004505 +004506 +004507 +004508 +004509 +004510 +004511 +004512 +004513 +004514 +004515 +004516 +004517 +004518 +004519 +004520 +004521 +004522 +004523 +004524 +004525 +004526 +004527 +004528 +004529 +004530 +004531 +004532 +004533 +004534 +004535 +004536 +004537 +004538 +004539 +004540 +004541 +004542 +004543 +004544 +004545 +004546 +004547 +004548 +004549 +004550 +004551 +004552 +004553 +004554 +004555 +004556 +004557 +004558 +004559 +004560 +004561 +004562 +004563 +004564 +004565 +004566 +004567 +004568 +004569 +004570 +004571 +004572 +004573 +004574 +004575 +004576 +004577 +004578 +004579 +004580 +004581 +004582 +004583 +004584 +004585 +004586 +004587 +004588 +004589 +004590 +004591 +004592 +004593 +004594 +004595 +004596 +004597 +004598 +004599 +004600 +004601 +004602 +004603 +004604 +004605 +004606 +004607 +004608 +004609 +004610 +004611 +004612 +004613 +004614 +004615 +004616 +004617 +004618 +004619 +004620 +004621 +004622 +004623 +004624 +004625 +004626 +004627 +004628 +004629 +004630 +004631 +004632 +004633 +004634 +004635 +004636 +004637 +004638 +004639 +004640 +004641 +004642 +004643 +004644 +004645 +004646 +004647 +004648 +004649 +004650 +004651 +004652 +004653 +004654 +004655 +004656 +004657 +004658 +004659 +004660 +004661 +004662 +004663 +004664 +004665 +004666 +004667 +004668 +004669 +004670 +004671 +004672 +004673 +004674 +004675 +004676 +004677 +004678 +004679 +004680 +004681 +004682 +004683 +004684 +004685 +004686 +004687 +004688 +004689 +004690 +004691 +004692 +004693 +004694 +004695 +004696 +004697 +004698 +004699 +004700 +004701 +004702 +004703 +004704 +004705 +004706 +004707 +004708 +004709 +004710 +004711 +004712 +004713 +004714 +004715 +004716 +004717 +004718 +004719 +004720 +004721 +004722 +004723 +004724 +004725 +004726 +004727 +004728 +004729 +004730 +004731 +004732 +004733 +004734 +004735 +004736 +004737 +004738 +004739 +004740 +004741 +004742 +004743 +004744 +004745 +004746 +004747 +004748 +004749 +004750 +004751 +004752 +004753 +004754 +004755 +004756 +004757 +004758 +004759 +004760 +004761 +004762 +004763 +004764 +004765 +004766 +004767 +004768 +004769 +004770 +004771 +004772 +004773 +004774 +004775 +004776 +004777 +004778 +004779 +004780 +004781 +004782 +004783 +004784 +004785 +004786 +004787 +004788 +004789 +004790 +004791 +004792 +004793 +004794 +004795 +004796 +004797 +004798 +004799 +004800 +004801 +004802 +004803 +004804 +004805 +004806 +004807 +004808 +004809 +004810 +004811 +004812 +004813 +004814 +004815 +004816 +004817 +004818 +004819 +004820 +004821 +004822 +004823 +004824 +004825 +004826 +004827 +004828 +004829 +004830 +004831 +004832 +004833 +004834 +004835 +004836 +004837 +004838 +004839 +004840 +004841 +004842 +004843 +004844 +004845 +004846 +004847 +004848 +004849 +004850 +004851 +004852 +004853 +004854 +004855 +004856 +004857 +004858 +004859 +004860 +004861 +004862 +004863 +004864 +004865 +004866 +004867 +004868 +004869 +004870 +004871 +004872 +004873 +004874 +004875 +004876 +004877 +004878 +004879 +004880 +004881 +004882 +004883 +004884 +004885 +004886 +004887 +004888 +004889 +004890 +004891 +004892 +004893 +004894 +004895 +004896 +004897 +004898 +004899 +004900 +004901 +004902 +004903 +004904 +004905 +004906 +004907 +004908 +004909 +004910 +004911 +004912 +004913 +004914 +004915 +004916 +004917 +004918 +004919 +004920 +004921 +004922 +004923 +004924 +004925 +004926 +004927 +004928 +004929 +004930 +004931 +004932 +004933 +004934 +004935 +004936 +004937 +004938 +004939 +004940 +004941 +004942 +004943 +004944 +004945 +004946 +004947 +004948 +004949 +004950 +004951 +004952 +004953 +004954 +004955 +004956 +004957 +004958 +004959 +004960 +004961 +004962 +004963 +004964 +004965 +004966 +004967 +004968 +004969 +004970 +004971 +004972 +004973 +004974 +004975 +004976 +004977 +004978 +004979 +004980 +004981 +004982 +004983 +004984 +004985 +004986 +004987 +004988 +004989 +004990 +004991 +004992 +004993 +004994 +004995 +004996 +004997 +004998 +004999 +005000 +005001 +005002 +005003 +005004 +005005 +005006 +005007 +005008 +005009 +005010 +005011 +005012 +005013 +005014 +005015 +005016 +005017 +005018 +005019 +005020 +005021 +005022 +005023 +005024 +005025 +005026 +005027 +005028 +005029 +005030 +005031 +005032 +005033 +005034 +005035 +005036 +005037 +005038 +005039 +005040 +005041 +005042 +005043 +005044 +005045 +005046 +005047 +005048 +005049 +005050 +005051 +005052 +005053 +005054 +005055 +005056 +005057 +005058 +005059 +005060 +005061 +005062 +005063 +005064 +005065 +005066 +005067 +005068 +005069 +005070 +005071 +005072 +005073 +005074 +005075 +005076 +005077 +005078 +005079 +005080 +005081 +005082 +005083 +005084 +005085 +005086 +005087 +005088 +005089 +005090 +005091 +005092 +005093 +005094 +005095 +005096 +005097 +005098 +005099 +005100 +005101 +005102 +005103 +005104 +005105 +005106 +005107 +005108 +005109 +005110 +005111 +005112 +005113 +005114 +005115 +005116 +005117 +005118 +005119 +005120 +005121 +005122 +005123 +005124 +005125 +005126 +005127 +005128 +005129 +005130 +005131 +005132 +005133 +005134 +005135 +005136 +005137 +005138 +005139 +005140 +005141 +005142 +005143 +005144 +005145 +005146 +005147 +005148 +005149 +005150 +005151 +005152 +005153 +005154 +005155 +005156 +005157 +005158 +005159 +005160 +005161 +005162 +005163 +005164 +005165 +005166 +005167 +005168 +005169 +005170 +005171 +005172 +005173 +005174 +005175 +005176 +005177 +005178 +005179 +005180 +005181 +005182 +005183 +005184 +005185 +005186 +005187 +005188 +005189 +005190 +005191 +005192 +005193 +005194 +005195 +005196 +005197 +005198 +005199 +005200 +005201 +005202 +005203 +005204 +005205 +005206 +005207 +005208 +005209 +005210 +005211 +005212 +005213 +005214 +005215 +005216 +005217 +005218 +005219 +005220 +005221 +005222 +005223 +005224 +005225 +005226 +005227 +005228 +005229 +005230 +005231 +005232 +005233 +005234 +005235 +005236 +005237 +005238 +005239 +005240 +005241 +005242 +005243 +005244 +005245 +005246 +005247 +005248 +005249 +005250 +005251 +005252 +005253 +005254 +005255 +005256 +005257 +005258 +005259 +005260 +005261 +005262 +005263 +005264 +005265 +005266 +005267 +005268 +005269 +005270 +005271 +005272 +005273 +005274 +005275 +005276 +005277 +005278 +005279 +005280 +005281 +005282 +005283 +005284 +005285 +005286 +005287 +005288 +005289 +005290 +005291 +005292 +005293 +005294 +005295 +005296 +005297 +005298 +005299 +005300 +005301 +005302 +005303 +005304 +005305 +005306 +005307 +005308 +005309 +005310 +005311 +005312 +005313 +005314 +005315 +005316 +005317 +005318 +005319 +005320 +005321 +005322 +005323 +005324 +005325 +005326 +005327 +005328 +005329 +005330 +005331 +005332 +005333 +005334 +005335 +005336 +005337 +005338 +005339 +005340 +005341 +005342 +005343 +005344 +005345 +005346 +005347 +005348 +005349 +005350 +005351 +005352 +005353 +005354 +005355 +005356 +005357 +005358 +005359 +005360 +005361 +005362 +005363 +005364 +005365 +005366 +005367 +005368 +005369 +005370 +005371 +005372 +005373 +005374 +005375 +005376 +005377 +005378 +005379 +005380 +005381 +005382 +005383 +005384 +005385 +005386 +005387 +005388 +005389 +005390 +005391 +005392 +005393 +005394 +005395 +005396 +005397 +005398 +005399 +005400 +005401 +005402 +005403 +005404 +005405 +005406 +005407 +005408 +005409 +005410 +005411 +005412 +005413 +005414 +005415 +005416 +005417 +005418 +005419 +005420 +005421 +005422 +005423 +005424 +005425 +005426 +005427 +005428 +005429 +005430 +005431 +005432 +005433 +005434 +005435 +005436 +005437 +005438 +005439 +005440 +005441 +005442 +005443 +005444 +005445 +005446 +005447 +005448 +005449 +005450 +005451 +005452 +005453 +005454 +005455 +005456 +005457 +005458 +005459 +005460 +005461 +005462 +005463 +005464 +005465 +005466 +005467 +005468 +005469 +005470 +005471 +005472 +005473 +005474 +005475 +005476 +005477 +005478 +005479 +005480 +005481 +005482 +005483 +005484 +005485 +005486 +005487 +005488 +005489 +005490 +005491 +005492 +005493 +005494 +005495 +005496 +005497 +005498 +005499 +005500 +005501 +005502 +005503 +005504 +005505 +005506 +005507 +005508 +005509 +005510 +005511 +005512 +005513 +005514 +005515 +005516 +005517 +005518 +005519 +005520 +005521 +005522 +005523 +005524 +005525 +005526 +005527 +005528 +005529 +005530 +005531 +005532 +005533 +005534 +005535 +005536 +005537 +005538 +005539 +005540 +005541 +005542 +005543 +005544 +005545 +005546 +005547 +005548 +005549 +005550 +005551 +005552 +005553 +005554 +005555 +005556 +005557 +005558 +005559 +005560 +005561 +005562 +005563 +005564 +005565 +005566 +005567 +005568 +005569 +005570 +005571 +005572 +005573 +005574 +005575 +005576 +005577 +005578 +005579 +005580 +005581 +005582 +005583 +005584 +005585 +005586 +005587 +005588 +005589 +005590 +005591 +005592 +005593 +005594 +005595 +005596 +005597 +005598 +005599 +005600 +005601 +005602 +005603 +005604 +005605 +005606 +005607 +005608 +005609 +005610 +005611 +005612 +005613 +005614 +005615 +005616 +005617 +005618 +005619 +005620 +005621 +005622 +005623 +005624 +005625 +005626 +005627 +005628 +005629 +005630 +005631 +005632 +005633 +005634 +005635 +005636 +005637 +005638 +005639 +005640 +005641 +005642 +005643 +005644 +005645 +005646 +005647 +005648 +005649 +005650 +005651 +005652 +005653 +005654 +005655 +005656 +005657 +005658 +005659 +005660 +005661 +005662 +005663 +005664 +005665 +005666 +005667 +005668 +005669 +005670 +005671 +005672 +005673 +005674 +005675 +005676 +005677 +005678 +005679 +005680 +005681 +005682 +005683 +005684 +005685 +005686 +005687 +005688 +005689 +005690 +005691 +005692 +005693 +005694 +005695 +005696 +005697 +005698 +005699 +005700 +005701 +005702 +005703 +005704 +005705 +005706 +005707 +005708 +005709 +005710 +005711 +005712 +005713 +005714 +005715 +005716 +005717 +005718 +005719 +005720 +005721 +005722 +005723 +005724 +005725 +005726 +005727 +005728 +005729 +005730 +005731 +005732 +005733 +005734 +005735 +005736 +005737 +005738 +005739 +005740 +005741 +005742 +005743 +005744 +005745 +005746 +005747 +005748 +005749 +005750 +005751 +005752 +005753 +005754 +005755 +005756 +005757 +005758 +005759 +005760 +005761 +005762 +005763 +005764 +005765 +005766 +005767 +005768 +005769 +005770 +005771 +005772 +005773 +005774 +005775 +005776 +005777 +005778 +005779 +005780 +005781 +005782 +005783 +005784 +005785 +005786 +005787 +005788 +005789 +005790 +005791 +005792 +005793 +005794 +005795 +005796 +005797 +005798 +005799 +005800 +005801 +005802 +005803 +005804 +005805 +005806 +005807 +005808 +005809 +005810 +005811 +005812 +005813 +005814 +005815 +005816 +005817 +005818 +005819 +005820 +005821 +005822 +005823 +005824 +005825 +005826 +005827 +005828 +005829 +005830 +005831 +005832 +005833 +005834 +005835 +005836 +005837 +005838 +005839 +005840 +005841 +005842 +005843 +005844 +005845 +005846 +005847 +005848 +005849 +005850 +005851 +005852 +005853 +005854 +005855 +005856 +005857 +005858 +005859 +005860 +005861 +005862 +005863 +005864 +005865 +005866 +005867 +005868 +005869 +005870 +005871 +005872 +005873 +005874 +005875 +005876 +005877 +005878 +005879 +005880 +005881 +005882 +005883 +005884 +005885 +005886 +005887 +005888 +005889 +005890 +005891 +005892 +005893 +005894 +005895 +005896 +005897 +005898 +005899 +005900 +005901 +005902 +005903 +005904 +005905 +005906 +005907 +005908 +005909 +005910 +005911 +005912 +005913 +005914 +005915 +005916 +005917 +005918 +005919 +005920 +005921 +005922 +005923 +005924 +005925 +005926 +005927 +005928 +005929 +005930 +005931 +005932 +005933 +005934 +005935 +005936 +005937 +005938 +005939 +005940 +005941 +005942 +005943 +005944 +005945 +005946 +005947 +005948 +005949 +005950 +005951 +005952 +005953 +005954 +005955 +005956 +005957 +005958 +005959 +005960 +005961 +005962 +005963 +005964 +005965 +005966 +005967 +005968 +005969 +005970 +005971 +005972 +005973 +005974 +005975 +005976 +005977 +005978 +005979 +005980 +005981 +005982 +005983 +005984 +005985 +005986 +005987 +005988 +005989 +005990 +005991 +005992 +005993 +005994 +005995 +005996 +005997 +005998 +005999 +006000 +006001 +006002 +006003 +006004 +006005 +006006 +006007 +006008 +006009 +006010 +006011 +006012 +006013 +006014 +006015 +006016 +006017 +006018 +006019 +006020 +006021 +006022 +006023 +006024 +006025 +006026 +006027 +006028 +006029 +006030 +006031 +006032 +006033 +006034 +006035 +006036 +006037 +006038 +006039 +006040 +006041 +006042 +006043 +006044 +006045 +006046 +006047 +006048 +006049 +006050 +006051 +006052 +006053 +006054 +006055 +006056 +006057 +006058 +006059 +006060 +006061 +006062 +006063 +006064 +006065 +006066 +006067 +006068 +006069 +006070 +006071 +006072 +006073 +006074 +006075 +006076 +006077 +006078 +006079 +006080 +006081 +006082 +006083 +006084 +006085 +006086 +006087 +006088 +006089 +006090 +006091 +006092 +006093 +006094 +006095 +006096 +006097 +006098 +006099 +006100 +006101 +006102 +006103 +006104 +006105 +006106 +006107 +006108 +006109 +006110 +006111 +006112 +006113 +006114 +006115 +006116 +006117 +006118 +006119 +006120 +006121 +006122 +006123 +006124 +006125 +006126 +006127 +006128 +006129 +006130 +006131 +006132 +006133 +006134 +006135 +006136 +006137 +006138 +006139 +006140 +006141 +006142 +006143 +006144 +006145 +006146 +006147 +006148 +006149 +006150 +006151 +006152 +006153 +006154 +006155 +006156 +006157 +006158 +006159 +006160 +006161 +006162 +006163 +006164 +006165 +006166 +006167 +006168 +006169 +006170 +006171 +006172 +006173 +006174 +006175 +006176 +006177 +006178 +006179 +006180 +006181 +006182 +006183 +006184 +006185 +006186 +006187 +006188 +006189 +006190 +006191 +006192 +006193 +006194 +006195 +006196 +006197 +006198 +006199 +006200 +006201 +006202 +006203 +006204 +006205 +006206 +006207 +006208 +006209 +006210 +006211 +006212 +006213 +006214 +006215 +006216 +006217 +006218 +006219 +006220 +006221 +006222 +006223 +006224 +006225 +006226 +006227 +006228 +006229 +006230 +006231 +006232 +006233 +006234 +006235 +006236 +006237 +006238 +006239 +006240 +006241 +006242 +006243 +006244 +006245 +006246 +006247 +006248 +006249 +006250 +006251 +006252 +006253 +006254 +006255 +006256 +006257 +006258 +006259 +006260 +006261 +006262 +006263 +006264 +006265 +006266 +006267 +006268 +006269 +006270 +006271 +006272 +006273 +006274 +006275 +006276 +006277 +006278 +006279 +006280 +006281 +006282 +006283 +006284 +006285 +006286 +006287 +006288 +006289 +006290 +006291 +006292 +006293 +006294 +006295 +006296 +006297 +006298 +006299 +006300 +006301 +006302 +006303 +006304 +006305 +006306 +006307 +006308 +006309 +006310 +006311 +006312 +006313 +006314 +006315 +006316 +006317 +006318 +006319 +006320 +006321 +006322 +006323 +006324 +006325 +006326 +006327 +006328 +006329 +006330 +006331 +006332 +006333 +006334 +006335 +006336 +006337 +006338 +006339 +006340 +006341 +006342 +006343 +006344 +006345 +006346 +006347 +006348 +006349 +006350 +006351 +006352 +006353 +006354 +006355 +006356 +006357 +006358 +006359 +006360 +006361 +006362 +006363 +006364 +006365 +006366 +006367 +006368 +006369 +006370 +006371 +006372 +006373 +006374 +006375 +006376 +006377 +006378 +006379 +006380 +006381 +006382 +006383 +006384 +006385 +006386 +006387 +006388 +006389 +006390 +006391 +006392 +006393 +006394 +006395 +006396 +006397 +006398 +006399 +006400 +006401 +006402 +006403 +006404 +006405 +006406 +006407 +006408 +006409 +006410 +006411 +006412 +006413 +006414 +006415 +006416 +006417 +006418 +006419 +006420 +006421 +006422 +006423 +006424 +006425 +006426 +006427 +006428 +006429 +006430 +006431 +006432 +006433 +006434 +006435 +006436 +006437 +006438 +006439 +006440 +006441 +006442 +006443 +006444 +006445 +006446 +006447 +006448 +006449 +006450 +006451 +006452 +006453 +006454 +006455 +006456 +006457 +006458 +006459 +006460 +006461 +006462 +006463 +006464 +006465 +006466 +006467 +006468 +006469 +006470 +006471 +006472 +006473 +006474 +006475 +006476 +006477 +006478 +006479 +006480 +006481 +006482 +006483 +006484 +006485 +006486 +006487 +006488 +006489 +006490 +006491 +006492 +006493 +006494 +006495 +006496 +006497 +006498 +006499 +006500 +006501 +006502 +006503 +006504 +006505 +006506 +006507 +006508 +006509 +006510 +006511 +006512 +006513 +006514 +006515 +006516 +006517 +006518 +006519 +006520 +006521 +006522 +006523 +006524 +006525 +006526 +006527 +006528 +006529 +006530 +006531 +006532 +006533 +006534 +006535 +006536 +006537 +006538 +006539 +006540 +006541 +006542 +006543 +006544 +006545 +006546 +006547 +006548 +006549 +006550 +006551 +006552 +006553 +006554 +006555 +006556 +006557 +006558 +006559 +006560 +006561 +006562 +006563 +006564 +006565 +006566 +006567 +006568 +006569 +006570 +006571 +006572 +006573 +006574 +006575 +006576 +006577 +006578 +006579 +006580 +006581 +006582 +006583 +006584 +006585 +006586 +006587 +006588 +006589 +006590 +006591 +006592 +006593 +006594 +006595 +006596 +006597 +006598 +006599 +006600 +006601 +006602 +006603 +006604 +006605 +006606 +006607 +006608 +006609 +006610 +006611 +006612 +006613 +006614 +006615 +006616 +006617 +006618 +006619 +006620 +006621 +006622 +006623 +006624 +006625 +006626 +006627 +006628 +006629 +006630 +006631 +006632 +006633 +006634 +006635 +006636 +006637 +006638 +006639 +006640 +006641 +006642 +006643 +006644 +006645 +006646 +006647 +006648 +006649 +006650 +006651 +006652 +006653 +006654 +006655 +006656 +006657 +006658 +006659 +006660 +006661 +006662 +006663 +006664 +006665 +006666 +006667 +006668 +006669 +006670 +006671 +006672 +006673 +006674 +006675 +006676 +006677 +006678 +006679 +006680 +006681 +006682 +006683 +006684 +006685 +006686 +006687 +006688 +006689 +006690 +006691 +006692 +006693 +006694 +006695 +006696 +006697 +006698 +006699 +006700 +006701 +006702 +006703 +006704 +006705 +006706 +006707 +006708 +006709 +006710 +006711 +006712 +006713 +006714 +006715 +006716 +006717 +006718 +006719 +006720 +006721 +006722 +006723 +006724 +006725 +006726 +006727 +006728 +006729 +006730 +006731 +006732 +006733 +006734 +006735 +006736 +006737 +006738 +006739 +006740 +006741 +006742 +006743 +006744 +006745 +006746 +006747 +006748 +006749 +006750 +006751 +006752 +006753 +006754 +006755 +006756 +006757 +006758 +006759 +006760 +006761 +006762 +006763 +006764 +006765 +006766 +006767 +006768 +006769 +006770 +006771 +006772 +006773 +006774 +006775 +006776 +006777 +006778 +006779 +006780 +006781 +006782 +006783 +006784 +006785 +006786 +006787 +006788 +006789 +006790 +006791 +006792 +006793 +006794 +006795 +006796 +006797 +006798 +006799 +006800 +006801 +006802 +006803 +006804 +006805 +006806 +006807 +006808 +006809 +006810 +006811 +006812 +006813 +006814 +006815 +006816 +006817 +006818 +006819 +006820 +006821 +006822 +006823 +006824 +006825 +006826 +006827 +006828 +006829 +006830 +006831 +006832 +006833 +006834 +006835 +006836 +006837 +006838 +006839 +006840 +006841 +006842 +006843 +006844 +006845 +006846 +006847 +006848 +006849 +006850 +006851 +006852 +006853 +006854 +006855 +006856 +006857 +006858 +006859 +006860 +006861 +006862 +006863 +006864 +006865 +006866 +006867 +006868 +006869 +006870 +006871 +006872 +006873 +006874 +006875 +006876 +006877 +006878 +006879 +006880 +006881 +006882 +006883 +006884 +006885 +006886 +006887 +006888 +006889 +006890 +006891 +006892 +006893 +006894 +006895 +006896 +006897 +006898 +006899 +006900 +006901 +006902 +006903 +006904 +006905 +006906 +006907 +006908 +006909 +006910 +006911 +006912 +006913 +006914 +006915 +006916 +006917 +006918 +006919 +006920 +006921 +006922 +006923 +006924 +006925 +006926 +006927 +006928 +006929 +006930 +006931 +006932 +006933 +006934 +006935 +006936 +006937 +006938 +006939 +006940 +006941 +006942 +006943 +006944 +006945 +006946 +006947 +006948 +006949 +006950 +006951 +006952 +006953 +006954 +006955 +006956 +006957 +006958 +006959 +006960 +006961 +006962 +006963 +006964 +006965 +006966 +006967 +006968 +006969 +006970 +006971 +006972 +006973 +006974 +006975 +006976 +006977 +006978 +006979 +006980 +006981 +006982 +006983 +006984 +006985 +006986 +006987 +006988 +006989 +006990 +006991 +006992 +006993 +006994 +006995 +006996 +006997 +006998 +006999 +007000 +007001 +007002 +007003 +007004 +007005 +007006 +007007 +007008 +007009 +007010 +007011 +007012 +007013 +007014 +007015 +007016 +007017 +007018 +007019 +007020 +007021 +007022 +007023 +007024 +007025 +007026 +007027 +007028 +007029 +007030 +007031 +007032 +007033 +007034 +007035 +007036 +007037 +007038 +007039 +007040 +007041 +007042 +007043 +007044 +007045 +007046 +007047 +007048 +007049 +007050 +007051 +007052 +007053 +007054 +007055 +007056 +007057 +007058 +007059 +007060 +007061 +007062 +007063 +007064 +007065 +007066 +007067 +007068 +007069 +007070 +007071 +007072 +007073 +007074 +007075 +007076 +007077 +007078 +007079 +007080 +007081 +007082 +007083 +007084 +007085 +007086 +007087 +007088 +007089 +007090 +007091 +007092 +007093 +007094 +007095 +007096 +007097 +007098 +007099 +007100 +007101 +007102 +007103 +007104 +007105 +007106 +007107 +007108 +007109 +007110 +007111 +007112 +007113 +007114 +007115 +007116 +007117 +007118 +007119 +007120 +007121 +007122 +007123 +007124 +007125 +007126 +007127 +007128 +007129 +007130 +007131 +007132 +007133 +007134 +007135 +007136 +007137 +007138 +007139 +007140 +007141 +007142 +007143 +007144 +007145 +007146 +007147 +007148 +007149 +007150 +007151 +007152 +007153 +007154 +007155 +007156 +007157 +007158 +007159 +007160 +007161 +007162 +007163 +007164 +007165 +007166 +007167 +007168 +007169 +007170 +007171 +007172 +007173 +007174 +007175 +007176 +007177 +007178 +007179 +007180 +007181 +007182 +007183 +007184 +007185 +007186 +007187 +007188 +007189 +007190 +007191 +007192 +007193 +007194 +007195 +007196 +007197 +007198 +007199 +007200 +007201 +007202 +007203 +007204 +007205 +007206 +007207 +007208 +007209 +007210 +007211 +007212 +007213 +007214 +007215 +007216 +007217 +007218 +007219 +007220 +007221 +007222 +007223 +007224 +007225 +007226 +007227 +007228 +007229 +007230 +007231 +007232 +007233 +007234 +007235 +007236 +007237 +007238 +007239 +007240 +007241 +007242 +007243 +007244 +007245 +007246 +007247 +007248 +007249 +007250 +007251 +007252 +007253 +007254 +007255 +007256 +007257 +007258 +007259 +007260 +007261 +007262 +007263 +007264 +007265 +007266 +007267 +007268 +007269 +007270 +007271 +007272 +007273 +007274 +007275 +007276 +007277 +007278 +007279 +007280 +007281 +007282 +007283 +007284 +007285 +007286 +007287 +007288 +007289 +007290 +007291 +007292 +007293 +007294 +007295 +007296 +007297 +007298 +007299 +007300 +007301 +007302 +007303 +007304 +007305 +007306 +007307 +007308 +007309 +007310 +007311 +007312 +007313 +007314 +007315 +007316 +007317 +007318 +007319 +007320 +007321 +007322 +007323 +007324 +007325 +007326 +007327 +007328 +007329 +007330 +007331 +007332 +007333 +007334 +007335 +007336 +007337 +007338 +007339 +007340 +007341 +007342 +007343 +007344 +007345 +007346 +007347 +007348 +007349 +007350 +007351 +007352 +007353 +007354 +007355 +007356 +007357 +007358 +007359 +007360 +007361 +007362 +007363 +007364 +007365 +007366 +007367 +007368 +007369 +007370 +007371 +007372 +007373 +007374 +007375 +007376 +007377 +007378 +007379 +007380 +007381 +007382 +007383 +007384 +007385 +007386 +007387 +007388 +007389 +007390 +007391 +007392 +007393 +007394 +007395 +007396 +007397 +007398 +007399 +007400 +007401 +007402 +007403 +007404 +007405 +007406 +007407 +007408 +007409 +007410 +007411 +007412 +007413 +007414 +007415 +007416 +007417 +007418 +007419 +007420 +007421 +007422 +007423 +007424 +007425 +007426 +007427 +007428 +007429 +007430 +007431 +007432 +007433 +007434 +007435 +007436 +007437 +007438 +007439 +007440 +007441 +007442 +007443 +007444 +007445 +007446 +007447 +007448 +007449 +007450 +007451 +007452 +007453 +007454 +007455 +007456 +007457 +007458 +007459 +007460 +007461 +007462 +007463 +007464 +007465 +007466 +007467 +007468 +007469 +007470 +007471 +007472 +007473 +007474 +007475 +007476 +007477 +007478 +007479 +007480 +007481 +007482 +007483 +007484 +007485 +007486 +007487 +007488 +007489 +007490 +007491 +007492 +007493 +007494 +007495 +007496 +007497 +007498 +007499 +007500 +007501 +007502 +007503 +007504 +007505 +007506 +007507 +007508 +007509 +007510 +007511 +007512 +007513 +007514 +007515 +007516 +007517 \ No newline at end of file diff --git a/data/KITTI/ImageSets/train.txt b/data/KITTI/ImageSets/train.txt new file mode 100644 index 0000000..505b1e2 --- /dev/null +++ b/data/KITTI/ImageSets/train.txt @@ -0,0 +1,3712 @@ +000000 +000003 +000007 +000009 +000010 +000011 +000012 +000013 +000014 +000016 +000017 +000018 +000022 +000026 +000029 +000030 +000032 +000034 +000036 +000038 +000041 +000043 +000044 +000045 +000046 +000049 +000051 +000054 +000055 +000056 +000057 +000060 +000064 +000067 +000068 +000069 +000070 +000071 +000072 +000073 +000074 +000075 +000079 +000080 +000082 +000083 +000084 +000085 +000086 +000087 +000088 +000091 +000092 +000095 +000096 +000097 +000099 +000100 +000101 +000103 +000105 +000109 +000110 +000111 +000112 +000113 +000114 +000115 +000119 +000120 +000121 +000123 +000125 +000127 +000129 +000130 +000131 +000133 +000136 +000138 +000141 +000142 +000144 +000145 +000146 +000148 +000149 +000150 +000154 +000155 +000157 +000158 +000160 +000162 +000163 +000164 +000165 +000166 +000171 +000172 +000176 +000177 +000178 +000179 +000180 +000184 +000185 +000189 +000193 +000198 +000200 +000202 +000205 +000206 +000208 +000209 +000210 +000214 +000215 +000217 +000219 +000220 +000221 +000222 +000225 +000227 +000228 +000232 +000233 +000238 +000240 +000241 +000243 +000244 +000245 +000253 +000254 +000255 +000256 +000257 +000258 +000259 +000261 +000264 +000267 +000271 +000274 +000275 +000276 +000277 +000280 +000282 +000285 +000286 +000287 +000288 +000292 +000294 +000295 +000296 +000298 +000299 +000300 +000303 +000304 +000306 +000310 +000313 +000316 +000317 +000318 +000322 +000325 +000326 +000330 +000331 +000334 +000337 +000338 +000339 +000342 +000344 +000348 +000349 +000353 +000358 +000363 +000364 +000367 +000368 +000371 +000374 +000375 +000380 +000384 +000387 +000389 +000390 +000400 +000405 +000406 +000410 +000411 +000412 +000416 +000417 +000418 +000421 +000423 +000424 +000425 +000426 +000431 +000432 +000433 +000434 +000435 +000438 +000439 +000441 +000442 +000444 +000445 +000447 +000449 +000456 +000458 +000460 +000461 +000462 +000464 +000465 +000466 +000467 +000470 +000471 +000474 +000482 +000483 +000484 +000487 +000488 +000490 +000497 +000500 +000501 +000502 +000505 +000507 +000511 +000513 +000514 +000516 +000518 +000520 +000522 +000523 +000525 +000526 +000529 +000531 +000532 +000534 +000535 +000537 +000538 +000539 +000540 +000544 +000547 +000549 +000550 +000552 +000553 +000556 +000557 +000562 +000563 +000565 +000570 +000573 +000574 +000575 +000576 +000577 +000578 +000579 +000580 +000582 +000584 +000585 +000586 +000587 +000592 +000593 +000594 +000596 +000597 +000598 +000599 +000602 +000603 +000605 +000606 +000607 +000608 +000609 +000616 +000617 +000621 +000622 +000623 +000627 +000629 +000631 +000632 +000633 +000637 +000638 +000640 +000641 +000643 +000646 +000649 +000651 +000652 +000653 +000654 +000656 +000661 +000662 +000663 +000664 +000665 +000666 +000668 +000671 +000672 +000673 +000675 +000676 +000678 +000680 +000681 +000685 +000686 +000687 +000688 +000689 +000690 +000693 +000695 +000697 +000701 +000703 +000705 +000707 +000709 +000710 +000711 +000712 +000713 +000714 +000715 +000719 +000720 +000723 +000724 +000726 +000730 +000732 +000733 +000735 +000738 +000739 +000742 +000743 +000744 +000747 +000749 +000753 +000755 +000757 +000758 +000759 +000760 +000762 +000763 +000764 +000770 +000775 +000776 +000777 +000780 +000781 +000783 +000784 +000785 +000786 +000787 +000788 +000789 +000791 +000793 +000794 +000796 +000797 +000799 +000808 +000813 +000814 +000815 +000817 +000818 +000820 +000821 +000822 +000824 +000825 +000827 +000828 +000829 +000830 +000832 +000833 +000834 +000835 +000836 +000839 +000842 +000845 +000846 +000851 +000853 +000855 +000856 +000857 +000858 +000860 +000861 +000864 +000865 +000866 +000867 +000868 +000870 +000871 +000872 +000880 +000882 +000883 +000886 +000887 +000888 +000890 +000891 +000892 +000895 +000896 +000898 +000900 +000901 +000902 +000903 +000905 +000906 +000908 +000910 +000913 +000914 +000918 +000919 +000921 +000924 +000925 +000927 +000929 +000933 +000934 +000935 +000936 +000937 +000941 +000945 +000946 +000947 +000950 +000951 +000954 +000955 +000957 +000959 +000960 +000962 +000965 +000968 +000972 +000975 +000977 +000978 +000980 +000982 +000987 +000989 +000990 +000992 +000993 +000994 +000995 +000996 +000997 +000998 +001000 +001001 +001003 +001004 +001005 +001009 +001016 +001017 +001020 +001023 +001024 +001028 +001029 +001030 +001031 +001032 +001033 +001034 +001036 +001038 +001040 +001041 +001044 +001045 +001047 +001048 +001049 +001052 +001056 +001057 +001059 +001060 +001061 +001062 +001064 +001072 +001073 +001074 +001079 +001080 +001081 +001082 +001085 +001087 +001090 +001091 +001092 +001093 +001098 +001100 +001103 +001105 +001109 +001110 +001112 +001117 +001119 +001121 +001122 +001124 +001126 +001128 +001130 +001137 +001142 +001146 +001151 +001156 +001157 +001159 +001160 +001161 +001164 +001165 +001166 +001168 +001169 +001170 +001171 +001174 +001175 +001181 +001184 +001185 +001186 +001190 +001196 +001197 +001200 +001201 +001202 +001204 +001205 +001208 +001209 +001210 +001211 +001212 +001215 +001219 +001220 +001223 +001227 +001229 +001231 +001233 +001238 +001240 +001247 +001248 +001250 +001256 +001258 +001262 +001264 +001276 +001277 +001278 +001279 +001280 +001282 +001283 +001285 +001288 +001290 +001293 +001297 +001298 +001299 +001300 +001301 +001302 +001309 +001310 +001311 +001312 +001313 +001315 +001316 +001319 +001320 +001321 +001322 +001323 +001324 +001325 +001326 +001327 +001328 +001335 +001338 +001340 +001341 +001343 +001348 +001349 +001351 +001354 +001357 +001358 +001360 +001361 +001362 +001364 +001366 +001367 +001368 +001369 +001370 +001371 +001373 +001378 +001379 +001383 +001385 +001390 +001392 +001393 +001394 +001396 +001399 +001400 +001401 +001402 +001403 +001404 +001405 +001406 +001408 +001409 +001413 +001414 +001417 +001418 +001420 +001422 +001423 +001425 +001426 +001428 +001429 +001430 +001433 +001434 +001436 +001440 +001444 +001447 +001449 +001452 +001453 +001454 +001455 +001456 +001457 +001459 +001460 +001462 +001464 +001465 +001467 +001468 +001470 +001472 +001473 +001474 +001475 +001476 +001479 +001482 +001483 +001484 +001486 +001490 +001491 +001492 +001493 +001494 +001496 +001498 +001499 +001500 +001503 +001504 +001505 +001506 +001509 +001510 +001512 +001515 +001518 +001519 +001520 +001523 +001529 +001530 +001531 +001532 +001534 +001539 +001540 +001541 +001543 +001544 +001548 +001550 +001551 +001553 +001554 +001556 +001558 +001559 +001561 +001563 +001566 +001568 +001570 +001571 +001572 +001575 +001578 +001580 +001581 +001584 +001593 +001595 +001598 +001599 +001601 +001604 +001607 +001608 +001609 +001611 +001612 +001614 +001618 +001620 +001622 +001623 +001624 +001626 +001628 +001630 +001632 +001636 +001637 +001638 +001639 +001641 +001642 +001644 +001646 +001648 +001649 +001651 +001652 +001653 +001655 +001657 +001659 +001661 +001663 +001668 +001669 +001671 +001672 +001673 +001674 +001676 +001677 +001678 +001679 +001681 +001685 +001686 +001687 +001688 +001690 +001691 +001692 +001695 +001696 +001698 +001700 +001703 +001708 +001715 +001716 +001720 +001723 +001724 +001725 +001728 +001730 +001731 +001734 +001735 +001736 +001737 +001738 +001739 +001743 +001744 +001747 +001748 +001753 +001754 +001756 +001757 +001759 +001760 +001761 +001763 +001766 +001767 +001769 +001770 +001773 +001775 +001777 +001779 +001784 +001785 +001788 +001789 +001790 +001791 +001792 +001793 +001796 +001798 +001799 +001803 +001805 +001806 +001809 +001810 +001811 +001812 +001815 +001816 +001819 +001821 +001826 +001827 +001829 +001830 +001832 +001833 +001834 +001836 +001837 +001838 +001839 +001841 +001842 +001843 +001845 +001847 +001849 +001850 +001857 +001860 +001864 +001865 +001866 +001870 +001871 +001873 +001874 +001876 +001879 +001882 +001883 +001889 +001891 +001894 +001895 +001896 +001899 +001901 +001902 +001903 +001906 +001907 +001908 +001910 +001911 +001912 +001913 +001914 +001915 +001916 +001917 +001918 +001921 +001922 +001930 +001935 +001938 +001939 +001944 +001947 +001948 +001949 +001950 +001951 +001953 +001955 +001956 +001957 +001958 +001961 +001962 +001963 +001964 +001965 +001968 +001970 +001971 +001973 +001974 +001975 +001976 +001981 +001987 +001988 +001990 +001992 +001993 +001994 +001998 +002003 +002005 +002006 +002007 +002009 +002015 +002016 +002018 +002020 +002023 +002024 +002026 +002030 +002031 +002032 +002033 +002039 +002040 +002041 +002047 +002051 +002053 +002055 +002059 +002060 +002061 +002063 +002064 +002065 +002066 +002067 +002069 +002070 +002072 +002077 +002080 +002083 +002084 +002088 +002090 +002092 +002095 +002096 +002097 +002098 +002099 +002104 +002105 +002106 +002109 +002110 +002114 +002116 +002117 +002119 +002122 +002125 +002126 +002129 +002132 +002133 +002134 +002141 +002143 +002144 +002145 +002146 +002147 +002148 +002149 +002150 +002154 +002155 +002156 +002157 +002162 +002164 +002167 +002171 +002172 +002174 +002175 +002176 +002178 +002180 +002181 +002184 +002186 +002189 +002190 +002191 +002192 +002194 +002195 +002197 +002198 +002199 +002203 +002204 +002205 +002208 +002210 +002211 +002212 +002213 +002214 +002217 +002221 +002222 +002223 +002226 +002227 +002230 +002231 +002235 +002236 +002237 +002238 +002240 +002241 +002242 +002244 +002247 +002249 +002252 +002253 +002256 +002259 +002261 +002263 +002264 +002265 +002267 +002268 +002269 +002270 +002271 +002273 +002274 +002275 +002278 +002281 +002285 +002288 +002289 +002296 +002297 +002301 +002302 +002305 +002309 +002311 +002312 +002313 +002316 +002317 +002318 +002321 +002322 +002323 +002324 +002326 +002328 +002331 +002333 +002335 +002339 +002342 +002343 +002349 +002350 +002351 +002352 +002354 +002355 +002358 +002360 +002361 +002363 +002364 +002368 +002371 +002373 +002374 +002375 +002377 +002379 +002381 +002388 +002389 +002390 +002394 +002395 +002396 +002400 +002401 +002402 +002403 +002406 +002407 +002408 +002409 +002410 +002412 +002413 +002416 +002417 +002421 +002426 +002427 +002430 +002431 +002435 +002436 +002437 +002438 +002441 +002443 +002444 +002445 +002447 +002448 +002449 +002451 +002452 +002453 +002456 +002459 +002464 +002465 +002466 +002467 +002468 +002469 +002470 +002471 +002472 +002475 +002480 +002481 +002482 +002484 +002485 +002487 +002489 +002491 +002493 +002494 +002496 +002498 +002501 +002507 +002508 +002510 +002512 +002513 +002514 +002515 +002517 +002518 +002522 +002523 +002524 +002527 +002533 +002535 +002536 +002537 +002542 +002544 +002545 +002547 +002549 +002550 +002551 +002553 +002554 +002555 +002559 +002560 +002561 +002566 +002567 +002571 +002573 +002576 +002578 +002579 +002582 +002587 +002588 +002589 +002591 +002592 +002593 +002595 +002596 +002597 +002605 +002607 +002608 +002609 +002610 +002611 +002614 +002616 +002617 +002618 +002620 +002622 +002623 +002624 +002627 +002629 +002632 +002634 +002637 +002639 +002642 +002643 +002647 +002648 +002649 +002650 +002652 +002654 +002655 +002658 +002659 +002660 +002662 +002664 +002665 +002667 +002668 +002670 +002671 +002672 +002676 +002678 +002679 +002682 +002683 +002684 +002687 +002688 +002689 +002691 +002697 +002698 +002700 +002701 +002703 +002704 +002705 +002708 +002714 +002716 +002718 +002719 +002723 +002731 +002732 +002733 +002734 +002736 +002738 +002739 +002741 +002743 +002750 +002751 +002754 +002756 +002759 +002762 +002766 +002768 +002769 +002770 +002771 +002774 +002776 +002777 +002778 +002779 +002780 +002781 +002782 +002784 +002785 +002788 +002790 +002791 +002792 +002795 +002798 +002799 +002802 +002803 +002807 +002808 +002813 +002816 +002817 +002819 +002821 +002822 +002823 +002824 +002825 +002829 +002832 +002834 +002835 +002837 +002838 +002842 +002843 +002849 +002850 +002851 +002852 +002854 +002855 +002857 +002859 +002860 +002862 +002864 +002865 +002868 +002869 +002870 +002871 +002872 +002873 +002874 +002882 +002884 +002886 +002887 +002888 +002897 +002898 +002899 +002904 +002906 +002907 +002909 +002910 +002912 +002913 +002915 +002918 +002920 +002921 +002922 +002923 +002926 +002927 +002929 +002931 +002932 +002933 +002936 +002938 +002939 +002940 +002941 +002943 +002946 +002949 +002950 +002952 +002954 +002956 +002965 +002967 +002968 +002969 +002970 +002972 +002973 +002975 +002980 +002981 +002983 +002986 +002987 +002989 +002990 +002992 +002996 +002998 +003002 +003008 +003009 +003012 +003013 +003014 +003015 +003016 +003017 +003018 +003020 +003021 +003023 +003026 +003028 +003036 +003037 +003039 +003040 +003041 +003044 +003045 +003049 +003051 +003057 +003059 +003060 +003063 +003064 +003068 +003069 +003070 +003072 +003075 +003077 +003078 +003079 +003081 +003083 +003084 +003085 +003086 +003089 +003091 +003092 +003093 +003095 +003097 +003098 +003100 +003104 +003105 +003108 +003111 +003113 +003115 +003117 +003119 +003120 +003121 +003122 +003123 +003125 +003128 +003130 +003132 +003138 +003139 +003140 +003143 +003147 +003149 +003151 +003152 +003154 +003155 +003157 +003158 +003160 +003163 +003164 +003166 +003168 +003169 +003171 +003173 +003176 +003178 +003184 +003185 +003186 +003188 +003189 +003191 +003193 +003195 +003196 +003198 +003200 +003201 +003205 +003206 +003208 +003209 +003212 +003213 +003215 +003218 +003220 +003223 +003227 +003230 +003234 +003235 +003237 +003238 +003241 +003243 +003244 +003245 +003246 +003248 +003249 +003253 +003256 +003258 +003260 +003261 +003262 +003263 +003264 +003267 +003268 +003270 +003271 +003273 +003274 +003277 +003278 +003279 +003282 +003284 +003285 +003286 +003287 +003289 +003290 +003291 +003293 +003294 +003297 +003299 +003303 +003307 +003309 +003311 +003314 +003317 +003320 +003321 +003326 +003327 +003328 +003329 +003332 +003333 +003334 +003335 +003336 +003339 +003340 +003342 +003344 +003345 +003348 +003349 +003354 +003356 +003359 +003360 +003361 +003362 +003363 +003369 +003371 +003372 +003374 +003376 +003377 +003378 +003380 +003381 +003382 +003383 +003384 +003387 +003388 +003389 +003390 +003391 +003392 +003398 +003400 +003413 +003414 +003415 +003416 +003418 +003420 +003423 +003424 +003427 +003431 +003433 +003436 +003437 +003438 +003439 +003440 +003441 +003442 +003444 +003445 +003446 +003451 +003452 +003454 +003455 +003457 +003458 +003459 +003460 +003462 +003463 +003468 +003472 +003473 +003475 +003476 +003477 +003479 +003485 +003486 +003493 +003494 +003498 +003499 +003500 +003501 +003505 +003507 +003508 +003509 +003510 +003512 +003513 +003514 +003516 +003518 +003522 +003523 +003525 +003526 +003532 +003533 +003534 +003536 +003537 +003538 +003540 +003541 +003542 +003545 +003546 +003548 +003549 +003551 +003555 +003556 +003560 +003561 +003564 +003565 +003566 +003567 +003569 +003570 +003572 +003575 +003576 +003577 +003578 +003579 +003581 +003585 +003586 +003587 +003589 +003590 +003591 +003592 +003593 +003594 +003595 +003596 +003597 +003598 +003599 +003602 +003603 +003606 +003610 +003612 +003613 +003615 +003617 +003619 +003625 +003626 +003628 +003636 +003637 +003638 +003639 +003640 +003641 +003642 +003644 +003646 +003648 +003650 +003651 +003654 +003656 +003657 +003660 +003663 +003664 +003665 +003666 +003670 +003672 +003673 +003674 +003675 +003680 +003681 +003685 +003686 +003687 +003693 +003694 +003695 +003696 +003697 +003698 +003699 +003700 +003701 +003704 +003706 +003709 +003710 +003713 +003714 +003717 +003720 +003721 +003722 +003724 +003725 +003727 +003729 +003730 +003731 +003732 +003733 +003734 +003740 +003741 +003742 +003743 +003744 +003745 +003749 +003752 +003754 +003757 +003758 +003759 +003760 +003761 +003765 +003766 +003767 +003768 +003770 +003772 +003773 +003774 +003776 +003780 +003783 +003784 +003785 +003786 +003789 +003790 +003791 +003792 +003795 +003796 +003797 +003799 +003801 +003803 +003806 +003810 +003813 +003815 +003816 +003817 +003818 +003819 +003821 +003823 +003824 +003825 +003829 +003831 +003832 +003833 +003836 +003838 +003839 +003840 +003842 +003843 +003844 +003845 +003846 +003848 +003849 +003850 +003851 +003853 +003855 +003857 +003858 +003861 +003862 +003863 +003865 +003867 +003868 +003871 +003875 +003876 +003877 +003882 +003884 +003887 +003888 +003889 +003893 +003895 +003896 +003900 +003903 +003904 +003906 +003908 +003910 +003911 +003912 +003913 +003917 +003918 +003919 +003921 +003922 +003925 +003927 +003928 +003929 +003930 +003933 +003935 +003936 +003939 +003940 +003941 +003942 +003944 +003947 +003949 +003951 +003952 +003953 +003954 +003955 +003957 +003959 +003960 +003963 +003966 +003967 +003968 +003971 +003973 +003974 +003976 +003978 +003979 +003983 +003985 +003987 +003988 +003989 +003990 +003991 +003993 +003994 +003995 +003997 +003999 +004005 +004006 +004012 +004013 +004014 +004015 +004017 +004018 +004019 +004020 +004022 +004023 +004024 +004025 +004029 +004030 +004031 +004035 +004037 +004039 +004043 +004044 +004046 +004047 +004050 +004052 +004053 +004054 +004056 +004057 +004058 +004060 +004062 +004066 +004067 +004069 +004070 +004071 +004073 +004075 +004076 +004078 +004080 +004084 +004086 +004088 +004090 +004093 +004094 +004097 +004099 +004102 +004103 +004106 +004112 +004114 +004115 +004123 +004127 +004133 +004134 +004135 +004139 +004141 +004144 +004145 +004146 +004147 +004151 +004159 +004165 +004166 +004167 +004169 +004170 +004176 +004177 +004178 +004179 +004180 +004181 +004182 +004183 +004184 +004186 +004192 +004193 +004194 +004197 +004198 +004199 +004200 +004201 +004203 +004204 +004208 +004211 +004212 +004216 +004217 +004218 +004219 +004225 +004227 +004229 +004230 +004231 +004233 +004234 +004235 +004236 +004238 +004240 +004244 +004245 +004247 +004252 +004253 +004257 +004258 +004261 +004262 +004264 +004265 +004266 +004267 +004268 +004269 +004272 +004273 +004274 +004276 +004279 +004283 +004286 +004287 +004292 +004296 +004297 +004302 +004304 +004308 +004310 +004313 +004315 +004316 +004317 +004320 +004322 +004325 +004328 +004331 +004332 +004333 +004334 +004339 +004341 +004344 +004346 +004347 +004351 +004354 +004355 +004356 +004357 +004358 +004359 +004361 +004365 +004366 +004371 +004372 +004375 +004376 +004378 +004379 +004380 +004381 +004382 +004386 +004387 +004389 +004390 +004394 +004395 +004399 +004400 +004405 +004408 +004409 +004410 +004411 +004412 +004413 +004416 +004417 +004427 +004428 +004431 +004432 +004436 +004441 +004442 +004445 +004446 +004448 +004449 +004451 +004453 +004455 +004457 +004459 +004461 +004463 +004464 +004466 +004467 +004468 +004471 +004473 +004476 +004477 +004478 +004479 +004484 +004488 +004492 +004495 +004497 +004498 +004499 +004500 +004503 +004504 +004505 +004506 +004507 +004509 +004510 +004512 +004514 +004515 +004518 +004522 +004523 +004524 +004525 +004533 +004535 +004536 +004537 +004538 +004539 +004543 +004544 +004545 +004546 +004550 +004552 +004554 +004555 +004558 +004559 +004560 +004561 +004563 +004564 +004565 +004571 +004572 +004575 +004577 +004579 +004580 +004583 +004584 +004586 +004590 +004592 +004593 +004594 +004595 +004597 +004600 +004601 +004602 +004604 +004605 +004606 +004607 +004613 +004614 +004616 +004617 +004619 +004621 +004623 +004625 +004627 +004628 +004631 +004635 +004637 +004639 +004641 +004642 +004643 +004645 +004646 +004653 +004654 +004656 +004659 +004661 +004662 +004663 +004664 +004670 +004671 +004674 +004675 +004676 +004677 +004678 +004681 +004684 +004690 +004696 +004701 +004702 +004703 +004704 +004707 +004712 +004719 +004723 +004727 +004728 +004729 +004731 +004733 +004736 +004741 +004747 +004749 +004750 +004751 +004754 +004755 +004757 +004758 +004760 +004761 +004765 +004767 +004771 +004772 +004774 +004775 +004778 +004779 +004780 +004781 +004784 +004785 +004786 +004789 +004793 +004794 +004795 +004796 +004798 +004801 +004802 +004803 +004805 +004808 +004809 +004812 +004818 +004819 +004820 +004823 +004824 +004826 +004827 +004828 +004833 +004834 +004836 +004837 +004838 +004840 +004841 +004842 +004844 +004845 +004847 +004853 +004854 +004855 +004856 +004857 +004865 +004866 +004869 +004870 +004872 +004876 +004877 +004878 +004879 +004880 +004882 +004883 +004884 +004886 +004889 +004890 +004894 +004897 +004899 +004900 +004901 +004906 +004908 +004910 +004911 +004912 +004913 +004915 +004916 +004919 +004922 +004923 +004925 +004930 +004933 +004936 +004937 +004939 +004940 +004945 +004950 +004951 +004952 +004955 +004957 +004961 +004964 +004965 +004967 +004968 +004969 +004970 +004971 +004972 +004973 +004975 +004977 +004978 +004980 +004982 +004984 +004987 +004991 +004992 +004997 +005000 +005003 +005005 +005006 +005007 +005009 +005011 +005012 +005016 +005018 +005020 +005022 +005023 +005025 +005027 +005029 +005030 +005031 +005033 +005035 +005039 +005042 +005043 +005044 +005046 +005047 +005048 +005051 +005059 +005060 +005061 +005066 +005069 +005071 +005076 +005083 +005084 +005085 +005087 +005088 +005089 +005091 +005092 +005096 +005097 +005098 +005099 +005100 +005102 +005104 +005106 +005107 +005111 +005114 +005115 +005116 +005117 +005118 +005119 +005123 +005126 +005129 +005130 +005131 +005132 +005134 +005137 +005142 +005146 +005148 +005150 +005151 +005152 +005154 +005159 +005160 +005165 +005169 +005171 +005173 +005177 +005178 +005183 +005186 +005187 +005192 +005193 +005195 +005196 +005200 +005202 +005203 +005204 +005205 +005207 +005208 +005209 +005210 +005211 +005212 +005215 +005216 +005220 +005223 +005224 +005225 +005228 +005231 +005232 +005235 +005238 +005239 +005243 +005245 +005247 +005248 +005250 +005252 +005253 +005254 +005257 +005258 +005259 +005261 +005263 +005264 +005265 +005266 +005269 +005270 +005272 +005277 +005278 +005281 +005283 +005285 +005286 +005288 +005290 +005291 +005293 +005294 +005295 +005300 +005301 +005302 +005303 +005305 +005306 +005310 +005314 +005317 +005320 +005324 +005326 +005327 +005331 +005332 +005339 +005340 +005344 +005346 +005348 +005351 +005352 +005353 +005354 +005355 +005356 +005357 +005358 +005361 +005362 +005364 +005367 +005370 +005373 +005374 +005376 +005380 +005382 +005383 +005384 +005387 +005388 +005392 +005393 +005394 +005395 +005396 +005397 +005398 +005399 +005400 +005401 +005402 +005403 +005406 +005407 +005408 +005409 +005410 +005411 +005412 +005414 +005416 +005417 +005418 +005419 +005420 +005421 +005424 +005425 +005428 +005432 +005433 +005435 +005436 +005438 +005439 +005440 +005442 +005446 +005451 +005454 +005455 +005456 +005457 +005462 +005463 +005464 +005468 +005469 +005470 +005475 +005478 +005480 +005483 +005485 +005488 +005490 +005491 +005492 +005493 +005496 +005497 +005499 +005500 +005501 +005502 +005503 +005504 +005506 +005507 +005508 +005509 +005512 +005513 +005516 +005517 +005518 +005519 +005520 +005521 +005522 +005524 +005526 +005527 +005529 +005530 +005533 +005535 +005537 +005539 +005541 +005543 +005547 +005548 +005549 +005550 +005553 +005554 +005561 +005562 +005563 +005564 +005567 +005568 +005569 +005574 +005575 +005578 +005579 +005583 +005585 +005591 +005592 +005593 +005594 +005597 +005598 +005599 +005604 +005605 +005606 +005607 +005608 +005609 +005611 +005612 +005614 +005615 +005620 +005621 +005622 +005624 +005626 +005627 +005628 +005629 +005632 +005636 +005637 +005641 +005644 +005645 +005646 +005647 +005648 +005651 +005654 +005655 +005657 +005661 +005663 +005665 +005666 +005667 +005670 +005671 +005674 +005675 +005678 +005679 +005681 +005682 +005684 +005686 +005688 +005690 +005691 +005692 +005693 +005694 +005696 +005697 +005701 +005702 +005705 +005710 +005711 +005715 +005716 +005718 +005719 +005720 +005721 +005722 +005723 +005726 +005730 +005732 +005733 +005734 +005737 +005738 +005742 +005748 +005749 +005750 +005752 +005753 +005755 +005756 +005758 +005759 +005761 +005764 +005766 +005767 +005768 +005769 +005770 +005771 +005772 +005773 +005774 +005775 +005776 +005778 +005779 +005780 +005781 +005788 +005789 +005791 +005792 +005795 +005797 +005798 +005799 +005802 +005804 +005808 +005809 +005810 +005813 +005814 +005815 +005816 +005817 +005823 +005824 +005825 +005828 +005830 +005831 +005832 +005833 +005835 +005836 +005837 +005838 +005842 +005844 +005845 +005846 +005847 +005848 +005849 +005850 +005851 +005853 +005858 +005860 +005861 +005862 +005863 +005865 +005866 +005867 +005868 +005870 +005871 +005872 +005874 +005875 +005877 +005880 +005884 +005886 +005888 +005890 +005891 +005895 +005896 +005897 +005898 +005902 +005904 +005908 +005915 +005920 +005924 +005928 +005929 +005930 +005932 +005934 +005936 +005937 +005940 +005941 +005942 +005943 +005945 +005946 +005950 +005951 +005953 +005954 +005956 +005957 +005959 +005960 +005964 +005966 +005967 +005968 +005971 +005973 +005974 +005976 +005977 +005979 +005980 +005983 +005987 +005989 +005990 +005991 +005992 +005993 +005995 +005998 +006000 +006004 +006006 +006007 +006011 +006015 +006017 +006018 +006019 +006020 +006021 +006022 +006025 +006032 +006035 +006037 +006040 +006049 +006051 +006053 +006055 +006056 +006059 +006064 +006065 +006069 +006072 +006073 +006076 +006079 +006080 +006081 +006082 +006084 +006089 +006090 +006091 +006092 +006094 +006099 +006101 +006104 +006105 +006108 +006109 +006111 +006112 +006113 +006119 +006120 +006124 +006128 +006129 +006131 +006132 +006134 +006135 +006137 +006138 +006140 +006141 +006142 +006143 +006145 +006147 +006149 +006150 +006153 +006155 +006157 +006158 +006159 +006160 +006162 +006164 +006166 +006170 +006171 +006172 +006174 +006175 +006178 +006179 +006180 +006181 +006183 +006184 +006188 +006189 +006191 +006192 +006193 +006197 +006199 +006200 +006201 +006203 +006205 +006206 +006207 +006209 +006211 +006212 +006214 +006216 +006217 +006218 +006220 +006221 +006223 +006224 +006225 +006226 +006230 +006231 +006234 +006235 +006236 +006237 +006239 +006241 +006242 +006243 +006245 +006248 +006251 +006252 +006253 +006254 +006255 +006256 +006257 +006259 +006260 +006261 +006262 +006264 +006268 +006271 +006277 +006279 +006281 +006283 +006284 +006285 +006289 +006290 +006291 +006292 +006293 +006294 +006295 +006296 +006298 +006299 +006303 +006304 +006307 +006308 +006309 +006310 +006311 +006313 +006318 +006319 +006320 +006323 +006325 +006326 +006327 +006328 +006329 +006330 +006335 +006336 +006337 +006341 +006346 +006347 +006350 +006352 +006358 +006359 +006361 +006362 +006363 +006365 +006367 +006373 +006374 +006375 +006376 +006378 +006382 +006383 +006384 +006387 +006389 +006390 +006392 +006397 +006398 +006399 +006400 +006401 +006402 +006404 +006408 +006412 +006413 +006414 +006418 +006419 +006421 +006422 +006428 +006429 +006430 +006431 +006432 +006438 +006443 +006447 +006448 +006449 +006450 +006455 +006456 +006457 +006458 +006459 +006460 +006461 +006463 +006466 +006467 +006471 +006476 +006479 +006480 +006485 +006487 +006489 +006490 +006492 +006494 +006495 +006499 +006500 +006501 +006502 +006504 +006509 +006510 +006511 +006513 +006518 +006522 +006523 +006526 +006527 +006528 +006536 +006538 +006539 +006541 +006543 +006544 +006545 +006546 +006547 +006550 +006552 +006554 +006557 +006559 +006562 +006564 +006566 +006567 +006571 +006572 +006573 +006575 +006579 +006580 +006584 +006585 +006587 +006589 +006591 +006594 +006598 +006599 +006600 +006601 +006605 +006606 +006607 +006608 +006609 +006610 +006615 +006616 +006617 +006619 +006620 +006621 +006622 +006627 +006630 +006631 +006635 +006639 +006640 +006642 +006644 +006645 +006646 +006648 +006652 +006653 +006654 +006657 +006661 +006662 +006663 +006665 +006668 +006671 +006672 +006673 +006675 +006680 +006681 +006683 +006684 +006687 +006688 +006689 +006690 +006691 +006697 +006699 +006700 +006702 +006704 +006705 +006706 +006707 +006708 +006716 +006717 +006718 +006721 +006722 +006724 +006727 +006728 +006730 +006735 +006736 +006739 +006740 +006742 +006743 +006746 +006748 +006749 +006750 +006757 +006763 +006766 +006769 +006774 +006775 +006776 +006779 +006784 +006787 +006788 +006790 +006793 +006795 +006799 +006801 +006802 +006805 +006809 +006810 +006814 +006817 +006820 +006821 +006823 +006824 +006825 +006826 +006827 +006830 +006831 +006834 +006835 +006838 +006839 +006840 +006842 +006845 +006846 +006848 +006851 +006857 +006859 +006861 +006864 +006865 +006867 +006869 +006871 +006875 +006877 +006878 +006880 +006883 +006886 +006888 +006890 +006892 +006893 +006894 +006896 +006902 +006904 +006905 +006909 +006911 +006912 +006915 +006916 +006918 +006919 +006920 +006921 +006923 +006924 +006926 +006927 +006929 +006931 +006932 +006933 +006934 +006935 +006939 +006940 +006941 +006946 +006947 +006949 +006951 +006952 +006957 +006958 +006961 +006963 +006965 +006966 +006967 +006969 +006970 +006972 +006974 +006975 +006976 +006979 +006983 +006984 +006985 +006986 +006988 +006991 +006993 +006995 +006996 +006998 +007001 +007002 +007004 +007007 +007009 +007013 +007017 +007018 +007020 +007021 +007024 +007025 +007035 +007036 +007039 +007040 +007041 +007044 +007045 +007046 +007050 +007051 +007054 +007057 +007058 +007060 +007062 +007064 +007066 +007070 +007073 +007075 +007077 +007086 +007090 +007092 +007093 +007094 +007096 +007097 +007099 +007101 +007102 +007104 +007105 +007106 +007107 +007108 +007111 +007113 +007114 +007116 +007118 +007121 +007123 +007124 +007126 +007127 +007128 +007129 +007134 +007137 +007140 +007141 +007142 +007143 +007147 +007148 +007150 +007151 +007152 +007153 +007155 +007156 +007159 +007160 +007167 +007170 +007171 +007173 +007175 +007179 +007181 +007184 +007185 +007186 +007188 +007189 +007190 +007191 +007192 +007193 +007195 +007196 +007197 +007203 +007206 +007209 +007211 +007213 +007216 +007218 +007220 +007222 +007223 +007224 +007226 +007228 +007231 +007234 +007236 +007237 +007239 +007241 +007243 +007245 +007248 +007249 +007250 +007251 +007254 +007257 +007259 +007263 +007264 +007268 +007269 +007270 +007276 +007281 +007282 +007285 +007286 +007293 +007295 +007296 +007297 +007298 +007301 +007305 +007306 +007307 +007308 +007312 +007313 +007314 +007316 +007317 +007320 +007321 +007324 +007328 +007332 +007333 +007334 +007335 +007338 +007340 +007341 +007346 +007348 +007354 +007355 +007356 +007357 +007358 +007361 +007362 +007363 +007365 +007366 +007367 +007368 +007370 +007372 +007373 +007378 +007379 +007386 +007387 +007388 +007390 +007392 +007393 +007394 +007399 +007400 +007404 +007406 +007408 +007414 +007417 +007418 +007425 +007427 +007428 +007429 +007431 +007432 +007438 +007441 +007443 +007444 +007446 +007451 +007452 +007454 +007455 +007457 +007459 +007460 +007461 +007465 +007471 +007472 +007474 +007476 +007479 \ No newline at end of file diff --git a/data/KITTI/ImageSets/trainval.txt b/data/KITTI/ImageSets/trainval.txt new file mode 100644 index 0000000..43467b5 --- /dev/null +++ b/data/KITTI/ImageSets/trainval.txt @@ -0,0 +1,7481 @@ +000000 +000001 +000002 +000003 +000004 +000005 +000006 +000007 +000008 +000009 +000010 +000011 +000012 +000013 +000014 +000015 +000016 +000017 +000018 +000019 +000020 +000021 +000022 +000023 +000024 +000025 +000026 +000027 +000028 +000029 +000030 +000031 +000032 +000033 +000034 +000035 +000036 +000037 +000038 +000039 +000040 +000041 +000042 +000043 +000044 +000045 +000046 +000047 +000048 +000049 +000050 +000051 +000052 +000053 +000054 +000055 +000056 +000057 +000058 +000059 +000060 +000061 +000062 +000063 +000064 +000065 +000066 +000067 +000068 +000069 +000070 +000071 +000072 +000073 +000074 +000075 +000076 +000077 +000078 +000079 +000080 +000081 +000082 +000083 +000084 +000085 +000086 +000087 +000088 +000089 +000090 +000091 +000092 +000093 +000094 +000095 +000096 +000097 +000098 +000099 +000100 +000101 +000102 +000103 +000104 +000105 +000106 +000107 +000108 +000109 +000110 +000111 +000112 +000113 +000114 +000115 +000116 +000117 +000118 +000119 +000120 +000121 +000122 +000123 +000124 +000125 +000126 +000127 +000128 +000129 +000130 +000131 +000132 +000133 +000134 +000135 +000136 +000137 +000138 +000139 +000140 +000141 +000142 +000143 +000144 +000145 +000146 +000147 +000148 +000149 +000150 +000151 +000152 +000153 +000154 +000155 +000156 +000157 +000158 +000159 +000160 +000161 +000162 +000163 +000164 +000165 +000166 +000167 +000168 +000169 +000170 +000171 +000172 +000173 +000174 +000175 +000176 +000177 +000178 +000179 +000180 +000181 +000182 +000183 +000184 +000185 +000186 +000187 +000188 +000189 +000190 +000191 +000192 +000193 +000194 +000195 +000196 +000197 +000198 +000199 +000200 +000201 +000202 +000203 +000204 +000205 +000206 +000207 +000208 +000209 +000210 +000211 +000212 +000213 +000214 +000215 +000216 +000217 +000218 +000219 +000220 +000221 +000222 +000223 +000224 +000225 +000226 +000227 +000228 +000229 +000230 +000231 +000232 +000233 +000234 +000235 +000236 +000237 +000238 +000239 +000240 +000241 +000242 +000243 +000244 +000245 +000246 +000247 +000248 +000249 +000250 +000251 +000252 +000253 +000254 +000255 +000256 +000257 +000258 +000259 +000260 +000261 +000262 +000263 +000264 +000265 +000266 +000267 +000268 +000269 +000270 +000271 +000272 +000273 +000274 +000275 +000276 +000277 +000278 +000279 +000280 +000281 +000282 +000283 +000284 +000285 +000286 +000287 +000288 +000289 +000290 +000291 +000292 +000293 +000294 +000295 +000296 +000297 +000298 +000299 +000300 +000301 +000302 +000303 +000304 +000305 +000306 +000307 +000308 +000309 +000310 +000311 +000312 +000313 +000314 +000315 +000316 +000317 +000318 +000319 +000320 +000321 +000322 +000323 +000324 +000325 +000326 +000327 +000328 +000329 +000330 +000331 +000332 +000333 +000334 +000335 +000336 +000337 +000338 +000339 +000340 +000341 +000342 +000343 +000344 +000345 +000346 +000347 +000348 +000349 +000350 +000351 +000352 +000353 +000354 +000355 +000356 +000357 +000358 +000359 +000360 +000361 +000362 +000363 +000364 +000365 +000366 +000367 +000368 +000369 +000370 +000371 +000372 +000373 +000374 +000375 +000376 +000377 +000378 +000379 +000380 +000381 +000382 +000383 +000384 +000385 +000386 +000387 +000388 +000389 +000390 +000391 +000392 +000393 +000394 +000395 +000396 +000397 +000398 +000399 +000400 +000401 +000402 +000403 +000404 +000405 +000406 +000407 +000408 +000409 +000410 +000411 +000412 +000413 +000414 +000415 +000416 +000417 +000418 +000419 +000420 +000421 +000422 +000423 +000424 +000425 +000426 +000427 +000428 +000429 +000430 +000431 +000432 +000433 +000434 +000435 +000436 +000437 +000438 +000439 +000440 +000441 +000442 +000443 +000444 +000445 +000446 +000447 +000448 +000449 +000450 +000451 +000452 +000453 +000454 +000455 +000456 +000457 +000458 +000459 +000460 +000461 +000462 +000463 +000464 +000465 +000466 +000467 +000468 +000469 +000470 +000471 +000472 +000473 +000474 +000475 +000476 +000477 +000478 +000479 +000480 +000481 +000482 +000483 +000484 +000485 +000486 +000487 +000488 +000489 +000490 +000491 +000492 +000493 +000494 +000495 +000496 +000497 +000498 +000499 +000500 +000501 +000502 +000503 +000504 +000505 +000506 +000507 +000508 +000509 +000510 +000511 +000512 +000513 +000514 +000515 +000516 +000517 +000518 +000519 +000520 +000521 +000522 +000523 +000524 +000525 +000526 +000527 +000528 +000529 +000530 +000531 +000532 +000533 +000534 +000535 +000536 +000537 +000538 +000539 +000540 +000541 +000542 +000543 +000544 +000545 +000546 +000547 +000548 +000549 +000550 +000551 +000552 +000553 +000554 +000555 +000556 +000557 +000558 +000559 +000560 +000561 +000562 +000563 +000564 +000565 +000566 +000567 +000568 +000569 +000570 +000571 +000572 +000573 +000574 +000575 +000576 +000577 +000578 +000579 +000580 +000581 +000582 +000583 +000584 +000585 +000586 +000587 +000588 +000589 +000590 +000591 +000592 +000593 +000594 +000595 +000596 +000597 +000598 +000599 +000600 +000601 +000602 +000603 +000604 +000605 +000606 +000607 +000608 +000609 +000610 +000611 +000612 +000613 +000614 +000615 +000616 +000617 +000618 +000619 +000620 +000621 +000622 +000623 +000624 +000625 +000626 +000627 +000628 +000629 +000630 +000631 +000632 +000633 +000634 +000635 +000636 +000637 +000638 +000639 +000640 +000641 +000642 +000643 +000644 +000645 +000646 +000647 +000648 +000649 +000650 +000651 +000652 +000653 +000654 +000655 +000656 +000657 +000658 +000659 +000660 +000661 +000662 +000663 +000664 +000665 +000666 +000667 +000668 +000669 +000670 +000671 +000672 +000673 +000674 +000675 +000676 +000677 +000678 +000679 +000680 +000681 +000682 +000683 +000684 +000685 +000686 +000687 +000688 +000689 +000690 +000691 +000692 +000693 +000694 +000695 +000696 +000697 +000698 +000699 +000700 +000701 +000702 +000703 +000704 +000705 +000706 +000707 +000708 +000709 +000710 +000711 +000712 +000713 +000714 +000715 +000716 +000717 +000718 +000719 +000720 +000721 +000722 +000723 +000724 +000725 +000726 +000727 +000728 +000729 +000730 +000731 +000732 +000733 +000734 +000735 +000736 +000737 +000738 +000739 +000740 +000741 +000742 +000743 +000744 +000745 +000746 +000747 +000748 +000749 +000750 +000751 +000752 +000753 +000754 +000755 +000756 +000757 +000758 +000759 +000760 +000761 +000762 +000763 +000764 +000765 +000766 +000767 +000768 +000769 +000770 +000771 +000772 +000773 +000774 +000775 +000776 +000777 +000778 +000779 +000780 +000781 +000782 +000783 +000784 +000785 +000786 +000787 +000788 +000789 +000790 +000791 +000792 +000793 +000794 +000795 +000796 +000797 +000798 +000799 +000800 +000801 +000802 +000803 +000804 +000805 +000806 +000807 +000808 +000809 +000810 +000811 +000812 +000813 +000814 +000815 +000816 +000817 +000818 +000819 +000820 +000821 +000822 +000823 +000824 +000825 +000826 +000827 +000828 +000829 +000830 +000831 +000832 +000833 +000834 +000835 +000836 +000837 +000838 +000839 +000840 +000841 +000842 +000843 +000844 +000845 +000846 +000847 +000848 +000849 +000850 +000851 +000852 +000853 +000854 +000855 +000856 +000857 +000858 +000859 +000860 +000861 +000862 +000863 +000864 +000865 +000866 +000867 +000868 +000869 +000870 +000871 +000872 +000873 +000874 +000875 +000876 +000877 +000878 +000879 +000880 +000881 +000882 +000883 +000884 +000885 +000886 +000887 +000888 +000889 +000890 +000891 +000892 +000893 +000894 +000895 +000896 +000897 +000898 +000899 +000900 +000901 +000902 +000903 +000904 +000905 +000906 +000907 +000908 +000909 +000910 +000911 +000912 +000913 +000914 +000915 +000916 +000917 +000918 +000919 +000920 +000921 +000922 +000923 +000924 +000925 +000926 +000927 +000928 +000929 +000930 +000931 +000932 +000933 +000934 +000935 +000936 +000937 +000938 +000939 +000940 +000941 +000942 +000943 +000944 +000945 +000946 +000947 +000948 +000949 +000950 +000951 +000952 +000953 +000954 +000955 +000956 +000957 +000958 +000959 +000960 +000961 +000962 +000963 +000964 +000965 +000966 +000967 +000968 +000969 +000970 +000971 +000972 +000973 +000974 +000975 +000976 +000977 +000978 +000979 +000980 +000981 +000982 +000983 +000984 +000985 +000986 +000987 +000988 +000989 +000990 +000991 +000992 +000993 +000994 +000995 +000996 +000997 +000998 +000999 +001000 +001001 +001002 +001003 +001004 +001005 +001006 +001007 +001008 +001009 +001010 +001011 +001012 +001013 +001014 +001015 +001016 +001017 +001018 +001019 +001020 +001021 +001022 +001023 +001024 +001025 +001026 +001027 +001028 +001029 +001030 +001031 +001032 +001033 +001034 +001035 +001036 +001037 +001038 +001039 +001040 +001041 +001042 +001043 +001044 +001045 +001046 +001047 +001048 +001049 +001050 +001051 +001052 +001053 +001054 +001055 +001056 +001057 +001058 +001059 +001060 +001061 +001062 +001063 +001064 +001065 +001066 +001067 +001068 +001069 +001070 +001071 +001072 +001073 +001074 +001075 +001076 +001077 +001078 +001079 +001080 +001081 +001082 +001083 +001084 +001085 +001086 +001087 +001088 +001089 +001090 +001091 +001092 +001093 +001094 +001095 +001096 +001097 +001098 +001099 +001100 +001101 +001102 +001103 +001104 +001105 +001106 +001107 +001108 +001109 +001110 +001111 +001112 +001113 +001114 +001115 +001116 +001117 +001118 +001119 +001120 +001121 +001122 +001123 +001124 +001125 +001126 +001127 +001128 +001129 +001130 +001131 +001132 +001133 +001134 +001135 +001136 +001137 +001138 +001139 +001140 +001141 +001142 +001143 +001144 +001145 +001146 +001147 +001148 +001149 +001150 +001151 +001152 +001153 +001154 +001155 +001156 +001157 +001158 +001159 +001160 +001161 +001162 +001163 +001164 +001165 +001166 +001167 +001168 +001169 +001170 +001171 +001172 +001173 +001174 +001175 +001176 +001177 +001178 +001179 +001180 +001181 +001182 +001183 +001184 +001185 +001186 +001187 +001188 +001189 +001190 +001191 +001192 +001193 +001194 +001195 +001196 +001197 +001198 +001199 +001200 +001201 +001202 +001203 +001204 +001205 +001206 +001207 +001208 +001209 +001210 +001211 +001212 +001213 +001214 +001215 +001216 +001217 +001218 +001219 +001220 +001221 +001222 +001223 +001224 +001225 +001226 +001227 +001228 +001229 +001230 +001231 +001232 +001233 +001234 +001235 +001236 +001237 +001238 +001239 +001240 +001241 +001242 +001243 +001244 +001245 +001246 +001247 +001248 +001249 +001250 +001251 +001252 +001253 +001254 +001255 +001256 +001257 +001258 +001259 +001260 +001261 +001262 +001263 +001264 +001265 +001266 +001267 +001268 +001269 +001270 +001271 +001272 +001273 +001274 +001275 +001276 +001277 +001278 +001279 +001280 +001281 +001282 +001283 +001284 +001285 +001286 +001287 +001288 +001289 +001290 +001291 +001292 +001293 +001294 +001295 +001296 +001297 +001298 +001299 +001300 +001301 +001302 +001303 +001304 +001305 +001306 +001307 +001308 +001309 +001310 +001311 +001312 +001313 +001314 +001315 +001316 +001317 +001318 +001319 +001320 +001321 +001322 +001323 +001324 +001325 +001326 +001327 +001328 +001329 +001330 +001331 +001332 +001333 +001334 +001335 +001336 +001337 +001338 +001339 +001340 +001341 +001342 +001343 +001344 +001345 +001346 +001347 +001348 +001349 +001350 +001351 +001352 +001353 +001354 +001355 +001356 +001357 +001358 +001359 +001360 +001361 +001362 +001363 +001364 +001365 +001366 +001367 +001368 +001369 +001370 +001371 +001372 +001373 +001374 +001375 +001376 +001377 +001378 +001379 +001380 +001381 +001382 +001383 +001384 +001385 +001386 +001387 +001388 +001389 +001390 +001391 +001392 +001393 +001394 +001395 +001396 +001397 +001398 +001399 +001400 +001401 +001402 +001403 +001404 +001405 +001406 +001407 +001408 +001409 +001410 +001411 +001412 +001413 +001414 +001415 +001416 +001417 +001418 +001419 +001420 +001421 +001422 +001423 +001424 +001425 +001426 +001427 +001428 +001429 +001430 +001431 +001432 +001433 +001434 +001435 +001436 +001437 +001438 +001439 +001440 +001441 +001442 +001443 +001444 +001445 +001446 +001447 +001448 +001449 +001450 +001451 +001452 +001453 +001454 +001455 +001456 +001457 +001458 +001459 +001460 +001461 +001462 +001463 +001464 +001465 +001466 +001467 +001468 +001469 +001470 +001471 +001472 +001473 +001474 +001475 +001476 +001477 +001478 +001479 +001480 +001481 +001482 +001483 +001484 +001485 +001486 +001487 +001488 +001489 +001490 +001491 +001492 +001493 +001494 +001495 +001496 +001497 +001498 +001499 +001500 +001501 +001502 +001503 +001504 +001505 +001506 +001507 +001508 +001509 +001510 +001511 +001512 +001513 +001514 +001515 +001516 +001517 +001518 +001519 +001520 +001521 +001522 +001523 +001524 +001525 +001526 +001527 +001528 +001529 +001530 +001531 +001532 +001533 +001534 +001535 +001536 +001537 +001538 +001539 +001540 +001541 +001542 +001543 +001544 +001545 +001546 +001547 +001548 +001549 +001550 +001551 +001552 +001553 +001554 +001555 +001556 +001557 +001558 +001559 +001560 +001561 +001562 +001563 +001564 +001565 +001566 +001567 +001568 +001569 +001570 +001571 +001572 +001573 +001574 +001575 +001576 +001577 +001578 +001579 +001580 +001581 +001582 +001583 +001584 +001585 +001586 +001587 +001588 +001589 +001590 +001591 +001592 +001593 +001594 +001595 +001596 +001597 +001598 +001599 +001600 +001601 +001602 +001603 +001604 +001605 +001606 +001607 +001608 +001609 +001610 +001611 +001612 +001613 +001614 +001615 +001616 +001617 +001618 +001619 +001620 +001621 +001622 +001623 +001624 +001625 +001626 +001627 +001628 +001629 +001630 +001631 +001632 +001633 +001634 +001635 +001636 +001637 +001638 +001639 +001640 +001641 +001642 +001643 +001644 +001645 +001646 +001647 +001648 +001649 +001650 +001651 +001652 +001653 +001654 +001655 +001656 +001657 +001658 +001659 +001660 +001661 +001662 +001663 +001664 +001665 +001666 +001667 +001668 +001669 +001670 +001671 +001672 +001673 +001674 +001675 +001676 +001677 +001678 +001679 +001680 +001681 +001682 +001683 +001684 +001685 +001686 +001687 +001688 +001689 +001690 +001691 +001692 +001693 +001694 +001695 +001696 +001697 +001698 +001699 +001700 +001701 +001702 +001703 +001704 +001705 +001706 +001707 +001708 +001709 +001710 +001711 +001712 +001713 +001714 +001715 +001716 +001717 +001718 +001719 +001720 +001721 +001722 +001723 +001724 +001725 +001726 +001727 +001728 +001729 +001730 +001731 +001732 +001733 +001734 +001735 +001736 +001737 +001738 +001739 +001740 +001741 +001742 +001743 +001744 +001745 +001746 +001747 +001748 +001749 +001750 +001751 +001752 +001753 +001754 +001755 +001756 +001757 +001758 +001759 +001760 +001761 +001762 +001763 +001764 +001765 +001766 +001767 +001768 +001769 +001770 +001771 +001772 +001773 +001774 +001775 +001776 +001777 +001778 +001779 +001780 +001781 +001782 +001783 +001784 +001785 +001786 +001787 +001788 +001789 +001790 +001791 +001792 +001793 +001794 +001795 +001796 +001797 +001798 +001799 +001800 +001801 +001802 +001803 +001804 +001805 +001806 +001807 +001808 +001809 +001810 +001811 +001812 +001813 +001814 +001815 +001816 +001817 +001818 +001819 +001820 +001821 +001822 +001823 +001824 +001825 +001826 +001827 +001828 +001829 +001830 +001831 +001832 +001833 +001834 +001835 +001836 +001837 +001838 +001839 +001840 +001841 +001842 +001843 +001844 +001845 +001846 +001847 +001848 +001849 +001850 +001851 +001852 +001853 +001854 +001855 +001856 +001857 +001858 +001859 +001860 +001861 +001862 +001863 +001864 +001865 +001866 +001867 +001868 +001869 +001870 +001871 +001872 +001873 +001874 +001875 +001876 +001877 +001878 +001879 +001880 +001881 +001882 +001883 +001884 +001885 +001886 +001887 +001888 +001889 +001890 +001891 +001892 +001893 +001894 +001895 +001896 +001897 +001898 +001899 +001900 +001901 +001902 +001903 +001904 +001905 +001906 +001907 +001908 +001909 +001910 +001911 +001912 +001913 +001914 +001915 +001916 +001917 +001918 +001919 +001920 +001921 +001922 +001923 +001924 +001925 +001926 +001927 +001928 +001929 +001930 +001931 +001932 +001933 +001934 +001935 +001936 +001937 +001938 +001939 +001940 +001941 +001942 +001943 +001944 +001945 +001946 +001947 +001948 +001949 +001950 +001951 +001952 +001953 +001954 +001955 +001956 +001957 +001958 +001959 +001960 +001961 +001962 +001963 +001964 +001965 +001966 +001967 +001968 +001969 +001970 +001971 +001972 +001973 +001974 +001975 +001976 +001977 +001978 +001979 +001980 +001981 +001982 +001983 +001984 +001985 +001986 +001987 +001988 +001989 +001990 +001991 +001992 +001993 +001994 +001995 +001996 +001997 +001998 +001999 +002000 +002001 +002002 +002003 +002004 +002005 +002006 +002007 +002008 +002009 +002010 +002011 +002012 +002013 +002014 +002015 +002016 +002017 +002018 +002019 +002020 +002021 +002022 +002023 +002024 +002025 +002026 +002027 +002028 +002029 +002030 +002031 +002032 +002033 +002034 +002035 +002036 +002037 +002038 +002039 +002040 +002041 +002042 +002043 +002044 +002045 +002046 +002047 +002048 +002049 +002050 +002051 +002052 +002053 +002054 +002055 +002056 +002057 +002058 +002059 +002060 +002061 +002062 +002063 +002064 +002065 +002066 +002067 +002068 +002069 +002070 +002071 +002072 +002073 +002074 +002075 +002076 +002077 +002078 +002079 +002080 +002081 +002082 +002083 +002084 +002085 +002086 +002087 +002088 +002089 +002090 +002091 +002092 +002093 +002094 +002095 +002096 +002097 +002098 +002099 +002100 +002101 +002102 +002103 +002104 +002105 +002106 +002107 +002108 +002109 +002110 +002111 +002112 +002113 +002114 +002115 +002116 +002117 +002118 +002119 +002120 +002121 +002122 +002123 +002124 +002125 +002126 +002127 +002128 +002129 +002130 +002131 +002132 +002133 +002134 +002135 +002136 +002137 +002138 +002139 +002140 +002141 +002142 +002143 +002144 +002145 +002146 +002147 +002148 +002149 +002150 +002151 +002152 +002153 +002154 +002155 +002156 +002157 +002158 +002159 +002160 +002161 +002162 +002163 +002164 +002165 +002166 +002167 +002168 +002169 +002170 +002171 +002172 +002173 +002174 +002175 +002176 +002177 +002178 +002179 +002180 +002181 +002182 +002183 +002184 +002185 +002186 +002187 +002188 +002189 +002190 +002191 +002192 +002193 +002194 +002195 +002196 +002197 +002198 +002199 +002200 +002201 +002202 +002203 +002204 +002205 +002206 +002207 +002208 +002209 +002210 +002211 +002212 +002213 +002214 +002215 +002216 +002217 +002218 +002219 +002220 +002221 +002222 +002223 +002224 +002225 +002226 +002227 +002228 +002229 +002230 +002231 +002232 +002233 +002234 +002235 +002236 +002237 +002238 +002239 +002240 +002241 +002242 +002243 +002244 +002245 +002246 +002247 +002248 +002249 +002250 +002251 +002252 +002253 +002254 +002255 +002256 +002257 +002258 +002259 +002260 +002261 +002262 +002263 +002264 +002265 +002266 +002267 +002268 +002269 +002270 +002271 +002272 +002273 +002274 +002275 +002276 +002277 +002278 +002279 +002280 +002281 +002282 +002283 +002284 +002285 +002286 +002287 +002288 +002289 +002290 +002291 +002292 +002293 +002294 +002295 +002296 +002297 +002298 +002299 +002300 +002301 +002302 +002303 +002304 +002305 +002306 +002307 +002308 +002309 +002310 +002311 +002312 +002313 +002314 +002315 +002316 +002317 +002318 +002319 +002320 +002321 +002322 +002323 +002324 +002325 +002326 +002327 +002328 +002329 +002330 +002331 +002332 +002333 +002334 +002335 +002336 +002337 +002338 +002339 +002340 +002341 +002342 +002343 +002344 +002345 +002346 +002347 +002348 +002349 +002350 +002351 +002352 +002353 +002354 +002355 +002356 +002357 +002358 +002359 +002360 +002361 +002362 +002363 +002364 +002365 +002366 +002367 +002368 +002369 +002370 +002371 +002372 +002373 +002374 +002375 +002376 +002377 +002378 +002379 +002380 +002381 +002382 +002383 +002384 +002385 +002386 +002387 +002388 +002389 +002390 +002391 +002392 +002393 +002394 +002395 +002396 +002397 +002398 +002399 +002400 +002401 +002402 +002403 +002404 +002405 +002406 +002407 +002408 +002409 +002410 +002411 +002412 +002413 +002414 +002415 +002416 +002417 +002418 +002419 +002420 +002421 +002422 +002423 +002424 +002425 +002426 +002427 +002428 +002429 +002430 +002431 +002432 +002433 +002434 +002435 +002436 +002437 +002438 +002439 +002440 +002441 +002442 +002443 +002444 +002445 +002446 +002447 +002448 +002449 +002450 +002451 +002452 +002453 +002454 +002455 +002456 +002457 +002458 +002459 +002460 +002461 +002462 +002463 +002464 +002465 +002466 +002467 +002468 +002469 +002470 +002471 +002472 +002473 +002474 +002475 +002476 +002477 +002478 +002479 +002480 +002481 +002482 +002483 +002484 +002485 +002486 +002487 +002488 +002489 +002490 +002491 +002492 +002493 +002494 +002495 +002496 +002497 +002498 +002499 +002500 +002501 +002502 +002503 +002504 +002505 +002506 +002507 +002508 +002509 +002510 +002511 +002512 +002513 +002514 +002515 +002516 +002517 +002518 +002519 +002520 +002521 +002522 +002523 +002524 +002525 +002526 +002527 +002528 +002529 +002530 +002531 +002532 +002533 +002534 +002535 +002536 +002537 +002538 +002539 +002540 +002541 +002542 +002543 +002544 +002545 +002546 +002547 +002548 +002549 +002550 +002551 +002552 +002553 +002554 +002555 +002556 +002557 +002558 +002559 +002560 +002561 +002562 +002563 +002564 +002565 +002566 +002567 +002568 +002569 +002570 +002571 +002572 +002573 +002574 +002575 +002576 +002577 +002578 +002579 +002580 +002581 +002582 +002583 +002584 +002585 +002586 +002587 +002588 +002589 +002590 +002591 +002592 +002593 +002594 +002595 +002596 +002597 +002598 +002599 +002600 +002601 +002602 +002603 +002604 +002605 +002606 +002607 +002608 +002609 +002610 +002611 +002612 +002613 +002614 +002615 +002616 +002617 +002618 +002619 +002620 +002621 +002622 +002623 +002624 +002625 +002626 +002627 +002628 +002629 +002630 +002631 +002632 +002633 +002634 +002635 +002636 +002637 +002638 +002639 +002640 +002641 +002642 +002643 +002644 +002645 +002646 +002647 +002648 +002649 +002650 +002651 +002652 +002653 +002654 +002655 +002656 +002657 +002658 +002659 +002660 +002661 +002662 +002663 +002664 +002665 +002666 +002667 +002668 +002669 +002670 +002671 +002672 +002673 +002674 +002675 +002676 +002677 +002678 +002679 +002680 +002681 +002682 +002683 +002684 +002685 +002686 +002687 +002688 +002689 +002690 +002691 +002692 +002693 +002694 +002695 +002696 +002697 +002698 +002699 +002700 +002701 +002702 +002703 +002704 +002705 +002706 +002707 +002708 +002709 +002710 +002711 +002712 +002713 +002714 +002715 +002716 +002717 +002718 +002719 +002720 +002721 +002722 +002723 +002724 +002725 +002726 +002727 +002728 +002729 +002730 +002731 +002732 +002733 +002734 +002735 +002736 +002737 +002738 +002739 +002740 +002741 +002742 +002743 +002744 +002745 +002746 +002747 +002748 +002749 +002750 +002751 +002752 +002753 +002754 +002755 +002756 +002757 +002758 +002759 +002760 +002761 +002762 +002763 +002764 +002765 +002766 +002767 +002768 +002769 +002770 +002771 +002772 +002773 +002774 +002775 +002776 +002777 +002778 +002779 +002780 +002781 +002782 +002783 +002784 +002785 +002786 +002787 +002788 +002789 +002790 +002791 +002792 +002793 +002794 +002795 +002796 +002797 +002798 +002799 +002800 +002801 +002802 +002803 +002804 +002805 +002806 +002807 +002808 +002809 +002810 +002811 +002812 +002813 +002814 +002815 +002816 +002817 +002818 +002819 +002820 +002821 +002822 +002823 +002824 +002825 +002826 +002827 +002828 +002829 +002830 +002831 +002832 +002833 +002834 +002835 +002836 +002837 +002838 +002839 +002840 +002841 +002842 +002843 +002844 +002845 +002846 +002847 +002848 +002849 +002850 +002851 +002852 +002853 +002854 +002855 +002856 +002857 +002858 +002859 +002860 +002861 +002862 +002863 +002864 +002865 +002866 +002867 +002868 +002869 +002870 +002871 +002872 +002873 +002874 +002875 +002876 +002877 +002878 +002879 +002880 +002881 +002882 +002883 +002884 +002885 +002886 +002887 +002888 +002889 +002890 +002891 +002892 +002893 +002894 +002895 +002896 +002897 +002898 +002899 +002900 +002901 +002902 +002903 +002904 +002905 +002906 +002907 +002908 +002909 +002910 +002911 +002912 +002913 +002914 +002915 +002916 +002917 +002918 +002919 +002920 +002921 +002922 +002923 +002924 +002925 +002926 +002927 +002928 +002929 +002930 +002931 +002932 +002933 +002934 +002935 +002936 +002937 +002938 +002939 +002940 +002941 +002942 +002943 +002944 +002945 +002946 +002947 +002948 +002949 +002950 +002951 +002952 +002953 +002954 +002955 +002956 +002957 +002958 +002959 +002960 +002961 +002962 +002963 +002964 +002965 +002966 +002967 +002968 +002969 +002970 +002971 +002972 +002973 +002974 +002975 +002976 +002977 +002978 +002979 +002980 +002981 +002982 +002983 +002984 +002985 +002986 +002987 +002988 +002989 +002990 +002991 +002992 +002993 +002994 +002995 +002996 +002997 +002998 +002999 +003000 +003001 +003002 +003003 +003004 +003005 +003006 +003007 +003008 +003009 +003010 +003011 +003012 +003013 +003014 +003015 +003016 +003017 +003018 +003019 +003020 +003021 +003022 +003023 +003024 +003025 +003026 +003027 +003028 +003029 +003030 +003031 +003032 +003033 +003034 +003035 +003036 +003037 +003038 +003039 +003040 +003041 +003042 +003043 +003044 +003045 +003046 +003047 +003048 +003049 +003050 +003051 +003052 +003053 +003054 +003055 +003056 +003057 +003058 +003059 +003060 +003061 +003062 +003063 +003064 +003065 +003066 +003067 +003068 +003069 +003070 +003071 +003072 +003073 +003074 +003075 +003076 +003077 +003078 +003079 +003080 +003081 +003082 +003083 +003084 +003085 +003086 +003087 +003088 +003089 +003090 +003091 +003092 +003093 +003094 +003095 +003096 +003097 +003098 +003099 +003100 +003101 +003102 +003103 +003104 +003105 +003106 +003107 +003108 +003109 +003110 +003111 +003112 +003113 +003114 +003115 +003116 +003117 +003118 +003119 +003120 +003121 +003122 +003123 +003124 +003125 +003126 +003127 +003128 +003129 +003130 +003131 +003132 +003133 +003134 +003135 +003136 +003137 +003138 +003139 +003140 +003141 +003142 +003143 +003144 +003145 +003146 +003147 +003148 +003149 +003150 +003151 +003152 +003153 +003154 +003155 +003156 +003157 +003158 +003159 +003160 +003161 +003162 +003163 +003164 +003165 +003166 +003167 +003168 +003169 +003170 +003171 +003172 +003173 +003174 +003175 +003176 +003177 +003178 +003179 +003180 +003181 +003182 +003183 +003184 +003185 +003186 +003187 +003188 +003189 +003190 +003191 +003192 +003193 +003194 +003195 +003196 +003197 +003198 +003199 +003200 +003201 +003202 +003203 +003204 +003205 +003206 +003207 +003208 +003209 +003210 +003211 +003212 +003213 +003214 +003215 +003216 +003217 +003218 +003219 +003220 +003221 +003222 +003223 +003224 +003225 +003226 +003227 +003228 +003229 +003230 +003231 +003232 +003233 +003234 +003235 +003236 +003237 +003238 +003239 +003240 +003241 +003242 +003243 +003244 +003245 +003246 +003247 +003248 +003249 +003250 +003251 +003252 +003253 +003254 +003255 +003256 +003257 +003258 +003259 +003260 +003261 +003262 +003263 +003264 +003265 +003266 +003267 +003268 +003269 +003270 +003271 +003272 +003273 +003274 +003275 +003276 +003277 +003278 +003279 +003280 +003281 +003282 +003283 +003284 +003285 +003286 +003287 +003288 +003289 +003290 +003291 +003292 +003293 +003294 +003295 +003296 +003297 +003298 +003299 +003300 +003301 +003302 +003303 +003304 +003305 +003306 +003307 +003308 +003309 +003310 +003311 +003312 +003313 +003314 +003315 +003316 +003317 +003318 +003319 +003320 +003321 +003322 +003323 +003324 +003325 +003326 +003327 +003328 +003329 +003330 +003331 +003332 +003333 +003334 +003335 +003336 +003337 +003338 +003339 +003340 +003341 +003342 +003343 +003344 +003345 +003346 +003347 +003348 +003349 +003350 +003351 +003352 +003353 +003354 +003355 +003356 +003357 +003358 +003359 +003360 +003361 +003362 +003363 +003364 +003365 +003366 +003367 +003368 +003369 +003370 +003371 +003372 +003373 +003374 +003375 +003376 +003377 +003378 +003379 +003380 +003381 +003382 +003383 +003384 +003385 +003386 +003387 +003388 +003389 +003390 +003391 +003392 +003393 +003394 +003395 +003396 +003397 +003398 +003399 +003400 +003401 +003402 +003403 +003404 +003405 +003406 +003407 +003408 +003409 +003410 +003411 +003412 +003413 +003414 +003415 +003416 +003417 +003418 +003419 +003420 +003421 +003422 +003423 +003424 +003425 +003426 +003427 +003428 +003429 +003430 +003431 +003432 +003433 +003434 +003435 +003436 +003437 +003438 +003439 +003440 +003441 +003442 +003443 +003444 +003445 +003446 +003447 +003448 +003449 +003450 +003451 +003452 +003453 +003454 +003455 +003456 +003457 +003458 +003459 +003460 +003461 +003462 +003463 +003464 +003465 +003466 +003467 +003468 +003469 +003470 +003471 +003472 +003473 +003474 +003475 +003476 +003477 +003478 +003479 +003480 +003481 +003482 +003483 +003484 +003485 +003486 +003487 +003488 +003489 +003490 +003491 +003492 +003493 +003494 +003495 +003496 +003497 +003498 +003499 +003500 +003501 +003502 +003503 +003504 +003505 +003506 +003507 +003508 +003509 +003510 +003511 +003512 +003513 +003514 +003515 +003516 +003517 +003518 +003519 +003520 +003521 +003522 +003523 +003524 +003525 +003526 +003527 +003528 +003529 +003530 +003531 +003532 +003533 +003534 +003535 +003536 +003537 +003538 +003539 +003540 +003541 +003542 +003543 +003544 +003545 +003546 +003547 +003548 +003549 +003550 +003551 +003552 +003553 +003554 +003555 +003556 +003557 +003558 +003559 +003560 +003561 +003562 +003563 +003564 +003565 +003566 +003567 +003568 +003569 +003570 +003571 +003572 +003573 +003574 +003575 +003576 +003577 +003578 +003579 +003580 +003581 +003582 +003583 +003584 +003585 +003586 +003587 +003588 +003589 +003590 +003591 +003592 +003593 +003594 +003595 +003596 +003597 +003598 +003599 +003600 +003601 +003602 +003603 +003604 +003605 +003606 +003607 +003608 +003609 +003610 +003611 +003612 +003613 +003614 +003615 +003616 +003617 +003618 +003619 +003620 +003621 +003622 +003623 +003624 +003625 +003626 +003627 +003628 +003629 +003630 +003631 +003632 +003633 +003634 +003635 +003636 +003637 +003638 +003639 +003640 +003641 +003642 +003643 +003644 +003645 +003646 +003647 +003648 +003649 +003650 +003651 +003652 +003653 +003654 +003655 +003656 +003657 +003658 +003659 +003660 +003661 +003662 +003663 +003664 +003665 +003666 +003667 +003668 +003669 +003670 +003671 +003672 +003673 +003674 +003675 +003676 +003677 +003678 +003679 +003680 +003681 +003682 +003683 +003684 +003685 +003686 +003687 +003688 +003689 +003690 +003691 +003692 +003693 +003694 +003695 +003696 +003697 +003698 +003699 +003700 +003701 +003702 +003703 +003704 +003705 +003706 +003707 +003708 +003709 +003710 +003711 +003712 +003713 +003714 +003715 +003716 +003717 +003718 +003719 +003720 +003721 +003722 +003723 +003724 +003725 +003726 +003727 +003728 +003729 +003730 +003731 +003732 +003733 +003734 +003735 +003736 +003737 +003738 +003739 +003740 +003741 +003742 +003743 +003744 +003745 +003746 +003747 +003748 +003749 +003750 +003751 +003752 +003753 +003754 +003755 +003756 +003757 +003758 +003759 +003760 +003761 +003762 +003763 +003764 +003765 +003766 +003767 +003768 +003769 +003770 +003771 +003772 +003773 +003774 +003775 +003776 +003777 +003778 +003779 +003780 +003781 +003782 +003783 +003784 +003785 +003786 +003787 +003788 +003789 +003790 +003791 +003792 +003793 +003794 +003795 +003796 +003797 +003798 +003799 +003800 +003801 +003802 +003803 +003804 +003805 +003806 +003807 +003808 +003809 +003810 +003811 +003812 +003813 +003814 +003815 +003816 +003817 +003818 +003819 +003820 +003821 +003822 +003823 +003824 +003825 +003826 +003827 +003828 +003829 +003830 +003831 +003832 +003833 +003834 +003835 +003836 +003837 +003838 +003839 +003840 +003841 +003842 +003843 +003844 +003845 +003846 +003847 +003848 +003849 +003850 +003851 +003852 +003853 +003854 +003855 +003856 +003857 +003858 +003859 +003860 +003861 +003862 +003863 +003864 +003865 +003866 +003867 +003868 +003869 +003870 +003871 +003872 +003873 +003874 +003875 +003876 +003877 +003878 +003879 +003880 +003881 +003882 +003883 +003884 +003885 +003886 +003887 +003888 +003889 +003890 +003891 +003892 +003893 +003894 +003895 +003896 +003897 +003898 +003899 +003900 +003901 +003902 +003903 +003904 +003905 +003906 +003907 +003908 +003909 +003910 +003911 +003912 +003913 +003914 +003915 +003916 +003917 +003918 +003919 +003920 +003921 +003922 +003923 +003924 +003925 +003926 +003927 +003928 +003929 +003930 +003931 +003932 +003933 +003934 +003935 +003936 +003937 +003938 +003939 +003940 +003941 +003942 +003943 +003944 +003945 +003946 +003947 +003948 +003949 +003950 +003951 +003952 +003953 +003954 +003955 +003956 +003957 +003958 +003959 +003960 +003961 +003962 +003963 +003964 +003965 +003966 +003967 +003968 +003969 +003970 +003971 +003972 +003973 +003974 +003975 +003976 +003977 +003978 +003979 +003980 +003981 +003982 +003983 +003984 +003985 +003986 +003987 +003988 +003989 +003990 +003991 +003992 +003993 +003994 +003995 +003996 +003997 +003998 +003999 +004000 +004001 +004002 +004003 +004004 +004005 +004006 +004007 +004008 +004009 +004010 +004011 +004012 +004013 +004014 +004015 +004016 +004017 +004018 +004019 +004020 +004021 +004022 +004023 +004024 +004025 +004026 +004027 +004028 +004029 +004030 +004031 +004032 +004033 +004034 +004035 +004036 +004037 +004038 +004039 +004040 +004041 +004042 +004043 +004044 +004045 +004046 +004047 +004048 +004049 +004050 +004051 +004052 +004053 +004054 +004055 +004056 +004057 +004058 +004059 +004060 +004061 +004062 +004063 +004064 +004065 +004066 +004067 +004068 +004069 +004070 +004071 +004072 +004073 +004074 +004075 +004076 +004077 +004078 +004079 +004080 +004081 +004082 +004083 +004084 +004085 +004086 +004087 +004088 +004089 +004090 +004091 +004092 +004093 +004094 +004095 +004096 +004097 +004098 +004099 +004100 +004101 +004102 +004103 +004104 +004105 +004106 +004107 +004108 +004109 +004110 +004111 +004112 +004113 +004114 +004115 +004116 +004117 +004118 +004119 +004120 +004121 +004122 +004123 +004124 +004125 +004126 +004127 +004128 +004129 +004130 +004131 +004132 +004133 +004134 +004135 +004136 +004137 +004138 +004139 +004140 +004141 +004142 +004143 +004144 +004145 +004146 +004147 +004148 +004149 +004150 +004151 +004152 +004153 +004154 +004155 +004156 +004157 +004158 +004159 +004160 +004161 +004162 +004163 +004164 +004165 +004166 +004167 +004168 +004169 +004170 +004171 +004172 +004173 +004174 +004175 +004176 +004177 +004178 +004179 +004180 +004181 +004182 +004183 +004184 +004185 +004186 +004187 +004188 +004189 +004190 +004191 +004192 +004193 +004194 +004195 +004196 +004197 +004198 +004199 +004200 +004201 +004202 +004203 +004204 +004205 +004206 +004207 +004208 +004209 +004210 +004211 +004212 +004213 +004214 +004215 +004216 +004217 +004218 +004219 +004220 +004221 +004222 +004223 +004224 +004225 +004226 +004227 +004228 +004229 +004230 +004231 +004232 +004233 +004234 +004235 +004236 +004237 +004238 +004239 +004240 +004241 +004242 +004243 +004244 +004245 +004246 +004247 +004248 +004249 +004250 +004251 +004252 +004253 +004254 +004255 +004256 +004257 +004258 +004259 +004260 +004261 +004262 +004263 +004264 +004265 +004266 +004267 +004268 +004269 +004270 +004271 +004272 +004273 +004274 +004275 +004276 +004277 +004278 +004279 +004280 +004281 +004282 +004283 +004284 +004285 +004286 +004287 +004288 +004289 +004290 +004291 +004292 +004293 +004294 +004295 +004296 +004297 +004298 +004299 +004300 +004301 +004302 +004303 +004304 +004305 +004306 +004307 +004308 +004309 +004310 +004311 +004312 +004313 +004314 +004315 +004316 +004317 +004318 +004319 +004320 +004321 +004322 +004323 +004324 +004325 +004326 +004327 +004328 +004329 +004330 +004331 +004332 +004333 +004334 +004335 +004336 +004337 +004338 +004339 +004340 +004341 +004342 +004343 +004344 +004345 +004346 +004347 +004348 +004349 +004350 +004351 +004352 +004353 +004354 +004355 +004356 +004357 +004358 +004359 +004360 +004361 +004362 +004363 +004364 +004365 +004366 +004367 +004368 +004369 +004370 +004371 +004372 +004373 +004374 +004375 +004376 +004377 +004378 +004379 +004380 +004381 +004382 +004383 +004384 +004385 +004386 +004387 +004388 +004389 +004390 +004391 +004392 +004393 +004394 +004395 +004396 +004397 +004398 +004399 +004400 +004401 +004402 +004403 +004404 +004405 +004406 +004407 +004408 +004409 +004410 +004411 +004412 +004413 +004414 +004415 +004416 +004417 +004418 +004419 +004420 +004421 +004422 +004423 +004424 +004425 +004426 +004427 +004428 +004429 +004430 +004431 +004432 +004433 +004434 +004435 +004436 +004437 +004438 +004439 +004440 +004441 +004442 +004443 +004444 +004445 +004446 +004447 +004448 +004449 +004450 +004451 +004452 +004453 +004454 +004455 +004456 +004457 +004458 +004459 +004460 +004461 +004462 +004463 +004464 +004465 +004466 +004467 +004468 +004469 +004470 +004471 +004472 +004473 +004474 +004475 +004476 +004477 +004478 +004479 +004480 +004481 +004482 +004483 +004484 +004485 +004486 +004487 +004488 +004489 +004490 +004491 +004492 +004493 +004494 +004495 +004496 +004497 +004498 +004499 +004500 +004501 +004502 +004503 +004504 +004505 +004506 +004507 +004508 +004509 +004510 +004511 +004512 +004513 +004514 +004515 +004516 +004517 +004518 +004519 +004520 +004521 +004522 +004523 +004524 +004525 +004526 +004527 +004528 +004529 +004530 +004531 +004532 +004533 +004534 +004535 +004536 +004537 +004538 +004539 +004540 +004541 +004542 +004543 +004544 +004545 +004546 +004547 +004548 +004549 +004550 +004551 +004552 +004553 +004554 +004555 +004556 +004557 +004558 +004559 +004560 +004561 +004562 +004563 +004564 +004565 +004566 +004567 +004568 +004569 +004570 +004571 +004572 +004573 +004574 +004575 +004576 +004577 +004578 +004579 +004580 +004581 +004582 +004583 +004584 +004585 +004586 +004587 +004588 +004589 +004590 +004591 +004592 +004593 +004594 +004595 +004596 +004597 +004598 +004599 +004600 +004601 +004602 +004603 +004604 +004605 +004606 +004607 +004608 +004609 +004610 +004611 +004612 +004613 +004614 +004615 +004616 +004617 +004618 +004619 +004620 +004621 +004622 +004623 +004624 +004625 +004626 +004627 +004628 +004629 +004630 +004631 +004632 +004633 +004634 +004635 +004636 +004637 +004638 +004639 +004640 +004641 +004642 +004643 +004644 +004645 +004646 +004647 +004648 +004649 +004650 +004651 +004652 +004653 +004654 +004655 +004656 +004657 +004658 +004659 +004660 +004661 +004662 +004663 +004664 +004665 +004666 +004667 +004668 +004669 +004670 +004671 +004672 +004673 +004674 +004675 +004676 +004677 +004678 +004679 +004680 +004681 +004682 +004683 +004684 +004685 +004686 +004687 +004688 +004689 +004690 +004691 +004692 +004693 +004694 +004695 +004696 +004697 +004698 +004699 +004700 +004701 +004702 +004703 +004704 +004705 +004706 +004707 +004708 +004709 +004710 +004711 +004712 +004713 +004714 +004715 +004716 +004717 +004718 +004719 +004720 +004721 +004722 +004723 +004724 +004725 +004726 +004727 +004728 +004729 +004730 +004731 +004732 +004733 +004734 +004735 +004736 +004737 +004738 +004739 +004740 +004741 +004742 +004743 +004744 +004745 +004746 +004747 +004748 +004749 +004750 +004751 +004752 +004753 +004754 +004755 +004756 +004757 +004758 +004759 +004760 +004761 +004762 +004763 +004764 +004765 +004766 +004767 +004768 +004769 +004770 +004771 +004772 +004773 +004774 +004775 +004776 +004777 +004778 +004779 +004780 +004781 +004782 +004783 +004784 +004785 +004786 +004787 +004788 +004789 +004790 +004791 +004792 +004793 +004794 +004795 +004796 +004797 +004798 +004799 +004800 +004801 +004802 +004803 +004804 +004805 +004806 +004807 +004808 +004809 +004810 +004811 +004812 +004813 +004814 +004815 +004816 +004817 +004818 +004819 +004820 +004821 +004822 +004823 +004824 +004825 +004826 +004827 +004828 +004829 +004830 +004831 +004832 +004833 +004834 +004835 +004836 +004837 +004838 +004839 +004840 +004841 +004842 +004843 +004844 +004845 +004846 +004847 +004848 +004849 +004850 +004851 +004852 +004853 +004854 +004855 +004856 +004857 +004858 +004859 +004860 +004861 +004862 +004863 +004864 +004865 +004866 +004867 +004868 +004869 +004870 +004871 +004872 +004873 +004874 +004875 +004876 +004877 +004878 +004879 +004880 +004881 +004882 +004883 +004884 +004885 +004886 +004887 +004888 +004889 +004890 +004891 +004892 +004893 +004894 +004895 +004896 +004897 +004898 +004899 +004900 +004901 +004902 +004903 +004904 +004905 +004906 +004907 +004908 +004909 +004910 +004911 +004912 +004913 +004914 +004915 +004916 +004917 +004918 +004919 +004920 +004921 +004922 +004923 +004924 +004925 +004926 +004927 +004928 +004929 +004930 +004931 +004932 +004933 +004934 +004935 +004936 +004937 +004938 +004939 +004940 +004941 +004942 +004943 +004944 +004945 +004946 +004947 +004948 +004949 +004950 +004951 +004952 +004953 +004954 +004955 +004956 +004957 +004958 +004959 +004960 +004961 +004962 +004963 +004964 +004965 +004966 +004967 +004968 +004969 +004970 +004971 +004972 +004973 +004974 +004975 +004976 +004977 +004978 +004979 +004980 +004981 +004982 +004983 +004984 +004985 +004986 +004987 +004988 +004989 +004990 +004991 +004992 +004993 +004994 +004995 +004996 +004997 +004998 +004999 +005000 +005001 +005002 +005003 +005004 +005005 +005006 +005007 +005008 +005009 +005010 +005011 +005012 +005013 +005014 +005015 +005016 +005017 +005018 +005019 +005020 +005021 +005022 +005023 +005024 +005025 +005026 +005027 +005028 +005029 +005030 +005031 +005032 +005033 +005034 +005035 +005036 +005037 +005038 +005039 +005040 +005041 +005042 +005043 +005044 +005045 +005046 +005047 +005048 +005049 +005050 +005051 +005052 +005053 +005054 +005055 +005056 +005057 +005058 +005059 +005060 +005061 +005062 +005063 +005064 +005065 +005066 +005067 +005068 +005069 +005070 +005071 +005072 +005073 +005074 +005075 +005076 +005077 +005078 +005079 +005080 +005081 +005082 +005083 +005084 +005085 +005086 +005087 +005088 +005089 +005090 +005091 +005092 +005093 +005094 +005095 +005096 +005097 +005098 +005099 +005100 +005101 +005102 +005103 +005104 +005105 +005106 +005107 +005108 +005109 +005110 +005111 +005112 +005113 +005114 +005115 +005116 +005117 +005118 +005119 +005120 +005121 +005122 +005123 +005124 +005125 +005126 +005127 +005128 +005129 +005130 +005131 +005132 +005133 +005134 +005135 +005136 +005137 +005138 +005139 +005140 +005141 +005142 +005143 +005144 +005145 +005146 +005147 +005148 +005149 +005150 +005151 +005152 +005153 +005154 +005155 +005156 +005157 +005158 +005159 +005160 +005161 +005162 +005163 +005164 +005165 +005166 +005167 +005168 +005169 +005170 +005171 +005172 +005173 +005174 +005175 +005176 +005177 +005178 +005179 +005180 +005181 +005182 +005183 +005184 +005185 +005186 +005187 +005188 +005189 +005190 +005191 +005192 +005193 +005194 +005195 +005196 +005197 +005198 +005199 +005200 +005201 +005202 +005203 +005204 +005205 +005206 +005207 +005208 +005209 +005210 +005211 +005212 +005213 +005214 +005215 +005216 +005217 +005218 +005219 +005220 +005221 +005222 +005223 +005224 +005225 +005226 +005227 +005228 +005229 +005230 +005231 +005232 +005233 +005234 +005235 +005236 +005237 +005238 +005239 +005240 +005241 +005242 +005243 +005244 +005245 +005246 +005247 +005248 +005249 +005250 +005251 +005252 +005253 +005254 +005255 +005256 +005257 +005258 +005259 +005260 +005261 +005262 +005263 +005264 +005265 +005266 +005267 +005268 +005269 +005270 +005271 +005272 +005273 +005274 +005275 +005276 +005277 +005278 +005279 +005280 +005281 +005282 +005283 +005284 +005285 +005286 +005287 +005288 +005289 +005290 +005291 +005292 +005293 +005294 +005295 +005296 +005297 +005298 +005299 +005300 +005301 +005302 +005303 +005304 +005305 +005306 +005307 +005308 +005309 +005310 +005311 +005312 +005313 +005314 +005315 +005316 +005317 +005318 +005319 +005320 +005321 +005322 +005323 +005324 +005325 +005326 +005327 +005328 +005329 +005330 +005331 +005332 +005333 +005334 +005335 +005336 +005337 +005338 +005339 +005340 +005341 +005342 +005343 +005344 +005345 +005346 +005347 +005348 +005349 +005350 +005351 +005352 +005353 +005354 +005355 +005356 +005357 +005358 +005359 +005360 +005361 +005362 +005363 +005364 +005365 +005366 +005367 +005368 +005369 +005370 +005371 +005372 +005373 +005374 +005375 +005376 +005377 +005378 +005379 +005380 +005381 +005382 +005383 +005384 +005385 +005386 +005387 +005388 +005389 +005390 +005391 +005392 +005393 +005394 +005395 +005396 +005397 +005398 +005399 +005400 +005401 +005402 +005403 +005404 +005405 +005406 +005407 +005408 +005409 +005410 +005411 +005412 +005413 +005414 +005415 +005416 +005417 +005418 +005419 +005420 +005421 +005422 +005423 +005424 +005425 +005426 +005427 +005428 +005429 +005430 +005431 +005432 +005433 +005434 +005435 +005436 +005437 +005438 +005439 +005440 +005441 +005442 +005443 +005444 +005445 +005446 +005447 +005448 +005449 +005450 +005451 +005452 +005453 +005454 +005455 +005456 +005457 +005458 +005459 +005460 +005461 +005462 +005463 +005464 +005465 +005466 +005467 +005468 +005469 +005470 +005471 +005472 +005473 +005474 +005475 +005476 +005477 +005478 +005479 +005480 +005481 +005482 +005483 +005484 +005485 +005486 +005487 +005488 +005489 +005490 +005491 +005492 +005493 +005494 +005495 +005496 +005497 +005498 +005499 +005500 +005501 +005502 +005503 +005504 +005505 +005506 +005507 +005508 +005509 +005510 +005511 +005512 +005513 +005514 +005515 +005516 +005517 +005518 +005519 +005520 +005521 +005522 +005523 +005524 +005525 +005526 +005527 +005528 +005529 +005530 +005531 +005532 +005533 +005534 +005535 +005536 +005537 +005538 +005539 +005540 +005541 +005542 +005543 +005544 +005545 +005546 +005547 +005548 +005549 +005550 +005551 +005552 +005553 +005554 +005555 +005556 +005557 +005558 +005559 +005560 +005561 +005562 +005563 +005564 +005565 +005566 +005567 +005568 +005569 +005570 +005571 +005572 +005573 +005574 +005575 +005576 +005577 +005578 +005579 +005580 +005581 +005582 +005583 +005584 +005585 +005586 +005587 +005588 +005589 +005590 +005591 +005592 +005593 +005594 +005595 +005596 +005597 +005598 +005599 +005600 +005601 +005602 +005603 +005604 +005605 +005606 +005607 +005608 +005609 +005610 +005611 +005612 +005613 +005614 +005615 +005616 +005617 +005618 +005619 +005620 +005621 +005622 +005623 +005624 +005625 +005626 +005627 +005628 +005629 +005630 +005631 +005632 +005633 +005634 +005635 +005636 +005637 +005638 +005639 +005640 +005641 +005642 +005643 +005644 +005645 +005646 +005647 +005648 +005649 +005650 +005651 +005652 +005653 +005654 +005655 +005656 +005657 +005658 +005659 +005660 +005661 +005662 +005663 +005664 +005665 +005666 +005667 +005668 +005669 +005670 +005671 +005672 +005673 +005674 +005675 +005676 +005677 +005678 +005679 +005680 +005681 +005682 +005683 +005684 +005685 +005686 +005687 +005688 +005689 +005690 +005691 +005692 +005693 +005694 +005695 +005696 +005697 +005698 +005699 +005700 +005701 +005702 +005703 +005704 +005705 +005706 +005707 +005708 +005709 +005710 +005711 +005712 +005713 +005714 +005715 +005716 +005717 +005718 +005719 +005720 +005721 +005722 +005723 +005724 +005725 +005726 +005727 +005728 +005729 +005730 +005731 +005732 +005733 +005734 +005735 +005736 +005737 +005738 +005739 +005740 +005741 +005742 +005743 +005744 +005745 +005746 +005747 +005748 +005749 +005750 +005751 +005752 +005753 +005754 +005755 +005756 +005757 +005758 +005759 +005760 +005761 +005762 +005763 +005764 +005765 +005766 +005767 +005768 +005769 +005770 +005771 +005772 +005773 +005774 +005775 +005776 +005777 +005778 +005779 +005780 +005781 +005782 +005783 +005784 +005785 +005786 +005787 +005788 +005789 +005790 +005791 +005792 +005793 +005794 +005795 +005796 +005797 +005798 +005799 +005800 +005801 +005802 +005803 +005804 +005805 +005806 +005807 +005808 +005809 +005810 +005811 +005812 +005813 +005814 +005815 +005816 +005817 +005818 +005819 +005820 +005821 +005822 +005823 +005824 +005825 +005826 +005827 +005828 +005829 +005830 +005831 +005832 +005833 +005834 +005835 +005836 +005837 +005838 +005839 +005840 +005841 +005842 +005843 +005844 +005845 +005846 +005847 +005848 +005849 +005850 +005851 +005852 +005853 +005854 +005855 +005856 +005857 +005858 +005859 +005860 +005861 +005862 +005863 +005864 +005865 +005866 +005867 +005868 +005869 +005870 +005871 +005872 +005873 +005874 +005875 +005876 +005877 +005878 +005879 +005880 +005881 +005882 +005883 +005884 +005885 +005886 +005887 +005888 +005889 +005890 +005891 +005892 +005893 +005894 +005895 +005896 +005897 +005898 +005899 +005900 +005901 +005902 +005903 +005904 +005905 +005906 +005907 +005908 +005909 +005910 +005911 +005912 +005913 +005914 +005915 +005916 +005917 +005918 +005919 +005920 +005921 +005922 +005923 +005924 +005925 +005926 +005927 +005928 +005929 +005930 +005931 +005932 +005933 +005934 +005935 +005936 +005937 +005938 +005939 +005940 +005941 +005942 +005943 +005944 +005945 +005946 +005947 +005948 +005949 +005950 +005951 +005952 +005953 +005954 +005955 +005956 +005957 +005958 +005959 +005960 +005961 +005962 +005963 +005964 +005965 +005966 +005967 +005968 +005969 +005970 +005971 +005972 +005973 +005974 +005975 +005976 +005977 +005978 +005979 +005980 +005981 +005982 +005983 +005984 +005985 +005986 +005987 +005988 +005989 +005990 +005991 +005992 +005993 +005994 +005995 +005996 +005997 +005998 +005999 +006000 +006001 +006002 +006003 +006004 +006005 +006006 +006007 +006008 +006009 +006010 +006011 +006012 +006013 +006014 +006015 +006016 +006017 +006018 +006019 +006020 +006021 +006022 +006023 +006024 +006025 +006026 +006027 +006028 +006029 +006030 +006031 +006032 +006033 +006034 +006035 +006036 +006037 +006038 +006039 +006040 +006041 +006042 +006043 +006044 +006045 +006046 +006047 +006048 +006049 +006050 +006051 +006052 +006053 +006054 +006055 +006056 +006057 +006058 +006059 +006060 +006061 +006062 +006063 +006064 +006065 +006066 +006067 +006068 +006069 +006070 +006071 +006072 +006073 +006074 +006075 +006076 +006077 +006078 +006079 +006080 +006081 +006082 +006083 +006084 +006085 +006086 +006087 +006088 +006089 +006090 +006091 +006092 +006093 +006094 +006095 +006096 +006097 +006098 +006099 +006100 +006101 +006102 +006103 +006104 +006105 +006106 +006107 +006108 +006109 +006110 +006111 +006112 +006113 +006114 +006115 +006116 +006117 +006118 +006119 +006120 +006121 +006122 +006123 +006124 +006125 +006126 +006127 +006128 +006129 +006130 +006131 +006132 +006133 +006134 +006135 +006136 +006137 +006138 +006139 +006140 +006141 +006142 +006143 +006144 +006145 +006146 +006147 +006148 +006149 +006150 +006151 +006152 +006153 +006154 +006155 +006156 +006157 +006158 +006159 +006160 +006161 +006162 +006163 +006164 +006165 +006166 +006167 +006168 +006169 +006170 +006171 +006172 +006173 +006174 +006175 +006176 +006177 +006178 +006179 +006180 +006181 +006182 +006183 +006184 +006185 +006186 +006187 +006188 +006189 +006190 +006191 +006192 +006193 +006194 +006195 +006196 +006197 +006198 +006199 +006200 +006201 +006202 +006203 +006204 +006205 +006206 +006207 +006208 +006209 +006210 +006211 +006212 +006213 +006214 +006215 +006216 +006217 +006218 +006219 +006220 +006221 +006222 +006223 +006224 +006225 +006226 +006227 +006228 +006229 +006230 +006231 +006232 +006233 +006234 +006235 +006236 +006237 +006238 +006239 +006240 +006241 +006242 +006243 +006244 +006245 +006246 +006247 +006248 +006249 +006250 +006251 +006252 +006253 +006254 +006255 +006256 +006257 +006258 +006259 +006260 +006261 +006262 +006263 +006264 +006265 +006266 +006267 +006268 +006269 +006270 +006271 +006272 +006273 +006274 +006275 +006276 +006277 +006278 +006279 +006280 +006281 +006282 +006283 +006284 +006285 +006286 +006287 +006288 +006289 +006290 +006291 +006292 +006293 +006294 +006295 +006296 +006297 +006298 +006299 +006300 +006301 +006302 +006303 +006304 +006305 +006306 +006307 +006308 +006309 +006310 +006311 +006312 +006313 +006314 +006315 +006316 +006317 +006318 +006319 +006320 +006321 +006322 +006323 +006324 +006325 +006326 +006327 +006328 +006329 +006330 +006331 +006332 +006333 +006334 +006335 +006336 +006337 +006338 +006339 +006340 +006341 +006342 +006343 +006344 +006345 +006346 +006347 +006348 +006349 +006350 +006351 +006352 +006353 +006354 +006355 +006356 +006357 +006358 +006359 +006360 +006361 +006362 +006363 +006364 +006365 +006366 +006367 +006368 +006369 +006370 +006371 +006372 +006373 +006374 +006375 +006376 +006377 +006378 +006379 +006380 +006381 +006382 +006383 +006384 +006385 +006386 +006387 +006388 +006389 +006390 +006391 +006392 +006393 +006394 +006395 +006396 +006397 +006398 +006399 +006400 +006401 +006402 +006403 +006404 +006405 +006406 +006407 +006408 +006409 +006410 +006411 +006412 +006413 +006414 +006415 +006416 +006417 +006418 +006419 +006420 +006421 +006422 +006423 +006424 +006425 +006426 +006427 +006428 +006429 +006430 +006431 +006432 +006433 +006434 +006435 +006436 +006437 +006438 +006439 +006440 +006441 +006442 +006443 +006444 +006445 +006446 +006447 +006448 +006449 +006450 +006451 +006452 +006453 +006454 +006455 +006456 +006457 +006458 +006459 +006460 +006461 +006462 +006463 +006464 +006465 +006466 +006467 +006468 +006469 +006470 +006471 +006472 +006473 +006474 +006475 +006476 +006477 +006478 +006479 +006480 +006481 +006482 +006483 +006484 +006485 +006486 +006487 +006488 +006489 +006490 +006491 +006492 +006493 +006494 +006495 +006496 +006497 +006498 +006499 +006500 +006501 +006502 +006503 +006504 +006505 +006506 +006507 +006508 +006509 +006510 +006511 +006512 +006513 +006514 +006515 +006516 +006517 +006518 +006519 +006520 +006521 +006522 +006523 +006524 +006525 +006526 +006527 +006528 +006529 +006530 +006531 +006532 +006533 +006534 +006535 +006536 +006537 +006538 +006539 +006540 +006541 +006542 +006543 +006544 +006545 +006546 +006547 +006548 +006549 +006550 +006551 +006552 +006553 +006554 +006555 +006556 +006557 +006558 +006559 +006560 +006561 +006562 +006563 +006564 +006565 +006566 +006567 +006568 +006569 +006570 +006571 +006572 +006573 +006574 +006575 +006576 +006577 +006578 +006579 +006580 +006581 +006582 +006583 +006584 +006585 +006586 +006587 +006588 +006589 +006590 +006591 +006592 +006593 +006594 +006595 +006596 +006597 +006598 +006599 +006600 +006601 +006602 +006603 +006604 +006605 +006606 +006607 +006608 +006609 +006610 +006611 +006612 +006613 +006614 +006615 +006616 +006617 +006618 +006619 +006620 +006621 +006622 +006623 +006624 +006625 +006626 +006627 +006628 +006629 +006630 +006631 +006632 +006633 +006634 +006635 +006636 +006637 +006638 +006639 +006640 +006641 +006642 +006643 +006644 +006645 +006646 +006647 +006648 +006649 +006650 +006651 +006652 +006653 +006654 +006655 +006656 +006657 +006658 +006659 +006660 +006661 +006662 +006663 +006664 +006665 +006666 +006667 +006668 +006669 +006670 +006671 +006672 +006673 +006674 +006675 +006676 +006677 +006678 +006679 +006680 +006681 +006682 +006683 +006684 +006685 +006686 +006687 +006688 +006689 +006690 +006691 +006692 +006693 +006694 +006695 +006696 +006697 +006698 +006699 +006700 +006701 +006702 +006703 +006704 +006705 +006706 +006707 +006708 +006709 +006710 +006711 +006712 +006713 +006714 +006715 +006716 +006717 +006718 +006719 +006720 +006721 +006722 +006723 +006724 +006725 +006726 +006727 +006728 +006729 +006730 +006731 +006732 +006733 +006734 +006735 +006736 +006737 +006738 +006739 +006740 +006741 +006742 +006743 +006744 +006745 +006746 +006747 +006748 +006749 +006750 +006751 +006752 +006753 +006754 +006755 +006756 +006757 +006758 +006759 +006760 +006761 +006762 +006763 +006764 +006765 +006766 +006767 +006768 +006769 +006770 +006771 +006772 +006773 +006774 +006775 +006776 +006777 +006778 +006779 +006780 +006781 +006782 +006783 +006784 +006785 +006786 +006787 +006788 +006789 +006790 +006791 +006792 +006793 +006794 +006795 +006796 +006797 +006798 +006799 +006800 +006801 +006802 +006803 +006804 +006805 +006806 +006807 +006808 +006809 +006810 +006811 +006812 +006813 +006814 +006815 +006816 +006817 +006818 +006819 +006820 +006821 +006822 +006823 +006824 +006825 +006826 +006827 +006828 +006829 +006830 +006831 +006832 +006833 +006834 +006835 +006836 +006837 +006838 +006839 +006840 +006841 +006842 +006843 +006844 +006845 +006846 +006847 +006848 +006849 +006850 +006851 +006852 +006853 +006854 +006855 +006856 +006857 +006858 +006859 +006860 +006861 +006862 +006863 +006864 +006865 +006866 +006867 +006868 +006869 +006870 +006871 +006872 +006873 +006874 +006875 +006876 +006877 +006878 +006879 +006880 +006881 +006882 +006883 +006884 +006885 +006886 +006887 +006888 +006889 +006890 +006891 +006892 +006893 +006894 +006895 +006896 +006897 +006898 +006899 +006900 +006901 +006902 +006903 +006904 +006905 +006906 +006907 +006908 +006909 +006910 +006911 +006912 +006913 +006914 +006915 +006916 +006917 +006918 +006919 +006920 +006921 +006922 +006923 +006924 +006925 +006926 +006927 +006928 +006929 +006930 +006931 +006932 +006933 +006934 +006935 +006936 +006937 +006938 +006939 +006940 +006941 +006942 +006943 +006944 +006945 +006946 +006947 +006948 +006949 +006950 +006951 +006952 +006953 +006954 +006955 +006956 +006957 +006958 +006959 +006960 +006961 +006962 +006963 +006964 +006965 +006966 +006967 +006968 +006969 +006970 +006971 +006972 +006973 +006974 +006975 +006976 +006977 +006978 +006979 +006980 +006981 +006982 +006983 +006984 +006985 +006986 +006987 +006988 +006989 +006990 +006991 +006992 +006993 +006994 +006995 +006996 +006997 +006998 +006999 +007000 +007001 +007002 +007003 +007004 +007005 +007006 +007007 +007008 +007009 +007010 +007011 +007012 +007013 +007014 +007015 +007016 +007017 +007018 +007019 +007020 +007021 +007022 +007023 +007024 +007025 +007026 +007027 +007028 +007029 +007030 +007031 +007032 +007033 +007034 +007035 +007036 +007037 +007038 +007039 +007040 +007041 +007042 +007043 +007044 +007045 +007046 +007047 +007048 +007049 +007050 +007051 +007052 +007053 +007054 +007055 +007056 +007057 +007058 +007059 +007060 +007061 +007062 +007063 +007064 +007065 +007066 +007067 +007068 +007069 +007070 +007071 +007072 +007073 +007074 +007075 +007076 +007077 +007078 +007079 +007080 +007081 +007082 +007083 +007084 +007085 +007086 +007087 +007088 +007089 +007090 +007091 +007092 +007093 +007094 +007095 +007096 +007097 +007098 +007099 +007100 +007101 +007102 +007103 +007104 +007105 +007106 +007107 +007108 +007109 +007110 +007111 +007112 +007113 +007114 +007115 +007116 +007117 +007118 +007119 +007120 +007121 +007122 +007123 +007124 +007125 +007126 +007127 +007128 +007129 +007130 +007131 +007132 +007133 +007134 +007135 +007136 +007137 +007138 +007139 +007140 +007141 +007142 +007143 +007144 +007145 +007146 +007147 +007148 +007149 +007150 +007151 +007152 +007153 +007154 +007155 +007156 +007157 +007158 +007159 +007160 +007161 +007162 +007163 +007164 +007165 +007166 +007167 +007168 +007169 +007170 +007171 +007172 +007173 +007174 +007175 +007176 +007177 +007178 +007179 +007180 +007181 +007182 +007183 +007184 +007185 +007186 +007187 +007188 +007189 +007190 +007191 +007192 +007193 +007194 +007195 +007196 +007197 +007198 +007199 +007200 +007201 +007202 +007203 +007204 +007205 +007206 +007207 +007208 +007209 +007210 +007211 +007212 +007213 +007214 +007215 +007216 +007217 +007218 +007219 +007220 +007221 +007222 +007223 +007224 +007225 +007226 +007227 +007228 +007229 +007230 +007231 +007232 +007233 +007234 +007235 +007236 +007237 +007238 +007239 +007240 +007241 +007242 +007243 +007244 +007245 +007246 +007247 +007248 +007249 +007250 +007251 +007252 +007253 +007254 +007255 +007256 +007257 +007258 +007259 +007260 +007261 +007262 +007263 +007264 +007265 +007266 +007267 +007268 +007269 +007270 +007271 +007272 +007273 +007274 +007275 +007276 +007277 +007278 +007279 +007280 +007281 +007282 +007283 +007284 +007285 +007286 +007287 +007288 +007289 +007290 +007291 +007292 +007293 +007294 +007295 +007296 +007297 +007298 +007299 +007300 +007301 +007302 +007303 +007304 +007305 +007306 +007307 +007308 +007309 +007310 +007311 +007312 +007313 +007314 +007315 +007316 +007317 +007318 +007319 +007320 +007321 +007322 +007323 +007324 +007325 +007326 +007327 +007328 +007329 +007330 +007331 +007332 +007333 +007334 +007335 +007336 +007337 +007338 +007339 +007340 +007341 +007342 +007343 +007344 +007345 +007346 +007347 +007348 +007349 +007350 +007351 +007352 +007353 +007354 +007355 +007356 +007357 +007358 +007359 +007360 +007361 +007362 +007363 +007364 +007365 +007366 +007367 +007368 +007369 +007370 +007371 +007372 +007373 +007374 +007375 +007376 +007377 +007378 +007379 +007380 +007381 +007382 +007383 +007384 +007385 +007386 +007387 +007388 +007389 +007390 +007391 +007392 +007393 +007394 +007395 +007396 +007397 +007398 +007399 +007400 +007401 +007402 +007403 +007404 +007405 +007406 +007407 +007408 +007409 +007410 +007411 +007412 +007413 +007414 +007415 +007416 +007417 +007418 +007419 +007420 +007421 +007422 +007423 +007424 +007425 +007426 +007427 +007428 +007429 +007430 +007431 +007432 +007433 +007434 +007435 +007436 +007437 +007438 +007439 +007440 +007441 +007442 +007443 +007444 +007445 +007446 +007447 +007448 +007449 +007450 +007451 +007452 +007453 +007454 +007455 +007456 +007457 +007458 +007459 +007460 +007461 +007462 +007463 +007464 +007465 +007466 +007467 +007468 +007469 +007470 +007471 +007472 +007473 +007474 +007475 +007476 +007477 +007478 +007479 +007480 \ No newline at end of file diff --git a/data/KITTI/ImageSets/val.txt b/data/KITTI/ImageSets/val.txt new file mode 100644 index 0000000..258ca11 --- /dev/null +++ b/data/KITTI/ImageSets/val.txt @@ -0,0 +1,3769 @@ +000001 +000002 +000004 +000005 +000006 +000008 +000015 +000019 +000020 +000021 +000023 +000024 +000025 +000027 +000028 +000031 +000033 +000035 +000037 +000039 +000040 +000042 +000047 +000048 +000050 +000052 +000053 +000058 +000059 +000061 +000062 +000063 +000065 +000066 +000076 +000077 +000078 +000081 +000089 +000090 +000093 +000094 +000098 +000102 +000104 +000106 +000107 +000108 +000116 +000117 +000118 +000122 +000124 +000126 +000128 +000132 +000134 +000135 +000137 +000139 +000140 +000143 +000147 +000151 +000152 +000153 +000156 +000159 +000161 +000167 +000168 +000169 +000170 +000173 +000174 +000175 +000181 +000182 +000183 +000186 +000187 +000188 +000190 +000191 +000192 +000194 +000195 +000196 +000197 +000199 +000201 +000203 +000204 +000207 +000211 +000212 +000213 +000216 +000218 +000223 +000224 +000226 +000229 +000230 +000231 +000234 +000235 +000236 +000237 +000239 +000242 +000246 +000247 +000248 +000249 +000250 +000251 +000252 +000260 +000262 +000263 +000265 +000266 +000268 +000269 +000270 +000272 +000273 +000278 +000279 +000281 +000283 +000284 +000289 +000290 +000291 +000293 +000297 +000301 +000302 +000305 +000307 +000308 +000309 +000311 +000312 +000314 +000315 +000319 +000320 +000321 +000323 +000324 +000327 +000328 +000329 +000332 +000333 +000335 +000336 +000340 +000341 +000343 +000345 +000346 +000347 +000350 +000351 +000352 +000354 +000355 +000356 +000357 +000359 +000360 +000361 +000362 +000365 +000366 +000369 +000370 +000372 +000373 +000376 +000377 +000378 +000379 +000381 +000382 +000383 +000385 +000386 +000388 +000391 +000392 +000393 +000394 +000395 +000396 +000397 +000398 +000399 +000401 +000402 +000403 +000404 +000407 +000408 +000409 +000413 +000414 +000415 +000419 +000420 +000422 +000427 +000428 +000429 +000430 +000436 +000437 +000440 +000443 +000446 +000448 +000450 +000451 +000452 +000453 +000454 +000455 +000457 +000459 +000463 +000468 +000469 +000472 +000473 +000475 +000476 +000477 +000478 +000479 +000480 +000481 +000485 +000486 +000489 +000491 +000492 +000493 +000494 +000495 +000496 +000498 +000499 +000503 +000504 +000506 +000508 +000509 +000510 +000512 +000515 +000517 +000519 +000521 +000524 +000527 +000528 +000530 +000533 +000536 +000541 +000542 +000543 +000545 +000546 +000548 +000551 +000554 +000555 +000558 +000559 +000560 +000561 +000564 +000566 +000567 +000568 +000569 +000571 +000572 +000581 +000583 +000588 +000589 +000590 +000591 +000595 +000600 +000601 +000604 +000610 +000611 +000612 +000613 +000614 +000615 +000618 +000619 +000620 +000624 +000625 +000626 +000628 +000630 +000634 +000635 +000636 +000639 +000642 +000644 +000645 +000647 +000648 +000650 +000655 +000657 +000658 +000659 +000660 +000667 +000669 +000670 +000674 +000677 +000679 +000682 +000683 +000684 +000691 +000692 +000694 +000696 +000698 +000699 +000700 +000702 +000704 +000706 +000708 +000716 +000717 +000718 +000721 +000722 +000725 +000727 +000728 +000729 +000731 +000734 +000736 +000737 +000740 +000741 +000745 +000746 +000748 +000750 +000751 +000752 +000754 +000756 +000761 +000765 +000766 +000767 +000768 +000769 +000771 +000772 +000773 +000774 +000778 +000779 +000782 +000790 +000792 +000795 +000798 +000800 +000801 +000802 +000803 +000804 +000805 +000806 +000807 +000809 +000810 +000811 +000812 +000816 +000819 +000823 +000826 +000831 +000837 +000838 +000840 +000841 +000843 +000844 +000847 +000848 +000849 +000850 +000852 +000854 +000859 +000862 +000863 +000869 +000873 +000874 +000875 +000876 +000877 +000878 +000879 +000881 +000884 +000885 +000889 +000893 +000894 +000897 +000899 +000904 +000907 +000909 +000911 +000912 +000915 +000916 +000917 +000920 +000922 +000923 +000926 +000928 +000930 +000931 +000932 +000938 +000939 +000940 +000942 +000943 +000944 +000948 +000949 +000952 +000953 +000956 +000958 +000961 +000963 +000964 +000966 +000967 +000969 +000970 +000971 +000973 +000974 +000976 +000979 +000981 +000983 +000984 +000985 +000986 +000988 +000991 +000999 +001002 +001006 +001007 +001008 +001010 +001011 +001012 +001013 +001014 +001015 +001018 +001019 +001021 +001022 +001025 +001026 +001027 +001035 +001037 +001039 +001042 +001043 +001046 +001050 +001051 +001053 +001054 +001055 +001058 +001063 +001065 +001066 +001067 +001068 +001069 +001070 +001071 +001075 +001076 +001077 +001078 +001083 +001084 +001086 +001088 +001089 +001094 +001095 +001096 +001097 +001099 +001101 +001102 +001104 +001106 +001107 +001108 +001111 +001113 +001114 +001115 +001116 +001118 +001120 +001123 +001125 +001127 +001129 +001131 +001132 +001133 +001134 +001135 +001136 +001138 +001139 +001140 +001141 +001143 +001144 +001145 +001147 +001148 +001149 +001150 +001152 +001153 +001154 +001155 +001158 +001162 +001163 +001167 +001172 +001173 +001176 +001177 +001178 +001179 +001180 +001182 +001183 +001187 +001188 +001189 +001191 +001192 +001193 +001194 +001195 +001198 +001199 +001203 +001206 +001207 +001213 +001214 +001216 +001217 +001218 +001221 +001222 +001224 +001225 +001226 +001228 +001230 +001232 +001234 +001235 +001236 +001237 +001239 +001241 +001242 +001243 +001244 +001245 +001246 +001249 +001251 +001252 +001253 +001254 +001255 +001257 +001259 +001260 +001261 +001263 +001265 +001266 +001267 +001268 +001269 +001270 +001271 +001272 +001273 +001274 +001275 +001281 +001284 +001286 +001287 +001289 +001291 +001292 +001294 +001295 +001296 +001303 +001304 +001305 +001306 +001307 +001308 +001314 +001317 +001318 +001329 +001330 +001331 +001332 +001333 +001334 +001336 +001337 +001339 +001342 +001344 +001345 +001346 +001347 +001350 +001352 +001353 +001355 +001356 +001359 +001363 +001365 +001372 +001374 +001375 +001376 +001377 +001380 +001381 +001382 +001384 +001386 +001387 +001388 +001389 +001391 +001395 +001397 +001398 +001407 +001410 +001411 +001412 +001415 +001416 +001419 +001421 +001424 +001427 +001431 +001432 +001435 +001437 +001438 +001439 +001441 +001442 +001443 +001445 +001446 +001448 +001450 +001451 +001458 +001461 +001463 +001466 +001469 +001471 +001477 +001478 +001480 +001481 +001485 +001487 +001488 +001489 +001495 +001497 +001501 +001502 +001507 +001508 +001511 +001513 +001514 +001516 +001517 +001521 +001522 +001524 +001525 +001526 +001527 +001528 +001533 +001535 +001536 +001537 +001538 +001542 +001545 +001546 +001547 +001549 +001552 +001555 +001557 +001560 +001562 +001564 +001565 +001567 +001569 +001573 +001574 +001576 +001577 +001579 +001582 +001583 +001585 +001586 +001587 +001588 +001589 +001590 +001591 +001592 +001594 +001596 +001597 +001600 +001602 +001603 +001605 +001606 +001610 +001613 +001615 +001616 +001617 +001619 +001621 +001625 +001627 +001629 +001631 +001633 +001634 +001635 +001640 +001643 +001645 +001647 +001650 +001654 +001656 +001658 +001660 +001662 +001664 +001665 +001666 +001667 +001670 +001675 +001680 +001682 +001683 +001684 +001689 +001693 +001694 +001697 +001699 +001701 +001702 +001704 +001705 +001706 +001707 +001709 +001710 +001711 +001712 +001713 +001714 +001717 +001718 +001719 +001721 +001722 +001726 +001727 +001729 +001732 +001733 +001740 +001741 +001742 +001745 +001746 +001749 +001750 +001751 +001752 +001755 +001758 +001762 +001764 +001765 +001768 +001771 +001772 +001774 +001776 +001778 +001780 +001781 +001782 +001783 +001786 +001787 +001794 +001795 +001797 +001800 +001801 +001802 +001804 +001807 +001808 +001813 +001814 +001817 +001818 +001820 +001822 +001823 +001824 +001825 +001828 +001831 +001835 +001840 +001844 +001846 +001848 +001851 +001852 +001853 +001854 +001855 +001856 +001858 +001859 +001861 +001862 +001863 +001867 +001868 +001869 +001872 +001875 +001877 +001878 +001880 +001881 +001884 +001885 +001886 +001887 +001888 +001890 +001892 +001893 +001897 +001898 +001900 +001904 +001905 +001909 +001919 +001920 +001923 +001924 +001925 +001926 +001927 +001928 +001929 +001931 +001932 +001933 +001934 +001936 +001937 +001940 +001941 +001942 +001943 +001945 +001946 +001952 +001954 +001959 +001960 +001966 +001967 +001969 +001972 +001977 +001978 +001979 +001980 +001982 +001983 +001984 +001985 +001986 +001989 +001991 +001995 +001996 +001997 +001999 +002000 +002001 +002002 +002004 +002008 +002010 +002011 +002012 +002013 +002014 +002017 +002019 +002021 +002022 +002025 +002027 +002028 +002029 +002034 +002035 +002036 +002037 +002038 +002042 +002043 +002044 +002045 +002046 +002048 +002049 +002050 +002052 +002054 +002056 +002057 +002058 +002062 +002068 +002071 +002073 +002074 +002075 +002076 +002078 +002079 +002081 +002082 +002085 +002086 +002087 +002089 +002091 +002093 +002094 +002100 +002101 +002102 +002103 +002107 +002108 +002111 +002112 +002113 +002115 +002118 +002120 +002121 +002123 +002124 +002127 +002128 +002130 +002131 +002135 +002136 +002137 +002138 +002139 +002140 +002142 +002151 +002152 +002153 +002158 +002159 +002160 +002161 +002163 +002165 +002166 +002168 +002169 +002170 +002173 +002177 +002179 +002182 +002183 +002185 +002187 +002188 +002193 +002196 +002200 +002201 +002202 +002206 +002207 +002209 +002215 +002216 +002218 +002219 +002220 +002224 +002225 +002228 +002229 +002232 +002233 +002234 +002239 +002243 +002245 +002246 +002248 +002250 +002251 +002254 +002255 +002257 +002258 +002260 +002262 +002266 +002272 +002276 +002277 +002279 +002280 +002282 +002283 +002284 +002286 +002287 +002290 +002291 +002292 +002293 +002294 +002295 +002298 +002299 +002300 +002303 +002304 +002306 +002307 +002308 +002310 +002314 +002315 +002319 +002320 +002325 +002327 +002329 +002330 +002332 +002334 +002336 +002337 +002338 +002340 +002341 +002344 +002345 +002346 +002347 +002348 +002353 +002356 +002357 +002359 +002362 +002365 +002366 +002367 +002369 +002370 +002372 +002376 +002378 +002380 +002382 +002383 +002384 +002385 +002386 +002387 +002391 +002392 +002393 +002397 +002398 +002399 +002404 +002405 +002411 +002414 +002415 +002418 +002419 +002420 +002422 +002423 +002424 +002425 +002428 +002429 +002432 +002433 +002434 +002439 +002440 +002442 +002446 +002450 +002454 +002455 +002457 +002458 +002460 +002461 +002462 +002463 +002473 +002474 +002476 +002477 +002478 +002479 +002483 +002486 +002488 +002490 +002492 +002495 +002497 +002499 +002500 +002502 +002503 +002504 +002505 +002506 +002509 +002511 +002516 +002519 +002520 +002521 +002525 +002526 +002528 +002529 +002530 +002531 +002532 +002534 +002538 +002539 +002540 +002541 +002543 +002546 +002548 +002552 +002556 +002557 +002558 +002562 +002563 +002564 +002565 +002568 +002569 +002570 +002572 +002574 +002575 +002577 +002580 +002581 +002583 +002584 +002585 +002586 +002590 +002594 +002598 +002599 +002600 +002601 +002602 +002603 +002604 +002606 +002612 +002613 +002615 +002619 +002621 +002625 +002626 +002628 +002630 +002631 +002633 +002635 +002636 +002638 +002640 +002641 +002644 +002645 +002646 +002651 +002653 +002656 +002657 +002661 +002663 +002666 +002669 +002673 +002674 +002675 +002677 +002680 +002681 +002685 +002686 +002690 +002692 +002693 +002694 +002695 +002696 +002699 +002702 +002706 +002707 +002709 +002710 +002711 +002712 +002713 +002715 +002717 +002720 +002721 +002722 +002724 +002725 +002726 +002727 +002728 +002729 +002730 +002735 +002737 +002740 +002742 +002744 +002745 +002746 +002747 +002748 +002749 +002752 +002753 +002755 +002757 +002758 +002760 +002761 +002763 +002764 +002765 +002767 +002772 +002773 +002775 +002783 +002786 +002787 +002789 +002793 +002794 +002796 +002797 +002800 +002801 +002804 +002805 +002806 +002809 +002810 +002811 +002812 +002814 +002815 +002818 +002820 +002826 +002827 +002828 +002830 +002831 +002833 +002836 +002839 +002840 +002841 +002844 +002845 +002846 +002847 +002848 +002853 +002856 +002858 +002861 +002863 +002866 +002867 +002875 +002876 +002877 +002878 +002879 +002880 +002881 +002883 +002885 +002889 +002890 +002891 +002892 +002893 +002894 +002895 +002896 +002900 +002901 +002902 +002903 +002905 +002908 +002911 +002914 +002916 +002917 +002919 +002924 +002925 +002928 +002930 +002934 +002935 +002937 +002942 +002944 +002945 +002947 +002948 +002951 +002953 +002955 +002957 +002958 +002959 +002960 +002961 +002962 +002963 +002964 +002966 +002971 +002974 +002976 +002977 +002978 +002979 +002982 +002984 +002985 +002988 +002991 +002993 +002994 +002995 +002997 +002999 +003000 +003001 +003003 +003004 +003005 +003006 +003007 +003010 +003011 +003019 +003022 +003024 +003025 +003027 +003029 +003030 +003031 +003032 +003033 +003034 +003035 +003038 +003042 +003043 +003046 +003047 +003048 +003050 +003052 +003053 +003054 +003055 +003056 +003058 +003061 +003062 +003065 +003066 +003067 +003071 +003073 +003074 +003076 +003080 +003082 +003087 +003088 +003090 +003094 +003096 +003099 +003101 +003102 +003103 +003106 +003107 +003109 +003110 +003112 +003114 +003116 +003118 +003124 +003126 +003127 +003129 +003131 +003133 +003134 +003135 +003136 +003137 +003141 +003142 +003144 +003145 +003146 +003148 +003150 +003153 +003156 +003159 +003161 +003162 +003165 +003167 +003170 +003172 +003174 +003175 +003177 +003179 +003180 +003181 +003182 +003183 +003187 +003190 +003192 +003194 +003197 +003199 +003202 +003203 +003204 +003207 +003210 +003211 +003214 +003216 +003217 +003219 +003221 +003222 +003224 +003225 +003226 +003228 +003229 +003231 +003232 +003233 +003236 +003239 +003240 +003242 +003247 +003250 +003251 +003252 +003254 +003255 +003257 +003259 +003265 +003266 +003269 +003272 +003275 +003276 +003280 +003281 +003283 +003288 +003292 +003295 +003296 +003298 +003300 +003301 +003302 +003304 +003305 +003306 +003308 +003310 +003312 +003313 +003315 +003316 +003318 +003319 +003322 +003323 +003324 +003325 +003330 +003331 +003337 +003338 +003341 +003343 +003346 +003347 +003350 +003351 +003352 +003353 +003355 +003357 +003358 +003364 +003365 +003366 +003367 +003368 +003370 +003373 +003375 +003379 +003385 +003386 +003393 +003394 +003395 +003396 +003397 +003399 +003401 +003402 +003403 +003404 +003405 +003406 +003407 +003408 +003409 +003410 +003411 +003412 +003417 +003419 +003421 +003422 +003425 +003426 +003428 +003429 +003430 +003432 +003434 +003435 +003443 +003447 +003448 +003449 +003450 +003453 +003456 +003461 +003464 +003465 +003466 +003467 +003469 +003470 +003471 +003474 +003478 +003480 +003481 +003482 +003483 +003484 +003487 +003488 +003489 +003490 +003491 +003492 +003495 +003496 +003497 +003502 +003503 +003504 +003506 +003511 +003515 +003517 +003519 +003520 +003521 +003524 +003527 +003528 +003529 +003530 +003531 +003535 +003539 +003543 +003544 +003547 +003550 +003552 +003553 +003554 +003557 +003558 +003559 +003562 +003563 +003568 +003571 +003573 +003574 +003580 +003582 +003583 +003584 +003588 +003600 +003601 +003604 +003605 +003607 +003608 +003609 +003611 +003614 +003616 +003618 +003620 +003621 +003622 +003623 +003624 +003627 +003629 +003630 +003631 +003632 +003633 +003634 +003635 +003643 +003645 +003647 +003649 +003652 +003653 +003655 +003658 +003659 +003661 +003662 +003667 +003668 +003669 +003671 +003676 +003677 +003678 +003679 +003682 +003683 +003684 +003688 +003689 +003690 +003691 +003692 +003702 +003703 +003705 +003707 +003708 +003711 +003712 +003715 +003716 +003718 +003719 +003723 +003726 +003728 +003735 +003736 +003737 +003738 +003739 +003746 +003747 +003748 +003750 +003751 +003753 +003755 +003756 +003762 +003763 +003764 +003769 +003771 +003775 +003777 +003778 +003779 +003781 +003782 +003787 +003788 +003793 +003794 +003798 +003800 +003802 +003804 +003805 +003807 +003808 +003809 +003811 +003812 +003814 +003820 +003822 +003826 +003827 +003828 +003830 +003834 +003835 +003837 +003841 +003847 +003852 +003854 +003856 +003859 +003860 +003864 +003866 +003869 +003870 +003872 +003873 +003874 +003878 +003879 +003880 +003881 +003883 +003885 +003886 +003890 +003891 +003892 +003894 +003897 +003898 +003899 +003901 +003902 +003905 +003907 +003909 +003914 +003915 +003916 +003920 +003923 +003924 +003926 +003931 +003932 +003934 +003937 +003938 +003943 +003945 +003946 +003948 +003950 +003956 +003958 +003961 +003962 +003964 +003965 +003969 +003970 +003972 +003975 +003977 +003980 +003981 +003982 +003984 +003986 +003992 +003996 +003998 +004000 +004001 +004002 +004003 +004004 +004007 +004008 +004009 +004010 +004011 +004016 +004021 +004026 +004027 +004028 +004032 +004033 +004034 +004036 +004038 +004040 +004041 +004042 +004045 +004048 +004049 +004051 +004055 +004059 +004061 +004063 +004064 +004065 +004068 +004072 +004074 +004077 +004079 +004081 +004082 +004083 +004085 +004087 +004089 +004091 +004092 +004095 +004096 +004098 +004100 +004101 +004104 +004105 +004107 +004108 +004109 +004110 +004111 +004113 +004116 +004117 +004118 +004119 +004120 +004121 +004122 +004124 +004125 +004126 +004128 +004129 +004130 +004131 +004132 +004136 +004137 +004138 +004140 +004142 +004143 +004148 +004149 +004150 +004152 +004153 +004154 +004155 +004156 +004157 +004158 +004160 +004161 +004162 +004163 +004164 +004168 +004171 +004172 +004173 +004174 +004175 +004185 +004187 +004188 +004189 +004190 +004191 +004195 +004196 +004202 +004205 +004206 +004207 +004209 +004210 +004213 +004214 +004215 +004220 +004221 +004222 +004223 +004224 +004226 +004228 +004232 +004237 +004239 +004241 +004242 +004243 +004246 +004248 +004249 +004250 +004251 +004254 +004255 +004256 +004259 +004260 +004263 +004270 +004271 +004275 +004277 +004278 +004280 +004281 +004282 +004284 +004285 +004288 +004289 +004290 +004291 +004293 +004294 +004295 +004298 +004299 +004300 +004301 +004303 +004305 +004306 +004307 +004309 +004311 +004312 +004314 +004318 +004319 +004321 +004323 +004324 +004326 +004327 +004329 +004330 +004335 +004336 +004337 +004338 +004340 +004342 +004343 +004345 +004348 +004349 +004350 +004352 +004353 +004360 +004362 +004363 +004364 +004367 +004368 +004369 +004370 +004373 +004374 +004377 +004383 +004384 +004385 +004388 +004391 +004392 +004393 +004396 +004397 +004398 +004401 +004402 +004403 +004404 +004406 +004407 +004414 +004415 +004418 +004419 +004420 +004421 +004422 +004423 +004424 +004425 +004426 +004429 +004430 +004433 +004434 +004435 +004437 +004438 +004439 +004440 +004443 +004444 +004447 +004450 +004452 +004454 +004456 +004458 +004460 +004462 +004465 +004469 +004470 +004472 +004474 +004475 +004480 +004481 +004482 +004483 +004485 +004486 +004487 +004489 +004490 +004491 +004493 +004494 +004496 +004501 +004502 +004508 +004511 +004513 +004516 +004517 +004519 +004520 +004521 +004526 +004527 +004528 +004529 +004530 +004531 +004532 +004534 +004540 +004541 +004542 +004547 +004548 +004549 +004551 +004553 +004556 +004557 +004562 +004566 +004567 +004568 +004569 +004570 +004573 +004574 +004576 +004578 +004581 +004582 +004585 +004587 +004588 +004589 +004591 +004596 +004598 +004599 +004603 +004608 +004609 +004610 +004611 +004612 +004615 +004618 +004620 +004622 +004624 +004626 +004629 +004630 +004632 +004633 +004634 +004636 +004638 +004640 +004644 +004647 +004648 +004649 +004650 +004651 +004652 +004655 +004657 +004658 +004660 +004665 +004666 +004667 +004668 +004669 +004672 +004673 +004679 +004680 +004682 +004683 +004685 +004686 +004687 +004688 +004689 +004691 +004692 +004693 +004694 +004695 +004697 +004698 +004699 +004700 +004705 +004706 +004708 +004709 +004710 +004711 +004713 +004714 +004715 +004716 +004717 +004718 +004720 +004721 +004722 +004724 +004725 +004726 +004730 +004732 +004734 +004735 +004737 +004738 +004739 +004740 +004742 +004743 +004744 +004745 +004746 +004748 +004752 +004753 +004756 +004759 +004762 +004763 +004764 +004766 +004768 +004769 +004770 +004773 +004776 +004777 +004782 +004783 +004787 +004788 +004790 +004791 +004792 +004797 +004799 +004800 +004804 +004806 +004807 +004810 +004811 +004813 +004814 +004815 +004816 +004817 +004821 +004822 +004825 +004829 +004830 +004831 +004832 +004835 +004839 +004843 +004846 +004848 +004849 +004850 +004851 +004852 +004858 +004859 +004860 +004861 +004862 +004863 +004864 +004867 +004868 +004871 +004873 +004874 +004875 +004881 +004885 +004887 +004888 +004891 +004892 +004893 +004895 +004896 +004898 +004902 +004903 +004904 +004905 +004907 +004909 +004914 +004917 +004918 +004920 +004921 +004924 +004926 +004927 +004928 +004929 +004931 +004932 +004934 +004935 +004938 +004941 +004942 +004943 +004944 +004946 +004947 +004948 +004949 +004953 +004954 +004956 +004958 +004959 +004960 +004962 +004963 +004966 +004974 +004976 +004979 +004981 +004983 +004985 +004986 +004988 +004989 +004990 +004993 +004994 +004995 +004996 +004998 +004999 +005001 +005002 +005004 +005008 +005010 +005013 +005014 +005015 +005017 +005019 +005021 +005024 +005026 +005028 +005032 +005034 +005036 +005037 +005038 +005040 +005041 +005045 +005049 +005050 +005052 +005053 +005054 +005055 +005056 +005057 +005058 +005062 +005063 +005064 +005065 +005067 +005068 +005070 +005072 +005073 +005074 +005075 +005077 +005078 +005079 +005080 +005081 +005082 +005086 +005090 +005093 +005094 +005095 +005101 +005103 +005105 +005108 +005109 +005110 +005112 +005113 +005120 +005121 +005122 +005124 +005125 +005127 +005128 +005133 +005135 +005136 +005138 +005139 +005140 +005141 +005143 +005144 +005145 +005147 +005149 +005153 +005155 +005156 +005157 +005158 +005161 +005162 +005163 +005164 +005166 +005167 +005168 +005170 +005172 +005174 +005175 +005176 +005179 +005180 +005181 +005182 +005184 +005185 +005188 +005189 +005190 +005191 +005194 +005197 +005198 +005199 +005201 +005206 +005213 +005214 +005217 +005218 +005219 +005221 +005222 +005226 +005227 +005229 +005230 +005233 +005234 +005236 +005237 +005240 +005241 +005242 +005244 +005246 +005249 +005251 +005255 +005256 +005260 +005262 +005267 +005268 +005271 +005273 +005274 +005275 +005276 +005279 +005280 +005282 +005284 +005287 +005289 +005292 +005296 +005297 +005298 +005299 +005304 +005307 +005308 +005309 +005311 +005312 +005313 +005315 +005316 +005318 +005319 +005321 +005322 +005323 +005325 +005328 +005329 +005330 +005333 +005334 +005335 +005336 +005337 +005338 +005341 +005342 +005343 +005345 +005347 +005349 +005350 +005359 +005360 +005363 +005365 +005366 +005368 +005369 +005371 +005372 +005375 +005377 +005378 +005379 +005381 +005385 +005386 +005389 +005390 +005391 +005404 +005405 +005413 +005415 +005422 +005423 +005426 +005427 +005429 +005430 +005431 +005434 +005437 +005441 +005443 +005444 +005445 +005447 +005448 +005449 +005450 +005452 +005453 +005458 +005459 +005460 +005461 +005465 +005466 +005467 +005471 +005472 +005473 +005474 +005476 +005477 +005479 +005481 +005482 +005484 +005486 +005487 +005489 +005494 +005495 +005498 +005505 +005510 +005511 +005514 +005515 +005523 +005525 +005528 +005531 +005532 +005534 +005536 +005538 +005540 +005542 +005544 +005545 +005546 +005551 +005552 +005555 +005556 +005557 +005558 +005559 +005560 +005565 +005566 +005570 +005571 +005572 +005573 +005576 +005577 +005580 +005581 +005582 +005584 +005586 +005587 +005588 +005589 +005590 +005595 +005596 +005600 +005601 +005602 +005603 +005610 +005613 +005616 +005617 +005618 +005619 +005623 +005625 +005630 +005631 +005633 +005634 +005635 +005638 +005639 +005640 +005642 +005643 +005649 +005650 +005652 +005653 +005656 +005658 +005659 +005660 +005662 +005664 +005668 +005669 +005672 +005673 +005676 +005677 +005680 +005683 +005685 +005687 +005689 +005695 +005698 +005699 +005700 +005703 +005704 +005706 +005707 +005708 +005709 +005712 +005713 +005714 +005717 +005724 +005725 +005727 +005728 +005729 +005731 +005735 +005736 +005739 +005740 +005741 +005743 +005744 +005745 +005746 +005747 +005751 +005754 +005757 +005760 +005762 +005763 +005765 +005777 +005782 +005783 +005784 +005785 +005786 +005787 +005790 +005793 +005794 +005796 +005800 +005801 +005803 +005805 +005806 +005807 +005811 +005812 +005818 +005819 +005820 +005821 +005822 +005826 +005827 +005829 +005834 +005839 +005840 +005841 +005843 +005852 +005854 +005855 +005856 +005857 +005859 +005864 +005869 +005873 +005876 +005878 +005879 +005881 +005882 +005883 +005885 +005887 +005889 +005892 +005893 +005894 +005899 +005900 +005901 +005903 +005905 +005906 +005907 +005909 +005910 +005911 +005912 +005913 +005914 +005916 +005917 +005918 +005919 +005921 +005922 +005923 +005925 +005926 +005927 +005931 +005933 +005935 +005938 +005939 +005944 +005947 +005948 +005949 +005952 +005955 +005958 +005961 +005962 +005963 +005965 +005969 +005970 +005972 +005975 +005978 +005981 +005982 +005984 +005985 +005986 +005988 +005994 +005996 +005997 +005999 +006001 +006002 +006003 +006005 +006008 +006009 +006010 +006012 +006013 +006014 +006016 +006023 +006024 +006026 +006027 +006028 +006029 +006030 +006031 +006033 +006034 +006036 +006038 +006039 +006041 +006042 +006043 +006044 +006045 +006046 +006047 +006048 +006050 +006052 +006054 +006057 +006058 +006060 +006061 +006062 +006063 +006066 +006067 +006068 +006070 +006071 +006074 +006075 +006077 +006078 +006083 +006085 +006086 +006087 +006088 +006093 +006095 +006096 +006097 +006098 +006100 +006102 +006103 +006106 +006107 +006110 +006114 +006115 +006116 +006117 +006118 +006121 +006122 +006123 +006125 +006126 +006127 +006130 +006133 +006136 +006139 +006144 +006146 +006148 +006151 +006152 +006154 +006156 +006161 +006163 +006165 +006167 +006168 +006169 +006173 +006176 +006177 +006182 +006185 +006186 +006187 +006190 +006194 +006195 +006196 +006198 +006202 +006204 +006208 +006210 +006213 +006215 +006219 +006222 +006227 +006228 +006229 +006232 +006233 +006238 +006240 +006244 +006246 +006247 +006249 +006250 +006258 +006263 +006265 +006266 +006267 +006269 +006270 +006272 +006273 +006274 +006275 +006276 +006278 +006280 +006282 +006286 +006287 +006288 +006297 +006300 +006301 +006302 +006305 +006306 +006312 +006314 +006315 +006316 +006317 +006321 +006322 +006324 +006331 +006332 +006333 +006334 +006338 +006339 +006340 +006342 +006343 +006344 +006345 +006348 +006349 +006351 +006353 +006354 +006355 +006356 +006357 +006360 +006364 +006366 +006368 +006369 +006370 +006371 +006372 +006377 +006379 +006380 +006381 +006385 +006386 +006388 +006391 +006393 +006394 +006395 +006396 +006403 +006405 +006406 +006407 +006409 +006410 +006411 +006415 +006416 +006417 +006420 +006423 +006424 +006425 +006426 +006427 +006433 +006434 +006435 +006436 +006437 +006439 +006440 +006441 +006442 +006444 +006445 +006446 +006451 +006452 +006453 +006454 +006462 +006464 +006465 +006468 +006469 +006470 +006472 +006473 +006474 +006475 +006477 +006478 +006481 +006482 +006483 +006484 +006486 +006488 +006491 +006493 +006496 +006497 +006498 +006503 +006505 +006506 +006507 +006508 +006512 +006514 +006515 +006516 +006517 +006519 +006520 +006521 +006524 +006525 +006529 +006530 +006531 +006532 +006533 +006534 +006535 +006537 +006540 +006542 +006548 +006549 +006551 +006553 +006555 +006556 +006558 +006560 +006561 +006563 +006565 +006568 +006569 +006570 +006574 +006576 +006577 +006578 +006581 +006582 +006583 +006586 +006588 +006590 +006592 +006593 +006595 +006596 +006597 +006602 +006603 +006604 +006611 +006612 +006613 +006614 +006618 +006623 +006624 +006625 +006626 +006628 +006629 +006632 +006633 +006634 +006636 +006637 +006638 +006641 +006643 +006647 +006649 +006650 +006651 +006655 +006656 +006658 +006659 +006660 +006664 +006666 +006667 +006669 +006670 +006674 +006676 +006677 +006678 +006679 +006682 +006685 +006686 +006692 +006693 +006694 +006695 +006696 +006698 +006701 +006703 +006709 +006710 +006711 +006712 +006713 +006714 +006715 +006719 +006720 +006723 +006725 +006726 +006729 +006731 +006732 +006733 +006734 +006737 +006738 +006741 +006744 +006745 +006747 +006751 +006752 +006753 +006754 +006755 +006756 +006758 +006759 +006760 +006761 +006762 +006764 +006765 +006767 +006768 +006770 +006771 +006772 +006773 +006777 +006778 +006780 +006781 +006782 +006783 +006785 +006786 +006789 +006791 +006792 +006794 +006796 +006797 +006798 +006800 +006803 +006804 +006806 +006807 +006808 +006811 +006812 +006813 +006815 +006816 +006818 +006819 +006822 +006828 +006829 +006832 +006833 +006836 +006837 +006841 +006843 +006844 +006847 +006849 +006850 +006852 +006853 +006854 +006855 +006856 +006858 +006860 +006862 +006863 +006866 +006868 +006870 +006872 +006873 +006874 +006876 +006879 +006881 +006882 +006884 +006885 +006887 +006889 +006891 +006895 +006897 +006898 +006899 +006900 +006901 +006903 +006906 +006907 +006908 +006910 +006913 +006914 +006917 +006922 +006925 +006928 +006930 +006936 +006937 +006938 +006942 +006943 +006944 +006945 +006948 +006950 +006953 +006954 +006955 +006956 +006959 +006960 +006962 +006964 +006968 +006971 +006973 +006977 +006978 +006980 +006981 +006982 +006987 +006989 +006990 +006992 +006994 +006997 +006999 +007000 +007003 +007005 +007006 +007008 +007010 +007011 +007012 +007014 +007015 +007016 +007019 +007022 +007023 +007026 +007027 +007028 +007029 +007030 +007031 +007032 +007033 +007034 +007037 +007038 +007042 +007043 +007047 +007048 +007049 +007052 +007053 +007055 +007056 +007059 +007061 +007063 +007065 +007067 +007068 +007069 +007071 +007072 +007074 +007076 +007078 +007079 +007080 +007081 +007082 +007083 +007084 +007085 +007087 +007088 +007089 +007091 +007095 +007098 +007100 +007103 +007109 +007110 +007112 +007115 +007117 +007119 +007120 +007122 +007125 +007130 +007131 +007132 +007133 +007135 +007136 +007138 +007139 +007144 +007145 +007146 +007149 +007154 +007157 +007158 +007161 +007162 +007163 +007164 +007165 +007166 +007168 +007169 +007172 +007174 +007176 +007177 +007178 +007180 +007182 +007183 +007187 +007194 +007198 +007199 +007200 +007201 +007202 +007204 +007205 +007207 +007208 +007210 +007212 +007214 +007215 +007217 +007219 +007221 +007225 +007227 +007229 +007230 +007232 +007233 +007235 +007238 +007240 +007242 +007244 +007246 +007247 +007252 +007253 +007255 +007256 +007258 +007260 +007261 +007262 +007265 +007266 +007267 +007271 +007272 +007273 +007274 +007275 +007277 +007278 +007279 +007280 +007283 +007284 +007287 +007288 +007289 +007290 +007291 +007292 +007294 +007299 +007300 +007302 +007303 +007304 +007309 +007310 +007311 +007315 +007318 +007319 +007322 +007323 +007325 +007326 +007327 +007329 +007330 +007331 +007336 +007337 +007339 +007342 +007343 +007344 +007345 +007347 +007349 +007350 +007351 +007352 +007353 +007359 +007360 +007364 +007369 +007371 +007374 +007375 +007376 +007377 +007380 +007381 +007382 +007383 +007384 +007385 +007389 +007391 +007395 +007396 +007397 +007398 +007401 +007402 +007403 +007405 +007407 +007409 +007410 +007411 +007412 +007413 +007415 +007416 +007419 +007420 +007421 +007422 +007423 +007424 +007426 +007430 +007433 +007434 +007435 +007436 +007437 +007439 +007440 +007442 +007445 +007447 +007448 +007449 +007450 +007453 +007456 +007458 +007462 +007463 +007464 +007466 +007467 +007468 +007469 +007470 +007473 +007475 +007477 +007478 +007480 \ No newline at end of file diff --git a/img/framework.png b/img/framework.png new file mode 100644 index 0000000000000000000000000000000000000000..ec36ab30bc0730755804d5ae5f9be17a120eedc7 GIT binary patch literal 205992 zcmdSA^;aCh(>**ua0o7e0D*+y5IndAcXxML2=0;u3GVLh?kw)^7Tn$4`EK%j9(n(S zcYc^TJ!j5tcXe0wy|=0ZRYWxozxzAgtCvL}jvw!GLH81^Oe1q2aDebN8jsA*Z)p#G;Rr{C#-^1rRj|G%^_ z^{_tt8<0|g1EVNzfVSAQ_Z6%#vtXs)^z6X$B436|Xchg&Sg9bPz1#K@4!reeqfb3K z8ra3lpr;Vxqavv6dhTe}_h7ey*FlYS{ku;(IF3wKD@84MWCl!>nzshqgicS!a$|1v z@-+Fp<~*e>T)dMa2%b>bq;E8#NR@mVwj_se-@BEvTk2QZ>`weDn%67#RGw&inmuUb zN-yMX){qP77u;DpIn>RyUX~z^>m66RblKEBB%!RPqdf=ldGMn?mW;=aH8i@dl;Ite z2!C5S7DX-67d4a$L4W9w8KkP8y|(1u(@Q!(jq;?prR$(n%=$Qb!skGN1GHx;1q>9& zg_mr0Z!srF5c8HAk8jaXKU((@hqWr?PhQ~xKam`d$>=}B00K`GL6aV3eZ55Zv&~Z%kHkc#4EHUHIHn+nyAEaM{Bmyt?O~=7lcyHTE)Q7UEMnhr zwD1qUaz7?{)ArE-Kn4o*;2VO^>|{n0@4TC_k?J2#$Yo*HmQKA04e-a#R4%DDeN5VY ztY3-re(c*k9>D<}uMqu!jN$cqwVc4kap z9Q5YkdP22Jj0(h5SG7OG0yd! zyw;|mw}Us2x}38%H`Dx26PMZB2}-zwInbDpyZ)meW;czcZCh^-rjMR%A4TnU+97fVFCy^4qvZa^7s&7D0HRSm+bA& z35$%E^CcIF#F=S(Kc^wl-Q(mNw&t51US@eqcawOj!1 z9U%~c8w-u=d{Gql!;2G7ZTxm#Vic;}G&eXx0x<;n%4!UWtn9v$VhJj|b*>mW_%^Sn zE#(zOagc228B**P-kSNz>Y&SBbZkjdp^ungF{dWpW=3b|W9w~q?bA*TKPi8xhc0H! zoCq!~fX=#-dexyCM2Pi7o}1U~Ec|ffhA#M9kHiKt;n}n_Vd@-pNz!E$?|U21yZv}P zzv=GMQ(BLl;JWb!KJvarpo^5Sdzfz>?SFS1I>G1uSY8R%HC@#gF?gPM14nim79Eff z#S+LR^5m#(t~I~V#p<39IvJh5`E}c8${>5RwQ00_U=}$vCg|uqvlOfWM4Im3`pX8k za4{@1cj$JSt6eM;lS3AuU0@FHxnHgKaaP{z+%o3_e_FWUbs}P3k82pb-qF)xy_wpm z_{PLTd33k|Svlyv(hHbaEjx5+y6w7~@L^0{^@W8S4&Zn8njK8G<9$?BiS;I0olf&~ z-oIzRhJ+HC?O=53J2^#-R>|tgsAjyH^6N|49)q^>f)mYH$s?3d4wFj9{vD{^;&GI03>uX`DkFREI;~lV%71gWIp0nov!}PpC^I^> z{-ER+fdp&QtzEmo5JQ*9)#LTdpTdEZ8TJ&AYekkf))(L}DjUx*=L*f(U7A@<|!XXITQL|M88*N60$XSxxjOgq%MRwakW+^kn_MnQqP zR4g|q-L_N55r5=#f)|xkWYDeV{h2H3(Fh6|BV?YR8?eX9TbZ8uv_J2)DE+)xA6yxM zKaobjuXZ1asNvwd1`*cTp8a4()@)>2m}f5DE%n6AXQWSZHSYSUx-EF%ghDI-$Iuk) z)XQ8YEP48tuDK^fx^gl~nC{_J3Ia36R;WyEWOQ%GzyIO5y#0|Kj<%vR*XuU`)>#-V z4%ysfOjRXdTc~sF{S|nLt1Y?v74WI1a3c0_d2)y^eqNAn)Jti+I~r&SASMA<7d2vm zdc6|CxlMSMuXCoOenyIsjP7_Zj!{s>2cw|HN68Tm?L{RE#YkTFU+}=VNC}ae=jBsf zcD@wROl312Oo-)XgdX6tbhwTcCc8Hdntis*WDdA0x^X-0o=6@|U8;BbbJFvAL}K7Z zo#|1L{sWJy07#wviB45}o{wdjL zhJacHp3fJ(;U8Rc zv(EM~dQb2qVeHaCMmQf^T^ya-9HMsJ;o7Kc%M*%imUeXyT4?pgfg;J#`-mCM<+|${ z3|3IJGvzt4%Tv$2`1Mg$`Yg`_k>sG~OKQy~5g((nz{4s}b>7d+V!Gf#Uiq?@`(``! zSp3~E(r=e~8iggYo+8?+1o5NXi(s|qQgL51Vg}SfFRb-(YsH$(`JB@0X7F=r+K~T2 zIs=LagA?w3Em0c}S?IG~XM!v-`DlvBXJ5(_2DBS!09}Q^acyOjnfbuj`?>gwS-E-R z5*no-!nXTKLW*p^)=ucYvR@v9L})|8%}Mj<=l6U2aKIYIMR*E&mUm?Z8Q?P_%e8_p zE1J0b!1->Dj%g$5V!4F!V^BhwA{|=NfyDCalQz6cx(*?Uah`6v0q5$}jA~Ne(+GIQ zY6g0qHQAXbG*&k~!m$whk1K1cEvZ0x15aViezph~=Ey2Tb^X;$5HMws&X|uJ@8o56 z9)6NYJLHsV|M{dPVngwIFME!)?hSB8MxM=$0YK{^ zb?B2XI{oz%0$*i%X3A0)XU{e|nqpRtxY$ru+V&v6uDbr>zKdHLX^i^%LR+YHo6^;5 zpaaUIi8q7ll<5i2O>;16^(3QjMW87Boza-}8yPSaI}OnGThUeQ zMLkgKT6XB9nXz72wW-?ejb-n}b$>-l@+u0}*D~L}AG>bMHJrxqj=B3v6zxPizi?ML zG-kTOhY0QRmQ^cgq)^q?xMIJ`Y{npe9Xj>J*?G$Hv1IXBatnd<)1?F2lxb_*vPyD` zu8H-2n>57vSQyxD>C?Ipw1<53A9c(xo33(>*rgN3rZotMTILXXDDbm_r-k?uiIroL zAC(~PXH`@9wQ0d_^Ag=Yj7@NF(T}GDLr@pzhwQF=sgeI{_;TY;(|dqFJ~5BYtDF~n zVZ;p&6skH4t32lh(x3tv&5Wi;ttDKp5%h{PQE-`r8Hf2>nn-z4_P8d={#d3vdx%El`qB8Hmje}~gvA=Dg@H^1C#ZemqYC!R)!DEy9z8I%FD%%aW@GyUVt~GGJBLslQ(Z?*}qh$20IFMED z&Uu=4q`v^`iIYDHr=h+f6ad_YugE;AYU-`)(Xr^PN-0}x6TC1V;4E_Cn^>6oV(P2x zzx8L#h#Bv6&{xc-{xkNcT&wO%5*_cb^5o{3F7rBN^;OH!QvT6RNcQutPNd^@+A%5* zJq-(`sU%E=MX5Pzs>BcsKoRZ^N?H*$FvPzcR7)}$-p#iDR*d{)_R;sn{V#;}G&UvA z_lH@;zeEy$RM8THrGMGl@y&?qrLb-aE)Hwnx!Xb}BvwAr9vc5B;_5tgg5NdAq^c>@ zOcudG(3fn+0BVSck=*)JVavMXy>auTQK&E{tg2{y{j!9vF-u00d_3W*hV1NbTg35w zG$|Hq5gTKhNH7tj?0IqTMpF2dWa?8}kn2$6Ea4Z&lfl&8^t={cE{gfxDdt(VIFd?x zONQ{0Hr?{$Sxe;{^{EqszidrjE$)!Z^SV`TqSbK8#a!)WBaLdV{(I~KOMx=h$Qv%T3hy)MGIYct%>E2fu40KPbR8B6{8 zb!|a}2DDqoXje(1A(%hE}NnIJKCSwQs(xYqbCn^mwu~!V<;C?Brzx|58VV=ygk`W zj!pf&grkvd1@;Rs(aOF5Du5(8OQ#<@=JO}y`$z~s$J~Dz^YWMpv_IK{Y$BWf^&(#8 z`pN1tMJB@5rp@Ygi(1_f+LuIW^|PLZiR zdMFgpS^gQvIV=0`Ww<)1Y*DL`>SiuxE8k(IDlqq`dpy_I!0s+G(vr=j<4xtM0`+}0u50US_fWIU0~w*_OkX1d)dtyD86AX7|2}B^n z&9&>yN!+}8f=42#tw=@SpXsiCMJVsj{lO0^deNzJJ7$vx%is(;?(6=!doGf*q z!6m!;fdR!sN;^~e_v6FgFKy`NYj-?{KdU#?G2I!&Lm}os@I+7HVo7WvXR%x;spon` zn}44=vmF^XuSSTq*t9#BrQ}zq46AkOU8~D6PNBWn`s#Xv?h|+KoaDJN8+Kydi^jA$ zWq-O_=vtL$d5_c*ueHGi;;GImF5lV=pp3fM3`hL@$pQk{kXpnZlvYVv#;i#t(I&S+&h4p%HR zv0>~PPsdP8!YK2^nRvzfNVxzSd8srWS&{z7bB$cAHJumY&W)wLO^ENWcVowD@@R|# z3G(Vcwk);Ymu)OtG(YvEs=_)sQSZ66ncYvffxG0FH5=Eo2H_3$g@rk#($#w8ngiOv zpW@w=*z;em+O!#gwLhLNlHszU&&FB@s7YVyY<{_cifrTSK`os%i4d%?arrCTa(?Rd z{6DU9wHbBie7I@T5Dz2zSGQ{ z7J{UNEk4$xpzw5yv`V)er>`K_}Xw)B5{Az z1LL7`pT493h{Uia66~?M5jVuR&*|=NM)iIMz5j9})(OA-jjA#h76you_5!UBcr+2u zWNR;`6YzSvpE{NJOSjsu>-(G2R5zVe+1y;C6%Z55*z~27W3>=EBxop8vUc57?Q(G!e8Pwd_C5 z=%6hYF3uRDJ(1r-u#0ehz}TQtR{y$`{EUUC6%X^pNjc!8p`xl4A2JOapmcX7QH0lh zG0HLY!Qt=6y2ACk)DP^$G|S-rn5kVx9@O%wy_()cHQujvJ&&Lz=A1XZ-3X*_v_!0| zRNPdPv6Itg7$&MZ3r4CflZgfQ%0mw?`f?qJflu23qo=#(PfzDd{<)S8dwdzr!9GpF zT3&=Yf6j7#D&{4_{i0r_t{Tm-68X7w*OpXoqugtsp%baEp_Z0V$WEJJv_NfKWt%)) z5U-q@Xg3wqtt9mDOWCA_P(CujWN)-UQZmLACtd$&!%XDf#G1-yxGim;`cm(BqjpI2 zHu=%l@WRnZryQi)_S*D2q+|#`-oyGPMDl%u0|N4)hVMWwv51v1r*+aLX)bN|$}9Ly zUOh}VaWbDt_h0~l+)4tgRTs_pvIevFOr18^`FNqxaJQ@lK3uo?lId9cROufgBlJIg z5JAA9zdl|KZsCH5^%t(@pwIJur}7J`!lITda_Hg9wVEmyK*@2Rw)3ke_SR%B5&#-* z9=T%Cvl{tILrCqJ_gpZO58?LHN9o|#R&0&GeHKL!=UZMx1WFc9TnyD@UF}n-7NV3$ zsxAJU)$y>mPGbp9$~x9E8#8%vrab63T5Uv@pd$e(MR2!|xvG~;C$W7m(W4kyjqHtE z?tg40+u)#>rgVEapE$XXek7qsy*RTg(se>;0V1hy4iU%(gfy>tEDO=LqpI3sjb}~a z73}`#vvJa%X}842f7@U6%y(sJbE?DpbRXWDU}E8Q@Aukh;<=9FrW~o6O>GlG1IKpk z)MuZ5hr_Kk-HzE_wz#_A2-;&3k_n^wV|u)NRgyKIGnMHDtWD}qNtPCNz7LOb4J~P4 zn#Ka!n_l*CG@VZt8twaTh`4Uwe|1`SQYubQw+1U5N2IO?BvuDt!n~ zYGj{u37u+H`}1{?sJ`M`F+?h}KqMWcNPdZDHNXSfGe;A;D&2${MNRx_u3Xva71f%a z&tSd#3RhE-W-DvXmZ{4^{Y$(eFq|C>F0D-26|Qh|HQIA|E#E}VO$(Zg~5HKz{u*ub4SWA!e;DNS;mZ8HZ$b@J z=VxCQ;A!t7CfxM{=aG%5WiwLiUe7Dyyd%QBnvWs>yKfG$)%^h8SB<#+#)nL%!=Q(K z$d>B4`FBsDX1M@cQiR@d6!g1CJO7rbdoaEVA(EXTf&ErVNT~M5@#~Y00qLcOx2ixQ zABhDoeX1mC`s#%2n-8V)C%Y%t%TVn+d$&T23(ai7!J3*~vm;|u3Ttw$^2wjKOQ5VQ zLpX@Xc5p$qHVh*NDLiG@){Is?Bg1IXn2mmNCcc)spPHU;CtmYD4pF>4mhanc(VJ+| z8O!u8X}pzJ5P3H1x_bc)6510JJ2nN3az0T9{A%wdIoLg%;0bp?`5Qy75R>k>@^7rv z+id3w!H7wx1uG0Hy!l>_7ve@gqefOOF8WN%$nj;4O1KXOGe`7;@#wT`xBF|&I842G zQaGs&_KT;j=BT6hs|qb}ssxJJZox{UNcFS`M3ZzKekLN_m5sIIV%& ze7+{((_%nVf<48`U46kx)FthQX6lpC++tZwCBZ%eD92))=)@^@_1m3MWN}w-^M~xA zU7HpYB8fS3{5|^n3L|NW;bIiz@+|te`@>U|1z#{817C<@5I6O7sb+`plULzDxxsYZ zig1NAFluyzAYq5RZyGy}N;H^*atnYA(=6p-)Y`ntNWh1XtOm`XGvIX9B*V0VK!cC+el2;r$$2>4jyp zny4|r^vFJ_8BML4_%IA=?_t+>tjI)vXhp0*gygrL=NZq(i%{)?15=;0DqQt(4?ZD{2|BUdf2NL%4I49HwA6yj^@r9%BaxUF#jkW{eA zCDWqR;*JMRhpwr?)qq2%oEeo~y@kcFac@Q!g!?=9EwNXuCET6I`rEmthD&fb@2H;r z!L)#Sti@DqWb?+88P#BBHk2x^iCH_rW`N(Vbh=@0H(6Ck;>T zzp({ud}U|R_ zDd|VHHe88NTE7L=zE^^lwLGS9o;m88yc!I;imK}R`e?*LhQ)?IMU(}ct=R8NAhWgC zm*-yV{WR8vo9YQtuCuK^$mKqwqXfA<1%u-PLjn^{(|2B?82e~sh}HYjpp3hZGTJLuJ@c2s@3KqQ*YZg~ zx;ZmmhaIYE^o9B!ZP4)-W4c6|(qH4{bH>tWihuFg;cs1>Yj_OqXZ{F{Z@EhUe%V_C z3^6po1WCd?9h50JbKtbJR7%h0UtDGju7;^EcHB?V3^KgnCZRXgDMC4M)B94lzW>jd zK2tAPk0v@XuLK9u%$?$MlKfklux?OQ47m78wCIEzeOwn_d}CCGnq-bY)YkUuOzo<}{Y%HP`C7%tw!pLt8Bf7HmlVCrA62AdvbZA=1^k-L{J+ zW9MG&vlbW264&q4fR5#{4*jpeE_yq4czmI>=A$TdCDcMNK**;>H6opttvi3ZzSGu; z#dwSPT-^Ww9_}vW@zGg_3B}joh`QeL(AN7lGG9VaGaj^2 zZ@@>=+r{B|HUP&z{H+u(cHPMqa+6iQJ`92D7jtJPdQERO4YjhN-vR=8iRSY;#fz$^ zO;1e93{(oGT|O(nhZ-Q!;iZS$*|GK2x&N*Oux*p#!A*TS8c)d- zK4;jRwv%gXa5lJ(tT9$Qr>QYkjD;vN20GVEoy_X3RT{POdjoI&yD&h|FF?Ab(#r@7 zlH#o>VmvoMGJW{`tc0ftgm zJ9z@z?nHVnRM4}Rs5In9|fa@a7d;jEn)f@UdC|ROXtGk2?P-ctLt!KoR&i#mG~>1|ED5C zDFGi$?YTqT^sd!J&p43DLnFP-y6{3PV5XWIK%o1U-V}bj*Sd2T*cA|-&S~p1w{T9! zV{n6FF>i8^t3(Va-Oe(!l$~44jV?66mI?gQSm=trwzQ@JfD-6l>l9+K<+~QwDe<$# ztdkYjkbo-Gq{iFsKr9NG6i(;E^KEff<6aE;nB44-+|SwSXC&^#CTBOHpI_zYKKJ2P zR8+d0%331&SjGa*7=VwTs=6u=5L7E6W)7f(;~cX1BNIBHeYD4aYA>>dlvwr9kd2YN zm3=c#&7zg&xU~Y*bZel?LS!c^36 zsSQ`3%X5_(oef=qM>}I8J`UgCOk3aH)V0>SNv%42*-Ec3+AB86pr``<=~`iM=o-JP zdWFS<=O~fTU)O3E&gyfP&K#qME^Wtz$vr>`9!KyX&Bi{4P%TtVVXk`ZrYh$F-|Q*Q zI7gaATOD7uxXYbMme|GgpSLeY_yj&@z5oEyY<0xiFI)vXp{PH1mNwJ0Q|RKQwv2NV z__&pONqo@r*+sq)p8l&M{%2LcV1-y_r?HZMK;MpimE&0f-;(>-FY-dM?cmT|m@k*o z0^Qn85B{(kltzHU$LJb$Wlc3npya)=;3oW_sA$<&76wWB(39%B%+Gx8r1UFw_Ubfi zN_KQ;8yhWb-o%g3F(1Nqo&o(|)ih<92Mrb6H- zjqXgymBbpZ-dMfyMSrM-meN$UdZW}oIUP8Ufbgt(M5|SS!FwnZxJmbvG-o&DNNU_2 zMSA(MqwYFI{Mu@SPCXev78gCB^&uk>HZAT`MFDsO|RiefP{ynjrwV ztFc$?<=t=9mz~+o{5~*sh26uP@?rkan4~ZevAB zs`fk{Nem?*R3v_FZ4>X!Eo>uEVB?y}tsw`}Ta>8hsIBZkz_IuDt;+x78DCv`qPr|- z(_Ma}6VvCnVlr^)-p`E0W=PikHbWCxot z*%wD~l;4)p36tH#{9AMG7VM^WhOT`_^aXpemoIA!iEf;rPr-O)4jx>%nxMh#dGLQ~TgRMeabq z^9fFmwuBDAJZY0~_jKyx6MsKpsRpPNvl}V>))|7T&}b>-jsw1P&L-HbyAu1%R08}C z1UxyqTyD&M%EH9jmB&;*WQ;HC=bH>yBuY@HGm-8v=&2HD?G~%bVm@>G^FN=c^iCmp z>i?>sD)_#aM@Y|L3(^I#JYSLdJ)@MF2UY}U0%j+p@)DsZYT@`F=h z6S?HQc}SjzAxo7;VgX1_94O|K--uUgJkxDUZ+^^4y3I@e=6>m!ded7!lQ5$A*@1=v z3eeM9^i9<&K_;HbbGpco$&IZa<1KD}nh!~0lH<|h$ar2WwHkw?BKVG(-!f290l0jv zHnzN8a*cj?GjHO!Io`tCV*4D%3XX=-^M3WeemeHU2G*yk7_3FQZ#kNkr1%i)S>?2| zvkP1`B_ex@Ix$>(EI#4f0`tq&yUN7N2*CDYfjn?{A=qu)8v&jsF+Eb;mEfFG zfX0bZp~@lBLptdZ_AE#hqh8lY!zH$KZTAE9Z!SLJB9Tur-ni#1h+0ER^xI?ugF9yn zF?Uil>=cO4<%cepOyXPUov3!63gy=qft9}$ zB;NEUHBA0eJ$NEyUeM%pLwT~zBT!Z*`K&_&n+{tXl;SA?uR?DffB7D1;jUQp_I=aS zhtJXsw)8r+@?aTM$wh+QCcS8DB3?U&nXAv(5*sR;`JZz^XMqfmye{f)n1J6K$PFQr z1a*|?7t-qOS7(%TSCCh{&SRqi{Xw59@u#1T(T%&#*Jrp;K*d1(OTi-JQEYqpvk(1D zO9_?h(JvZNr&M=+<<_p>;#2T*cBA`$ip}Y4{k%ScRU@c$wH8`8=9vRH4cSZZmFE~A zbF#E1WBp+K{mEyDnD66fe^^I+KixX`Gv!>_A*mw$JTMf|GC{toK9#-fD@hO#Ml)7s zaY){qz8;PY=_jQw#KcI5d4>>%A^Hnq^1Ie9hmf7~qI1*1P!grCG^p(^VY+0QMjeNx z&}zd${7K?>`AteEc4D)yOWS{DN_}Z;vW>x-V2XTGFq7)!q%$C?3N5 z2}>rIw1Ro(mlGn^73rlB4zTl1?AHcEnr_sIbYklBYc05$YoGHBX*gXF0H7xP#Y)ZH z&8XJ#Hr&~^lVm*{1(HVI9-8A*6Tf{pD%+H0R#%+;T}M5UWw}X4a+NzRTxO+scsoqG zI~vA-Cg10r;#N2~YpeL7uEyy*jYg3mIp6M*PGN3CtwnX~WyS#;i5BcCLqwLJL1AjK zP2+*G#De!miyBE;=U?VY4r2j(*FWqUs(!N^xmFHI_WiXA`Ni>zsW=TIDxjxK-4O5^ zbNKD^01_ot=3duYlx+cb^0RSa?~Kw}*A6XQv~UWtOL-fLurfTm~!|92%Y=LgMKm z$voAC#eA(|Zud5XFp_$&*dZNEY-Ver0OQa!T<-2%i4o8dcWkE^S_k(_5fKsNxp+Fp z2Z;G0ZO%qss{kJI95v5Qnyit1NuVq@uj6n6-`loz-a9hQA0OwIhqkDm7r~37td!ep z%PxcWM)dhr|ChveCnX0anlr8+!p&XP-deI#aEFdPRv0Z3F`sGp#r2Ue|8=dTUBeAGLc%kVC)gM0c2QQbICQ0cKXrGj^piPwh3f8lPMxb^q zI=ov){iVJA)-gB2)ovoqjEEQ_OgtQ6J=Ir4hJgNvuxddWDgPpT6t|ZGBM05gzwF&l zWagqSv%$*5{-PcPQV9KC7_H|s1cdrt8h7rx5+tXFTIV^N1ZG~3Lm$6>>=HRnasMQA ze)|L(<7=%*KyoOUa2MvCh{^ds=S+uUc6d-{@mw_f{+69b5Ikyfd&7o(@_J} zQ25eUp81al?Fl9=^=&&B^P6Z+6lfRTpV7DyLikMn#+J-ee1`COrdy3{V|zlb-DOli z+p<47Zvbv?oXEJ{&V_h)Tu>lv-sD)@8+=i2D&q9kV%YC=yB{C#ygDW+#$fPPZ%RDrgi5;OT&ExtU+ zC$2Du5fiA)PTr-S93MIW6;<&&;~1hw$5ybw(PD$c5iuk-ryshzA`_LnI2ru$9e6aH zUvi$SyD12C^oy|l%aKpDc@!9EId+&`dya1E&MdmJE(=YR3YGVHT_Xo0$9tCvfUl)1 zF0k9%vSIXzUm8}&U_{1NTxt9sr%x&hF&=k9Qi;+!KI<-irONT{k2yx$;s*9GM_=D9 zIZPg3H9%N#shal&jt<6O8O*npD@41;+8e~_ZmqK#WH56L+cp|1x83-5s0zX=K4r;p{}0oJ1e)kzjt0ssC$q~*fi8DffrD1pfJ*||oY1BF-6nkW zH@+x{F-i5QS{vI!vQQZwi$!mN4A-h6!`QL_BCu2vUt{r9qi*qCZ30Q=otbk(j-(_! zd-*$Vlbca28n~N8Tk6l}CvgMI1Ss5b!PlXz;V@KGPSu4=&3d}5dGT)>=9dkd1e@IyUXg{?fiZ{-MK==;BezY1m?FAlz`3T z`I>O{a;ejXyd;!vC>SAXT4HCXjyhBAzPJ~c!L{LlfLZ$1n5(s?mF0NW?MJ$M%h?R1 zxJFgV@MdB!MOx{mD(lX(k9Gl{?b%SMKEf?WZ?^;*j-?I(k=ids z&X#WN?%=MkbMLsFPg;ICs`iOK9xbzvY7V!QDURi@o*z4JmlDLAFKO6Xq4)^J#-(zu zxo`gi0=N8dyTY{f@XTpmd4qIV4-I#)chA8-_Sq@ui;9(k{_Z*h)a*Ikt zZ!>H6w_k0D+^RVjCJ)}F@5Kd_26Rw;%{1a#veYx~Sn+1>?PT)L%8JS__VOxY7r=*_ zeu=vNa=KOHN6GR@0|5R08h=6z8e>RhMPa9+YP`3l0N&m4@ti^_dq24gZti>krE|x4 z5Z>#14%{@koZBSkvFz~hqEE1L%bIOhO}$_N4$G-rRx>$Ccb6)!5l~MbSV*#@2N3>s z;2sR+Q+a(}4KFMR2mg<-?FDq&Hs`2C7tuHNW(>Mp_R)L6vPtKKmFq{V2G%HcnIC#n z<+ZWqo^~dy+mlJq_&up;Ld!3*Ej{a{eY?3{Jsd3-t`!L>vkC5l$?PPpbB>QGqq{i6 zal@U-rTJDj=^YtNs(Z~LlRYiGJXMK8_?QG2GX{4&3%OX2YuN^%*RFg0Oci;FsV&#l$thiqdTUePPu-yHwXscx zynK!0MRoTNOJ@QN-s$rY;KmbZXuWLNvG|M&#yFD4;+D%}e|YJzbbJdIYPqE#NB`2E z40rqw2xgw*l)HEtJl{->#c%^ax|gxGnnLk@+k>Q(GyP!5tti&{ARc_avlaKIf61sX zOfVPDKqUir{@^sU*<`GLzI%=Iab>X=gAk9}JhPE(O(l1vY&tp3<3p=DqepL|(aG`+ zw-yiNQ!<`et3A!myYJD;x#oaNt>D-ulu&_Q1(M&%Jt)-&_Y4|YR+Te-0ACOzZG+i> z*?=VPy9ST28nj#7&R2;sm_B)K*+hH$fy;KN@7z7M*y~hjfUM6)>^_*jUuiyRQ|3&V z+pi|F?*e1{>oMaCx3)N?E%Nv+P!Y#CIx+9w+IY8JcStlp34hB|B}c2i6}omgU5N)q zL62|niE%3Ax#D|nINTgfVoL->gS-VLX4Jj(UOMJ+`S1qT2VgcQ^!IzcR5i+o7VPCq ztFwnBF4~rB!qjl@)P-8poioOVxjg005BT?&`9$_UCrsZo|Dvw0nwojcGZ#Eovw0rXliy!DDv@om2Jay!q#X7U z?a)<`%TmUwLS79jivKBWv|CB()`!%87vO$JR*&Eef-iFsLM>wUj<@0*jG?mjTI_UWJw?fcd zD2XAWNSV;WSV2I!+m+VQevcw{>M58niSbGYYoe~KmrJIQ894L(HOxZ(s#3mN%QDuF zb#K zHe?UnRoavf`0TOL0?F4%=4;sdNw-03v12#kmO9Q_4YX#xm7zRA5Ti9Tcq-Pb-Bojs zxMJL2r!9;mE+w!&dQhVQ8NnR2ueG}U({(2(9yMIw@+VEbCq9f}tvPby0zQ=$b%0pI zEA{d9AEMV!>mE%10`e+Oq|o_bt%V92Z7L6*VuLe^O4@cYe61CM64>fS3q`O1UA`tQ zJJpJxET?TN+9k-~cW8F*w2+13bZ1^ccXrcsrx2NaEYmDBJcFgk=SmuC$Fs_+>$*yZ z+BZKCe6q|q0&`qrU zvRgvu95rPyl`A|HhpV;R?6F>gS|8eDc9+nbFbzFiWJEU_?U zH875|G)JSyXjJxC%s{0^J|+$|u5-Qgv-{K!9{u8S_iK{L$-WEA5q!*0XxS`CqFN}E zT;Z?O$Wnqy2$lI79CWM6d9b_nzy?tH75S%pGkx-W zGJP^BH9nxru?R!XdxZ}^jD-lk6PzMT?C+E)5mOEXeN*z{tox2lYGa7w{1HD9R^I44 z0nY1Zk+rRh5ou3LhQZ=YgfP2%hV%cA((-VmP(yW|sA4S>RerxakV~<4E&>rDkPHul zLVb|blQ=Q{bu!g&2yF!#_A<+|Q<0x)<-#JZt`0uZ$7574_w~p>9v+GEi}#5>G{4VW zpXpB(fkP&iW(_NYbxZJW{>$bqI^gKnM@BNf|-HJ9I;I9CIUm0T#%@2cdM{p z``)xgWHz&?E&n-+EX)_(*U{a?u#S%Of1U!ljjIMhPGn4QGdRS{`F>uVB=kq1K$q9? zim9cLHv-THn{SnoX?+*wm4uawcp2$8`;Dbdn3Bt&KA=Et(-4g?ul7au{IB+Y#Y)Dn zrc8a!rgF3|3?+n>B!`U_u^NLic7ULI$KaJoC0IETE@r|Mn{La&jXLW!YVeNclEFlN zAu`yD6l3Uqm_0=XL?yXvj!SvMg}(#QO#aDDf2D%rZ$IRW#8ZOLYStYmd<$Y&&LYNm zncRr_E?=eLgtoN4!MDS}@MAFdsbd7mWhhG4SRg*K#YWT_gGVoxpy4o78CA}zkp{P|7I z@v|!-UBn+4Q6-7eZze*BX8w`=+!FY^U97LV+?nLWldlJU->}ymY@&Uwt@$tNVc!d% z$0!a~6FTAKgp>*r zAWq${Gme%fS5=U9aDly(2`tfU!Z}(@y1WDL_#h>XZ-R&I}9wg zEW{_7UZ1B-l-gnBurnz;VXnQ#h$KGTP{Br=BHBRus_d`}8ahyekKhOtb~{LR9jH}5 z;|@Z1AeqmUp38GCTWQ1cI#s!Qt`DS2ND|;n(o*=+Bl)OQq*G>hZHH4F%hP;0eUW%r z+4y&jL_y;HcXdpjV7gE_(o6(lL~`t|LLDBgK)T-=xxb}wV=wBdH)MD%A5@bfYQEyg zVt|5Vj41Tpe85C7E99reRlEX2Bcf9WSOt&BAToZ)qlhiR0n~cejOsvUhfh0QgO~j##XENR{cMSXF*PzSt=4K;04x_RFpP4MFG1lm)klz{00to1&qy`ZRCPvDXfW(jqxA*pZL6#huO7DK*9XlCbIGX$F)?^6a z%XBn4M@h1ufEQGL2rq$^Dw7B|D+8T2>K#s=mjCsh#!ngpT>eBtCVQ@QKuA}P!{w&% z4T4C_f(E7DB=QLPZt)!yDQi6t<(p7Kmb+Ak$1WBANb%<5sA#d2_5&59QgBx%ORzLZ z*l1^|XD>_D5yYdjaypzjev$qxt9yO1_kzrLkdg(nkY{kn#+q{GiU=|CP|v(rAquRC z5#(5;yEYQP#n5jm75-1NGVPT`{i0+vg=}>4MyKQs*H3z`VdNTgvKWfx*hqj&WkOz} zv*Iv56F#=|#0T_2Na{}Pjm}%l47IGfaTBv_l23mg#azS2aYLpTg2*4X;LTOu8myVu z%1~fTg-99g>h~b{WSd!%!3rv3Bfs&<;y@}{HVfOh$srnEnvGd^Ea5Mx_K*NhpyRaHL0H`}v?Q14%`NR^_iM~duXRd>_Io+FY zT^+|F3xii-tu**Y50D6gyT?xP*`atfC%k`I|jZ#H`fs4}MOF(sMhBLN3Pj9krl z%G-C}j0AFfe@?@#(GabEx?}UsqD@Z|TWmrmk1OHtJX3lx{g8!EmVrPDJJaZElxH+Q zuZb1JW@T&Q;U8K|j>Zj(B042V?1BU!$oZ1}E7%tFyiTT5PDCeO;yfv!J;1-74d-UfIYT1LuHeDLyY}HcKgGm!k-^qIjTo*c`{EB zsgMD5%pb6om|s#*Ee*xhi#F)eQyi`N#e2S`%z@BAo93cef|MDQQROGdv=fE=GG=KQ z6Z}3Hh)1b!B6!}xtgQ`eiY#IdmbmsIqPvn=YEP>~=T(=kNX2JP4KQ~L^WayX{;Ey1 zq0Y{E>{KU3x zH;wJ2u^Okb)2OkUq_J(=ww*?eZQI7pd++z%_2-PS$N9C#T6-_dIX9pzly95J`iYrI zh-aH8Q_G!Rp7{;(1*fahK|n!KWV6Z!jo*ABb?=ZMsYg?cx7V`{_`Jbbpl;kgq62fS z5A>Irsx`6Hv3$5$hVze(i-&z5|7c_Zje%cR=)LRSbP6B8s*Zu07aHaV~}I8bC=tm zMhRIBP>lNBY*}O$Uemi*JezN&eSgjb<15Tx^}WZQgqm|AQcog#OLw z$Ax(3HvWMf6JZ}j*}p;Y&`>q->;}7SF?sp$scow^Hbo&ZL+QqfT=Q)HlU5ouHHyGG zM#}>E*&}BjV0lm_RYN`aAXd);ElYEhz?D(f`jj?vZ&mi65LizDLZK;2p~2_Y1%gb@ z*?`T4@+*5pW*=|Qm38z7^xI#Qc+f70mF9NsYg-AE6X(mGGkZRHS{SH6+WqC!zE{j# z5aNSZjm_P;EG<%mH__sVzRXGN1T*gZQPn4xF^RK6o#*MxG1e~S=*jxQc?nk*)jOv3 zHy0h)g+?2$pRCHHS0(Fr2a(P?h)~tiocC2rK079@dfS@Z%?d@&=Z3CbJlgcI#j^~G zSnCn3P;NaZVJiyar65YirFus~PSl?k$O@G!glabC*o$4BR`m*9N^50{Q`xewW)J%r zNqKCP>(0El=bAI(0fGntEZ9b)0|uYxlN2c|G6O1H|F8J5<*@)M_dolXm`NiAsCeeR zD4{Iz!-&lJGfg04nwZ9^PW=3hh>EW3PC)$Ar4cK51_P<`R)q^7W)u33L4;Y z)8xh^#b60wx&CHj+8$5@`W4wTi_nXKP-xIez;raYg$*dS%9-#2Q+z9{bt@7>_OeA2 z(E_V1g+~4bCq)-agvx`;@8*BgON^3qr0$9E2$oeHymsHd|Kf2_0-s(6e-~&I=I<-`F z*^e7MS%G$2a+|YMdDqIIy4hKIsV|`g>GNBi)>m+<6^KapkLQ`ahXg#PRy@pkbC-S8 zCyft9;-Nd)^lWBn+w!&UE=CfGvy%IN#xM)Ezv9-Kl(GMTTzPVGo<@Vf?`geC|#b549M=WH#Y(RTOD zDzcivcWNSn3}Gt%^5ZnaEtx>AA}o(?WNp?$<~@T*a) zLARpRb^`qAnO88X5E%>#AVs6eRl4l8xtx8}cIYQyq>`$n=4@s$$b!&yOT^*&axLGR=6LmLM&`RDkY z?7P#1|3<6#lBQtB^Qs-ud8Ft4Ff1u0*TZUEkc=GGo1>fU> z!rN1upxZe)C&A7GEpgVr`~kje9qx=cxFBQ#Q!)QzV zSc7mr%6xFuLnacC#8le|kIIg0$+gFkIZN=ON^L`NbQ8g(M~wJZ8c zRgjYSXE5>4%p|vki^X8p)b5Tf%LR++P0WE_eO~E5mZv-BMzC4vdO{wtVrOsm)tAxK zN;VaLZ?oiep7wDWW+C%4VBmCcPUMKrD#E`o)ZbD16tJ ztiH~|ihad04#_y7VdcJ_n8bh;VKw|@@unx}Yai2+EwY6{mHAC6L|zr(66^oI;@)IY z+X6&wAih%2&yl#1U;rR0k>j2<2jiZUAf%5tdW6Q;w}#ax#F{f7%mKezy$b90v){skypf!$N$=>K+m2%g~b#^ZZfHkx$ycGfBb2R4mgZn7Eqyyn-RiV-$)9w^nQ7F)A1 zi8>x&y4>8Z3T}P_fWu~%V7rp%)5-BerF*$+udbN_>yI`<;(uv2k&e1hTc?%`q#K4< z2}`xbQ0Jdq#;h*){wCA3gtND|KUr?rJ_q#y8|0pz5=)mEd8Gv9sM)!yGON$yr#m;1 zFs;C_TxehkFL@pZc_v*rL@hivv;OEwa|>ApoTWBII{-3BiSVFt zp2{R;|`N^ZX_L%h`; zJ6>)fD(=FoGCXI@h@Sq{6g7CA{6?q#ml)*z^7D1Ht<0e0O!(K*MiL^UUcA>U_JcLJ zK}W|4ksccF`JpbSi=~4yrt8W3)ob?%^%4HN>yDisgHUH3yUkLLka%KE-QHFQw9PBG zKF<{lHq+CE%H-;8!i_?j6h86Wn^z4z(ba#4Qw?%mBi&O0*%Y0~jaSaVkeL&i5{Wde zb(Kxt{1?%X0ZFE9U}vjHp?w!=P-N)C4`h`IBl68S{wHbxZV4m1Bg^xP>xmi}H+8aa zDHkgEqRG&B+sbllX{qR5z131=3`_412vsMBB0gjM9}VAsu7&3XsEKOuW9Ws9702RN zgBKcd>(LMm|4P6|t~l_u2f$$oo>iFLYR{hiGM4 zBV9)x#uKZjw86hCnu`9Z^xA$S(fZq0gG|Ny9E6*N{^Rwb{ zxvZThWbU7Vld0QZx?&kR2ZV23L`Dt>)W~N0i4uK9%O;I+!@l^o(AL!jaI7#-hi?X}2`02R8r>FuoX<7v+laQnFH z;@T}6nlcjfDu&O#NaB30fBx{wKb~8$^DajwdGX;~o4y>liL`#Y)O4(R`8SC@seM1b z@wUI15kycR_ptG{Ao#HX51#!nsiCX&5aKxTwg}66;UCL$u)kr;eIeL&GrOkY`EKjm z8l`jdl7wnHpe9V<$87dFm5v z*_=RDc$RIyOSW6Pt$}KK+wnW^(sQ($!yQp;dM|(gY+tMJWA&e+qKIQRv%`5$3SS!a zTqlsTI^RlVZDGv0?v7!qG!He@uCH)U4m-NOHhS0P_HbPxV^Y5oBp-8zMNJmt-^l*3 z(<>+`HNXqV^QfbcE{NyLf1A|sI$rN4r0z)}#r>3{F%W~$tajM$e62V|`C85YQLyb^ zlvZk5YMKECND#8op8h^)CEKtOH*%-IoJS-oHv$5+)6Z<6~^K$xj*TCe>p|) z2P|-StoO`b-m5qBjh@E~6_GF@pI`L#A0Ndfv42~D)N5@H{r+5-%B++cf=GT%G79+` z>cieB{~T0R{5B9ytIT;re_iV~eB|OkL;a&=k)=v#FXuE*RE;{TnA$K_n42BsPpN=( zyhFtr!fS#YrH6}W(a$}L51(h?5~#agn@X+1#(1}{g>#Pe;;`7#W5|NGNe4jzP9@A{ zAOxZTs@ZZPQBZ?H^hi@cKM0m?%|ioUSB%6!HHEi{d?3y@IzVg!hyieN#DHrQMcP6e zFqwRJF$V+`(veEx0)fcggb!z8D3P8N$>k>^j&eMjm*B?*vd>$JLi$@-nw>jw zy0oUbeuJm%x|Cc7J8RC_iOgxvdb(yB!s*KR60T0ub^?)QGKb6ZNgD~=+L@P(sFgBI z^~;KTcBk9o?tVmCTwn!#qVur|a)#azjCv(Q{g?%8*I6W&=t)w=X+bM4eBtR0=|#TP z!0{{o?@8Wot*b9Y0yEyDvME@9}*4_{4Z=%}?H!!GPS1WjKY_LX>b~~by zFp#qN_T(fyM5YOmF~K&`sbP@U18B2qWLJy5vYNUDg2r|+@)OL`6PBB<=10Kz+lRQ4 zd_k1tTI(J6qitDT7t@oggLX7Z^1={69|jcw0ZtmsCyX5EeOC|}5~b%gc3@fjPD03= z^Gg1DyqBhZbabTOCQt|+1Nd#O=(|2nUwBSA#*apU`kUOlw#u_E7bd-gVS!E)T01g~ zz{=XR2kVuxOpK!!M>AtRMOJCVlt7NO?==0&4XNVg++!iYVr4D7bTmJ`L&*_p$+gRg zZNpHpTe!(k^AZ>q90|0h*r`G-CF_r5%L6N~oP4BM)aU~Msu^{&wFgsFcmntoyrqDN z9rgnOlWj6Km8f?S`YGCU(!F8%2ER*mIlXuj*fc+ zWE!0X(J#NhCn>sbt_C(WxVdqT5gYsH1eLu@L-mm=`td3n*l$3gGOo4=CP$kZVj#?_$skNGv z=O(0VYg<4Z?a?s0D&b3yT_pGcs4H+Q5_=lW{m6f|Ak9IYK6KV0gm6Frqe0_?4#=h` z1uW~Vcy{9Op_B`_=C$c9?dp`F{G)m5p#fZOG*u?)=IOcsmD^ff@j(pnon5!a9U3|f zh;&v5O4{KyHack6-x!FPfeVfO!hVWhe1*bkRU#4LK3T#mGiJZ9HoNz~Br*KMgHi?q zDaErxv|Ah0XM%<2zw&~ogecN#%vx;^EKEOE6}j4|?M53>&fp;XViD;{m-l=Yb)Nxj zy3YFi4D{Q0}OUC=%580_XL^u+Sg|4q7%vFkPzU}2w@zXchOu$s6&cP4KxhV+_J0QfBlj7Og zp$8Cgu{CiMrX*63Mf+N}5kMq|Cu@R*gXIXh)03+j2LQhLxl}URCJcr4{`icD45-Dh zNnzUMw^+*WB+bNk;3>w7fvpY*_%qZl@`ysa%xOHMiRKpZv&BWYiOwLXDCZ5i>}L zpB+kAx;aL*U9P7J6J>Z1hIM!V6|)oW&!13qo{$b`4qD9vT=3PNH%AolQ_>9t8%=J=a} z4eZ#Tqk_nRWHwF>O$y8LmrD@0eXZD;L1Dm8HB#m3ZQc4Pb&g>njc|2LDXdmm$T3mc zwiTT3eSz&7amafWKw(S$J@0lyqg zNF}L}mW^Q|9ApR$p@@D10dxbSwxp~HxD_$Oj{q8)iYplstk7S=4)nOeYy$E?Z9_!4 z+Ai5<6AMi~470fiNIDou_QG$$U}vI^vZXi-Ga%i>1euz0X-=k%-5Pt4-2iWYFXOMqTUi#ti*FnFmFE-I{BihC2GXh#!3SELNvXAIk!lAp8sqlSUA z&c1pDK@pufc6*=XK~Oh3wKI6_6VGXb9wkWrJB@-)G0U)|Fl41s#LSG9K;2LYv!#DO zQtWECqxmmbQy8V(4zHmZ#en<)PCQQPz=R%3en6jUZG-CEBF|HvR^$&c`4-^KjSYM^ zWlV9@CarzFvU_acrQnJrzO^o=^-vGfln3Rm=I79d+Zh*=()l>ur!Y>29QWS_jtZs@ zQ|>UiJ$G6jk62>{#iFQY39Mm1oAy$_dyb`{gATfJ2L0CBt^+T||G&-2eT&Z(p;!=r zmMLt5Pt%w8R(@tGnH%~R1Ku$ao8L4kr@7-S!r!r|FUiQ$6X|l8fFj!o z9v6WUqJhx?^2UV%JX&W&eb!k&-rN(6&nx12n$|VEoJ$<`Uz5|B{ZJ!lFw5)fTKpGi zwre%_()P;6JQM^;C;W7Cit<2=N{4NbuLBodeBXL24G0W`84?RiBJmsOVRA=<1ko8= z^ICCJQWJb7Jf}=UpQ9CJiOF%&P9n5nm?hc+*kCN=nh+t4A;5ptBi@G0VlTD%sv9sMxQ)lQ;gq%{6Z>CLwE9Er?)s&|ZNs1BxSwKJ=o)7>^ z{e}pG{l#&F0tDWuE4vuzYC*29$@$=LV2{PuwpxX_zmB(Y1gSRN~Tml_el1s)Px(S9Vj3rBz2-KYf_!#LWmxK&xQ;*kmxgPgp~e4myREey!rEgjL`|1Xo7)5*TlLFZ-mq|KGlch&K?l&8vpFL5xK)rG&!4w65QXMOQj6~px6LF@RvdQvWTxBv0x4fI<;sr zl$DwRoc;|b1+JdRX&bZ=+P}&%xpJtJ4DRaR;b{QHk!DgzCe&zxusy34Vf%Gg#Y{;! zqwmt9n{Cj&;pp&Ri+k|+J4(MFPpZ$Q;0RqN4@YZ@2?c*sD~qZkbbK?q9c7hi2ZMFX z#hQVr^^&#ECeHhz{LS&+_(wvIBR}br04!WM9sj}F--sn-NRf8ZSUcvNB$^6+VnM^! zG)9^VmHbi~5=fea-LDkjfJkgGC{`Nf`v|oNKr*=(OkQGo@#*-O7KK*YcH&8z@2h1- zs;O6$IK-cp^+2J%qv4;{b^AOtTR@Jd3~k0;6IVZ$*XI|VS|69S_-6KN&4)+LjMXac z3soE%)RD&3?bXuBBD-pWldeJnT2DfHoSI?gEnie9E@xH}4?L*)89H1PJ0_uTGZP+y z^dJ4J_vN9Vo8FImN+wMs5#DIy0Q|l5$~ssTA?#=0249)p6UBvZdKhlm8>% z^W~CQ4e7Hn1TDjCz9b)G*SP(S2qLXsOFj+57uo=@i4b2Q_x@Rb{CzY4LX&km zrrBr|DJNbkmJ7!BtmbQ^8aK35jYEc(zJ8~1iX3{2ZSGD?Aq)9v1kLHtm$ZewLDom1 ztpK8g@0I)usAL5c?j8+)sG=js?v%>q$yY=P0g70$DAXNgcU^p)KRSptVi}uw)Iyc6 z{a;8ncn8e_roWYzFwLnxH=OYU~^s;@{C;yq-#g?kiH@dm! z?hL@3TJY%)tfZZ&{WhcCg^lGoE~FiX%sWAO_Y`#V6`IC)794*lf%UV6G z>2D-qgT?{}tcp%?4sLKz!{{t-$KT_Pf>Ba1wuHvyp{^3J6@Ro6fr)&P;{nLPNpn`6 zGtr7dYtE~?zVxnG7hb1@qE@czjTLX#%L3gLXy?_L>jP2G-Ti~1KVWsS3ERK^yNYt& z5u78DabxBjHoCK2kXYv;l;LI}o7&jpNyLj(xoh_QEl(?%;KT;lRqM;DlRjyMPlCmn zX1eccmRSqFolgI2Svldfg$CHo)9-80ys?57-82#Pn_+7zVU+oE@yVs zWDg!UI$AEP*Js*OKTR$MSe;YH+IcNJzsVnrtdLxxfqFthJDj`ao{3fxv0_|mMMEFz zc*e_aCCzS!b{&a|%DB=&s&5b!1b*~4=J7@pXyi(IZ}9>qISsZE^IsBzO7$;hV6vaY z3i;!R8IDRNcvwPARUd-mvrQ$w0P!GDc#M$Zp&UaJ2%wM=*+c`0i|>;NQ3;cMiU8Kb zgqcXnkd`TLEZvTW^D|$?Up@X+IcJs;t~QQf zBZBDHLX#YNZ=gZ41j?qFyW1>EeI`(}FuhDW$nah)l6XSecwWHQqKddtNROKQUZhI5 z=Ca`>o5-M9XDY{(bc1jFtq-THq9TLSZXMSw-M$}HGFvca*{ohfOe7ZNVu|$&84aA# zJBO=*ojpLr;__3`_$A{uI>wUD6uQ}nRtuCY85j#V5Ms~Z4&kZ#H|)nJHu;#y3!x3L zx$?a3k`nW19i5U`oJ1l<4yb0J@P!;E!uH{N zM(Z(aofelsN7rH7><5Ih509A4p=aRIq13?1@|BHA^sBcvNb zf(AEw=e~S`OFBb(c)$EOKje?4*v6s&1^?tP-G#r_GWc`C6VW=Om}}9wOY*oFw&rj+ z2+^8uWv#5G$5Z!OK05(8$uLw#rOFx0H8zfe6h_eKE#hdDQH)b?Ek*!#HK|%GZ zqQz9WyJKAqL^$K%VChau@B^5V&Nq#bq`?cVfqeK5EkD3HRjVNTU2E5^rE`KjW?d+v zdqkElr=I5;{cUS_-*YzfWK%KgPPk@BZ8nX9tV$~Ov5eg9%FXh2QVlSzKXKDPRve9g zdZ_=c^xHoPjQADFPrsqOHwoDd3K5cFUdS4E+9QXZp>>C3iw9xxqB2l#W~m2z-4Z00$BfO+-*~!fFzZL5bq%!``TWV0L{zS{nm&5-Qc_zn1Puob%sB)?>eIyZ@$xnvM#M2vj}pF(!*UGXf>hslY_yQgEa^`&W)K5lkK;rbcIT?z_XXD;5M@zjC27?((D z%dmcsC0AG+2H1B~#voj|a?pk9Q-$AC=E4{?lNlyW1Qono#~dqqe};W+maoON|0DoOs^uiuR6BUaUW#2l%giSlS8-h0CNDasI72Z^8Lv{#IMw_ zwO@y9(072^3d9FFc#q?zPltU$ov?=WZL}xHL7_-V?JOZ|^(M4TAC%<~3DZX_Vx$l$ zycM!egg_l`!QK=6&yPl8yZ|FI002V;Uttp6JfRzk^dUjV{XDlmn)QX zOMspJqdajZwf9FWZa0TVp?N-4<9lr))L7_D1SCf9{-%f7|7#N>>MZczjIk#%XkM>f zct3>FS8k@i*j(%778dSX>Ax(S3EZUAO$xlo>OZ3$yxwGev~MtYgr$0q9X<-wEw>NH z6eJSMbQ}p;vEI%)zyn+!`o;r1k*+2t`Zc?q#C9#2I&D9s&LVCV8Qnnc;&sD^KxU`Y~|pW9Fp ztkTH0(!g_e*q@wxoh_{lHxh%&@>`_Q8WLQ`Ea6+aj0EgC5jce3__I1@a}SwPgYjQ; zhGB4CHn;T#4>a;!HD?3pwMu!BLENU5d0Kq+;#AnU{nMKFDf2iZ3Lxe;io(xAY+l~J z89bDG7k5Ok?@Pav>c;lztVpZ|jk+JQRZ+pwYPX)OE%z4Cz_Qtv%$Vg}#LL2FhoX&5 zDp7t6+y$d7BE96HhRQ<%;ha7OGW#(@@G&VeHjdUJ5?Q>`Tk=2lpdB6RsQyV8L9iFK zs(%81mm?l5*|F)^k)WahI%-&7gk-#u`->=n|I8@x!sp}V^R8|=Z$0UJo~wGUxFW(b zA@Y9Qij(I#ij`&J-VP?;pU60A8X4XiSon=E5JUn9jlYxEpQo>u7lG+VK~DXpS)vY> z>^S`A>p=((5>{w8cD{q3*bE5#)4Q?b9)8m4(~wM9zL6}^{l{HU(p0d|c?ut`z!`!8 z3v3*nj(Le8Jy@6xr5Fjuj1wYcJ28Am>2nfxpiJT$z3ca~UAmeA8yH}a4rY+kX8jeC za=L#++R|Uhi-t>cFb*!Zck9mKAMamrt?-4x&xcm*D&1AMErzhTJC5{!!K&#FxLNPj z_LQux=;708GZ55Xt3BC$IBNb=jmiFM6S}|cJ3p`_CZO{Mt!Dg|_Hy(>MSBg=6fJDu6kRY zcC<}0lu964nL(_2&F!*7_jPaTuwHZe3*}kWJY`J0D-8-fEO;Fq+_1!Ad6my{$HjRd zfj`K1n!ZxoNIs1erDnVC8wn~PovSKoeH#V`-} z-V4^H)BJwGDo6rJuu_!)3KG*1!eK?iYdI$zH}(mt((Avbz#v*iM#h4U<2Y?m`gKB; z=YU2ENa>UwqzQN)RkEhhc>EtMP`VEem)}Iw`BLRt4Nq~!X#bq@*SeTMP zJ=ZTam`c%Iv+89)m?LzqLVCA(t=cpQLx^py54|C{8&M5{7IjBR3KEhDhDlO9fd0k= zjkM5U>&ORZ21}UcYvL|W%u$dhv?acp#JzzQU8$u6$`7AWKpJu`A>}TOgB@UU`xmVj z=Sq!ST6>XMnj9f_FFVQYmtAV9vsG!5lk3Qs?T~HH=#lgzu)JD5(Mo5WDWGU!o*=XO zMps@#t1p`j-qpXSCx?hHJEla2o@jj7YJ7dEoz@!IZWgcj_yRK97CG z&y^h)FnD-)Of9>yWmTQ-bERtRrel}*#=CiaYh-z6JDy~9XI7Yu&_Mk4vCx9T=(xs{ z)|mDm-PFLBl>+Y5tCZ!FWLv3&9;>ig+^S>m!G9 zQmu*jRni6e(Zlp&AwN3uKh2!9AP_hxX|$*iG2_CPsSs+xe+(;IoC`U-(y>AQ2Y-L) zx$KFo{*(D8AWFMISmC)s=2W1<_WaCtbO3-F_|{ zg(8{`rB})MolJ$y7gEg2b(BCxEIwgLiCnixr2PI618=mO{s&9MZD^%px<9U#&jtNR z$Vs@CDDgrsI>*F(t;+gXnN2< z@o`y|_vf-Fx9RTUh{B;)6FH*(MJBf_ahjG^*`@^u_xze+1!d< zn~?Xt3RUgbneUsqbssnW90Wjq#!~xhPaEe1ZkK_3?;@woFfHtW>PE+HeB`-jDd}$6 z)(duYF65xq9q3RjUoFj@0}K^d^#7GjH%7j!7w`!sJN)Tku4qvaS*+xG7lSxgB37gC zWbAl@%~6{A=bI3}d+6!O_4b^qG)gvSSw_91eD`H|wC=~7DM#|Ubr7UmFTHVcf>Q1d zTEZL|3q>VtguNOi&vE$EQqDKGJl!=Nj&{kmij;x)))o#5@y?ZFFOHMn!EmpX=oxU=7p+COT&@6BZ2@xF$~X=K_~%E zw@=sd;QiO8cyh)bPEAEtX&%lLO7c*MFDa?RpcLF6Ap>j~I1LPPmfpXLB$vx#Kjb{h zrT$l($A=-h3($>arhVQ&Z30?XyCmsce@B5e8ON=uIuH1w|044KbC`?I~XZ4iVD4sGKR1e!z-`c(>( zlDda-tF}IGwQNPtT4O8G6XzxW=1 ztlEq2k9}wn_$+AU>D*gOLOKm}jo$OKi3thGS81fI>+xw}ESUPE8xq`qJuP_cPpVUx z;ClT|H9L)(l|l+-?OJgWsJ&v!EVTqqhz%U<_jG(HY3(>S!em^A?b03QM2vVJzc z(0;ghUJd_k-;Ix#qB-UHpdiKTL0Q(#6Cd!nv0*DG1T5(3rJc`1SCD|Aa z%A_L)laifqq>?uNCR@e@a;DzUSL2SJ;NQUn7h%es#7S5I=7EeN+cBhCdUyrAzzs4F4yNb zUHthgDHxoRIWa<{0$OxA#&;f?<$(CU7N!8@XiBLCOkLtc^IzO;>C!^6wiEj$C}(@Z zxFyCH=xL?c4aPiSSEi%o%ZjIQa%N)U&grP?Uxz4y?C-#Z!C988EcE}&f-b-ZH0(T_ zwstanvT8$9^2eS z2@m5Yv&%mZs)(&$rcy3``8T6EeZcY3xbWr)^ylH7nqYyGS+cg-J}FfYvpSr8nNFweKey$ugp_LPBiULgkU6s3YswNgIdn`WzXT6?CsINn7cd@NlBQ*-gUMQ6EDIh2 zh7xkPZ+_n5m#LB4^Sygirn2VLfHup1$GLEpgd`vBkD}TvzCM##XlhPzYE97(oUo<< z^l=hdy`hWj*Xih(*_{%U%hAtiPWka^{0n=!i!ey4&D3C1zUT6n9p2(nVzPg<*9yr} zt(^lhn!|K{-U&Ok*#f^2Lcvfn0lCuXJJZUj<<+r&Ov5%QZAi9cWtua@4?+|_!b?eb zFhDaCGUa^C3K|&)j1kwHo)8j@LbR^al9=Mg!2@@+HObk&`cT}t3?mkAcf|w47IKgh z@RcHA19mg1G4-eKKZ{+*;0YMJN1L>xcW>!SBjRKtFYGTd-m;k8A&+TC4=E-*!n*$%U}Tm z8}G-;p~iSUpLlk0PI9%oag~FyG#zx;u*jD`1<@t$GCznquXv^B~p(i<|v+<(X z#5@9HJxA`>Q4duU(g1?|)6|e4i~Nd(!ourWW>I4TC6KMfanMK1pKHqy0m*fcpmO0T z^mB*$z?8qD;DgF-wt~k$p1-{g6K%ce6Haq_cKWX=r};?#%{fe)%9>ez7pdIsb=^$e zmfk92ChFyoweHUhwz%ou4Dy?oje@+nb6h0PO*1zKUGov9%j>B}lQr#pN8_%7I!m_S znG{g56}=Nx?3anDz&B$90jY&Wz}qDQ6|>QN8uRH*86JGEZ&e?M<$`0mRYMZ5s_AN5zT1yxP|EpF(%kJuRuculV)ZW=usU^1_wCePQXZD?*PT783n8L zVgc(eBXn@zp;ZE7Y63OH!z0-0X%dK^TYEPRC@$8Mw6J}u&zX+a34vx%7;;wxuz&u7 ze7V3q;FHkAlqta7_J&<&$|p6`*mcVobngO!)Y$Qj8}%I`W1B644I^Q88V{gEewbzJ zb9@=$VxtP!jT{hRrRK!}4d@KThL#rp(U=%Eq~&@r$HOwqPm@Jo$nMIR%jY_K^Fro# zXc_ID5A~izDpM)Cun$qXZJj)`G@>vT*s--T8*%5!UXp|q$t?r22ip?wO<|@}D#yD2 zbVe@l)Y?F#Wi9bkw{#gfOXdh_k77z|0hwZ?{}M?k;J`_zD1Y*=)WYq3Qz*1ku`9G< zS>sSV95;D!9LVu(WL~MMHTt>IJvHdp(|IfOBIMvnri@+3Mp+~Xvn%~qcmcf)@ep^5 zNVguALOQ8i^pc_#El>g!z|LR4dh$D~lBjJLZ9ILMPqoq29k3pl3b>6l&QtC}uIgvf4h`d)p zQYcmB(4LV!xC~vw0I_e6Z-?C$tV;zOyl1oY=lUHsZz^4#TeUMt+$ZznNS`g(yLd-I z9_PL&=FZ1$S)cpa`E=Rb#kXUBPS5ixtvuWJ7D4Z8xwdn8S+BiD4M(&!KaKO(V|$e{ zE&12;xHrQh{kN6fh6O#(tAV692TePswf9vUeXd*%)~B(POj&xwq4$>~$Hi%U=J!`K z1-`wmO!8reDY_YP#|P&TMuQUN{YPqW0|v1 z-WDqKKUS;MF@&_-m^mEXafCR?YRq%pCRpo2k##*z6h0PB!HS*PvE@cqUCt^5KV*Z* z*{ar^|29pu-6wUu9dvoS{H?lhzs=eBc#hPNscqhxsM@IRPxBglB2J~wdK-b#e}8}D z_iiZum-h^YI^8+Y{&y>DLTK%_MF)I`HTqkn_P>N1IK+@*fOYFJKSuSGTUV}NkD7Lb1D(3pWQOe+du>d%q3&i!!s5=yEv1gjUnn^o{ zu6OJl5`0fL;>9KDcwB^Ghtw)~gaCJEAy_{X?$*+(PNSPgm6Pw^m2!Ddev>Dp@-SsD z&Jz}gI&d?*?LylO4^N57cYDb)eU6Ql5UT`F%#gF7$CGrF{oU=nHy0^81tnY z?>&$2*J_fKTo@WU9=*T+5_i5w-etXCr0@VVBck%j_?;HAyAVn551d#~V8G_}#oJ+u z)%%<1#{06pLRIU*lD1&#vdwIRQHglg)Am5vZvb2G=imY||MBxhy7zm0gAaF64x65$ zB@+LS#;2KzxglNGr@f$*%R5=YUnA{1-uHKL;(b;=PrJ&qk-;CZ6B! zhP~%qS9~(&E(A3{BCHfLY=BQ;_MsQ6g-4r=!PO8kG$IZO9 zt~TOLyk^QXIg*;p@5n+BKC{gT0uKGe>z{h|j6M#T24rq}-9x77S2l$< z_DQS0)!66!%;>>EWdR!d`T|ko;E?5bRoN2UMDls(j0wx^O_mHBMG=&gFpYaB*c~*v z$krHU5d6Nu4A5eMFsX2snP}lqAx0>SM97fE`TkzBy-C#nK><^|_5}yIe=jS6ir<7& zyf@7y&82oiBPd8)A}Fs=h(zQ5BgGXQF2p8Q`WT8D_uZc3^F3Ih+A?%&?TCQ*<#@8C zPG^gj{{yr@OTW{6F-A~vDgrBTMMtnLG!8p^(WEAS+Y-e0+R--UaL}58eLD2OfQN#enk`FCO#>pZ(phJ~wq( zc+u()|Lm>z*NG!Pck`WVFT8TjJ5X7PbEc9<$OtMfb^>U$$0S4&89A=KxvkR`ga}9s zq#uVLFto|%T{k1BGXQ|#%yT|F`|_m}3GC<>sOfySXZMlcy!XD|dB^_rH|Kxhlq2r> zSp9Cj=nYG;==@K8eB$+2Z+zm9FXzu`h;9Fss_islWAQ~7d~Wj3zO(+JTb}CwOBeR+T!Rki*(_txIkd-qJwzGSk8BtQ}Z1PG!iJ1ByvJa~eF`cM>o4*}ul ziu*3&iYNjui0l}_uo@r`*6jOa_N}M4>h7w#_x%3onMndkNFs)a{r!>5^mJF{A(#5g&ELNd0Dy|cRC7uehVx?jp`W_q%F#(3rYv^7^3+Qi0JZ4K z-Nwwj2?G^_12i1Fs76&->+B$uH{S8rt=Wc$es=vM002~W8tUqWGn5CY(lKv71S2GJ z(s{GS-q&O$PH?I?xBK!xJk&`=%I6*l_nH0W&9lD}8Y??H7~>y&)z1@gI5Dc!ZV5nO<_S7$EVjnO9zsj1d1zZM z+gbFoXe6Sxq1MifFMV8Mz+k-{N*VyXGB!d0Gz#0g zLlG(yS5gfgGe{@Y15(#fQZYBG^8kc{%(UI~h}442)_^=W5EP=Uo3>jjO2ZVzvJ$TG zKzPC<&6bQaRa2qTNE-ox${rvIDgE?_9*UIHqgt{Sx04O8Pe~{tRZS|CC7sFe+>V2S zju9wj&n5wqyPoTeDppKL-v|V~+>qd04TQ_-v>uLlBO^w<#t+pi!O(tW(8=wlMUo+t zBoLucayIt_XS5LlBMFWK6Z5@Jpx)lGOoX2>`dhXZ*hjX~|BnkJCY8 z+y??gCxwGk1A!WYiE%1we}GJK00552S!zxad?9@)nTXE`3r3t2Ur_>+!?{UdfUk#`Qj|~uXC`o~F5QGBoZ&1dfucvot zs25O~C}&sAOnn2v^uTtjEDvldwEfrD&ue=fPl`nredhDW-gwbtf4TnF4QF04^>v@J z$+F`DKpPV2q!&NTkpN7^j|MU~OD~xD)pmPLj-goIoPO5p0A3k~B;VYNkboGi%-JAuJBk)ju ze`1AKMNj47EyGZI;dz&x8L#^N2OqGUr;@KowZe+*{br4TK4o|L?l27UfC@dO2dO`gKH#Zb~J4ZJJAuEJS&dlVQ+FuEI9?r zL&ffOeTt!vjv^;NRjLt&lvM21i2aaigUk zxr+{Xq6Fx3UFQJh!6B|`O4(;2@Lk`{rUg(?*G9XBw4jL~b(MRitSFWAP_R6dr3fmD z=6WTChS~gO=Wz0(Q!$*E)U08u7~UO zEhEb}L2LZYr(+OMngNl^KqRJGAqoUEuj~`k8m1sqKsS|CD zXCHHgTXqf0OplD}n#yb|IITr801)Wmz=m>ib|8|neL8TrCBdk?Sy~YS;|<%@7%^Xn z{MKS@O3*j4^6*nlSY;ROL@2wWo7VIM+Y(q46pr9Qoe}-ZT6$MeMHN--56XjEhXJ7F zT_;ZtXk!tors>T`zWQG4*vyE0;I^{;R+01m*CLUp9aDwl4agyn8JG%sykmc0W&R zzxX$ItUT)NKe+Y$_s*K~x6jS3J^+6UqP4!tv;CVPF!dc%#{GAW(EI#)=cJm6#Vtx{ zSB>)SJHCD8FXRVq_{>Mz)RB9azESZ4N9cKGOX1A;#D*%0nX($Ts#f%-gUUUdjby!1 z8`+EEF_l0LCxj`qL;MQ>4DjWbyIQ8#oN)F$QS@u;QmC;x@^F$Mm>(+v0u-`&j=WTG zM=~Rfv!ULRSU4;}_(E_}3c`ujL=a$PARUf`d`D!`xlqu~mh&Mqn9b!wVM|rjOgd|a zEJ8+777B$#ZLI9NOyM;(Ni$*sV+ocjJo1}6?--b~A&IP#5JT~}Z<>sl1g$n;l5Qxf z&ihv7QoJYL9McQ~bcSP6nF+N6i?s#ft#&!*j&90(uIDM#m<(@yaB$LvwQ4-U2m~YG z42950GGGEE5GuaMRZRdQW6|1_P&g7H<6H{qc*xXXLjyxiFw!J}l)xAwN+2X4kf3Th za9=^YrNbRfYjw43oHl(F2^n+B9bhru537S zDhfj6+1^n#O@&A?(ip4W_Nu6&ih~L6wkIFm22g+EiPK)KbwQ=($a4=10f5Y`x%cnG zvrjs@)d08ap`X8W>~%LZRUg+@@uotgpZ)DuuD<5mOS%CbIyvybz5nl+*y}vv(Gsmm z1;8Kd9C4v`m-3kG%DaF0gU1J6dE|~iK3ST5(GS1&^^eYZZEyG7#%Gp2tJ?$r_|-L^ zx;Ga;@teQCZ03$CG8RAK>wo#-$_1A_eAVfnocYX8Pft~sOBGcdG%%W`*1__}fBN8t z!_Mw70RUwBrQ2?n@3~m{-3`x{j=Xkpn?9k#q@B9+j>q4Os|rYOXMq&$M!IRHW;DRE zrMKU+cINx18UO$?^ZZSBiuYf5M5KB(-b^M22*AgofqbT5&xyZIc#pE^xS3%fL;}ee z)Yj?*2p|XWgv}TL*om>jCfjOvFd!j)SK zS7M0>84QWnBLH9n0017hWOCmozro7>Q9#sb0>MpL}Bsoz6XgMra*AM&|C zTfnY~7BgvbM6fYg930i+w$ndi=q4$MoO@+gcwVSAm3i#xNOP)d&5D|tt%Z@CCrzy( zX{D?{?e3d%b2Jl*Y62l=(5--C zpdAU8v19-NNx-U3v5G3HICw(Z{-t-V0f@ijwCNL1@XP&cd#uTAL2Kb>uN~uTx*q)U z#a~~Mocqqh>qGImANuxZ7KN*GSH+tP9-VjLx9)z|*|&V*$V>|LCp1e`Vn*KmFx-=goTNzROe9D^^7n2XuEUIv#-Uc&~i~t2Q2U&Z0ZM@x*Oc zT+(;mx$_#S_nE&uZU4`8AzJ17P#^a62#C!q)~4qlu5~|p^MmUgDE1HL%ATQvlmOrtO5lXjlyxXpO_U4a# zzW2XRIiiKSpLpmWp$}eP*|fJ!#$)PW#r(92sLe2DI=v5Au3p@gAV^MTZ_q9(1qj zQwMX&=47I&UQPjkP$lcBBJcsI2LezOqeg3}2{a0^{^@P0mPoKZieq6l;f`D+L#@_m zjp*H3*NPVUwiztgpw~zAly3Qx!2re3ByfFd19BeHIF&qmPOFrtn4EkF&1{z{a>RrX zQi$~WLLrZ4V=6i=;QPLnkkASLkNDc>}Q8XDX-ZvQ5Fi*l-B7$zLBU! z&UDoRfWqpRxsAmE2qaT-pKX8$%RnjsF%=P103zI8!=*4-4#c7B`Aq%cK_H0$5;%gC zNC3zQB@z-y*hO6x3FD=DK}wVeSfO$%|5VPvCn~c@$Yl%8V8L;UdPFl+D-a2Att1%- zc!P6&Q`5qokBvm9n1Q+gfe=~Yl>{>scc?6FF_QNJVZ&$)Vzp3A6;)JmusI?F%l`Nh zKhWKe7IdRa=XpmPzd~y*h7I94ydN3orW7eWCto z=iK?=<2~P)zvo(gPUj0<5+HWStiGn36Ng$o1Y|4 zSbX~8!)kYkjFrUVYd_a_^|ybseCbVJSPBq3>WZsBI@bi4cJ_IbmjCL$D^Fb7wD_!# zy!(jg(@U3r_uj*Q{Bcc_N4|5?AAWxKC*Qs9yl-4N+prd2aO9oWJ$%hO9}hOoIq!3e zr`)mPsevbd|A){~u`T!iZ4dzN`2HWlA3F8;89@%b=EVQ`V)u8x{Ii#q-0;IC0P*84 z`tlXit?HF}lR+rtU@xPXGcX*ktD`;kGa_Iwtj-R7{Px@Qrwj@JAc15=1;s%LBREV* zf4Og&X7jB*!{74mY{83X!UDFBt(taHLg4lP@rjcvm{v<&Te^WE4mPVQW&_D z?Jf1aP)tn^1nNXq1a(aQyDahf7(n zewIyAqQ)rbmpvg>bT;~VR|a%8R2F7LWo2PRR9UBk2hsz{1h+VWB;W5@kqyho%Kl*VxX~hf#0)AX0OK8y@Sk!c3AhW2Rpa!X!6Sj<>}r z?;Ew2p{WGyRYDR*p7h8sdA{qD3Zg_3pj;?}Pb3A?S=sSXfbYq0BH((iYHRWKL`(%H zS#~h}@*mf=%&pIKWup!DKi@xWXYWlkEsEwkOMOochnlTmRB>F+0JW6CJvC@*JY}M+ zR&lJNiYg8|3_o-CGd?I&53{$eetDal>s|BU&#(Wnd+o~Ch+ni=+uUllWfcb_c*Q~Y8D1I7sm+9nFyn zSreFf>Ggkn&%h{8)WuW)@5VdNhGZ(D0RT+?@YnwQ-n3&zBRT_s+LJzg)9j7w2GpjR ztudB4?8iU;?6j1{0AS(#Q{MNQ^92vN__|xp?pn8Q1d?r2+Jb8JGQIg=lDXZRJ-cj% zY#_GhTqW>f3w0ho^!7^1P(?T5b2Nnw0QD3Ap!&hSx6Pk^+cK7p+~cDMvI07M#9lWBMfyA`9 zS1flgA5b;28tq6;m=Q@F6P_SJ-CW{i1nEdsHC$E-x9d@EjjR{tyi`MexK%?13nM~r&_kZD zFiwLaHrE!8C=dEQkEIhG0j*X=b!^jT3N|c=l=?*H)45QXV*o?-CMXO5BvN>q(PtL+ zW|9vC9piDzuG*w1oPbHMq^r6|Qq%lm?~t!+Mlw;<1DJL^hv_w@1Pu`PqNdTSIUhA) zvbbqhtdw@NA*cvk7%2H{Y0Bby$<^`2P$6>2BR`YRdU>zEzn_&5Rc;0KY`;THMrs4D zC!Jy;o{Z&lMZ?s&su`B1YO2B&%~Tn$*bqvP6OEOSu#);2O2%iNVRdiliLx-EGBGf( zcLc+hV(rP*a`PqKQY0q{j8&9f!~ki6P~_4?CIjukV4bmf=}31*#OIiiCJTIYeOc-k zu+_1f4Fp0W0)|k{A+Mr}Dh>vWF28em5g;KR{pOdRAm8awC&Yv)=buo*P~D)4Lhb8GA$C{K`qD3hRJir zs(FOlX1sn1uNrBbxuCH+?*|P?lnVWw$Xj5T%eLLdyaXUou3uB?ZdfsAC?y*SBxP0& zF;((f+YDP{9f1K(J$ZUx_h@&|=&ZBBD+Zbi~*6A?ntl8#*F_U z$McBT&WuduXR5@%I}`w6PW6ie_9LURjK^DPCSj z;I7O30s;5aYdy1BSBaA)nW++CnJ~Z=w)K&y2uwZD~q`pqjh)4il(fA;MnNnI(Isg?jrNNb1&!M_`LC`n@ zh|aRCy(ss8Am|uPAkLTu83$C(RTUVh$PiF#p5&{D1QBQfBso;@3T30Y*6r-qnrip8 z&I;S!h&$g z?b?WBz+}b9#$@wyI}6BO%NZh|62Q8agpmx6WlvS^xt3lW%qT5;q+=Y>zkXCVSg|OQ zt+p965aCs+R02^Ha;gz*dsl+Fem*UR*W~q}N(SY&xzQpdgZzkKWiUdTfIqDaS%i1*W7peGWD4Eo;cNJAk$C$ z=zINVfBO@&$G38kA1wjw(9TE}*4_1`i!c28vf$gla?i~lY}uvHB?_4m081DAiZ>HE z_DjFJZt21=eEF=8Onu_lA8gmh-jyu@6w?K<>!CU6TrN|RJ30x0L;v?rx2&3f&hM&M zu8JzEsN!HirQD~xb(3u;hc=g$Hsy7r-4%Ts7akQv7zKbrXC76>o*Y780>^YCNZ=Q} zxE)*@c(Lj6Q*$Tey7Jo!1G!KzNCG91FMZBA2;oW@3|ImLF+?3>u_#v*+qOjI(fxQp zO#rg&yRHWSqa!06m=Lm%bxcdwgN9%BNl2zBLJF_wx~_{7HC4;!^O0x}G+sBU5ndz4 z0RRM&$jpQ_DbbDshqsPwe5O-ZR0(e5;VnDG{fZiIiN#t4_;P5AGq`?KP^o!t!|=v( ze3GHXO(ez@SsFI&dJd%MQM78bLdJ{FFgRlY=J&5CYqdswv+eYi{k%uS1G-Ww`pjYi z8JOg`D6%fMRVVGo=h?X)m-#GpjL9S$Uhj^)GEzTVkC4JshzhHz3-mozuA67M(ywW- zfbY^*MI)@La}AmI`=2Rj5sb~Uf-S6=lMp%x=olZF=p1tTM^sQe@CgWnP;5)&9i}Qq zG*s*xVgX$iggK>-6Jk>m-ng2#rEFO-n4;sz)^e=b-d#EzuSjYTE9x2~LSV#*h#^Qo zsEBU3+_#|-NL0BJu>cqXNx(7LOxUu*)((Bkb^$vXuxktg;4Tey<#N4guPjK>>skXm zh=7a`F`ov<5oVA{=z6wTcT~`<)xE)zo-nByJ>Lt~n1#V&xGA(h&BLpxqKYd11t_e( z|Dj=ksV6V48{dPp=#;}wp4qHbC+7|G`4A;Zg|TYJ>Kg}l(;u(9Lr!_{oo9^!#QQMEX;`Se2cJ`d;F8sIB z#(QqQ?awznR01g7b=}q9+;+yfA9zQz7MOSC@4oTyVOQLK{+!i!UVQ#}XSNMLcK44S zaRCPZbnR6$E;{qv_n*)-u~_`x$8Njzj$izK-9MIo=8N9B=bV4~jPU}$O2d19a_6;M zF00zW0YT9znU+zVhbpRg3!_vjjf{-c*Vpgg$0dM7pHZpnmDS<4le`Qe>^3Ka0ZE|s zlkb1mhI5~RKOg`@RYaI%V>v#Ypwu&{)hAR<869>9rY>)0hsK%`jnJ%2RdyYi0uT{N zG1{APi{)}LzjDQzh-I6Gp2-vfmf@iffW#>j3Ic-46e}3umYP&qDP1HaRaHrlZt8%< z80V(Kc%^4C0zhwP*GTt3tReYYT?G=u%GUx03=(b0L|YPoM*By%E!%20gyM~1!B{24 z1pp)toF^xRX;RoLm4~;C`o0+6RE$lIX1A4%MwT6MTr{k>a0-&C3<^@_U~p5R_7EdJ zP2KibR;e*`jY~&imKa@|9qrAxE>5~dUop6J5KLcpNdO<-ln;cBa>)y5YCc`ETX^k^ zc(GVse^1^o6he&wJwf4mrI0Q+zB9&lUy>C;Q3xQQ)*T)0e;e%8ItPIAK8Z~SKB6~yI59wjU%{ffxislmz(^fOH8ZF-}sVFC=41Op^@lCL=#k<|?-n z0iz6#-4bXPdaqSZkB}<-i?M0z#AoIpSO369%eOKXC$4?0ptWH+yPabs3V zO`kKP-l}?p6X@C2=XripOFiS&S*c>L;JWUkk3JfY$7ju&wO^!+WQ6Huk6B78*W*R! z1on_EBmpIX)!Pvhsmfz@VaQ;Ok$!$i(|B-d)I;(|^Vk&Uw%#q%CmkOjew0;8y^Q4v z(N1&-5Fj8hV4y~t5=a0*wUbi>#3a^ZC4)(hb3qdkDghvqXvP%=KtK~MlEwqpi5MDc zn^S$Od)9WZRk<2(PJ|O7!is{2@Kybi#)lQI4TWoiqgzH-{cUriJsO`BC1e$a#6&=> z8d`E%644*ooL3F3U0{(*>@Z&Lk!}`3Qxrcf!B$F6In<(gQU>ajw$ma`mj@PlBcKIU z3{r4<+!+-k>kDMU^u^I)i3t>*;?+$F5yK$q2!5bBR5@(`6zLRv9#D|0fSGPDF;m5f z$|U1~`feh4tes^It#PcHy|Re|LB}vcUs0URX(^-#b5STOhK{HcvhcK&$G{+>LaEr3 z*8&pux5Si0g7K6kLb;uQKmv*26EH5AIsQN_nMxejRTCM9yAlJN!3sYae~a*;cIdLh_)u8t+7mRe)WT!teDy`vrY|i1eCzA zLeBsKY4O$=0FVl@Dh^0zB>*L?M`qVn$P9@=CFiF0rm6^-Jrjq|BbNZSyu=+bgZ5p$xo*=OLHN{WGMDMDY7MGn+Sp zc`UbVGokOP#}UfApILLkyc7Dnh!x$;Q|t7+R@8^Mi{5}2pPe#mqF!0ZTsb&3x#pM6 z=bhaAr-3!28}Oo20sP7Ro04JSCO{mlzL5I1Cr z#E27KD$iF=N# zT57O0Sm-TE&mG<}Bt0tToMCUDq#{ zhKeqF;h;S))ezdz9BG`6thm&bj_kW!rd%?uCY~NDWd8by8A-^( zfI{4APazY9(P2=vK%&m=?=hosXQZDY=ZU1Bbwnv|)YO$TqiQ%LvRNUe9*(%_ezm@> zJUpQ20VzCg1l{bAnrx(49ReV6L?5`R)wh9L22F$iP>qb+z4WnXwv{KWl<8);wqfeL zj(U4PP!W(^#?;E8G-|&Wtxft0I-XxJyYXiKHxy6o&hT zJ>L!50Yz2n+EZLrxUP)nNbEz!gc{&6=k-DW*m!kj9icNE!m@CT~8hfdEj5(cavaN45nMx|%fN4YBPr30SG=5o-^} z+5y1tes-AU6)FARwdG_RnMs`yy@u;xfIy%ifoeh9@hRbno)se{N46dvV>(0H@%ti@ zQc20;a>C1Ekw!JrX!+z1y?gOtKOK03>d=#&Dbij2~JH00feN z&`{#FLH18@F#yp2WJV2vWox6OrEtm+z&QywCqmOu(^=Zd6H?vuh6zDY6zV2;N-TNB zajN+IZM|^x-+PN9kzro<1JA@P<2f+)%x}n^AG~sgs3YqY1bVt6$uEk@sOIX z)txh^OdakSiPlFYN^ zVo9xSbBBgB(<=EwH>_fI5K+~FK`~DCaV=6F+^i>?3#*>ACeH(~{v&U>TEp?jFMi_Z z7k%*Fey#oFOOI`at&jisd)IW=9P__dT=n7Uq1W+om}1$X2n#5Cw_K&?c-+!=aZqNr zE?m_0t+T&&e?LsQ>V_NVgfy7CaMqfu-uJ5&MdQ}e^ zaaF_reR)KI_%{u-6POdpHbWA>5$ma_Gc9(3Nl-Xh@TdM|K#onV>EIN)O zrDSZkD~x~;sjy*{KN4s0BoC{VHb$>?$}8QC9-_NuusI3}uvMQ~x)CR#Xi1F>MG5Gl z2DN0kxHYH8m1pAXYGvK(Uov(G}00!1d;c*yu8iUb;l_-AKt8LIw4hBWlv0r zP*|ub6mAS5p_BwbuH$O~1zt061XwpeUJ=BQhn>$3Dyv*2p~u^znyJBVHAtwatYKly zFZTtbA=Mud@+AVJ+Iov?B)NxVm>v~+JhQh5;qNTjYfOJ!nC@VeRwK{_f z6uu8CpGZUsB|)an6w1Q#aIlB%(H9akB znW@scA)c%k>4B1@K(e8n9RURuRWB8DEY={MoXn;95)FeW`@X6h8u>z3biXVL z1;(5N?Q6$4tXZ`Q3XYK8o3d?`@{lPP?IyJh>zv@j#eDFhU_g}x+$%_ha8=Q0MCqMk) z1wZH!wBuHwSO#l&-vwvP3NV0u8o+-jN`1@jSwB)fD2FCPGJU)5x+ml-#d}>2xEWpNK)1p&mCpGW~RzAFB zOF;qv56?a7=QzZA?+npO>LRflrL*}r&OJd3Dnfrs>)tPY8rnlm*vmp zazjHyJHBFB){Gf5+S}Xt9t-D71SY{7>H(igu<~6>>>bt^{qdUaw@5&QFFKp&SZAuW zHojEOCr7PA0;T7L?`m3&j)V${;dKraJf}0eM15*u+og6Juob{)uOe!MooeOo7*tRE z&-T47Z>0=Pq9v7RNeQHUUuNyot2IRd73-!nm?4uvrGP220H3^I5&)EnPWMaQxsk%u zMN^c31^|_49Rc`wcO5`Lg#?J+7y6j0HqNgZPal~mC(O6gMhdhoOaVwLxEr1vh&7wx z_MoaT+7<6Xz>q-Iw3ef5r6_NCYDD2I+7bw+bb*}jX)ZY<00xamM5XWw18&|IcEs2Z zLgUTM9u+8wNhNNvUI+AlY9vUQ?#x#5Y2~$GlRKMO^7_o^*CuKZ`2#5-je4C zRD5oj!3M>u2~8{oiHs4FM&XudtU2Oj%G;jn2Z51RTaW0PsQ_?5WF(PTVZ(6h*h76? zLC-4^sy`E*azA}3H@$on6 zsgtwe_g{Qq>qTem!C`1P>BMQ*|8&hgPj3BK+hhX(w3gFPZ2!TpH{J7G|HVy>l|jnP zihFYlKGAAcU5UMfkvj6Mqn3Z^QoU_0{~EJW-dOe>&i2J;eYaL*Ut(mJw`AR1eIpnyD0{uC~ZgA5{OJ^dD~MtA>EdFNz^MP`F8FU z^eUg7DONpag!`AT^rq16o`8P*$*Xy8P}yqoKB+^iJjhLe`v6yiXfY=}i^Ten_Y$B9d1 zp75Dw=!W6v3ufbF-}iY?K`0~BG~JSp18&I`z7~%0VgX7e@;!m%nx61BmRZr|-F|XZFX>YTUT=jz6y)jn94e#mBVU%o}*= z*3Vr3%aw)NWe?v`2sa$Rcwyr%c6g=Xh|>@M`I_e*`P=$)CLf{!fEsUXuwc{5haTB- z$_G1C0FZ+({B8Kq4^J_6H2~?Xf9lV-ujnqBsp(Atwd3S-+bho>y@8cWe)rf5TO6%< z);rFB+uWM}uu`B(Wa^?e{pP{Qktcodqf-rd&pD_3?85Wzz2SoIB$izL&iXxWx=PKv zet6@%_VPseRlodV`rL0E6Wa|jwCK{)JN|UjhJrJufC^7}_qAu;b<*wIeA=woa@`V3FbIqg$dojc=zG+Rs5#18(8AX7_Jb<8$75 zjQ!hdKmOxQ%B-_K_MS1gkq_0XwKM<$7I|L1VhZK=B57& zu}?(IMC^;;6;{41-FR+*)f^QL)ai_N_i=iJmNYzn?v#&cNMbZw>gnfaJ^wX9f|Wwx z8$n2z5VAcrqy&3mrQdnv{WY@ld1jqyt76G$YG$oeQIQOZ ztBNxHZ5`ly>z>|BO{15MZ(P?f52JyaHkP5Yp2AtNF|A6r=C0v z01$yRC8%|rXy!I&)-UO?>Wunn5mFfkN$~2_lgi~lV9{nZ+#*H6>v?&EGu28myD7jx z0?1vHC%6xCje~-YK>~@=>)WIyngtR8?%o#n0bP&w^=}%rgGRCJ*%8w%mg4R4K+H62 zH7NO7kQ0Dpl1n52LSj^@nOouh04okGq6A=sm69nHYH;jN+xa;HlvHVNG|maq7_C^T z&#Qxt0`}GyyBZH`LZIA5C>dvp zS1NK-M^)vnD};xjfNgVN08&+SRPHd z;vM{)(U$DKO$_MfbMd|A&~Ev=O?%Je^;$R7K?j<-|>p&Wer zu9bH7ChtyZ9}qKYaGR>Ts~NmE-7Iz^1T<j4-o)_M|9mD%Txy+$nRmmp(hOO@1f0S4nb( z)wg_S^sLVx{YIL}PRHc6Cw=-CCsnWPzX*z=%%4AhABmW;_!qLd;$TmyJSjdWrKdRC zy>Q*dat1P`{@}*-Zaj?ZS9;bnb!QF}EFb zj=)G|lNYs>v*nG;yA_`$rzS$FAR!PC0~jx;3XD*#F{hl^OrG4lY(Qhc!o0pCMy7)F zDnUi1KQg!5!ggEO8!ByBHjF;TCt0x;YrO9;aY=$#)H)M{7~__jnjZn+BO9FZ^1LI; zbu+?Rh$A2o?V>T%zY!fHm?W=MN{{46(`s~9s5wsm^!Gs^RB_7j&34i(j+V@j-oI{O z+q!LB8XfcMa>M!204W9!1Z^{HFkN8+^G>l`ESr`gq_9H)!H6LeRhk)*Ktkd`k_1UE zQK08~!V~3U$u9~`QKX<;Hg89bq3+RWJTlswW+n?K!;Vud4Lf#CNH=t^i(9T(JX6!6^>p6kTJ*Tq%?HW!|hyIKv~{Hr>HAF)$-xXCSRx zy1%)PR80@t>A_)5*G)T6cFQGS$h>1TBuYbBGhdcMc!i>tj5|_Vb*WOuEjpcThj#p@ z-;eXXt5&R6t#NN{k664|L8BT z{mCIWeWs)1?DId_|I7!z=>q`p$lMDq`k&QLef;*3-lnU*`mtI~49+>{qU)DFx<0>P z!IVRfE3kp{JHOlYcJUJ$s1q%^W$4T z+%j1mdHSBhkykb80KlxB1^{B{u_ezHkN8-P#`UImTy@EdU-ke1N}GRo<=-2=^1U+- zHvt+xdd)Krf9T1d{AkVb-#Fwyx+$?5NZ0@%J#SnBmEZQ%pO!9LHR6XG4?q3H;|~dq zOQ4FKk3Ib0^Rd$}J1V7sKf39O-#^+l@0|Cwb}hO2;TO}9c^^FUgsB#D(<^@V-K)R5 zG2igg{l6(jo9Cancvl)ECAsLl=|6aB^^&_g|7%8v4ggxXx!HiV8}EOx=RKD;sQ@6; z%kLhY|LK-71yrH?nL8d{S@W(7-c_RjfGlqL>wR}W-dhgTbaYtCthb*w(*yu;M_&BP z0}nmZQ_$6XO%L3@ zWV17#seoL!;|)^}Kl<>t;6ZGf!EIKH9`v2$~ORlhQ|km5;AffKOg91uc#f2b>6H0qQ5!QwM=B z%lXp!<(p(#PCjN57z!sag4UEplZdE){Sa9Y3I~WtB2^SOl`|r71OSC~98(WK<$SSk zL#8M^x$au`gGfI!H`hscQo0FMVI9~%IW!yD48{+bOXw7SDW zO1Yfx+i11ULf{kSdn-~iAO--*Nj7zmUlgO8v&H8|jRrM7HN*)4-k81rO`u~Kv2tHG z>*E6&6 zj0J|ehU=PZyj&SQ;klxec5*`wNead+H|LU1MRX%Udo+{LRl|%JnW1d5F5whQp@x}dMk2&Ik_d=^v2nEx0!Wl=x;CzUbZfLOrfY^86uPDkcji&yoS~l; zsQA)>0)P^;IZOmDCHh*EQ1^_%z*KWg$>B7F$d|-(=XB=xj{kJFYf8~Oi zZ7(nBne~C=j_g|Six>a#$lunV(;j{9VQ0Z7S|-_z9TkllV@(u|CPt&imS{9;5=-pmv&F86 zAgB~UP$>dZmtD59eQ(*@Pnr4txZ6OmM5Ac#!|%uL-Fs)w+&Od3nR(`UpZ8tIRo7f} zqf+zYgEyS;+Pi=J%bzbQk)k#4{qw1Z9{W60c*eBL#)%sjJ^trs-rk%Vc%`M`!5t|# zSO4Ckg$ws8A6zq@>oN+554CMv!^;kr-Dc$vauX!p=|>;N@7TCtMfLo$!7@N_`_i4N z0?F#vK5n?+%n}jIw$DEZjk=^v0HCoQ%bxkm(=VLU;>#bX6{N=Wj z?TxGdalu1Pf0%m05Sd1IJ$J>^%`?wE`^vG~9{$^(&senPulLhTxiotGFu{K6pwYm$~m`@S{R1Irf}WKKSj}nGe*7BQLma?m+nB?K>|xz0QBq zU!H&Jn(@vI(5xwA7tZ2zOAf;sNv|+II`3G-*tR|Md z_5D|q_PkL?j#C;x`fyqOz|&8FmCvngDV%ZEoH7dy?^yc&%FS&NgZB*`fBf-7J-xb6 zGrDKRhaY}gA4bjr(`Fqz-Y4k6&HL3k!DG~Iet%J=`{dImX7_9q#TwTw{AgWez_jHZ zHfzS@if#tk4A-t*u&U{>Q%)_|yX=(}Uxe%>rI8DrfFDQwqQp@EY5eY|J|Uk<)WTvvtfLPP=Zy``Z%}C;P|FNUpsTz&T*T>`7k{VNZOc z=S&bbu3Yur_Hzc07J{EFYnpn|rIq*G_t~-!t4}EzOgdJ66rKJ{zXSkSt(bA~&4G_+ z{$DreX9hm|_vYlR3)~VB%BKGM)@=`H0H9Sp^1yrkUq5+vg#u7`{jXNPd+Un3U*3BB z{E?PJrRQVY7p>5z-GASC6``nQ#>u~TWP+43I5fcdM$Z_oJW%JEe(81BjaA_HSDi6` z!$lZDLdzi%csT`-+#^gcUR57`;HS?|2l1oS-0@{Cm(owV=(K?BTr7# zFSIc#8JaYSaZ!MXB>&^2DQlBq*_8-qJ+OCyds5U;eOLK9J%a<(q zpjm$9$|IbaDd6`GWTv9)HzcD2OvEGA64;c?nIarSUV#8ubCnS3V#x%2u^~Bn26CDn zpbQYs18brs;q1s|31c9P2##slo!lAjh=&r%4NaBDew4e5Y^9750V9N&Mj2wFDki&P zm=HrVI3!S@CNpJ8j>Y1HBa+=>by%>#rGl|oG{!KK1U}Xk!-OC}KpDe;5z^l?88QJl)VIn<2uOMnRjDqSv%+GVe51eEoosb9tNH#glMZLM!u&k|reMfeY#Z~5xhD@7> z*HaW3Ali_r#d$?#0JdyZ%_+hRGsEO8g5ISy)a^wCx%_sk%f=ubrp9_{9|lYicxHm+ zcX@MNghMQ;BMi22r7X!Pha3QmF#tvwF@Pe2VS&@B!4b}uGMIn}CZ=u@jO>g8N5C{O z$H@b7fkQwrqYP8VfN>ZrlpzGbV=8ox^!gT#<$u>UNUM*-OvX<+q`r^{VxXD zK3SNWcynGaU4bRFPlAO+y9?^?yZHwFqRTIuP%`oC3*ssyYd(0pHeB%DgExE#06>%R zg4{ea(cWc(I(+y&vDWu&E*UoT z?Ij-snteCjeM+`qwvD*?ygN3os7xF&PR*R+?;D97TX^@Nm*aPrjh6OAUY|Yn zw-#pGR~~?R3{FkxR0(>dEZXKjU>zCnDPb>#!0> zS4*sELpx8fX_!2Zh)4Dn+Pm{bAp{IzhLF=MTM{{gag1_ZjnOWYM8KkL3BkeV4fY@& zeV;xthXNf#krA!>f@ujN*tHl{8NuJn_hU>?>YOAYU;u~$#%US?5EDeRJO35yjt4^& z(s|MUJ5&r}U}ALaa#Ky)yR9|Wn$SWiDNAw`I|xQ;l@?}1DzWmi6kNZTAPz9*EPNso;|w5H zj&OW5sv|}@ibdfa3NLnI1gb22y91Hf8*L>YX(YyIb~XO1CwoI2~TH(dGH%6GP$ccZ@|7d~Ba-@BDZ%pIrz0MO1&|MxF? z=8a?Q|9j_CDj0sug@g7B29H;7Zw7EleKC8ozdRdO?B3Op0#E?$t9jb@6-O|z11X>& zy88Zk=Y81GZ3HNkU48H4^NWS_dhdv1j<9{YZ0U#FFF9;!>tW|tDDHw|MoXJNS+KnF z)cobzxU2pB&NA+IMC_P++i#Bj&oAE8q`S(GJoCcSM!VBv+X!rV=h+33W3E0?f9Js`KB(me z&A#Na69-v(uFQj#0~+1&)3e}X_MDsloVN0%(JimuwQ!%w!+>90ao+0R zJ^k<<*AE_h(Y=!ox-W)!i32DaKQ5OixNxu?fDXU(&a1jAiY1z8+PHp40DwwH4496IzCS@kVExY-;MU!Tar14I@DTIbk zHxy>;Gx6Pbd3vB#KLG^1%H@ykl@`-D zyHGT=l$X*P6J&EaY%Yh1O_nk{nmU_SG!UM5`(3hAk?kTy7yuF0ZOxQBI2QmwGq-(M z?Jl!W3i>El501WY%0)p?Knfamb z%E&N~p>e7$xMr(!!Zi4Pq%mNcbnmu0)uvE`C1MF{q1WnEA}w74#|c(ZkOT@qIE)yE zUOMl;L~}7}A_hq_8jFj9NUWlNcp>JAi7?`U6Zt}~iYbl#@RH?yOzRe=>h=5$=#u6qBTpl&n8FviLMGPU4VM=t(a48%!nI%_bF(zfJ zlp-6)h=DPMBsMt0B0%TPwyvi7&-VJKk8vEl*Ol@! zCV5_7XghUMLHE?#I_#`5wnZ!RP9K}S?pW=V&!#o$y z7fqda)XTSj_}t}ZG@p0D*aFtH=B2gPKir;K-S(qYI0DdB7uLX;)}#GRt{OzWlEq6S94dyU3uC9rm>FG@yZN|e%!LA+()AUs1$`#FH zZn$p@s{deP>FiOSZ`G*wKxp&M>MW0QXjyTFl*!WexGgIqnDg|_+Ov8ciHQ|tq3go`}J4< z;l)b(iCXCKhhJ|)fy%nBC;zqJ?$4I|ZkPg)+(77v!D~IWuUvKf zWqa;A>%tKhWADrJ#$Mcb)r$8XDcbtynNy}ewPE?TN2h)1DmZZI%$fWdBl^j8$IX7A zk^+F`h+CHb`^q6wrc=I0U`zRw<68B7W@)1Tl$+yueZf7fiQ|#M8lJW=nU{!T>M+!hX zK+yaFcZ@RlPV+_v1~O7Rx1_=jEh<`uxMK1vUgllSpy z$IhNi5d{PQ7*XhI4TqY82%AqO7AAybJfI;R)xh@2jv`2CNsWr4fH=Yc5JHSGgwlKn zFi59FV%F;90csyiQNo#GW^8G!E z+fpzx7l09l{8RG~!^wbFy|NtxRdS`AGB?K&KtK`e{kI>_zz}kmT0JEe!(=U+gF(H^ zTA*5TB|TtL$(P)*Ni84SFCi0V5-XeJ(G#qBb2t*$22RdK=s(!U{Cd*ihn{`w?JY)A zy-7J~P?^EZZ|mc0V%=y?ChG&XVL3<=()~;k5RT*x$R~CQF+vz>uWL>u4ACOR1Ch42 zc9|D3MO}d|#$!nlIi8Dl#$-v>nW0B@i$@hCE)q*Pvz>&)7FjXCqz2PO#orUFhf8nvK82E zg;rwXuFh!Px(>o~k(3c^7F?s97;}=HGgFMkbqEgdt#FKss0e9@cko8kknDUasu2s(!c??lEe?jE8!03^O=<$z ze==4MoxyNxAUMFE&vV~>v}=tYeE9z2g&TE%z{gMAUD$H;R0);QZxy z$JP&C`Y;3#d-cJ^-pgl=Dmr53@YkPu{F*a1o^j_B!@l}iE)PC(R(X~C=)V0I>&RJC zN;)d0_jl~uw*gllR}QJl2XFj!?>^~A>?JaWwk z0Pe}R+;Y_zCG#}xg{6G7AHLYL{EPN0it;!B0KF?118(rKrQfcbdK6IUhmJU4=H-8x zd9Y&+o_mDr&846HJNT<5={#BXk!Oy!EM2|oRnJu`4jzw210O$8HRxAkM_n-gu@k2~ za@HSTT(ETK)n^SYgjI`Qcw_6Z^M^_R05rV*p;ygc{c6IYke*e{Kk~FQzhfP(9l_R) zV1|^*;J=4R^=E6>idt?jH-F`**LR;a)Cn-{nx`N8wBodD&b({thC7b?{XgSNUp@W9FSb-H++H(S4z0ay z#*|xE|LK}H<}E(Qv-{zLl^fBv^oeD$Id|tsL@Yn+?gv&~lL7z;uYK&zCi~@+2U?dx#g-A%j;6phVLhkF`EupI)k4tlGRJ!siZy3%xf!KFCXI1NMycO2ssmG0)Ux} zQ9^0=D-5{`T_ld(JKVrnBZ$e#MK_ou7&^8! zMretI)W)F>Zj-;tm@I z%EO2Q_NAOh3@8Ki_LBAqlU0UHpkz%$Wl3}-0Pc(GK<3hgTStvA6T`Sptj&s#wItol zmiVG=$MIxLl~IETVblc8)OB5wq-ZRT31KGVvB1D&)Fgywj1iI3l1ZKybd%cc)_6Rr zr(%*K#luNL2qqXI1PBb%Bm&W53E5`Vbq#~ECphlpz-I?vxbzQW`W>?&q z=XDq7aufrA>b6%*8jel2dqcIJXeb+3ie&-yBveKWV=P!bg4JUOV8MpK?hkegPN`&c z0U**dQ$~O}Ux~+4=7>~Bwl1tJotP)MWsYJ1qPXPpqw*QBXnVYNZ6n5rw+i_~y_5sO zdh>ciUsXLnIS%qidl5DFZtZB=)+II?$!exnG|AWiM$#FkaF3pdmm%+PFJtsS2v{<| zS&<(+SSc2wkwB=E*aw%3S)OmJQDJdYi=-Tf=O;r6Q4)Xw29|2+gd8{RN?;9$$HEY} zinA!9m=VRRdb>m0m=7Vu00A-)!!*sjr2~s7uM>C5&wpELJ}c^!#qc@;Ifqt z#*H6`L|Fid*>V|U2&+sFQ^17kj3OM5>L>{v^>M|KvX=am_EmJl+*(;vS<^7Es9=CU zpXa}miA(;BV=tI->;?CJ#m0s*@cipvKE17KYg0_}o_1wk4|B!m9C_09N1k+j?1yZhvM{VFQE@U2%)Yl}-hyZE(KtXz7|L)YN`5i6F8i~luW%m$1J~BNFRqdhhFf))2HmN+TF%Hg9j8j`TxOZCr!jPfK)j3WeF5t za@0k0-n{oq& zL6rd7Yg@wYwJY07jvwMYIP0bkpE*5i>FbUe(~G-Lhi&xH6CJCU`e#nu=PUy=G!wwk z3|9=WBk?k{Ni6n zKDGAsjgeEbpFU{0hX#5)uoungIqH_1qjmsr&ba@T7du9eQfah$#XD*NVBgWyhfTRE96Vp!C7O$5ojusFD0QT;t?Ux-khyle6 zQ_N^WClcAdK5*;|wU*m^*y1C1?x1qm9;pGHpE&IH!M!SnzM5H|PfTxm>$G44GsY-0 z4aOKHgixRu6GjlQbb4N30GL7V#v%02tJEuCLFJfXU_dc2U^`cAXxY)2U7qhN$^}4} zA(Nnzab>Yk*X~cMqJe0^h$5HQAy}n!@}_iBCqj{}&}l1lgldBwbsPv@$hi&E-}efo(xE$A8KN~ioz%ZK{JGCh@ibMArx@IWmb``UciKO7yy zfH7vKDz}=l-InVz_XLxQUGclW>FNjpVvq`_1h-5v6`Tr13^6d;!<54`95)g=QdE(e z8X<&XoaRRu$9e}I?Gc<%41EDhj8H-eBPy>jL9tmNP15wTDgYEQ3~V@ESF*U}Kuy#$#G`c>iic}iA5Jo) zIHBaUtYk;hDG5XZeU~mdM8+{CR{{ef!KCU@Vj&&zoW%zwLb(%NG>#M>Zr&cE37iU< zfOxB&v*t?ih+(zKhD9&&tH_GUPtep=(~YgWYj)K%3@k1f_+3evZ*b>TeFMk&zH=?S zJ^LF@lZm}_KQ|xVJ?LxveTZt60pvjy-UGbJ+Xqw}KHz`yv6*aM{rZX(&wgP5gcd#j z=zv*MW=}mlx4#6+IqA>0U#0!!!3%#EI(KqWym~`T>9x=NVweRW)w<^SR~GC7psQYZ zs(jug`srh9Isk&t{rgq-oGGrVMUQ_L1c*KT@>^M_&mNyMY0iKb{{H60Q+A&B=uIQP z`d+ORA3LXLmv4NZXRm$G>{)rOhaH~PZ!BvK>)w25$sPc@@QqhS%{y&|2#M-v=l!Ai zmorDY+CTm<`@B2nlt~1ic-1`(ci(v5rbTz(zX-rH>ymr_G~`?P_F+Q$6CZ~%dP$kX z%F7(qOh`rs-w%}HBhS5R?3V>``|qAbc79w7kR=Pf7r3QtydPfJw6!7ePd&**jKy zZk!x^D;pR4A&wn(63|L6kAF6kWOOb!6M{T=|I$ZXltoJ!HW;n|!-fDzoS zi68@~7Ma)x)rRUe)koV+Srz<8WC0-vFfeS%u~>2}CZfKS(NQ0++t9{wLf&wv;uR?X zAdFf6r!>Y?Ohsdg&)V5ycMR}yDz|5Oi)7<@H}B4~U;+2b--H0cH{oI^0tSp@OyL*; zi0H^C!XZP)Ac)uQN(DQyr2t_U%5`HZeshWmF@_n#8W@pODw?qPY{WD}d-igKS4(q4 zyBh363L`t)L3N23rH35qN91*rg!>F5)Nh2LeFxS)joLr7uOGFB7=eBX-1`?y^Ck?T z?16a@*Fix5wlR&^gc zsM^}v=H}+^yLkV8+V7>7=<9yA&(*x&2m8Iz$K~yJ@v{T`ci;oJ2SUBS>xQvycUmcv zKcJ{U;4@0i488;@XVSd0Ce1tR#;+hFEG4J^>#^e-YbxvFV$KN{l-Ro28Zm3ah4Uv| zIKTIk6^~9nzpq@ug-=euuwOmpSKYhhyg*ELIi;^vKuH5G`@>DRe_WC>?Aq(Em%RNa z)_09WO*1cI*wpu-~1L?@Awt*hXA=$D8Li?A+eTd?ghFY(H-W zlq?FlK95!T@ni1HaP5Z8d%`4Z$jFLZC6h?>e*l>02|%E_Ed^QK>Gous1fY7X`~h~e zd5c{H#p@7p;;YJuWxsf4^NjN!z4`Zldh)eD&RXC2!Nv!Vk}DsXJL6XKre&}EtwL&f z_L+mP&JR>Nlh^iV48cG5o<(!-$#lebhTgGp;j$HPuMGphiidxHNxqds0E{LhO*`s3 ziq3t!fKOI0{l|5WFEgf&2Cq&7 znMCOh66ssyG5`d9zA>o(a;$H$&i3=0InYZ42$hT-0;bupqqcr)jmPiH&hrsTpok&H zx&1y@kq0vdfc2E#ysM>UdyDLrDkcr)WUjlxl;jN0PKC6pWi^b4L6gg{fV-na5W|df z7rI@3kBJREpx1ud8gA(-8JBO*vttG(Vh9iudG7$HcYu@XW~4U0cTuF12Dr zaJNxcsWCN=zU z%TdA@4z)368XYmprKru2hsq35#K<@Jf{@(apvU6Dot^e#JE5dum3|pZP+!4bf*IAVkEF8lIL4=wHFpwNlJQ%TNNwEeUiy&J0cq)lZr0Xba zkVES*I%~Axo+xl8F-Xyo_O{Ow1E%*8eSturwzl@CbXzoxZB@0EwT4WaKX59U+e$?fYQr8UU_pSmFUkO_Tk4=z>2MlR_?uV zSP2IJpoLl@AdWkAlog;00Dzfh&vea!(?zLIh^}KQ%Hs`kY=k$Mnz(sexae6Q=I~^j1$-`z`vTt!q`T0*heL_>1WMw-9 z0PVhKE(CXX*t>Jw{J*?>cKxn8&0bPm=<3}^5%;MRX8(55?0s6O7N2z2gGV>lRy8Dq ztmEeSt?80V>G-qn9)EV<=km|JZ|b?3v-~}S0gY6xPqbic=ICH0il|4rFn8d^?{>6V z*<0wz$EX1rIt(pzSXUNqxwT!)f`a3mVv$56X6FWD%+S&%BVOP__S{rFL{Vvy)-_9! zsx8a+@|Xh*j6t-k%kA}q!x6-gAc=w~bcMSVRW%F)Ft%7#%p;54YH{0$fD#c61BwJg zU0o3~K^bF)p5i%9*K~|A!2}Sbl1YviI046F36U2N#*9%0j510nB9dTGLlDIQV}_*p zARwH7Xc-&Ky4r(Ztlf?^(<=lm{ya-lSUB8Ya9AOY>$^VLRWPi;>ahVZ ziUBcBvU3&34AjDjJ!@*I#)>EVd4(ayBk1;@A(ST$zIcMdz--yk(Xg?VQ>EM?E+tzg z4AP1(j^mtVqO;7&sL@^>s9d~Pu<)u^^i|jy0tSR>Om&DUUl3-AS{1!`{*?4MmWqelSJd4%mJC z4#GXe6T_f-9ID4*VjV(SW7S?QVkmaOWXzsrwdS}0fKbE$V+Mf1Hz+&Z6Yfx~etRo5 zG=&q9m3Njo92~(EBAqF{Q|BbkqFTZo39F5SS~U*LBxW{I>1s*=rq)79$q}13bjd1C zgms!QQeC_yk5ha^GjzqqceNzc961uuuxUyvhi%y9vH*Zm0FIFWKq)giDm0ZWvdb z5$OH&V8*(V0EuAifO>vvGJsS#-V=Ht3_S0)v+q0k#lPOY;_QcKxG-q-OW$l5`0GdK zPQgb)U6wsTmP z6NBFJ&%5@lM0mfH73NzE)KDoQhlom|-f|#nS1; zT34!OWwpj&@bnT+;wd29(;fzZ0A~;O=2Uo0leKLNcI=7JWJMPlN3EL1kKEZ7!~#Ug@;n|C)Gx`A~hSVTf`B%ZWMGLcCy3be)$t*{ZZR zcH|ArxMnaEAxACF5ViVc#3;jz0fv1{j4(pj zU2b(0t6Jwiv(Vh!Tv6?p?~JI?w@Es3K1RPzrGjy_9=odMeO$; zx~(%S?QPya&OdvMn*hl^@sTC} z)K0zj#7XU!olz0nxU6#2!;Ahr!P-}C?PC}Hs`HcyuDwg1S(Nve1@|85!T@maawb6h z^FPm=T08%jQ#}m}-zmP~v8#qlj^b;t8ngJ0f6VYaTYmC&_g;E(_R^)yluaM|hJ1C`F4@eDG^AcOzHZ49(zY}YoeZA_AgnoeQ?s;E(NfaR~5 z!0`{31fDzAnrheGIZ}OffE1S^3R*nXw6m!$ z6f?A>GsjantdJ3$7Df~wUffpQw*J-63rFYqO1%srjv``}2Oc{p6--vGuG3O@(3Bi( z7X=eBlurKMZTi6x$Q|CzR~kmDWlJDf8;Qpa|1cNt61!;!gdGKnBfr?t%*N05wl``v z&DFUx?i+_Cmkqw?Il|sq6IDyH zC`RNfxfq5RV~j;LQs`3he7xOMF!P`3KtLE!0AR8f-WEu;bt!|q)>0=B;wbXCSQa3# zW_^uGaC&IuR}^bniWw$wM2jQ?bsdRtQgq1#A%d{%P_rzOtV#$WvApnW1xE})Ke+{~ z*|2lVvd;&NE$1x)LI~5ewyLj^9&r$t_G#uMQZS;30*f{Tw=LhU*d*TopX^BUFX^}^ z(rbw0F7&ty0WlV83L!uUNGuo?-JEI_FvAR)9#KtoC5QxC7vW_N14STkwD0@#ir}kI zeH8$V1sa2Ot6KJkJAwrfENNJEYPRF1hK1OKy7RfbSCL=y|ukIq%l}|BkIAu6T9#1$CP@R&~h* zSKK^szfEk%%)h;Gn|If)@R+-o-%;uyUt5mk&0ExQff%aXysHhS{p-T=EC~aEeZuW) z+b^qW;|fZAB0y?O^^K5M;uQfZp4fiH6MYZk1C~A%kxHe~r#^!WGRWYk2*glq{YOSp zRfh}?Hrn-+3R0wSoP!~Azcn~TXf>?NZylGF2gu;{jzqZJ+&fh;3;8|u$mzv)#2R)d zj5b%NsBJvDwQCt!Z*P^QuKI0Vv5p;nA+hO$&s0Uxk{ZI;G)=}>H?@KgNBBf4DXWrU z=tSTYyT$Ib10c88jX7+X6fw|JsboBvN~Pjm(PSdoZ843IfG`kjY8ns>0H$eh9G8eE z2qB8yVw$EbiIh@6AX*e(c9vwfe`Haw!pLj1bJDuOGxf zF^EMD12F)sON=D}GztPqORk7pad>5nbJAwC095NriZ=P1&{DnJ9+5-_%x;l^fj!sD z5M#*1l=O-?Ow^Nz>05s9!ovr$m=S1d>)6|A#!XL|*PY`QR1pyQiu277QvJ3;g;tuR z?NzNETiOf@_U1X1EE_ox0c6X!r_)GDBHy|vmoCNF&?0gn_WXn&Ll*zYDgJ|v45e`Nw8C#*j#ykOfI4(S-k09U zE$fkBiQbZu10U^Rr4KC_hEZQ%KY8-x%=pjX$GKsXOhWK|SBxaHh48ImVisNY9!h$; z+9fR^c10;y6z<%k+44k>e;-pn%-Ev(&t~QRmQZN>JmJu1s5=cZWk*|*m=Q;GI|o3K zBOKM3*2%A1uvs&TQ4QO>Sb{RHXv^tm|DkB)G%IEtr1L@_$TuvlI5d-bGMhtlk%AX0>)y|C`UM|82}K8 zL_S}+Zp4%^yhA1k^Sm%*`Y4u4Rj%8$dtIfdh}`E<_gHtbB_L(H3FTN%HXWZo3;bCO zSk0E|uI6az;iXcxiWtr=^JbNK6P;c48*8JHxVJDXyW9uZz&u}ZTp6Ke&AJ9)j3IQu zYZWt`_ORh4QaCa<-TY`c-dY`L+Z~c^qQz&i=35d$UDuGDB`8iGe6t)w6aitTA^>J6 zrl(C!;JaKNKZrU8nX#Rp1J$r@N1Qdb%ZTh{*{7`ZkBX-D{PYGY0@D zA{+&VeqzrjtEwD3^$0aX#AJ{`1{wS`fe;!$e*E&~%T3cH<+IAHK5I9% z(UUEx6*q5QY@0t)Z2L?z6Ev{XecGk%3A8s1p%hB!D6TVO&x(ZChs5;-8*s;{%c|pV z*^17rEATlkP=DcIj8riG$MoNq62PKnSqHtFmBmJJR`>yVLsug&uPm27NLF z6JgANA)pAOp7!>oTS7>&+tNO&2tab#Y;H&G);)<}M5lDbv@wDrf+&_x91dWxxUq3< zXJ=#B!qthkR!@P??e_rc%Swy@6^$wbYVO%s6Wq9lS&AmAn6Z{^otxg= zo;$?n8|1}|X{i(#0>wI_0-*HjygS(r>;206Gc=~^P)eufBcOUX6>N@I{kuLZ-`&*~ z)01v}RjTL$L$DrHGm-w{1ek>|3Mi!^9ii`qr2Juml`z-Qf;cJJnd0)*mNkj8%gew- z`WbRzR^2cO zM`Gcq#is%#h#{;HU?wq9M{_{6D3VhOw*<3tvy)8IV;V2>{_XYqw-bjMqdJ#hDG4dToGcE>~Tn_nGQlrc2xhld}2SP+CcbLM2aB7+|V*;#7C z-bkh&((rm@p3A6XzU4C?nP6z~P80E2iFS7~SK@e0b`Uj7WuI6e5C8xm07*naR16rR z=^QT%7^3h#S}>~PSpOIq0cs0$N^8AG!T4?hfB^tdV6X03n;&&H2s>(bm!4x8xp2jb zrcoXF^}c4>T$8Lp0Fo>xk_nD*2q7({V}cN1Uf>PGFbrK1CCaGD7(zH6j;MBvEJ=xY z+%QbTG$cvZQ<|zuCWB-$$%&jGh|xfopoq+*q9P*>1v>*?pC=RuI_>tZNLNZrdA#0~ zu3?^2Rh1V6SH7=e+;9Mh2g2Jv+QcEwJ0LsDpMwd6cSeOaXYs^r&$Ju^(fW;h+Uwin zUGd>Z4(AlUdj-o7Ag*-u0F!Cet7|(O!ljdn0i}ImNY;=n?=W}!*7gmrY#DO=K+(br zR({7@O|n}Q6yz?m^Hv^X4pCs39(cLRI0Qf#p@{Gnk~Kid9^|cD)?gUSb{I7}?2*=p z(`Lg5OZtqMf*Hn;Fu-IO;G1Q!{?X}}G@F2<;kGZ<%W8>1R7JK*d4j2O)&l-N`3h`* zO$T8pP3SS8oPeSox;;nX05OD$hW|f%XC5F~Ri6F#J?HMVSJ%?r)%)H(-7`H4Gs6rZ zpn!si3y44rF68qI8e@!Gj9-E=#x03ijK(Y)4N43O5&?k$*#`z@VfL+O>F()$t^IE2 zyx$+yJ%FMCblj-lUtLwVZ`D0_sXEVn-sf$v#10Y4a4J@5!k%cNrO2TrPf4Y;uHx)k z>R4To*yL&N(^41p=cXE=tfVwo)(_Oszhnw-?0GilXnth+@Y9F9g<`NV5sOExWLRNb z0wHik(=vSxnZ5>zlKE)o%ExydS6pzotVzY=tu@FPA>y*|guo;q0;npBwMJttv5HXR z7QMOQxpZF&kdXLuv_QrH37BdEw=kHtQay3$%DLg}@b*bv=Z%APJgf>}j4E67yn&U7 z44C9*G?DeQGtrfoO+Gy<7+^rp^u$2HpU;!^+l!W1^M><+U;X&IJ~yPSzVeTLXDQ=R zy7}QPcii<*%{gaUj`4FN!wX|ucaBaE$m;#pDph`+a8G>ay&wCJy#N64z>lU^z53Q` ztMM@}nrpAU_NJR|`p8E*r^ghHX;{0(Bk-Zy{n%WwYRm#h2s1$gkm2fy>3@7#Ff zjfq5}IvQ1eB?JRnG@>tNo$62kKtp|~ItVX;QWm+v6?CRXnOQ4}wT@7VV+_;fu1nL5 z_`-B9Ke^bnvH^e-g3p|7$Hy*g%~U3pcRo10p}84JN*5V|y&w>=#LI!OHB%_d%}W36 z{oB7=>btUSw0+N&9oI2Xn8edg6N%3@Kw8psaQP(&FndfQsH*6ZCW|Qu#PE5V#OW;} zV65_F=XrHH&z-op;4fNKGJdRZ=X5tIjg!HkEXeiPPxO!Z^_F80z-LrCyfc((%}p{r z5id-6!G^Fq>5G-bUd{^fCmy17ZD-L7#LetjDVz#200M>trw0p(FCT@^^+Z+Pe>q&2 zPVc3q1%6WTdhUBOdiI&h$N0%*YnswEO=*Eb6x_M-nd##brMzuKtaNiaQWuj*1jLC0 zM>7`eSl>+`NYd=k^w8r+gd^HEwyQxEK&gDB<=A7y=Zu^dP|c9O@3#Fd>odW|ums@5 z;dD_|rI?Mh$^))JvNIX&u2F1xYR^=8rrffzR*e~egwNj_j4G+a9FlF$lUw5N4AdqA z0J1b$o;^OFJyz_zs-=3d{VBh(t%kK7aTszMR=xTytDn95eR-Ssi=Sef`9+@4(cV{| z>)iLv@13abi8F&e`NR))PL!X$I-sH&!B9=6p>sv%m(?q$>Vf)=#LvxSpM5YtRg_g- z`^94z##g@bmDj)i^>2OaTW`PpcEhOFpZ?Zz@ZiC#ufDpWq2W`X`c!o^s{9IR%!I0P z3;=|JS|nUG@4W=ZZ~m&{IT(&|^8MVR%R;G(Vw1N^H&)^X?D@gIP<s=MInFsnthnWY+9#sUhFowe3_#x^CC#u_x~JfA>=D@Z8jz zcem~hnv66CC~&#y3jmS?sFW8~^cQ3#X{matQiO!E(tZgf0Rkdb3Ni>8fB<5Em3g%A zg#_YirRNON(i$rTOUM}#*DOuNP^dD`PBhk@+;{Bw-lI;z>D2-h+LY^akSr0Jc;=MG&ctoORV;d7-rL&Vxok z?bzH55k(aWSNOms9XoG3>K1%8q;OP1DJ7H&004n3&XkI?_RyoVOyRo1j5@2PF`zIe zmRn;BWQqiNx=u=kNQ)L}W!4H-(;vVepZ=4|NN3;?<~#&ABXO!{xLApRkr2Zt{9@4? zRam1^n&d^h+UR30RN)s+qKA}*V3mR=!}Y-z6}aW$aVb~R8JB1!P% zHCOTQY)*;e`N!G>M>L)4Ey`G<9IMxNX)*^K?(G zVlo0&8M4X=hqi5>9424L*(sK&MSxJ+4AdGmU9sw=_GgKPTzP?WHQ002^$X@N<^5T; zzV{4H3Tw}P)8-w2^}+XlZHjlC^R_ql)`rB~p@Uog?yt8U*SoK{@_nDUVomZ4kAKwz z^&5KZG@Z3B{FRX$007jjTbop>WAc)??6S)~``OQa>|-Ch@WKnf`OR;3baYh5U7#T!jk`EAa-P1IOzEz#pA z7hlOolxZjfeF?;Bu<=VE+`R#vY(*OJ-A(;yn5=O7&}rd$BtXQwi?J5pQ_K!PWiDZEsO4s#$h<*>baXAd9*ef z-N&D_J)aRU#!BU~Wm$|PL(FD#mSwrFYXt(u*{l@{JC1Eymg6}JGC~MMLaM6fa(Pu% zi`krEnZDyWw!;}$GzB@@3jKYfDa9RTKg; zOj8dS^-WFfgS{z+G@ z^cvoGUK?l%lPH-)0xM-%DI?JrV+6vZ@>J>QLsN(%(HOCXt#e~xIBV6Zv1E-AGf+V! z)KjXN(Cast%SkYm^chf48VU)*yntW0Bj%SV$1vedO_mNnLt4AE(vh}0FK?Q)fncXW z@G>LI3CPZR^ZSd*)%xWAe7Za4&AP{qA0ND^H=Cc^|KzUBWSvhE0kSjMU@|0xG;}>z zC|Chgm8_62GL0jma0OQx0r*1bx=tKJ;gA4QI1@sUEfk$+O9e()n97Ay5t6VlyAX+o z6sEYg8%{<9Np9-oxZ!k-?|9M|j_3NWr*dU{Vxq0FDcVxUBuGv`j4iouIHX=?O&`JeDe+Gwgs2(lZ(R8z4zVo$Z(d$x-WU%<*!OF-;$8Up&fUBfBVrX z5pG}knyWYW#Gak@%fjemx7_!{;T+a=b>!vq-rb$w{rw-_|I~O{b)QM zfB*a6-`Cf7%{A9tb=6g?SFeu6VvJQUUcbrTkiI`QHumVFkAD5@U*EcQYez@NZMWSv zFfdRZnU~7$BknUteo6h{XBXuwVMv=B!vOG#Rb)&(!eFFOA&cNs%oH&zEJ7 z^iC)?9{7e9yR4k*i`Ng<0WrVi9o)8$%6_u7COJ@x5><{DU0UN8z1?>g-oEj?HS4(f z4&98P&lw@)R+S$5*1>jY+i#qF#IRf1S|RE_@rGSjnSaP2@N9-C5CFY!Yzm@WS_S|J zC3(eisaUFQt!Ic<$Px??6abK?+tv^vU*i9G5h#E z53Rpovl+I4Vf(t?WJ_&vF>lW}P5o`5MD*}&i*A8u;*n5Yq@SZi$t5QQMjTYW`i%p* zk;NU~eI}3ywybL~LOKx;GDaK$tZ;DE>$-iU@x9{*?>?&6=q(#EtFG?$7QDj`4Rf7K zq{dbCMnnPNl@3!r^Zi^|4f5_+H4tZ&hCC7>(H@KNacC+#TUv2hCo73Vw=c9^ z94BN*AdxikOoa=8lFNPe3(JW4)yOfD(B8Y%Z0KCv>zEy}!m)x7tT0Zp7*GN)GeZLb zjWBOGD_EHKntEao;7T~qV|C4*S|Hb4fAzUY6`==l*-b@D$+*wYf1%UKFi7MxUl#I3 zr)X=s#&yLhJF2eKh8il>aiFkDag#(qDM*s%c&_7s1ixgL@|#P}jKwDCN+$p%Z&gCekt6b{hH`12qu>Kw^=WM5HxI zj(2L`MDgijG##oNtOt659f!n$LnIRpH3XTU(t>kx$C$t1Ye6N}7>+f@UQkFu03Na$ z*BCyRv8+G7cSaP27GasSneved;YG=e6;A98uDeh`)pPMOaZYUg?gLF9J3pg~#b<85 z`AeU<=b+vG&dAs|cDONh?4keM@}m>xx4yVO#sJDEZ~enBJl6A$4}bK6{B57V@%kS; z_@`g|;_o$S;0}NL3m?5X^6oFb{r$n=?|=NS-u;y=@4MyW*YueH;7>jAH^1|*Ggtoc zpZsO>;vHZ9;6E+^Ygm?P4*%yLe0}1gw_p4I_4~j4*Pnjt-CIBRw>Mol^Mk+n+CTr- zQM>z%u;=I_5A7eN`N7Nn?3_q-U%j}ikgERpS(-dEvCC!ZAhJwF9=ES@J?IGSJuEbH->`&_0(8?e$jsAY_+PgOkFTj7t*Rf zHeSZ;I?>Eh%v2Qn$Sf3RK%?*qIdv-Aer4pykB)k-kW%z+>{R0h5K2j>6MM^!ICRv1 z|JzKbc$6E4WtyA;NpdCmuJ0F$p6e6~g;KdJ zd{J7+YPykI%qHqnk!Um!4%xO141@8ArYgXYE2@;Bg+s^~5a{8+x=>(v-{GmznOI$- z|Lj%4R3ws!3L*pvf*JLC#6&cZh(+6L0YFwFTTvn*Ax0V#>l$jx+2ZhyF*olirq=bU zW>ggfAf!rxRbzi0^wY%9+|%FMv-ZscT1@M_xE+wZ+0xklDOFK3>(hkDBtv4dOlsOt z2=cQO$~^3Nj+b^FBymzMHs9c44=TFs$AIxxC#WO96= z+i&Ff+>u+V@+FakgYIp~(ESAH(k{FGl6?M;cs z+U(4{Q!YPq$JR()tbJ{ljQ%bn=N~$%a@s6O{~*Dw^Mt^Mw-whJhNby(RN z^QAwyb@iRU+oR1s_=&%NAoKg*zF|cThng$j^`6@w|MXLpPvH*z^FQ9x{N8`Rw$}n^ z`jZGk$O)(p%MHeok0$<^Orxs&Do2M^|Qx$2nwW?wrQOQ+Lo)~wmEVS}Pn&y1f-i&@*N^|xz3 zxd1Qj=7nd3wL%2fzC6>tko3qaPm8Hg6E|z(W~t;&P8V%Q_`dw@JsCk&cqFW+k``lC zH~h=QFPSPRzGR@M@}Va}Ko0tw6IvK}xQXQ=XEC>V@{49n6DP4DSI zl_A0G*bEVkjgE!F0mpGp4jpqB3R=)2AvIkCM9*~%!^qCgXWE;(&RNq9(#hLT-~E_w zTAk;t2m}H^NZ?N&b&a5s%=&6rqvfb!>H4FGMvGIW#{Nv8E`$ussMT|RCje!qvqv8p z&Q53Rx>8N6GYq90wm+NTIoJQyKUSiAWq!vk zpS<;1FQFb%0pe$EOlttZ19fdd*u7Ac0HqT@xOFu2hW@k$0Kih`edfXTIjg>o+XrvG z^PrQyyk{wVhNlMKbV2apJMRC%zN^}!4}Eh>q3N?L;*~wggAJWgsK_z)$o+Sm%r@Th zrH?;QnF^K~>ocM}lM@hA3>Bc`oK20Y!kP!Faj?G#cyh%#ZLs^u@MC)m*R)59&)oXw zpP4%MLvMMl|v-K4>V}c99Dk!x>O{p zElv9fq?808T>%_D;T7lek$6;+V4O=S6;;zrBT}p4QaKVFRM~@>>W_4{{R|WyYsZi)OK_r|# zw(!iId!zBFG@*U4i8v8Zx+j)w3OxSpp-pe?m55r9OO>^so2l4sqSKb=`jaPeAz{E& z7JP*ney*5PNL3>kt&8Pmi}Bj9#3e_J$w6mO^0DM2fbvV063&zygMgBG&e21P8Cf_* zuvnOT#^jxTQ1?CGbEj;rKJb#&9T40Hp)djoAW@R8E5L*#oH;gj>_?-8g(<cV zB@~NwoE_$pmlg_#p4_j_&!-wKv##E#Zecqe>vEy~3*Ip9KC62@?`IHl&Oj0oKxz8eG;sBFtP(Tp8yf%sa>LuF0HV5jdL2f? z+7#Sz{P?(An%*%C%G#*Teo~EdW{*!p-Fx5up|>S|T;eTPW7*CCTDY!;0m#Mi0@(B4 z`q+;$pZ>cm)|vpB-~Yr@54`K~uYBpy1)uL6t9qbbnZM7D-G9g7rF*J{t+zci^?&-C zt44q-RjQm>Eru8Ooz`LiK%e=xTk{q_y!qv6F&Ca?)Yeq1SE^L`SvW;R-4I;$G+(U` z&g|8pZl?FOkyB5P4)2|;?MH5C!b5*fS0~K-{S2Of- zv7|6X)il?2HBECp*KzDfBw8rsb=?rc0|3ssl)`qs6&nVc*YxarXoovnYUpnzf$Oi` z@bs-ax;D1fG$n$aps;`%WD-c%wb_})Kz%SAj{pEN*0i#rX=Q@|fB5OqZU41BS{rI! z+n|MY07$ealI;lz{@lsM?LRnR=}OzV87*WnOJDn@j@kL_si)>r&B6LrHC$H#5Q&G9 zLAT&GpJPNCOj(xYoTG(d`k_*=-_TMTp>SqOGbCe~n2AE=;7Q0mr-L>AavNowx!;14 z^5y)|or>DU)v&L}jh1-Lg`vq^c_S4z8w`>!)dMGrM;F|(7amN}bU8mb8Q`d1pqe37M2^c4m2m}%dTk%!;&(AIB!c#am3`1x# zl8iL;wQ|mAxe*f~FaknEdbXbl1CT;GWk>h|8LB+ZgC+r;dS+;3_mJ&-P2J5}$RJP{ zaY8WT!Tt-^$n4zAmODy99er}FuD#9?c2jGkkqlHKZz?)vr3DvLS!PoUUs7Sjsl!v` zi`u?A;7lRLkacWs2b4owjs+$HxrI{u>N*}b2?(DPmW<0Sq$>4EilxVzbO|EX9%2$| z1ul$dN48IkS=VX_rhB6dH2?@DL(?-T^r#0apd^K(9F!0EAtT zM)0RE#yRIOy=x>d+fR!t-rRIKqAMx`$dAo=kgQa@a6PC27?B|N7hM5bKAZL9uB=74i3-WY7! z7_m%sVz@MYtkfJ#0(xexuovC2L-XN!9$eixSm-P>{SXUq2Dywf}$Ym@-@Pq@$kVBz9ZPR)RkzAU6jE)f~=)T*R{805m1Snv>^-Hu-Y zXGD&vWY8A&ZVSXRg_C}1p31hg8cZHgxDpNY5|ir|g0BDoAOJ~3K~&1m*lHvajQoPD zSsI}z47q9q=C@4;GJ){g1ecf}w`GBHM?AmiL^}0Td&+&6*3_8m#rCIB#E?H6?*P<@%k z4oxjSHD;(>iyDmsb)+GyoMONL2!SdJQl%f0B2l754mFL@cw+e(DldW>q8XeB$=Ij%>cZ zM*{$GC+_>FCptfTdB>|7;mP5x+vcwCsZ{^~y!=cVwC?l!O>?BR1`Z$p;r{unQ_pr- zCMf~Htm|)rM}PRO+x88-qf!?kv)ljaR&m1(o2pe3XM*y~(FpSR*L}uVfFmPGcPt! z{3o@T>abU-^1oses7Yq1x?dtShw<*xXx@3n5FmuADqQ8|a+zGu7n12pI2w zBXWLmu6*#zQ?VYdhqc38 zBq0Ll92iOfMd8b$9uv!%xRtDCLLdZ5fIx(Zm4@ShRMBE25RzQh%@`0a>x6`WK=Ro= zi70^)NZ)5Fmnu^gv+sg601{mP*q))II}TWu)pO2(S1xH?n?fsB5wJ?gb8~;2L?f5=Q*wYb!LsGaPBi$PB2>1HJ8@Bd<-T#lQrE@DtVKK7Dw)zbN9<+TFLEM zjK8^Vsj(UUDv#IS2&7~C0K)a3dyI?OXa40QkEA|-34rj+0zkMzR(1qQ0O@%?fZp)B z8wS4hr+fe9`p@})a_L}rVb^!=Jh@3%kNzDZ=c@smGADl;>NS04BVNg?wx7; z-S?fF=2qm=E4uIb+M{3n+&7wTdPloe9^ZQ?2V|a_w!Mz}i~jHWZ~W0y|N7RCjlJ*k z&8>9m!FwNxyyJ^8fU=YX;1(TO&1L?@eE|6$No|C%pW2ck-qD!(Kx1^`50jg$NxedgxMLpT0LKYVUgi>XrO%uyZ_ zVnJIh3a-P{cD<~Y+uj&g18mQI`|SlMRBsMk+K-%%A02!8`%n0e6CQ}k0sr8r`GpTB z_ujSp@E)(WqiueFInbyL9hf`tpChHb8xD7n)Q7f?>1lh!ZvBgQZ{i9OFkEUQT)B-+1vyR`1gTOo z#HsOd&ahl6tD3ejJ*%l|KAX!lH+F0oOt#b=+jj8r??2`&kXLqdkA_mQ^*5{r zL`5=zQu_Yv@Z!j}F|z#bOzG%8JXOC z@vA^rfFvTKs&=gHCdBe=cK-vrd{5}d$E^Ca-q+3~EgNls5tkaKx*Jp788=rxv3*!G z)YeVy1W@U+O_gR$z`4m9*EUJdKe2P%d1jmkpm{}u7BoOxijA?-VJjU$lIY3FWAoEP zbHxQaQg0>d<5t21g##c0NkBrXIA|*36q6iGs2@xzt~ysJl962WApg)zv`&wo)`g`}^|6h+S0>Hh0{|9ev3M&Xe($1gSdt#>jo!@Tsj^F)*Z{ITquz1_w z|4-~)XGizlf6D`l0L5Fs^uy$h7q7Yc!=FC+=|BJ1ZD0GqHh|!&_k8e^*JV@yNNxD= zH{av^-+%wemDx96)>z)RXLQAlfBm7}000KmWzM0TLI#=WqUeg^?Y)_up^5e+&S=|E0f=T>q|D z^~S0<>VIxZ$G`jM-yeg(=8s=H7y)YTt%q$7{q0>xHeTIj0RYIkXTJFr`L+*NJy2Du zRH;(sm!ZV~03eoHzTnopg%@`VYlR;^=f&jsR<)QaRbDnMt{3ObXuT}Vp;A{o`J>`% z-=pogWp~fTUBPt7FL=A|+be80(VSR+#Rfv0S(s1`?CaU{9`y~2uf1ulrUn*=7ro8l zXP%yj)Im;;)D6@f*}6}g(=lyNX4u(*F0OzCrBaXt0EEDN=?f2#qy!*hmD8Mv005C8 zDvXiJh=IaXAVh>jz=Y+RBLI*{l?o34kjr&SjH{ZZs+PszblnmWLZY=1mugCu-U%h_ zc;K;Z-@8|j1XrK8v9`7Fz*l}WerQZ6&~{EUCuQH=N4=bF)S8LfNUSNIZcEp;rE>GR znd4LA2PZTAnRHj0bDn5U)HEm27el*G?tA2*8M6`%@y702-B8-jYM~9_&6W>6a#%3l ze?iyT*F}b(o;v)v!CR;Mu1mJQAvkxqWC>q2l#)jR zUchVqn<2+YBnAZZzt6)7sc>k&+1a=7$h^=~b6aiKwK#LNI1j|j5+$RbHuobYyi<2h zg&UOO5z?91`Gqkv(*ToH9IW(l%vNZt!D*icV{ObKGAhe#%vClww@Wx zaC*ry0g(idh(ODp9RvjYdF6(do{n7)ZF4;@*^q8p(V?gc0wA$KO>E$j_4$eE1w|j) zeiBa18>Xp8jc76+u8jh(1k?Znu7`s?7xpc!&i8lSzq@@+dn6h38B^p^tsqmi_6;o( zBo*z2xkB#HlqmRQvWEV2AR3fN2pm+UuBR3%wN{=OJ32Fdcv4ju*VK4NjS(}DD*3?- z02x&(2^k|`s;~<*e}C~cf3lGoypS)Q+&N|{Jk%I~wrV2i3!x>?y5^d*uDRwOC&{i9#})=qQXhyLsnZ%Qcu(DJJ5Uj3a}1nmSD{*HT5gk4-_VIPu;U$jq$z<`}JUe5smB} zIdx!+65i~1*SlY{Y3&t3&u1fhjx3hv7}1{L6Mg3g)|_2m&SuX)ySrR0vXpYmiCdG0 z2e;id>XbcIQ>7$i=oQLVD9ChGNXd{95lYDz2a>)oK;c|bq$|t$VmKNumr4M@6^@*F zj;kuFZQESolB5~5@{8Gk6%ZsPNze04%izcg`GR8VQjit2>_X8BhCJbdFNE)_nu;2i zl8jJb^{Y2C#Nzb)k8ZuEp`*F${LZ$GohtAnPaL!7%glhLb2EW(;Mfzx<42~=fTe}h zL}#piAk(lSGc`QF^UmGO11oNHZt7+xYg^aazP6pDoIWP?J7CCZv!hX%}<3VVfIDYF*s*TrFxGet2@#wJGV@?nDVS$zGP@ zh!_K?1RE_0d`m8rpJP}FKr!@HuRR-q7RTrIJ+R%EvUQ+6(U`8RI2e!m##FGsHMY8u zh!tk@Qzs{9N2Z16JFZ{XRhMi`(Q_mrsVD>I_aAw3Xe3WoLTx>(1D~_&7$60Vgk{8= z6G&8^FXhIwCm$bGLt5kNdLtZImdOA#mRXUIkU{`2XOA720iViqcDgqbZ;Er>I6Z<= zN+W4F60~THRPZ@kQKMx;hJm2!#O@5M>Uz3uF#UqBn6>@=wJ&_u1sYbbTIze;(G`0B zi)yTKU~@Hq`4@pb@!0nsdgvRwU4Yqd|J4n98d4GNWf!IwmCX7#fAW);U(gp^5}TFe zMSuP06Yu-%zde5cKmYOl05#{{bkhgUvH&1c^*~jrQl(0jm&L&o-s!(bCdC9#V8$_f_GaY#vxAnW6&sku%?O zBf)T)q2qg$D|5y{Q8?#PN)G6ILI|NtEM&7lq-cummw}VQRbTp=f;uRrLdmjB+jba& zUn*OnfT-km_yhoqGmo=mOM|L#O*c^C456W=(UXD!deT=l4SXS_oE;tCa`Sh4)~;;q z>3sFImrV~(?znxMShUg7ntC&l`nd0SqkG2oZ#mS|Ra^8tJpzS7mJQRw@Z7@0eDCI- z#2Z&3F?+VS@BTe@&TU%NP}g5i3Z^^LwVkyfMQNdYeA|gq$*t{-H}p2BDjvD-m@lwy zWhzn|WDLx>^{lEuj1W3540WxwN*r9Nq5QQIVqUTwW1k%Ia*N2AQ!Z=K1TM}A!{mm+ zoRU+r6^+YSiZjN9kCCu*=%BY)Q$7jSKn;1AUsM+sDH197pOYqh0i^?h1PUfmA;0Y{ zv9WDAd(KfT<}(0b;^dAa6Guip5;dtreRG|^$O8ijBu>bJiD)Ph ztArr?wmWrv?C^to!_76d?F|T00w5qa)V}jqNT9`$*`5EhTMw8`gLUClNHARbC`gqc z7%LvK;-R{}2G=bgxbFnlK+}0N6AQ!*D;@x33;+rBkkPiG34lpSxtYb`9TUY_H`W|U zbw;_xQ4$e&E~|Zg0y#rM0EGb%AXc+us=Q(`>dv|DnsctZ<`XYsSsv?u=eNH8=A%PL zrzz3b*>c*@q*OgnRjO2}Qsw2-+p0!`XX@Tqp)B_fc>qw=VyaYm8Pso7$m8W)DV7Qx zowI{|ejsLbY;G^+%G>YUAv`ydj;_9Rt)glBAKo>z^Pm|E{Kci$*0m%6fG$uxL5H^= z-1fu~W5k!=&?uJDy6aB)Gmg5r;1}O__C=d67*sSBq*N5Ol79-7LU^|8lx??AE|yCq$@k?$cRvuXiFuwQ zeZd*eW*0f4YrE%NdeOwG@rVEWu23Yr?xJ(kz3G*gtR2}qTwE+4-L=cjmW_~+=}ZUe zLrrU10D*HpGdvaVNY(c??7Djw_<|d1`^N57uV2N0M)pnY_^+L+$rA0!#=a(C+>8f$ zFX?4cPMw&2>^~mwyQr_GA?A7R+_BujgA@H1b`hR#0)-la#iiQ>;|15UEadcS8-Dr{ zuCtR{wg};*v1@u&=@=UWq(k~Ph#X=BvZow?Ycp#a3;Ar^#x5<*QR>rf# zvgtg1Za!a>r6{~Wr(Xh~vgZjDIl+=@dQ+H56buw1pOKrYg0)Ujc+5F;kas0q^VZ?4+n~JoARAZ^$m@$Z?BcXH{f&9hN@b)iO7Gz+MYee20UP#=8FfD%+o^Tl2FK8-%L zZ0KHb;Tl3(7@2+Q?x#eVeBV(`UfbFTo^j#2?(SruVW3?{YarC4DvDFK{Q>vX(G$Mw z5mBjR`?e!JAtf=+6rysu>`USb*G!#=T*uW_)ptDI)QK}q)61n&ARI)6Yl>PZ6cB(R zP!{e(iTD|d& z>peu@c8+X4lp9$beDk_YPhI0cMiNc!pM2uKpR|I3M0cXLBTdMX?dfD&^6`J&cFqm! zHC3;&w-da|a@C&Y-0G%9` zCdvzkotD?dQQ?vSBanbx05KVe>V*X-+8S8cRW7#sO=s8S_Ijyv<8^~(DO*l9*M*~D z!3Y7{m)F>|;VE!OJT$k0RR{gAua_R5xBO)Ee!#e>@6s<;*P;3Q`1sj(bB`E zN0k8Fr8Z3jfQ%{@9E9=xCk{RGOfg^317_Qr9xH4iDUy+75NoYn_s+_;7pVHgv|?;{eDd3X6B2(=C`> z9L^rR`>@opb3=<7(g~3fATT5*xMUIlP;wSc#v&;~00A^VlAS&_m!B>$04rvtx{}3g zFx{wQ*RMma0^#Ql(0jDpjgfsZyoNkKu_d zV`g?@UZ@JW_Ut3W(fU=JuU>E4{{BY}6z1|))LMJVdbBiIaCdBZ+Mde=(-B?M3Qium zB&JNB7`ykb1E2l!o5GR6V|U%Za`T#r>=*5ug-vv-57LJUu>6 zz9ufArDej`P~s($pUqP)w1MaCA7kI8kWYP*0%5 zQ;Fq{HNW|CjC**ju>A;FT5fz=*9|3N*|YOTFrY?)exVdh#DuD=i53~q(W9Ad4=Js6 z_OMcaQ8YX5%}uh%S<2jWVN++n#3jk&xgda<*3GnHrnp7|72_^WBcXZb=_25M31uG;-CKw=SL8JRMeJFAKz}TMe z@6iKVvOQJPTti$i$zsjXSW^UDapcL9CgWtWrq#`rkz;b{&IwyGZ&g|%VWKHU7^%3{ zyrS!t-RyXA=Ey8#dduo01vFq>4=Qb&lE+7EXVf$-St-_f)&f?gN|h>As#K{`<<}K- zr?rJ20B|>cIjJ?(0jW~umqY!UsH$o;!s3ieS9EPW`_%EVt>1k@F_fOOdqcG$Mz}D! zczbPy21K}RrSTG(%uL5mlxgs^Ex|T)dqk9h!a@~DM$-tDLF=(Lea+f z>9)|n=CvI!L5{J`S{ZO$JsL^~!F=EW6?|U^a*Jj1JRv1>z0%ygZc8K#u397f^4!yk zI;e}Rro|jRR1RC}-n;ely(COf#P^SfQZny2AGj)9^f>Nd$!>ktaMuQS-Tc zGMSv2p0!NNc5ESp!chxaPSF;=7YK(`Rdrlf3R+mq*0(muvO7C98;*vg5WtzLD!ve= zse8U(&J{UBJ!ByO2@%8l{8Y- z`+XoXA0u8w+`P}+f1LCGD_w`$7jLdqbtV8ng2aatP(C^diGYZaw_>U3FaoM!LyhR+ zbOZ?9thF?;Mu@BTb9L>NrM#;)Doh&1^(+WU3de`)R6syLKtRB+hLn=Az#Bdw;G>7< zFGcTs8D$9ojM;x**!DdU>JEk_AmBH^&@+R?h=^7vR_G9H3{sY2tk{{H1u$xUH)c%^Belj5Sz^`><9lPAG``|Zh53(E0FZ1mYn^RQaY37k8WM2!0anaJAUBT_pa=H92moUjZw>$O6e_%b-*zn)0>HYi z=DMy%0=Y50vNAuvvbvFfH zK)`Q+;U5kQS8}ehC>wlwkQtm2jGno*dVfvV^vbQ(W>hb(6&m(7^&Hz?T-iAD+$qS5x}Im4nvl|Sd>}AXRaF(+b%YSUkkM$!vP!zDyN)MOYKE@qnxZIn(Y0*bFjXOi zq9~km*K+|G2%+j4QxOo2kP%N;=%G+Nl{CU301{;n5I_k8z;wer@VQR|fSu3Zx_B+W zys9W_yteAdXHKG_*oETM&GG56S;nBExtg`8H{Q6s^9%ctXlZ=u!Vk`)PkLPMKHOLH zm0GXl-Fx@m=-EQDKH0guIn~^UlI2%&w_d-cscO$--SJdBgi_&X3oN<}0T@6G2LQ)$836%sMIk8#$z(DvgwVqwK3USA)4qTftyI(QPNEyufnU}0qW^50$4>qC7b)0gyBqsnZAl zp#7cyFxGlBWu$ckFgTcuY@&?EDz)Hb8xRl>5b!HOM00ai(@Y?`5N7SteYTFnr`gKyl406_wk^Ms}Edyeb-zVCah z3(NUTc4}l)dO`{zJs%i4j#J;-ymjwRLShmGLp>VZe*6F+0LlE~>iJjR7LMm4?Krw` z+n!!PxpIH$^gp~2i^a}=`<;$~&YG^8>W&&tU}uY$U%E(ygU5!poYgkW zWWH~|GxN?f8A=~G5YgOB205TWW2J6hq_&2nxU5E3v`!NlBkS!?GS)TqDQ(^kUWZ)} z{UBVbV>@XUHvG&Z!Mjb8f{()0?C=p>zx}Y8ZzBFQJ!K-gbI7I z0+4}(3z<$vXRd{OnSssrskb%XbFO~MuQI)gg<8R?X|?Hk8UAxkt_X{-cGbO+R^hwO z7$Ht$b2avLZcxu+RW;480xQ0sBmgg4dc3D{jZw!q;#00xr~1i@O7kuZ8^7NQ*Q!Hy zTxIC^D=7UdEz=N^f>%EJAy%5Gk z`{FYjbfok-f>IQkZ^(!sNkFm}Jpr1mhbVLDS;V2xViN|SXyn~*U_TAoru7tkPMqgJ zM4HdkQilW{>LdzaAS+GtTiPL+x{i*lP8I|KcZOGb4iK93FP*@8e3ua_2kE$Vd!_1JG|Ob(bDa=!X``&pu%`niu^xe;@2GAXY*#A(_#w+MkZvB z%k73sC7KvagtQT&%J-c-akF<`;x}har~& z^s}E1jvOpq(H~d6X7AuZ&x;9_#v!@Az>pq(W>iQz77o+4^U^%Yl|Z)0aN0s%phCM^H_D9*U6 zfVDQjCU50^6gEGGK^q@It+v@~cOCiD%mm~a2}@Q&?(R<(5fyKrqRa}0#(H4BY6LbV zOH_Ec!X6I*Sq_&nvR2$_90?Gb<5kn#NdiEP`S2X--l%vgS&i$0mWm5>DeU4E7OVyS zDM`LrUH+-val3$x8-|^|ef>1Obm#b?{IXUKeqrjcHN`?w!JO38g5LfecI^ROt$25| zI!xgzu;oM-OWe!R#oW4ecaM)?zQy@ZQI?$$B+^tuo^;n42rK3YG?_78$BLkV95PH-c|_sh*Y=ELhGBkkET zaC!g5KBt$+Sc}-Q_ZF6+M_`p>v(P*)pd>k@u7N-39Rp0JXGzWT9Oq0SP=vVaz?=rR zQ%aNCoYvS4vgf=b8Z00^F(P_Q46t`Iw<0{`nDXCkXvkw^Kg`c-5!kB>?&h(AWmrNu z(|Tf%q)v-Pw*$7|{AS;-YeQLS>&Khg4FzJI{%Bckeg_u8{A%x7?^GBgBmv7c#DR)~ zC@yUJwXImQ`SWwxq3*ySe)OEonX{1>NE%BD|izCn)iFOaWd4 z-pWq?cOU3b%-zaY9Dnh=!@@+cWZgvH?e@+QSrnOdzbnj>vBJWVPVAWC(aW?UJTxT5 z3_cNHAqUhrZ9L3&^1trL(WMK#9Pvbxhf>;0FI{Ca8D(a)pu#RiumGiLUE4b_p5AkT z-98ck?6)DfT#3dmXGDO*7C{+NLYtUK$_c8AEqNb68Hu(VTS9I7C7nXhj;Am)R2>l; z?ZSAHJ^jNxj7X2LV&DWa!7loOVx}uy{30Cy00eURx*qi2nzFRHI8J6H{G$9D8mfSS zpu#izp|#fB=rQ87tLgMG(VJo+CwF`$@HS(pJ^xWCza;3vRZXv1*H!e-9JI66I{+nZ z<$gKUfq5ba{KgINcUz7%UuV9Qzy#@DSj1dRrE$9Bdqj^ZB8I0ijqPuu*gAXT1+kgX z)RbI~wgM{ED34LUhLUi{l9`I^rOpj#|6Q}2tg=!{w@{n^>ynB-OmxGSVr|5`DKOxW zEg+kOh=3+?h?l#J@VCDFP~~5_GBl{V(tDnb)j+>RjH)v0b_>1t_z8ePZPl_}zb+0h z6Hie!{`iLpLMw%EnNp^)TY70^MEIPLA{?A<{A)aukfB_;jRrvhQB_ZR0~L69L1=aQs^gL|~P%wo#>=|0;J-^z;fS8EOF@ZZmA)L6OR0`ZW$6SVttM6$AcR&ZWlMoM_y>lDC^wAlDnfl|um zT0mJ=GRVtkNaVOzWtn5>av=pkB&<`!2yCEwW7N@kx5gh;Edfg(*t5_O4dvbE8Q{GItfzo~NlF(G++$Zm>x3-Tq!f8-elpJug)@#q8rIEu^+r>YsTnEfGxAOG%qLF?-3nwvjl z1t1`FWxXac*`zs)~1UtaLIqeu_$G zTrU33Fo_xAXvrZKR5<&cF11H<|*4 za`3@{;gT@&*Rq($)4)?%W*Gq|mqoEGCvf%R{90?7bV?o>iTLwzW>)EhNsx|-YTgG| zGkj@B(mm+ixK^>Y)#$I6)yX4C#qXzUrmc;CxMyv`r7B(@^1%rs#g?lg1L^rDhzd`4 zIcYf3&%R=&JJLg3Hr34Zc4O$UL!W_N(iy;%Oh~$9oS)yHLq6%-wyh0yAgk1&Ug^Ar zWve>&{fON0g=c0{QVO_s<$e}(Hp@y38y8h-AUgfM-2zd*Q0h5v0(lT~wzA``#{qJS zrwaOgisRJV=577UzQaq+O=0nF9lN2Tywf)l)XB1x+M(1^pw#p)(VieWJ%T~c>m++p zQbeUMpPMk}s+3Zqoo4!@DYXM#T_mZgrPeP+)-NcqaPYj--NB!7r_2F>@MvvJI~Jn& znXf$@P``uDmhR`2D=SAcTTeAcmJZi-pCpgQRZP2w6@Tcm%56V!kt+?YVzfk>*<^9J zn=C#~RxyUyyiv2EVXiTi-*$#usj0coiHlyZ!=i{jPV6V`86zW@R=~sL1h)<^<_B!f zUrgErCn8fHcse-U?b*TAGv8VB+sV*a)rqFkiCKhhBRHJ9>yLQKLFzJ~HwGV*=(9eB z_Ye=MrdEW)j~b;P$%WOu1GU(m%H1;kU=rPg1bj5uJkX*!i31mx^W4*CYHn#^UjO!x zOoH&yZ?t20c=!-jha#l{FT~(_KB$et4O~q9tUFhQ8B4Ds#2KNXiLp5+Ip^q`Gqkt2 zfi7kOI6qiOh1VzNXSg!gUU*( zsZmR>$kn!U9pP0Va@ckrElp+7evac}o~N)aLa5EbP`$mQo~$>42cdFbG$FHbeSpa5lQUwLO&l%=(! zp{=&=kl|f*m?jL`DX*2jQ_vi_W@FpwwH>wlEn@oS`&Sc`fc6_#GEq<4jhmyJ!S+;* z%L_*s+NkdOOpqMi!=AwA?+`IT4;A+eUgST&OezLOgkST2r6= zrEp2hzD#1w_;}>h-GMu;)m*`BY#Q8%4DJ_8RxTh*BOgaIM{wg?fVR%Z?p#%uciEwB zIxY2o@7Pg3;>JX&({O4SwyPa(vL0S$Y+KdkxBWH11S`=IjzU}%KsqL@283{dV;8BK zUY+NjLZ=8e#_9Tm5!-zH8Di!^0Eea!OnjqfB~5eyI~X+Y`o(e43@3?zynizZ`LN_` z^ARKy{1EgIfqDN|S1`<|bz=`yoAMs#bY8Z|yYHGLw#-umwP0Ldk*^|%)u?h92W~k3 z%>tISf5dR3k@~4r0%k0cO+vs^kx=B$$CI*g;WN#LdiwB+7h0F&*2ePR^4g*1Cu>gW zCc7x3(YelbJ^jv|iat^3ZfF;qD?qfsAO=u8qy~}s!X}>JOCTzsUeY|Q7_#-{`PTr= zlA4oh$%#N;%YRpSDFAkA9@+F~aQ*YzbG+l_;YS^ZK!=7Bv3{?7=U^Q2E2cNd$bp(o z#Ad8_GCc@yiYE7pHAdBh`vaLyiJwT;xb8MOErXr(kcUD+)4eN7V!5e9Kx2v5-9R}V z4Q<)`>v#}WMDW^=KfFjFxcY~oH4e;;IhK~MY-#oU1{B$-y)GzH2e1&A!5@kw^mPGP zL*3zC0Ui)Y4R?6rO3+q7YE{_hot;;1n>NX_T-#u#V(lU=vqB5rcej-C<@madu0flA z@=M;%s_kmz{H&lU%UqH_URP>bp`~vcu3Z6%V?)BUCdg89`nh(^RJPjS|ZYGzg0(k zt6O(in49%3jSUKfl~Vm!R8+aih2aWM%UnW6cNK(Su(E{ zWDn`j<)fkyox?Jn?sC(~37PNqBIn}QGLDuWT6oJpk>yeVp`lFtsh!cFp>Z;uX1{hN zc##J;EGa9k_5RG`%R;$%lMGX4fGE?KFpp0kK!9NnRHA@Z;S$wX89+?G3JNr${m8JJ(@-= zzOaEnSiXR0mE718#H&V-0YQMvh9$1v!$@Qvn?{PK48|+eHf%9D>{XDZk$yqESpCs- zm@y~ruN5g=$SB&B5N`t|(5Pwfki~GLsBXCjQ99?l1)@h##Nb*!oi1a1$5^ogm`M|3dq@JLGO- zQY`hw(EKi5*=1_6+1Kc_I5@F!Wve&cm)%{JN~kU1Obg|ygy-pEaW0|^JaF*UEHKc| z$9(Hw`ne-N)$2oM?-VftH!`Y3O6e}r(AWr6>xZtM?vAZZesamFe|XV8)HNWsH$KVU zAOrg}w~BLsSb76}hBH6qaubywmlD|+34mP{xg(HJ0**89pwZDYs~EHtovO6ii{lv{ zVO|WC*n0dp%#yAY2}v~V>Jkxsy@Pf45%|2$=j*-hEH^v9J6UhHt_(SxKsehTzs!{9 zSp6w#`xv&W!1Q%=w}ooitg)6bluHWY5QKt~_WOrGyU6|8YR&cy`QGyR zQCH$~_&vO612#>STJY1f6v5TuMu#N(v`r4*(3+npuTSu}F2+adM2URQ(K(6OcArgk zd7c=7;~RQ$5n!e0qy{1<&)X*nEQZl@?HKkR!mA`wmBrI#osiSAaBc~1?IWK z52i*k@J$Tz*k-SDD_;Lm2Ka#t?NbKIHmi-9@zKCwG|6QBG#(d%Z_S4XIdD@4`k}$( zZCF@I6|~qdVK7O!*>Vqh7LlgO=iQ=im4}XrrboP~3A~`Uv@`G#!MO~a^bC@P1x})8 z8?qP5CdSFg7_#rjnv-U$M0&oVFRmg3kF=G@kJ-KVKe{cv@tDuvHM0#r&q-ss${2(m z1dzGF_w+nW8*ffTUdLP?OthMK#b;^2*8luj4-dBvDIZT_8W_u2XkHSN`)i|2yDQ)M zCYPoosEB?__ZP4g(sac??^-8sw${z-8FJTEz}rl|T+;vU>)6_)Z?%|7NPva2SGFtx zedz<;VUAX872A3}1`?AtuT*3T_k1Y+(kywdZ_C??DY(7?P~##4DAn(M%AU4=xe9V; zj@Cff0W31P-W_*F7A%;-N;WUy%b3H1ug~)+R_9w6toadaws8_*A5_ z)*7(g)L3e{-j$D3ys9^88u)x#P;Fl$d~?rO!N54tbrc&$I{poq0Vb;t+>-pUb9J3q?Q4Nn41Z*d z{lg_b{$RvV{va&YVl>68hC|)JnV;P%mSMyinNmDaoG@XM52)|bZ9N~+mq8l!HNxWk zPG)Lmru-pdHoUO#S|=ZMR3lr6TSRZ2Xz}%^h!B0#CcJM)B)|+NO$}fVb8DnQr}{ji zE|~p*gGViuB5_7tLy7<-5gmH4*fzRA;h-4}3c-4c8fiC2#=xe>4n6BIW>1t+zTI^R z+4S&KgZ<(NBF#4L?!H(Jc{RR`L0~RE-D|e!`g~c2!!{iH=*tmT-GiIn0KRc3e#n7v zTl5m?nYM!ssZ|VM^XjWQ)&0`agD7+*$Zee5A=_=NDk%l?M*g|d1YD5|&}xGdds07l zuZ!cDogg+99h|Jcvq2@QMekt?nY!q#p7*_5a?aA>>M=Lk<0h+F!q)G89%Ac!ZM6O0 zzP#%d;TCL>kpP~6M)`dkGYj!j7E~Qp7ArV?Np5~{2Dq_?sD-0^X+Q(s0lAW0%yP53 zxpEzps2*(xQ3VBF78aA+8Xn&vQ4Dr_OkZ!GIiBuhaMvQ`2Z;MxT#cnSu7~iMns9yo zq_@`>HdPHZtKiAq9?Cft${)FUvGR?FEuDjG55-_ePI)oOTRmPEJAyf#25oLl9gPg%v#^P;00p{a z1>DcUHkKwze)q>by{YozuRs4HC-4jUpT@gBhq5X;y2>l7!4}TOTV7vK;u;{GU%#UG zKA`yC-A}Ww-|+;97~Tz}Bvg9+L!SoQ#jnXgyl3zJYUK_}tHS6a19 z+7iWdhB`*jlOGD?>NO?mG)f)$==l*&O)u1lo8Mr8meZ8oCQBDhUdtQoC!2lo=SNR+ z_P>?*${s&rfT2736DytB4a&ZB(e;G7CB1Oh;3pKr`asu&!N2p>3A}mVLt1y2l)v9Z zj;6|0{nU6bu<1MaTvE$qtUv?GDiMItnyg<}mW&%kyEwvdgQ>QrN89)&L0n6oPbLdIb^DK*X$Mk!gom z3N+Y7W=ubIA z&e1l+#{GUwyeoU$#Y@+V-?{2^VQK@=ku?J|Y8UCQ*{lbC^rC@une%~PNUu$lTOW!G$NlilEQK4q#=3+3Ju~gVDbvySAIDsr1mIeY+{@eIZeqVX zNKltV{I1_JwPj$k`^-kx{?T-yP>1wN9#{0bmUD28VRcY%Fv&c_lS|IGb{$N<+3F>g$MuwyG*}+gqqZ)5lGPJl70I?aV#*7JfOmM=52m9HYb7JN&`6 zx9YFIOxl{pzP+BdR3ADd1n;*F5lsZ0s{Hug+)&ZUzBabS=-wL$6qHL$-4!sl*7THS zuKkH%{#E(xmrfkh1AFlxjrV(=?J|d-R%BE`=A!dJ#PTPpgJ|oe$svHTp@14OY zyKkP_v|0QQkDRfmIUa1L?T+jdbOvBVJeIqIeCe{9IFp%k#{}@X@BIY!_=(kEU*tgE zUjM;0%uX8PWi7Mr+AwUHU-=aIZI?GD1Ab7D&YTX3k}RqJA!)z?5oCPb&#~svDoTUG z84{;N_jMRsFFcV9F?zuUh{E!0RtK{r1zBQsBq1RN6BrLZ9*3lkeye87kN-u*`d^&> zO%2Tq6cKh)9Pd(8T2dxQ!B|3qfA!e-Zh~ZESlznyweJrhGc1$G)(rXw+PL{AQ_cAE zeLk$~{QE@M#HLxu&du31JlB^h<#xZz9?U964{bj+A?X$z(KcN+hD|r}HA0~R;=+E9 z%_WB|9Pg@)Wo4^e?IF%CClpIAi@IE9a2Ll=t*tR^uJvClCcv+tvEjrnhan;@k&e}4 z!WhOU4T8DjgtCs^zM)OaFq+WuFwS&Ohy#U^yrZA40!J>%2=!~2m>3ka@!K0Ur?cL5 zDJv*w(qzXdaOb-?IHEfiQ$5e|SLYn$LWK^Ry~sa5xJ!9m-dDR8vN~D)uIlSj%TH3! zg2|h`E}tI83~f#p#0^RjnF_21S(v@rVk&|>mC5Fnipwh^5PmV%sQi`A%M_KHdT^`3 zL91>qsUp2sLgdxFEW@=>Xmz23;1e42^nPQd6r?d4xjlg_K---UE zHIw`Tv;4fv2(z&riz*o|Ga#ddZt+&;k-Er%Qn5!mmp1R5k1?RCDrZxmSV5qWdEg(; zx+j6oq>~js>M-~xN-y(5zmphw{GC)5^q&AbF4sg&O6_5ev+#y~cr-E&>c&C>4k{}e z$Cl@G=9HBR_U8<|k8)+$hC@u5!DXy0H{y>12+?_-=W|-foX=~#4u*6%&g+XK1oZ7m zt62u^0VfGMKbKLAY$^z66lwd~Xai@W*;n)J_Z;^Z=0M2?_tSk50m1X`lFg?MwN912 z_46H#o%XiEpJ?%{~BY;UPsYYTiCNWbKVHh;J{kG4uMvwt+kMxu5l104W`QTN z>(@}1?t&98ps;aU@!6Z#ktM7AjxnpcO7)q*4U!6n|+~eQy2=*is%41Srr9P%;bvHZceaRLTI!o(bVHhR0s6!~x{YxX#J>*XJ_O8A{=u6`@7kFnvsWy zybP{Q2X_NkqtEHnbwW_k5#CPmIcO_u%wb|UO@yH+JUI>IGh{f-nsV~u`W}(igOZYNy8L_BLozKI8uAS zobp9kWtjJJbBENhyDTpW>>W>cPAY`aYn3HEu)9Z)b+Or$g(WYb**PaNO>A?_$9Omk z>;^*S;p4-<0FZAaBme%Rpm(hwPUb*|t0>vzLTp2mr%*vYs_8xF8#JL`Wn~{{Le(Yq zA&^JIXz@qT@-JH|yy&g&sSpploq?my8Oza2);wtelImr{Lh!fTb2rrhXEC$5GRu~= zE{z#uA~i;N(>4pif|5guueF_w4+6=Xqr;cC18Wpo-fRY$E);eaEU=jks8_2^J%W;x z)90rh(M2cNt&#dhyX0A}rVB(XH0lTgOfIq8X77^md z&`_+u1J#-6WEt6*ry)0gj`FiT*F^}N6^>r7)+SDNs!!wf{Txpl8RHeA1EJ>=+uK!( z`a1vqRwQ5*w!1xE%nd-Ai-Q6tJFj>8_J&r;__bG~%e1`T#Tz{YXlS509#$)lj*haH zY;AquzHhqjk6~8Fzj&iZ0Phy!o(HOFXfu}{EcY5_$R2GCB2jJpQUPVX?mr1;NW6W z&&;znrKDQ77{5-1gKaF)q#8U z?>`DDmXV&ZeR-@?EN$BFzt5eq8B%DT|9NzT{CU0QE)@y)xdd?*t2+QC`$~jcB_1b3 z2|}qcMgME4L1dj^8b`CJ1hmNly*P>3mwxr&G|6oBJMeL_7fnm>J~~jDWsn$YVD*KS zXJ)54^zDgp77eI;ltT2qyt!KFo1TSd7?)++(C@o-6tBB#Q8XGGk6pWt#p>?~-zXNl zDd1M+un;5wqk+ibU(%&TQ327|bcnP`KqsE7Z=iIZjL;FxW|^8mK`C4y(`dp}?v&y- zR#$eL%%1)STt!ghVvVh!!cU?0zfg95-blOUW@`hq-H&34$ns*ZrMpo62~+l(?UZ+X zYq9V> z^}5|eTG37hzG0iJnJ~>gH(8I4YCbyPP09Hbr_A}Q`|-w3RSmCBi8Dsr;DG*#RUEq7 z&_b_Ca!y`RUmzbd{4io>vF0TscBLg_bKY_WxTAVG?tWAYc`59E88my^47pVMdN|0z z_={Yy^>y#pvpnX>shy+jQ>|Fk&SgX+5qp7<=gDlgXC}lr_2QXuP9o?2&HX^g%SqvD zVaUB&*G)jkx$W!z=CdLrTJm~-OQ(Ri874FX83R?}C#*cc@U>dizMv8{At!X5lWcx^ z@NOpPaIWNd+BPv9cgo|Xf~vhav7P7CY`H_a+8xl?WGJWjEM6-UW(!+>yF~i1 zyzBZgzM#9np+@APPfXk1{`=M1w~ErF)qs{d=bDekWK+D}u$Zs3FxM^EhgKKz*D*XY zvg`(hMBDtfs&MtJ+$+=Q<${9OjHC6S*Yt}fosJNM>Wfwz`}L+5J&mwD!3wcw4J zqAEW`hS5@}@3+>js zjVJ=eUeD*Ow}ht?sQcmnGzLH#=@6HiMg4VZwFB}S_7?jsxz_uC5O;{DR)cz z2-Xg(#l4ZHZ5_Z_kQvPT&42Z(iIN5&0Bsbf%PQwz0HvidzxSZ`xcU%_Ar$hTOu7U^ z?h81Ae(Agsq=iqPCKYRWNX0~33K!Q8^Ud9cgYB!pXdqbuLR8saxWZ!XX#O7IiK$>s zuSSEdX!UnXiO5Vm*JyGtb}7gWMX^=PzpI(1#sO{SZu;(viCftz0Z)Y6`!NEST@Qj< zzN&**D`{}OMSO>QdgU3mE-D|*@<04eE)RQnI2jn3N29ZG z#&_-T95rD-_6NhF?YVIw%gasNsZ|hUi>t~mrsO2cSDsZS1OVTv!u-Ki*B(Ch*?HP0 zN_x`xz=y4KwL+1XnQ0YuKD;w`WSpcgseuwbgG8ns=aq2$=D`D2uwjA*S|;V31wdl7 z!89iV-6j8mssps!Bjs2O8}aIm#OT0XlPBDxPmTs1hAJye>#0U!ctbT;TJ zpXNFxI92kiV^1Gqa&i=ya=_JSgESj%`bl0+tc_h_{9^V*${6)l!g;jRV3z8tYLCw8 ze=6{5P>XNAfTod=@AlDsd|A4c^W5LHRibr~-^Q-HVK3k%Fe}U@(TueAz>ZV^n(4_9 z=0m>ZdAhTAb=)45cr5<Y(*y)XTfSxg1RqJ0U8s zgeTZgW6O%zq6?ruwY992}DSX($NlyFYO?7a-y^ z?^>lti{`J-f*(!?scstgzNUzGps7J9co?M054k^<@s<_K<bGypPgD8dhv%f!%71cf9(&j--}Dxqsc#A%zo>92QXd!97;it_49psZcq{3A9-Y`(V!Mv&<}5|9zL;ogoXKvw&iGZn#HI=qFK<*R zHF90wTbhE%s#xXPaOp$n;8ko++_B>4fOerJ@wd0;VZ3-}=aJEHsNt5#9uwStct^P} za-)T&dR0NikY3mktk}fzzu~HiAsY0_#p>T&cd+}dR#ZuoA|S8b|MceGvB@OfL_U%| zp9EqD@w;d&>=G9Y+WvR_Fh77-=yx%_@0qzSKvAcd+DnH-1;YR4i-j41hx#cFX=k2C z`O>oy53kV@ulJf*4;hDMh8CWU8Jp-CwFFeAaio#Ar*VXG0PNh&HFUHm&-pXE$StRU zo5nC+>QAvWa8RzcY9hU5UV}b&`uf6Xg?zC(?lUKa7pDs<5`t1}N2Ba?p`Qal289gi?qMI-I(bBPtk^~Z zC`S&O!3PNrjoh(Js04Ib?V_DR4f@y5LbUa#EjFF(Wd!9&>$ssY=0eK#7xX{;xS~1bP zz2F5Y;VRn1qVrV8cdBmBo|+{W8SLZH{}QSBf}>dwe7cluf)p3zL{aoLvivo%^D!Y2 zJVFTwJ_&O6XRw|QkU|R&_rmi}htTLAtzPdg5qQJAO?;hO4^NiJZV+S96g2!9>B0bE zbabH3%>C=mfRGRs3H2OSmhIrzAzSXti?v)H4xZC|LHJ5K%dhDZZc!+#!)Q34YtrvF zvSV;7o=hKqzh&j>?!0uY#|?$}Cwwvxb^8(PJs+ndL-0ByK0Ck0a>FK=_pJf*z94px zQv2fjFQxZHzn|Va)oa$ShA1j9N$H1)lKFV?{glB{ERi7f)r*piihB2PrKL^#E%!-p zL-d1>6Nl&GY95{bgTKy0C=O9V=1)UqHCJmtgAiA7hS;GPG{ksGB0>@ZQj(2&L$bpI zlaKsEG8$IMJ)R=z)^Df&#GBS2*@Cbq3j$r9QpdAgCFTNi(wrL`#w?qS%@@9&|G0dOkNw6p}Abd(7)5^ zWJe~Q$YCH6Iv4f8hkA^N&1cj%UqUf|1|c99k>}_vnT`Ha%elLx=0;RumWNx4(A1G% z*tq~mwIkTrJ!zC9txUwAqNq{n`lC~Eq!$n3S2|df7UxT0luh#hekkorO_KQHMqQ1d z$P}N__%QsViLKuboeyr5Y7picJp2+bn9BYq_X|@Bh3I@iIV5rfi8OxTz{jIAa3H#L zqWR?FREhC*y+7-de4_R4Kz>P2?(!q87OfULab6aQ8bZsF%gW7uRucboL>A5fQd?ILXZNd8`|;bXXdQZ@H?t}Z@!;D zeQ&eLmWA+2XeZ|W)`LlQYUHpHG18dG%O&5cO8Wl{#U~_;6pUI-UDs}UHmZj(YSsu} ztXb|@0D6+|Vry8pYRO{ocTu~KKeuI3nYR%~-mg>lL`%8)>M3Y5$^I-Zhj0587SYkcJe2y4V9}YB8&{qiTShX;{U#!so&J`iMLr73{{qQX8+syZ2 z)F*1$$+21cmbZoEx9YIsuqxZQOVn={9}s{_>WiV~(M=b`<&C7PS{;ZE+p7QY%8h*Y z+W$>h;cNMAq_DO&L)0zS=Og!pcEj;-b%PA95_}Dgb5wFh)^Ll``AU`YKY*_dwN1`u z_b%;oBj*bAXfl6$E%8#Pr|E0K-^*$(Mt7bG+FjaCB~5Xc92}%EG%dA+qYrAy#I@|3 z?k(lPv(x4DtgB+KYpolgWE>;Cn%jk=2p)#dW4gKHEwuKV{s`nl zrZ;2=N|l2L4hsSG-BnSVbPluStw}>a6>2g7I7eDzwHdK<+Z$Cl6ez-}JSj^D%PW|S z+tiLKJ`ys~%iAlB&13VH$yF)h4MW5vA8{m+h6#{?oNpF)#ag=uRu*Yl(FTT&Z(5Hr z!|Qcx8?Uqd(!+6>(A=Rb13dsqTz_GIKt0zGHQpPf1!qjfzoMqudgm!c7k*ZX-~S;` zIQue-$)4XKr&m(5#H7>=kz?=Qk^W&vOwH8?4KGPGx2As+Mv$IQ6HoBG4ft+DSJT@ zjjtt~j5KJfbKchhSfRgBw9{r&tfp-fhIepuIjPY^uPF#veccT~j>d9|(KU?N4Wj1>eqv&N{P9A@QEwk3w?sX|VAH+{Fbq2Sv zlJ(gm@7wgfLRs7hWI31W>(_)xEV z2wz!|X0<3K2cYt&gA|igk#2SV$J<@1TnMy071Ovx^VJQzWmFYGATCIZ#50s>NM?XM zkrNXQb$V}>rK!BOY0jn@n}#+n=KbMU#7p`355B(qbFiHD2YmHF8Ps?0F0ZaDFh4fv zliNqg0;TttRLA+erpM|i+F|j z5oE-GSU8Wr{OT}0Zfd`&SzlaUs?t>SOZ|ock_-*)>3NcW8Lka5T0XpmcklyD*!AQz z3dH^Z(JH-{;7%Siq;K!Md`XxB1bHvuEn_7FKyg?kH!xK5h$BrB>316En^*t0Bc@F7 zrM+PdiZ64YLI}o-&*cF{)IIq1Ax!ufWAlB(rH$3f8e1=Gl%4@CZQbH@OqJD5qJQu` zew8-p;{*TUW01cA^lI>OR^v&DRaCgZ3I=lIFq7shsq%!ishofKT%X)|%p`FMcJ!St z*XQo)EMc$KyBko0;@SWa0yPuYW|^$fRtcjR(cCl=_yV5FYR5N*73FE9N5(AOR!+%$_bu;>H44m{Z!#oX-*`F8#zO*R{3;y_2lMjF^Yb zK4+x#{l6C=EwEdR^L=2iHIW8&zIXhiPuieO`I`fzuO+mcp-0z<)Ta28g_|2*n7S-x zKUe2n@n)sG9IT8r7_hYIEo3Tx%R9wCqbNDn1`#&tQed0!IrDistQaiE2y}h6NiM<* zV)wg+ay#mYXWZ4ew;^bKL*b^FEmYR%pme>4j7&V(pcTFQU`4oPkMs@=QH20a0!IoT zAIubuo0+L8h1z8JzBw2@u?=#=p18cJ=6&Vmjb74&F7n^oXItRJjr+@{psWTUbcj2d zV6h^aC7C81fL$h5QpxAgJ`T~XCXm&zdIQAabBIf_Qg`Gf{W?W$&zGWG&5wL0LMVd+ zQsI1;z=zUccE3u1);Zp8CTW}&@T2#I4*d)maq&XipElkw>D?kIDU6Qc0aq&^n?D&HH=IqLlRoPdN-^o{arMz`0?w_#+ zy+$0iEZPn-Vu@)kk2P}1?9^)azot5y6+vMxegdGwnhEQIb6A*E;#_TF^up5|VcjUd zm9dTl*c){DV1W5F40O3VES#$A}orw!YxP(3k!F<;;;9OlVX?mfdFb?uU{^y z7~=~?CG7Bey+Fk8Lj2`C@#x5jFe7zI>+tbibFT*%#--6k;=>tEJ#Oj0O35OhY+Qe# za`{qVk_XF78@WrhklWW6yW6+Agj^cEYKn*z0{ArmxvOX~iWjL^D&1)2FTeZ6_M%le zqolk>)|n~lN5wK*AT^Nc=G5H=u?6f*0u*u~b&t>m;}~h$d`wrbdFm1zLtnnxj5#+E zF`oZ9Yz{*r_)}R-7fh zibx&AMFujJ42c)SAiCj?N{D=5EbCixZOE=IV$yBww{FCXP!A8Jtb-lD0i-Kk+*=jw zav>&ca`uw(N7>p^0D8y=;UH6B@z&W?m;mOZR;WY19K#Q$m=tRN6-rb3NSVmo-rvaa zO0}>DRu^n+G<+;_joEHXF?D!g!(dkbI9iUN>Gf zU8O9b6s)8+;C0{xNJi7%@xN4B!4t)({}fBUREG0{kCo6{z{Dgbcg2Y@#UHO14Bdxo zawq#!zW-tDEu-RUf_Blt-GaLWf(3VX2oAyBA-KD{yE}p4?(PJ4cXt@v^-SLHe&_zX z>&}{Av)1h1U0q%E)YH{fTDQzUr$Q+`S7B(qKc0^`w!G?x_R?9*CKuH7bQBcyS@Bkc zoK+pUv)f&OFT0Tpn;vI$#dFV~yGg(q3F3E(T~ zdUo}21D}*!MM9+q73%5(LnR}n+k{7}<+JnVOTGH3G>kMh`phMG0IT-z_xs9?2^~4y z*w$xZQ8W@!G_cdlb773wI?MZqst_0IAE|uoQ#e@rxum6?P3BWYK=?wu0{V8&DI7du zRO?l}`it^H`sPdzFF&rFeq~Z+)D9w7KUEllDd`R-sxWLZ$>MN0q}k&JSjPmaFq)CA zxiIkGK|cY)R8x*YRBJTf=WK?-N1y9sKXM`1$YK2w{7IET;2?KN8qV3XgFw(y$4RlHM$VDj_l)rTA!4wMB8FIuKt!s-27cLvs{<68oCkOIFd^{XBdvM84 z8NE>MMa#p}PbF7RKcE!FNEg;<9;{cx!N{|V>{f9UF~Ahuloe*J6M5kYmgf|jop^Rz zmwP4cxu~z*B~y^Ko*&x@Hkt+kf`*^2PNQEjb2(g9nX2_8VsjcZxg_3GsM89nh+^rZ zuZ^IXDF@3;(~QW1&dV+BT6s?-?KK-Cp!Utu1$;k1O>>jd)1Oa@bCHR7)1da(^JacU zME?5_gL-gg+O(Zq4-sEpUi$6?W2+m-m^&eRZt3HMa52m={T>mbOSXRHxKx2TNpMg? zB=r!)p>ny?dazxdTe3M6#ajkv^6Ma3^nhkIZ4C>lf5 z%KQ37+@niaj?w&tOV)PC>(=L1bn0vZTRUkz&r6aF(nTT>m{dB5^bTTlFf*GY0|ec& z1T0!MjR`cfGWmJ&0V``B4_50t=RkY`YYc4t8mc4(Qqvy}cX5!r@hBHuExg8>Jo?@*3yN{pe@@GHKeOKD%Tj|l(4xMyP4CTH#} z8^E&Ol3NbOa29=GVPW%zio_6cP{bNBRXb87Ew z9eg#f;-97NIck33e&i9*MuG~zO2o@C7igm8#w5S=eLRTc01V&-dyiZ)Crsf5y>+#0 zbGu<`mJU*sEi_0kyGmO>khK(kgi)0kuSB}XKf%;10z@~6ic4mnax2KC63EM(G<^#Z z#e#kc`tj1I)^8cstNTq~T-X$^Sk$I5K{cvMhmn<#XD&o=(YLq!Yh3JJGh$Hx>_yIR ztzc?Tip3caRzFl%Z45f_rQ9I_{U~l9L*j?5SS~EBmdd8OsOPk#0{L3(n-37I0Wlc7 zUD>qKbd{ZfgJ$A?{yGc@5OV(ZySj{zGyoY=AjpBj7S>e&{FaWintU=pCvlA;xndvy zIY*4;#vgjw(GVHS`h|LTTcX1^(QEG~I8oC{GuT5L%r})%UU5P4UFn|Gc@-|It9eAg1g?|{Q=~hci z$uB@1izTma5w8lQHgz_eJ)7yB+`k;p|LK@Fw)Xp7U+WI!kfsCj6{UsUbr`rS4ZTnz z)Gvavh>4c1~@4(xN)3Rx+)dL z1Yp}tU{0dGrlN7N!=9=@rw4U+MI85g&hKzx`TUuVTOnWKHh3AZCJ3lcovM6p4})pF ziJDi9hjW=0kL=Z~=+|0)aS4&T?v`k{ZU&+0y#L*$i%cD?HSaR(>!7 z_6@J^UNlFYLKez5*^H#PxEi}1;glEykZ{$x4;A{L;<(4dsDvS@0N1YFP!WRW)fOAw zmC&tTxN{D|h9Y37*R9PAo7+IwuXG&7#;OXx!p3YY*SgBf3zoj#V9O94T(vdRY1NzW zI7k*x;ByFLR-9R%tv6RXZ8b5MuV}Mc3o9FvL2bj54GZvb0Z{XX*O!dWE-Y=$8#OB) z_{%jhIwRyORog4%l68TOp5DxuD*18+&fGe>zMqPs)Fh&c?>R};TUDyzU%=x;N6zkl z(c0L_wFx_nJ-mY#sRf5ZZiH5 zUMc=_Xk2t)Ci=ETAfdHYEZ^-olbu@L7qv1+KirPmN_g>gjvfQ7=&h%`w*~{T|0m0L zHpd6fI#yE>p?;8lYe-5;%E`&;>yse-eFl)Sx|}b|^FIfn_N^u|fjr1}LdWCV8G-WK z-(G^YT1);&o|UF*ng*k3mf{exV0p!*keFS=42 z9dSh{*!{8-{P9D^;isDu`H;QtU$c`7=hbsQpbk;0R*Lrq;YP%{Gn%eA{9);LbkR3- zStVh(pT@arXrh}GV_ANk1AnDq9S5W_qPXB80e(kp{@jg<#fv4;Xa>|=BJ&=E!%|v~ z<&9hG>N;x+3(>IgqRB(RU3VhRAV_}3X&fv}%rAIlEKs^mXVI{6h4sZ&dsvPV%~I9! z1PRP%8If*LF9a8r_+u}4+@jeQ=Y?~B1a*{5fV&(5kDNN_Rh`EXtsE)zTZT55W7)bG z1o)-1mNAoIAP3)HGY^Wkvb=pX@%Dy5l2^2}JR6SxszESGY;SLG)7;+OEx^f{CKmWl z=}*#(wy7q&oyalKBk^zgZxO1!qK3tdVXPTfgBMH9~(VMpo?nEJW^L( z+}XdrGPks`F*mcauxfDuEVX~-%qjLm^`mAMu1x65FK|Of&~)~j)CZo`YgRy#22d3Z zui73m4jB+Mpcn3&mCjpe#JhvQRxq}klt1_`tPm`DG7TrQOp1x6JW6jERYO5(>7Kr{ ztwiY1UNX6ms<9lYMdPv4O9N?1MO z<5T7KP$Bj0R^h6@RO{ZmWaB{O3)pohtN)KJF zV3U!+Z+E@Klo2M2NE{va2Q0gafJD#uPw7vOPaxnm8%K9uehUuf2Af*}ak2=egq;lQ zzDpwO$=Q*7CI@b0k&M=zJ9bl+}$ zb~UAW7VWNS2z9%3h$-H6Uz~57{f~viLG@#z2_k7bdi@e^NGd9DkpUMbks^1y7<{Jf zVkTwt5~Rgov8vSUse-Gx(-$tromgwAbTSw`<{$d|>kT?IB{JQpD!?TLSh@O>UWtN+ zy2?&e@h^RCdBL3O=9+Tr)Q&^>U0cocblz1>(s_lKh@hpTlA2{^zlE#16^SR9keN>Q zpSeX~xoWuzd?8>S1Cv^c22eIr#+26+ryo^IC56|hMK>~Ej#_;lr;|_a;mse~59bF^ z1YEVOo2)gV=aNa1#hVn*P1ye^Vvc1hijc8Wu(PvM4q_swn#ZuI*Z6+;5i?knjnAPZ z6)!9VfM;bUr6*(cxE*C1IQfp3e#xkn97U>LPRQ=w#~4JwcQ|{wG)ANWKZ}yfOysuj z!g)6K=>7S(8Lo8XtEpa#<3vnePfuFZK%S_Qjh%sncR4>V>az0U|FhPM5r|0Ph)7_5 zr-8DVD$W2@LK~XNN&R>(%aSj}+ANj}?@;KUYg8s%Ps5nhHAk6dHXRjA7rVY*cNY+1 z{*w`kzIilZDU zjpx-&K{$i5cISM&@V%dW>jRv1dGk%&V-!4|2 zP=EX58j>eV#X-`bi=}iDp(scJ<~`^rA~sI9RRW@@DD8y&e-}-Iycle!=GlXLdwcup z86Hhj5+|XH0tR3kD#%R|p8%B*n=m(P-K*xe)ka3T+N4Ff?X-mWee3=~t`E?$EwE4K zc~AT-0762KD-m41&2!18b$rh$rq${tPBB4<3zUy`OhRr1m$f4+pYZeuTuf zAI^C%W$mU|W#)7=?`^u$3miJtPHr&BG5E9?uCJLy+Q@x*e>~8++g{rf=VLRSowaGt zm&Eg3S-_lnGl@QG;D7EoN#*Hwe>;FU7X-z{25Btc2u09Hf~ewae|pVONgYHaQ{ZD& zAcw!p zzW$1d7>VuEX*Bn(lZga$NN#TDF+GeXmswfa+FD#bwYsq9+>VDb&%Q7Z0WS0fO7c(A z>AiUK1Qu%E$?5s7n14Wk;0z8#1y<&g;r&u44)`5bWb)5H2-93vs#0M+2t=u4XAaMl zroymr3ETHAK)G@$3Ii$3l>&|uuoChTFF7@lgAe}tl`|*mx+th|M_$US(>%Ylpr^1j z^?4zT=;pEYPWzSS8H1uXv|we+I5w~?pVs9$+XX0GN^Q2WgL@m@uF(}UqzL#4A{7D* z!wkE>cm6~@7~C0}nh#4RvBYwa{CvJY^dxNce~{#VJoKCknoxs8Sc)6+oJ}gkyW0Q< zgSFvfG&&}~_}$ad`!UP1Pku?2z~r;-^1jUTlJDVYYBSX)t-Uw0%%JOz#H zap`K;KCPvkLJ-~~`IVL|;(&vb$IP8B9NNGnh?0%vOF6jnk-!@m{I&?>XR}Sq`@&|y z7V0kcpkeJIN)0%*vPw$!x92wiXpJc=N@jdb#E10*{A#2zas!nnj^ISLyw{9!7u09b z>fqOt5HXUrtlzLI4AK{bq`Ia~J96^yuAyEwVDoj|5}FZJba>of!Cv9cSbe37U+uv} z=q9tN{*!QHrj3L1rNdyQY_lmQET|;_P6;iKuzYdtwW4UaCTpYs1i19^aPh@3H6MU& zX#+u5q;P~VnSuQpuiXE;mspZfQOyB>zk59MN+JJz*5k~Lj&)aFgT2}&FAs+siNDY} z+g9giZ`1JFhm+)Y6bHS(yRQ7DgxAaOBPzt*Z@q&#)7P$>zG5Lno;-^a?FJ8eqZ9U) zTfTp7q%ye89MfYq$E%lMs^uUS5AW;r1y#0-{p5I-s9 zvRSy0GgQ5AtfLdjbY8}3hs47ne4Bkov@Yq_=AsSZSt^4%k3r@X(Xpt|h!F|^2`jF{ z*J`RI_8dV+{a0iNX^IS%TF!N(PId#ynlv1(-zXm(>w*;%1dY3Y1D5Ed#3BfZ24FPR*Sq*Bfr5$VAF3Pts16kb?84l*ci_x7^Z zw~FkviO02a0}+ENKjOSz3kWH=U85FHA=x9?2_LU~TJof>lt#_TD^oecy+L38cTrVq zX=%nHB1%dU&NjC6#bu=~u4Fh5buI@;k)WE%l0S@rS!=cyLs!0Np^_00UhtKSY>dP9 z?Bw{1%FmDec!I-4@1`U-UZHqU*gPoqZ6KXYSgqsnJ}PRTRE+EVn73bv(MTNmu~`4h z-kv)Ae|wQ@iz+)4TMa_+PzpjOPE zHZ|e}HIbTgTLNg5^@>$}3LZ8-#Y$K%Cl*2Ie11|K(` zDndt`e5&Vw3L$J`(lP?7>Z;YBlhV6TN)qMHhDs*y$o0<4VSS4sA%m+g6Jx3eQA9au zDav&&O3$mZtAc^#H=TUhx(W?^A@k=3gVSNMK==4m1p;7 zO&vf%eqbO9i&oCi)ScUVs#w4oj4Pf}DDQL0ZTA})733E{N}isaT3B1qZ`Eag0T25r z2y)_3!TnyPn~R0Q4+lyP4cdAVYabh)>qL4|4|zq0hkO(!qN-#C`3{58ATQHsxaT5e z{&3Y7HqcV%oY|hS7FD#ptC5&ti4Q6kpw*=r@4ZuU;;KDT)WB~9(gcZU9KH^>yD&14 zF#u7I29xqlZ3&fy300P3?Y?ccrF3qy2$vgtqTpAmogVLeXO3OGb}Hf|J!vq9-K~7y z4-jhnJZ*Hs+KBd+t+e00Ip5U0bZueUkUzXF#hmar!?1|{qq{H|EbQ|4|6Z*Bg zaFK75H4L6OB%HRZ2lc)QDt65mXOMn=$AUX%=U3qmGO2L_9Hz}Cq91MDF zoVMyO#~q#cP1YJ+G?i3l0|T|0-CFI=*YHL%qBYciDmz#9TuctIVkM(&%}dJHntkVH z55mY|*AjEsjMc?KO~wY=7GqBNJo#Ljmw=Egr7W3HiiI4|qC-PxtRBs4)=e4{gxUF0 zPMqfe5~=(pAO((c!v3vW7+GKxM(on;T&O=4s8SI4Re+znjDJ0sbr?9M19G#{Pf1;= zC~j|kJB1&{B|{fhC8a>n`3aDHHxW%-(A>q|U+6E~DuBk^g`vv%3GcK^z3}ICL?2AL zjN1G4D2x<6i1MPH1J)q8`D7VvaDEhQFW zvh79YzHNA^Xvq?qwx(@mE$ywfT2GZK_s(-PzZ!E3>+2 zm0t7ctX=Bv^yvL-l;Dg5N{v10*4~Zjb}uo|FTH|ul|#_HyMAjPJ35S+wm${1`MK(2 zs++_C$iB&1c5MA`E?Xak>&8{ZNq@ZJ*L^@rDOd z{J~#aZ`3bn{MThGa?OtSM|#?f?r%_Y^rC)L0SPMQVDUS9-@>@*NRX*?K7q4PxpWco zvZE7|Q89R-tR6kWxBd_mTh(NOS=LQ4lT9_lR0T67ilrC@wax8H)f~%K$f;`a^LVE> zPuk4T$f){3O#0z_a8N?19lFh$l`h=c5hwsZ34()~@93Sq+fH2IcOJUiS;%Vdn|Spb z=htqcL72i^{tP&}VJ5|^zZlneqcJ^SGFB_r`u^;G^G6r%%b#VbKP!Kv^D*ig)u$rj z;wC3;YwmPhTC_&aoWLQ#!PD2i={0NJ6BkF8nmz-^Lt%#uO@mb_+H$crvreRvyiO1F zXS;ZxpmTf7gu3)`?7$e`p}`4`dv`{v6HYKHaLZw1HJ+TC2TAgX6>hJpvsv)Az|&eK z6CcnwFW}U^K!N?Ycc}e=y&d4>cO7qz;&f-rI|W8cIaOCe1{2ylTC~|lkAjFLZ|^JL zVK$@}X?;0LgE|CVevL9h@^H!mTBPyxoER_w`!QOK`gs_rBoPsVvBiF2zRcu#7(2B}W(tUcU|?U+nC!MiP!lrpD|_^^HaKx^5CKZ8Eo7yQp_s>?(=w>3*G2;R8HfC!{2`9Z z&u#?MAOV1^)jaU&kmKOFCbWs|NP5K2B`rdHwT0FpM5T8puzb|`+d_SBB@sx>pd9|DUy;W6aYxr+Qd&Q-=x$>q425AM5#&U9@ z&Yr22w(oP(qC|im%ZnO+WjFJfkE0wOFale_gsx-%*8Z(-*IUaeFV7{)RB?iLYVu|} z&y(_Pt&8uhA5nac*9m|KH7*NBQP7cqp(>A0MC3oA4LgpjS3Xd{UniEfHbO=Z#tgnP3>L3v3?+o!Gs1tXDZ4)028 zp|gZjeBD*C0DxO&*J~NqK~*t*#6H09Wu4iNmyv?~p4%HxCqeYX8dcce?cIBfvr!0X zy0*Bs+kL_;v(2Z0n(2NLWit(PwON01ymf1y$Jeb#r}@}+N+q`K+g2D@*itte1K&wb zxIv(BPqXdHgH7=AgU`!(IggdTrOm0U=e)b?COd}q*8BBhs;ed}>Z?CpyQjYs@h?|5 zOc%y~XLI8WL$geXj_}3X5=nzkDhnJV%sj*t>jbwn>EP_rK_|6P~ut z^^eCCEm!H))o)W)_8;SDseD)axff~P8){tD^sk<^L_Kp?z_F>0M(jNMUWCt&e5WC7 zh6$~Cc!ldzrB98HZi6w2TV3s0xP~_YUtc4g%*0 z%+1>7W*K_k@BXQoXTqLpeNH)J$YgpR^?jJ|c*zL>AZ{M2*mf}b5(~Z$O?5qKXyiHs z@Ijy^R(svmT{XEDg`CQ^`dV~fwRu70El%aQI~!5v-XG_q*!*Z@Xy+Kh$l-MB+p<)~ z^Xn>{>Tal*nLvj7PsjhF7FIZZzs)pe9VaxqoLpjSOjc3-Jf0k%|8rXw7&UbEYI^_e z6R3eMQCq8tcq>rCzpSjN0t1-YDcU`ca{PF97`&RRBT5`$b-&q15qP^A_Z=PBR9Dos zR2echE`*HyJd@(>1gcsGNjm20iXD z{kI;K5fRn$;D$owBvm)VEow4MfDUvdPiQPh-0FOQPwZyO%gCZ?+^dgsXcgjYzf!R4 zb0YO;n`%ySY8yNeVURL6hOfUZD1Zs>W3A}7yM%2z!VemP)cx-46kFqJ#{axw*Bmrx zQ@n|Pe@*4{UHemEne(!n_n0Ql+T_x*o|Eh1^YHBDJW&xiZoXal`80^1>tkZwp86NW zmHZPk2p2nH2120NO5wpYYu%M;Fby6-vuxEbaSjO% z%r)OGBqCEPH&*ueCbchqj!sm2*^iN6mCX@64JKE2e*zO!+S|Q`;5yQrj)D6NBjxh;_W}DAHzIC%d@8IFw>91t8&xrv5G@g|kDr0&+ zcMr{LH$T6sXUX|)@6mT0UHA5t1qi4Lovy2_UE;H%y2VyHexCaBKTTU!_Ro@$7d|Hr zZEA1T9{0(k0+K!PF>d|WZXc#Rjv^^T&eSnL@yCC|a}Cvk#zww%V!MM7>}!B4OMwsi|z&nRp(yk^EnR=$f2f}FYV z$yV!C)%v;Xe*K# z94~TyxLUn95DQ)%OG6X$yFNH6v#r4Tu|HmS%fkN!05(%5F1s6Rq-t&7_X`+CJ=NEJ z+_%@*zpo&mGP|rQ0C##^ftC&E0P7F$DS0@9OCAtpIq85AXyNwU?Ov_7c>@vXP9iyG z*Oeo^>AvdghdlMJnQrGqH}%*99qf!$R0<3=8qoiQf5RPe!Q}CDAIoqIw6)FgT{_Q-+j6hQl<+hv7vt}pb8uy2d-Ra2ftR;c zax6}K^YOpjd|J>2^ruC#Aj5Rs09!xHZU`TNA(7AhAt@=on02!t2~47P(h|3s#h+B? zzMmb|rk9F{YhOP};CHt(sCXA$Hd)UlK6=f)*JJPRRXsax`P+V`(buZw{3kpfP+o3U z`Ad^mqck)>pFD`H_kFA)920QAsMGkYiymdoDfN{3zD7(ez-g+kI}C-w0OVe@61gbz z)k(liS3ynjUXzgBH2S`6{c${q&{qeaKg;lGR(R_Q{}uyvz>Brr2>g48J5 z^b|WmO<97tj`xxC*RRlcse$XQz(m(nWsOdLrsux1DgxYsL(ZI|8pEnLmye-jjYqzu ztoq}izfr%HS&}66RTB@g@WK}FsBpTOt$I?BfM<{A6omT^8uvI5n2 zA1x;K+K>KXHi_=fR;Z=@+weYLZA(L;VC!*5`S(&PXA_m(!B0hELuM$LAL=&mUUI$G zbdOb9*B)=1-SPx%4;yuj$+l7X0-6eT=RTk5p+r7A?Nxt z5oNfYk80-%cY_SYiPEqP>p&3EYh)!+Wb>c0&p(zp$+#6uutQ!YO$&;1h5{oC3<1=^ zJ42og6|3te{F2F9ak^A^U%Wab)KXnN4-ye=<)fyHhE8IiHFL385&0v05h;TIAy`U*jM??HfeYOyHql68Vd=~R>?b`g zt`Fx`uVgdtcwb^GQ#<)Ib?=|nZBLT%DTl6bZa%M$(iz6pLDcDCvLhLz z3}T6vCS!Fo-AnAHFBWaUJB{_}@Y1HoSgrle>~HS#6~e^y`}rg3QzCqJQSL+Noc8Jm z;QwzH4OBGzF_NR(0^zBD7=y2RqWfW$RDQGF#=5{v=1?9h=zrNc=#U=k zq86I3sQTj&w_EEu{ICABxr90ocf+4N9}g>T%Kw3x6Pyq*yCsI#Dnb&M!IqYO4FCc6 z^=0$P)HyD1Ex+|B@vK}!&^McNI$r|M0H}ehKFdp*w|DSv3wM0M^*`YP`<-^n`x2k> zQ$=znIEUZ+)5UokeqEjyGVt4NDd{bMYN;;171C~Mzt2>b(zqTRfm+NdNZO%P2B~Xo;T^zB0hyy)~W;;K%*#D?tyh)e{LJ zG=Q3SAt>+n92vy)p0(hJ8ZNnC7@W~A1zWcB|x~h&i8rn#r17M1PLNGSiZ$kb2>lvs9T4h!xfVM9{7*>Ky>`_ z&v^%SL|b-q!J_h;THK&%0GLS9RDQjU`U|vJxDyHvw7`{0K=Fq`<$zQw@teU*F1Bo& z`-HUX*4uD?2*amc|M1M&B~#Kd3go#ub9sMQJ)Dl01ATP@*ZX7Pd8_-iy zv7Pwwn6s_^Gx8~^&+UyQ2wJMR4qB4)0xzM!X$Zg5VW4|IoM=K8?v3Iz0bmzE%3J(J zJUiku*dPjCH59K}iCWIW!(ijTIs&&num-z}HX_q?|FYk7AkpFh(d>W5;8TpM)-#OU)Bdy8%b}W0BHu{=mUWPPMyMmvWx5`*Z?(Hk2L{@)M=NG_n9`z z_XN)k`L?@)c%1>I;LbBNzCVlyyKgufXz;f2NddO)`kA`VIbd(*iZeCq${wqBJq!te zL?*88oEdq{%64Qr!js?Vi*@ zt#cgDspJy@Co)2}<6|gaauP=e?jC=WGc6Q8&+68G=$|`!swHLoNUm8Xln*n$3CMUSv2<gV`tF^PM^<7{3b%zI~ zsp0if83P<3_4v@7lg)E>WA*?5oO`@ngyy~(e_n%6;fBiK6T0ay^&U>Nf30Miz zVfc6{b~bf+1Hoo!()V)WZr>07p`EQ)snn$9{^|}-WsclGJze`Qyx-cm0|C z$K3Y&1WLZtD0pZ)tJkqcd0V!}#zz{O?0xgn_15YI{%MWi7hZV3b1v54P%tSFf(EhfW4g z+Ody^La0j=A}+5VIMy3C_p^)f)fG>d80gSFOllK)4p*aNx$116GiOH-qB?K6CZ9yV zVF0Axc?9;8ABkHY-oGwlvYg#tZV|O#-kb6B0{phT4+{ri4*w87?dMhlq2sXmKMf~E zIIx}Q+^%y?FxI7Y-FD`BUaN+A2caW=-$bS!(pf+qd)$|P25oxU|5)*4pg)1Mc%ItY z3d``kP1{db1Kpb1??6as=I=VrEDqswv;DX!{^TRwe{YZ`a=(s|?(5F{9Pwt5)x58g z`Wd5LQ}Vo)n(J=K;8(XD?2G+*e-Sxn*41?*uqjQP#g+d$IW|XlU}@o?tMl+^$XBcK zA>jE)i2e7O72wsRy>szP;!-68JfhQpq1FJd=_&GC(Jz^Kj&SM>9HS^aEtzw=!R6>q zgP|%#WraV1cQkfGm{q`wSD0Vl{lWyPZr`6^D#kXMoBM$*Jg?`yTjd_!ud6(z-#yzU zzf|Lq1)*Au!d9J(cZcAyil?kiyNtxu{Wf|vkvpR&!T|~j&3qX6qqi*J4hie#K+}-2 zePw1p2@bB*Y+*|jQuZ;p{;L(wz;HQ7J>2(ppKCp8Y=eu@(K6RtLaY@DNaE zlT|kty>tw;2Q~)4E0>w_jPT8_Zx_IH46H)^((vd2qri-(+@#P%fU4)P#V*seo`x7}YQM_oz$z3U5 zTaxa~@QvMAfhW7mGGr#!1~wFIp1*|#7k_Fdfzk3OTD!lV3 zv9$x#%j_b#N_{4UoZdP>=cC;9ZMD(z!P$)>DV6Q^xX5A%9*98xS#JgU>~6-nyh2){m~O z?QN}HM~x2mP;dt=JcAS46TkMj^(VL_Y8MebPWmY<9GqgQ^6TTJ?Q;7FhL~RD$g;rg z-t*ejhUD<@$jFZtZ-@R=;`g1aXGr-9F158z&&T}yd|_eXQy`{02K9D4IFZI*C1o^W z*x#C=se}rS6tU~X6sPxvsi48T_D0 zW@GK1llRbBV-6JU;;#4CC40H2P*^r^299BUOPPBleD-lrW%E#C9~>sbAocGyAcf(Q zUG~(ye(zYIrF?kxhX5EYO^R&3zNFHTUXXdAmxu4QRCm2dQDOMp%}CroS_0m-}!^#yS^&CzO(Kntag&-1bD%WbxnFk$&HOhmrdhR$eFjkQfBlZmEB8-KP? z(O@N0UId?)*p}}MgHneHMS`x8S;h{O(Y#m7^4b7-amoZqU?o#^@X%hRcB{kF)ef=O zRc{O-4^`Q3_W7B`4S_Ygk9wpYX6Tsw+RMv(I`x|E^;SoL&$s&QY(jAe(jbMokOQ1~ z-cn+M?xM^0xs1oo2+UnDP-98m=rX_dL>((By#AVrO17f$Ip}nbU(sN)H!Cd6xwbs3 zJZn5VieVXaT1VOUaS&nuy&k{s4QFgh$yJN0WZyyy-$r)4)sG0c#^!KNV? zRX#yU={;aV>|3QDyFYKd*DG`N#KETtC;{7G%4oROF9(U(vup&YypES25Uc8YjYi69 zcaXoKrXgyO?2eL7e`d=uu(6nz%zOoNBcQBE{#aS{n7*&4*KWCbKFXC469eamy?t<&{$kqaqkU)LjNX10-Gg|4O$)%HCmLmK?lS# zN~B_ff{7N7V+V0+7)a~II{8cFMX3NPC~>H$EC4hksqSkv>_|LW-UQ%RN-{VsBl&T6 z0cE%$iBr!ooH$&Mp?J3`Hw56X;-DZd1f@O~3t%(Ts+xY3$;9Xh)Uz!Ybnmw}5JpT) zO!h`boF*9=pWW*90f^Yo3Jjba7#J8DD%R`vd4GKzqSX2d@*dx72mVN3=kK00{G&e=plPpzVP7 z&!FRzqivme~KoO5G-mAzG!HHTTaF*N&ehoFxfX3#OL)?bB+)#o|u&t1@_NeW{`g$ z$~z1rkpgQ7>H>%;zH-tXJ4k|`X_xS`Xu?5r)kqg9_rYIbDSi({ zRnwgE{s z)9Ze*>Z)PDip{`up-o~9&Kn3XzqN1=m$2U9{Y~f5mO6msS65Hp;`bpAYBeI7Z!x!vO!Kús=N5-aB=GAy}Q@ zTlL1-vypw*Fxt*J%(~Cp@tJ%xTXf5ExKe)EqvV4R>iO7?|Y#PAOJuJ z?#!qz>HdnQ4rCc70WG}j7nh+t(=Z6Ywgqo1TzOd6oz0eQfUhhE5btfT@3Zo$BHB~) zsisN~X1wK#)~Ms^gfQn|v&D+XtBeKUYZ!(#g%+kpH(N!a zQs0Srz3UWLIYt=Nms&wGa%FRrRNro2X~T(!lsqxA()8h$4;cHb7?LZzuVuuDcD8Oc zGjbc;Q6NU#<>A7-zW+k~Ne<^;n+_WAOZsugw$&mQn9J_@*rAmm^i``{J<{Am<4s!} zAHY~}){g)(a3zHxTrEN9=rUXrsHpSlY9`GQdjpnfIjnrNnf-@vXp=#q zR$oX1rU~&&XfZBNtVg?u83<ETiAHF@^J0L4Drj@0R%tc``b>f@;#R@7QH99IqpL*uhRu`#lmcm{LZbfxPLOUWyXC>SNGjVT?V0@0VCL#b1lETe^Q$i|$ z()srN_Q&{6`!$R9sN636Xs19IjH)>FA zVfX>%;e5{abAJPV){5yPeAN{^8mSBdR4{Xyu5HDhQzGK+sH+kiVuqu9@}d%^5B=cl z9E!1AQ`Ka5FpeM+x8_oww)W7Q`lD6;Dd%N%!g9aoQc3P6@KxCMSQ0DqSZPX+C;c}} zz|m&$o}k2!Zm%=Zh?uXCUGHm;bp>GRCWiYNgiIrNzb&P;&{s`n-6$oYG!QG8Ijk-x zYC;`4)2%OebhePGE)SV7%m_BTX-oU_rZya0KkWCU;_Y+8-v>eMeWF3Qhcw6{Rl6{? z{0=~ZHH-C)t>ZL=(i~PLm;O-3ir>%M5~e)j!UUDm!`Zc>Wng4fY3al18Vwd&=H!^7 zBpYi7*Fq(mLyf6!w zIeVRg)=j+fJPfEf{OA%X2&sO#d-NqUHgT0a-gB|CdX!8$xFV(P`eo~Et=4W+uF5+P zP;xkk-LOSkyINQcQ(AKYH6Z-fNn$i~L)Y0AL*EnnmGsW^W5RH|^}%ATaDTMH*0nNX z5qx@52VxB)NdDF3auAXaa7?6wdu!hC3S+c50R$EKdf4dovIdDPV_@tz1P5Va)2G4a zv^~~!F5;WdbO<6v%`->8fVbI536$AGh+YQs-hO&T!Z5_p5U7rxF=-LgKglW3jnOXh z|Dbk{`BvH__-;y7`#2Ni?xQd zI_q3Ax{hKe)t(`X$E3%nte?9n006-Vx;4*8l2R|i(eWk`1hD;7q$guDm%pXZQFZ7E z0a%17Z&`Q2SbiALQOFwUo?Eu|&yZPd1Ci+6^02FKbDYFXevjC+pO5laAq+)1r}h?Z zkB?irSooM7N;#)**S%BUp=1;vRLgRX?8_ zT*TQ20D4UI+B}3dJi^&xzbwbR+MFj7BQcmU9Xl*dE_;snO6|hJIQr!%eY5F!?OzkH z(Oq|+43O(gnV8^ZzOj%C`Rh*yp#x-U1C${ctJoPuR3GHGrY9VmtCz(X^2D&jDzNf9!8`E-9!O`=9h_Em3%!>c>*mqJ1-; zN$OvQ`6gSQ z=!gsAH|(5VKc1;)bl(jF8b~NRu6B0^sJgSG_|-R2@ta8Z^s|LATV3C-v(A5>%JgWv z*}k*(NbO8?cKlv^X%i;*;6lig^Q(lbsQ0e=L}^SC(5ouGsS~BVt2tOWf#Gc)j%)5) zKrG;%Ht{u;H}_&+p#V1w;(>mjP`lId%YEXpO2-`DNW5bKScOm(~ITCOXjb z>}LWvHT(xDC~e`cFd&Dt)7w}w+#dAidbnwg-r$>?dpAgzmrDG1M8H$xF1rRB88D3B zwGKGv88d)4?B&JHm*e1de{P-#1o8LWkP3n73K!yhyJA$hCZQDXUtT4c5CNO1&*mOX8}NOkg+O&{)3(1(Yn-0?`Y-5Wn>c!(&eg48@U$n&lHl8#>xLkeJSIDi8w6gOt&pvIzE~ z(`Ub)n=+9#t_ucWR&Y>U*p-f4{+4gjg(|`SI?N~G>?K!Ea ztNk=6HHAgAjZ*gSeKvYW3hZ;#_Ych#Q-baLTVqY{k~ofIJ6S@)v~G?Lyq?X5av95n zgT&o!#>44F{(lv&&jkb9T0}!t$`Ek9>}W0%-}`J4{JBFB}HId>ipklKcErM0nhiweBEa zY)(qL-4_T&m-jelfu>Wqp;{#(=<atj^l4=F_-i8I*b&tV?(Q=hMFlRJI>u7k;gbH9u^w zG;PwVy1?+mGV%^DDE9NVzw|e3_O`+jH~mx&g0oPFwG0CzN&?x6HH+7e58;hD@Q+xs z7SBU5?Jcc2Q-#+*?Z6=M7#Ztd>9h9V)EiVDo=ScC#)5oSU*zeYfm@K<*?197?^@| zL#cp5RZEXXCTh+%_9ZXDXU)s?e%Aqp-h=={bHJD)qG z5o;NZAG4eG6bd%t!47s{#SbT>DdK;&^}oj+7$vCzyY~OrrwdQN|Gp`0i?6uxg0i+` z=vDqFIP9q2b(!D<<0xCc!7hN=od2)HFRK{>c3-RV#zAo zzX*wRY9ajZ-}w>^uPG%|G$8?0W@S4JP(#rFlsNqO|9<$E9?DOC*~hUQ@4jn-q|od!I?H=Tb14t}EyMUn97>HZuQ;A@bz zrO)ID+Xet=q57s0{2`owCqV(9WS&Oh=rtQRsu2+T@cPtpZYi%`n<#KUlHbZC41Cmj zfQh!U*PawI`O)^RgQfC`F)~B={#PCY>#=224RQC)>ViK>*X`jvKFY%ldv{b+Lh2pa%QGfq z*Vb|lXD>oI#UqEU-X|r5kJv%8g=Ld#c&qz0G zH!b`(Y0A0EqMg!f)7>va4}663F^~VM_qL} z`URIq_jG$5M@C=XZro*66M@zrEsY$ZUo$=S-nV_xV zpKM%KkNUB{wOzaeBxAQ`&0XQ{zJg*`=f9)of914Y4Tn}j#=G6jChKJ{nh7m=b}K`L zB*<5|dufT8om5|YTE)gU!sD&ZcW#N;WWOBsjeFberNUPR$=rCfB+)lpQ_gvCYNw_) zIzOL|$H|4V=x@`y9oYqw9>8~fQnxbHL;>I-gS68wEjp1Rz_R2vq8=9gq6%#AIn-Al z($fr~=eLWcJ^8#=V<$_p@Wt17d+FohcZVK7S@uR5Gb(KD{tnEHm0f*jvp?XAdQgJ( z6Sa^CtM}=o6?~c8`Y5;*&kE9d%(=Nbe+QfM0lj6r2W~}~X6H{P@?jqW&JT$Hl$+e1 z=yAI3Bx^K^R$k5Yk7hRG`;@#J$a|Mo^{ zu(uSuAwu~6iF0E$A|JX{R07DDYH@rYXN2An01dT5b;vd@S2wY zo7pm7^p9Lk+*KzzxO?2{Z^!?SBk@pxj-o;H=btCx6D4@Py*GI83fL^v>7A1%u;{MW zWO7eWw2ar=cjja%CSsT`rkK6&)wh*oDiuwe&B#=(zoj03H=ygOcQw*q zODN})i)IyqUp4x&JDy1b3iF$r84|9&W)*E@X9oA^yvKs+R)Fu4r25&>*yvE#_umHM z=382gR?ap}HU6EI5YBqmRY!JGRjXV^emFE%xnLbc{d2Evey(m&Lxwqsi6$iC1KcxS}9Z_zl-aU4x%`Qunly{1Sw2#O*GX)OGKmk$?SzwWl2d=slFn zsuIRmNXddhSTI4iGQqEs{k}w8;k4T3KL*Z=s?^@zvm2x{;ed^H48jl57f<9fK(eak z8Xb1eQCx%sEKhiCbd{bXVa>}QC_ARj4*~cbsg2(%dNz!teU=T?r~1#UN5pZ`S*-#k zKQ?x;T}BDRT(kAAv>rnFA^HVC+55GTt+mV*8dZXF%#i}@s@m5eh~*c|t>vfiGLD!R4M#F{UUo8l;e@#}_K<638X^!;%5Ih|CVezmd_b4C-S_v=BWN zO9z&dKh2!ctRf4q`)(WJo6ggWZ(;I>a%PH_m-!7ZPZzgEYX?H4;q#>x4%jVhKx?Ef znYAT=4JFYw&pn5)BQ}li`5ZfGn6+nis#F#Na_N)UFD35Q#0fr@)y_Lv|=v{Iaa{rAa){S%GX0 zk28Zpc0?5S`lOPhvRpYBAqGu6#(|6Gj)1GU3jx?Ud$9ChktapVcqU8*Q>X(ZT1awg z82r8qJ%z-Qp4`2N56*jX#$qM=OLTl*B2Mx!YOl#7aogu_bnPH7l;~LQw(9!uZ^lQY z01||_>vs-fj;;#K()S$KQbUj2Y8vcHqoc(?xMY_;;tNT2$%-srfi=h4hPow&-vnfv z>${M5%cS4Nx(n$bkfU>a&;CG+YuxmitqsDM(8gB#UYLZ8FigGr9XTO%OTcq?N8@yO zwMrZY7NwpEdsb8E;B`2t7Z8E|iE6S%!fHH1)hFABt=oiYQT9}3p^lTkIb-F`8BVKN+twR;NCqaD9xI!&rJA=Yjt2+e2Wi z<44?|r+xr5T45ssz3%5i{vqtRu2!SB^)O*K@(K87!uzOfg}+$7(DDV)m_4O#-pgqk zaz~)2hxTF%C04jFU3}L5^f4FhRf&9@p~0=D+eHT^@}$5hCI`aM)e9=%qxl9ncFAq0 z9e|MZw)Wlhyr$jpVkgF+TbEa9l}1zBoSkgsXxb+DHzBFLWW$(6x6X6T69V#m9CX~@ z=7;kRxclsts$Cz=$&Tjt=C#QZ3wOqyNdZP8e0R)yGhv;3VK5>51 z?m77U#_8%YP^ia9a`#$aGdRnh$#c6yYQ0#k*I{>D#zh>!^7=Oy+|*C6uakh<+)y(pePYeuVmf|C=0l;?w&y6A) zZmWSB|LLB$howjf`THTW1K=U$eBxkkdo3`yN*2)TKODc_&u$%p+*!|f9&aOlA3;t& ztTixu5lLGYu#`RMGzR%LZLYEHHwlKhCjbEJMFYKmOtXsq67HN;ig(-((UJ>EK+0UH zM#!LaQzz!NvdVV7>EqjM6)e%{;c{6Q^!LBtrB)L;{uuLCc91~DJ6+6O4OmJc?Q;2S z)oF?F9Nkn!y%F8^V~uLITPtGirof77F44&!)DBcxcse|-E-&FSnqi| zjYcjs3&TauWUDExWqG&y1EkH|$}r)$pMoPii`CfzuHZGo9Ct#$9ON~Q)hpzG~gy2Mv@s&ei| zqhqGm`n6MVj>NmXYo_)9A7gn&Mnq%Zn`V1ySO4SLTz5$;`v*eW@p%yv7}8`zm;;|l9|T(T+kM_a{t}_@X0{5 zR)D|i>+m6N-6SS=$He{x2V^;JG*ipw36ktiFiQd(7EdXs?Ehu3BfLMpCzceijT)Zn zv^A3OY%&Y#1t|Yd0Z%OAK9Dgdj3%Vnv8j=JhUv(WPjN&KX-MJ zh6z_+NgNAO?%Mdokx3T=(cGAB+L+JA&dib8V1IbTST!E;D9diXGfet2*D^8i5ml2p~J*>R5D@ua&9ZcBl8X6P9lhC3<;96)p5 zYXNFOU73n#ZOE%4v%aT$FKg$n8y;F#0X{D@ciSaZyR4<+l31y<_QdH~Y|-)>eRabi zRQ0poXK!$^n|mdNf@Xb1f#?2&kUsY7Gc~}cjSu6Mws1j(xcxn2`v92=oU3OjC?!Xf z9o*sFbnnroES}NcetUMdX{@MH+P!6=2$9bqNV3l5`XD<33O;3SMiE^M`?uqXqM*b8F< zd0;T)VO;|_V8_z7quDWTWy{oZ1l`ZT;eT*dQM{P1s@6y_CK$vJwhhk@FDuPxcdxjuNxtL_DfJ3aG+k8QHCLR$zb$B*|-HC=*{t=46LP@#WC z9>(+(3&1~vUle&T>)6ZNz~&{e<5L_jnE``z%F55_t$Wa@AnOLv40ue#TB&((kz=S* zkW+8Wcb1A96RBKR;@4(j({J!8DzR}O$(#=U7{-5es-y=ZT$6pBkhrp%XH$X2y@DIJ+~oNPpZG8Ot8>TishUKf)6NH*kh&@(;DZjUit3{ zmvw}5cC!}(kyj^Yx`D1p2MegKivM<{L*mKnMj;SE}a|mE0Z=BOzs)pKL z07(t?C)H0Cv&HW-h&272^+acnaboR`w)?p>!Lb3mV>uG!K>6?H5pt5Hjwa=zZz+twOPkO zk0yxc{S>>$~iKysW)K+`ahgaryt~yOJ9Zm{iudG49q6C%?%I8M zIS){t1!E<16DmaJqmD=)(;S7E??yTk+to<3(?H}K%jT8wy_fuo@I>dgG+^8H?ow+C zmQVO&hGTN|rWw~_ymKgI>~x(t)2}>OMJ=n=TwT?>rn0m<*S?|8b9k1Caki_s_vg4@ z{-`yi?nM&`cE@3YUbFczQS=`@YMBx86#3eHr@MbF;ssRdIrrTA*zdH~t=R|<`w_M~ z^{0GX==jO}G`)Ob_v}g=YkI2HxbV7)?qG8WW;eO*Z|w$xJ|o*Q-V*V2zb1N)3wW-l zIXmPYm(54KJK_9yKEV7!A6f=7^oc6wj8l`A_;7m(e`VD!LEGi4`3_{0$_|o zUUj-jC?5NH&zuNrBmy#yY6u+5vIXNqt%nYZBFA*avvBwsFp{otWS}g{8&~q;HYo zWMwqp#zfF=MGjP0d?r!>Zu3!DBy$2do_c<-a~`f4W~qszLXHPcYu-bUzu zU;c#|;I(PVAY(ZQBI|l5NTd}qbCCg5a-h};#|_!A zu(82_@3toa80nld*&G(KDsPNav|IH#xj8Hy7O39hh9*gBw)dQtxhQWB6hGXvfCK2K zUB0&pE<*+A=3C+H;$`g3Z~@&c4ik)n|_JGrai~ESlC@Y74jnIN9mK^N*#dh54$L*z_1`)3Auuq20Ch>bth=+UYqP?hx2J7V!Pd-D%$m0@tF*-# z!N;oRk}fvr9l=`?UTv-(o&87DzRg!(TtC7?>ayd}9uqm=&sL9^jEN2ER>%@xprz2c zq<&-d6@D@&MDKnrMewKsUm+DUPpTZf@p{ErW~D~izAQA~8lAW+6nKphW=(ZFk|7L1 zJw4M=+de_fXF@b1JEJW`1CZ8+{Lxxnzp|2BZ%M#pq>%W8o1wN6+1-q8?tUtDR4}qbT0d z)#SDyJ~}y{Z#)V=ld>vvTx}rF-N4@2iC;F5PV!4-n19_+ zk;L9j6GRx~D9S1RltE!}1)@NaO58P24$&0TO8ane@$tQGae9!K1hO5B>2Pk&AJbP) zWu^-{k6RG>B%zgKg3USw+$3B-DwN&=kTR%b5v$Sja=8GAhKh(Yrl-cUq-vbIZQehMK1mH+#2y8B58; zieq6MvMBtj__SAFd>V=Ue(iAzk48g&q7Y< z_Wc4a@y&VCd0kJvh*9;G5)PJU*+Az4-OTEiTk8 zNyej7=sAk-U>hH{^zam+l7i)An)qA7vR9eXd0i67&RRtz(d0*6_@a$Kk}iWI@(uT9 zQ#Myy)6e=MA~-NFItRrlY!?a1+>kG96N%}0Vv#7Wx}3<^P=D=OE{P8l`KhK$nnfc; zC#;~fP{)nhHS@L2{O{Dfzn-Vfn)%Vbu{M6vk3Hgw&wLH1HJNv$zP$bE)eh96yMv}l z>u;OY6VyJd1VulF^W0(EFFvL^yY7bxr((*7ko0Yrbg!<(zL%%M0mTn*`OK@+lA<)H zE3pp`!4TChcSUP5qzvF_a`$!bJF+7#TWHJOc^sVye1yTsQqG-^5Q{QrATMMAO~u8+ zj1FvGS1=WR9cn>gb?%zr{Te%B^4dySUEBi$)_qi>kk}&|W z)JDgNkfgKOuhlwFfjYnX&)sOO81+7gC5F^}=i%Mc$@pMXBrd~9iK~+k-t}vBw6&;0 zHXz(m8IjY!rCdZ09Hwx)N@~96vEQ~lI=9=}H#*s+b8NvBT*c$p{9ro)lS1QziLRd} z`CezWZ$#ZTxMQI8dZ?o(?b~E>Ugr9R=GBKrnAK^x^RlZri^-62icQ1Of0ehez329D zT96_V=?4{U10QXiY@ z2DOo_q%Xj5sCxZsxLqowCP!+@o68T*((5fd(4?6_rG@?c@2L1_4&I8Tk@eBW`Wq8& z7%k7_rl`h0Z&tP{(y@L0%i5Ag6X@clb?ETWV45)?rc&b@4Eb%&Yiv>YtB;4{<5(tX zH6jnJp|cW`@ak1m)LEzXw1z^x(G}1;Hv!gmnN6>MoC=V)PTRR=<=EdlE$lnL_ zW^}Se;*QfeCED;9#9$JU9gp!4ty6zAhN}%2)-!5rGe%1Z38g3xe4W&-q-bgPZ?{=~ zrR>XeQe8tKdKpshHE}uLJj`x;uP_y!?LJualoy;o{N4D8C>cxc0@!s=S`>yl>9!6fOqtc}aq3_7Wa zzzS+0(D{AI&*ePU7!GB<9g}BR)IstqVLACZ4`Yvje|(d@3~vj`7bR>qY+oyx8vN~z z`M!a|*(G88_DGSMFZVuk{e{6L?wJr3#`Ad2U=zzGIyyS&zX1saW$<2NncA}7X^gzQ zibFJJ2sApbY80cs*XIr*miRz_1c0v{&Px^N_IkeZ+biQeKIJ}R&F6D{s^3@JAFC75 zw6w)v{stEAo%z$57y0f@BJ;qMdeDncerj^6EB^~g7D@vNq;?DI!2n|EZJx3p(;3Yf zrrE|u<_FO-9og^!2gZOs%6Ezt&=s z0s)Q0-gM+NmviCOSimWd+q$ISa$oM7+;0dV$(su3No;%(8xpBoeX z)R5k#7Ar&EO8<2jY#?N))zMC4LwcYdS>FE~4zWMO<|@r~9eAxtkE{!gJ(e8GfWignrf^$z{>7szr z+gcY@F9cb~pjOMoe6LblTA8hED$&B8ei#7Xl^m`J^C@$P)~@-1S4;YUMgk%CD|yKu+X%rBi*5vXnFXX;*zS@L3%(2Qfj@tkPI68Zne z_{d1<3@mi_`7{Yr*aJ7B`)k{dS&3F*mcdga+G8kEEeu_&6N}cDO&-lK->O9uY}xzz zPb6`WbY5nP4u9#Q(087;E;6_GtT#RL99A<%ok(S8l`h>%ftK9Eia zWTgr)BQMi_t?Xg0C}mO|CB|M^Iz29EEQWycg}_kKAM@gnoE>$-COhC`7)hJt?|$(> zneN`dndHTonzoYqdA5#`U+Q5g3@3E#co>ZJ7>wmV!2KVd~AQ zp5yY%zG_C(EN5hbZ0@JR^GT$Pd@T-_rk?7otd~c7;Aix%4ylMNv))y8==1FOc{45Z zlb_(4lE3ZQPWrHL36d+gdGSA5rWGWTRw~Lbh%`6+P1TW5q`;D{iJ>)9-46Zwp zx6Q_BX;?YN=g%1YsA&(Q``dym>TU>u_OrHslUc!*KJK@Fg7oO~P-ZeHRerUZ^&cXo zYWVj*KE0jQJG4VF)~6=odRWJ~#b3**?mGKmyK0OkHr^iNtEZdHHkmqMEK6H;i8HK0i8G?t~aHTXqY;R^PuIJ=i#3|ypJN~tc*hr z7nS1oe?zzOYD5Tk1Hl0Ic7E!*Y&7l9ZR@6nQ|^-=^k=haG1-Ae^$3% zA(QJylHww(pR@jWN{TxT&*kT6@l#&3!}vZBIXn{Bc;O>$XT?uO_mfVU*x-B* zBqXI<{r|_RkOH38r(eB(PI>f%n+{zX+coe`A)$$zg@VH%J!AC@V4jZF`k9y`?f~KZYMJ z?#AV`^LDAQu5dqR_389i6@W6}FZ!s%Ter3OHfHdaIVwzh{vv3(^I$6D@3T)A(D?jy z)lKQrYcGhxl-FZ{FAiy#{*>B-2%IytVdbgl8^SO&;&i+zb!C5|-6CqUO1gOn0I{zi zV&T0)C7NcRd;^JWr?=nD!f;b5C~Hr{-s|(-3b%efPP%defCt}di|yeW66n|8p#E|6 zdx&05CNIWTW8q&!`83k%<@}D9(#wm5priR6e}l0J9MBCx{HV#dZ{Gm1Sr-cni?yD- ze7?i8wWUgo{$DdSwuJ5tyxp3^46_AiG(Yh8n?BssdtA}xs^C~YKx&Dr`G_nvBVE;@ zC;nimGC(C6oxPTW=pE2M*|v$6F*pe9XMb;OE;`$(6|0A+syMUQfsE}smX(BSWPXqY&DH??iU!iQJ3jTHO>%Z z8lEErD_dN;>WPz{;^$4hLYoVXA{sDcrar(yWtFmniXzH`YQ4EfJG`A1WWTY9NI z95B0vqw>B1fG%PJr7g zLaY4$1hnQZ5;^w(E5F9Wl4%?d|U(#-uy>8{oVmi7?wbec02**d3B(v8qPQ*jYlM6o43R)wX>-_?=*D^T4nLqIV z{+ARkK{RIXyAr*CuEgam$b37d?dA$`)@p8fer3<8K7>ZSr;vqm;CA15_5+@`91;Y2dtLfw_kf5^rBm!_9fq)cbG-kBaBACUi0Q_Dn% zgOiewprVobv6OtEf~B-=d~zJs*WD@3PS4Bb_)n39cb%r%rqwhLI3XCxN~6BsnMJS5 z@ABwY{%Z}bsNCQ`x|BHU7dk9dQjyOJ3cGw$6B9^KSEPyKyI*Vcxw*M-@9*7RTz)Cy z&lz?}^AAl|zyR@72msY&Qrai#Jsh6|1bJ|@(hMKmW$zQzFneVnxK=2*1BU61K#^jF zO2u5XqWSM*7cXoGA_YK7m1z<#5eHvZk|L?Updc~PC|GfS$FSpJ|54ccCjEZ9lM{Ce ze~v-x8~Zo}GL@7HldLk80)~l}YF%$_WuOs1zy_Gxm|0m`n*ub9!bv8wLEvy=Q(0MA zQD{+iVN+;PQ&IZq#Czs9DvWp`AQl=j0IKx%W`~aqTsb$7t<>2>9Jeps*Sy2s28@mP z-Ol3{uLlqL^IeXGO4+WU-gDW5AZ|Vk+^6vVcy4_pq|V)G#N$FtrQQ})N2HY>M2jP&G47eQSlEF&F|k=E zPV+;~Quuho4EMlDCqDhXw6s<;y+Y+$^GCv`($TMM81mkEFf%Q7Jn2M+T6aTL-w9hN zaB3d13rbOJSJgB!=rcctebf54kD8$+5`WfYZV&9ZApMiz8z*uwV?ioiT9VMQ_6QsL zRN5$+K#o7mR14frH;78s5I^+9u}qtpy(K;^1B$#Do1nyi&z@W(QBh7#PTkLbXVdbYXT8eMkI|MA^E(%xJJi3oSDW=pX@j7Di!(5)Egj;dn6zT{Ew+rtB(ogVMzVz9967 z2i3BOx(Jnze(S+-?ApYXO`Ojcg`-sQVzmI(oR&r+K$XifHO`9;{}f9inZn>SMeUWI zc8j<1ZFKtUKBLi@9{PhkMtuWv2o-OK%YMr1c1O!-8!4u#oB#@`+02mi*Uy@Ro+UZs zSINHf)MOKwdX5H>e^`ZjL=9I3mUi2g6=5(nDFoK4tfLc0QEoGLUFuXn+{N zCI*Zqm`UIKUKhSf#K>l{p=7OU^E#^TEu~`?8R%Q5CT9VOWqLkb2Br|=DN;~Kat-dc z@wuM#aXOr%(yjMgNB$x|I9MKGO0Q4j{>`LC!N_4%aO;OFSkagj#309Mip=2^454GJ z?9A8){n!fEVmn`GJ3bXCB9y^#Ehv9&&?d|im zj<1`rG11ZBIs;?f>mBa)rnlKs*@7Ldt$>(`iHWYRZs>7ZW+o~g8#oX}Fu6Gp&iIV& zb$@1SYl{r<+OLB(L6rlsxA*hLh9N-5!U7HrxR_=Va9PAhL?8em;o?)VQ|tGQAYAre%ezrTOS$uso0(PsG%2t*2~s;b(pwcoltB7PhwK(eCP z#miR)H*D5Q+2xC+K_HQ;_fj7<&eovkOz(7~i~pz32&)`-rhizI`O-GER?!cs^Q ziY%UEKnXBJ7FfvJhX<<^A(8`rMU{R$LiqLhG)vwg+P$3xU>e*%7=xg*d5K77zm zJjhR!oL`&?tz);xI*j>ZjqwtT`A`C=_%;B(uA%tM>ohpBjccyxlo$lIHOUfGkCNn0&CEGK(c<`vz^#7gyQgsppbkH8T*-R@!`(310>RGjtG! zB(LmCG|Qx;93{Yy#Kgoz3JMB_iZrzu`T<8>({~-W*XP|D##ImOOfNUjdBHC_ndMWg z5AB%@gx(7t(*|^7np#@72Q!70zk`)28Hbgzq*Sm5*kFSLO_7Npz{w2I0~T11ZLme< zbAN_DS};Kd(J`jhf}5TF1Xmsyzmkolupom6FSlH1b26FiN^dPRFo5?d(G)4V;1?M? zPjhoKJ)K7-UGh7JdtbDYDC(6ZHa_vBGSV;xk?bLVECm8Ac%>l%$oNtC@;;?}3!n(0 zprnjg-Zp}XCyG;2fu7vHe9P?;j7vH$Ka=WNSb6@;?RG0GTvN^oD5|1BQX>03KAg@`V(YoID&IpOiUl_&awv>f5`}6 z_UFJNsZrvqXP7NPHY!mHx@%BbOU(#`;c3Ul=I$yJ>Uy_%dWesj4?~LgG zLVNynAoJ!2>b=~O~!#1rD25N>5;dn3@EJl>^b-N zze~i~@wDBb-Z1BiA2Yt#Kxg?Zx(%p?DfZ+6!NRVANCv)aCEx5>p1fiM~~j-i{0iE zH3By^ee`)tkRj31)O4u0MB4XUFILE$;@d__9pqE`5m?bm`e`y*qVVy}%*;yH1`QzEv0la!Erz;5xY+ z>s9WO)>lQij;}a#gpJcv#4?v3)0&glQ$Z zdmd_n(n{!((9xi=7=tDxQ{Vnn(6Dxt{@FjZ+^z1qk~)8xC1gnGabtR}aDVl~!&PYY zb1-4cw#Mxeg4u;Sbjd|%)_1tv6tuT!ndO?SoJt0tCxQ`PZAKWd#!u6_`T}{uxAbub0U`4_hXT0pWdpa zmd7lp7XKX8QZ;~_ayHAcrOk{oBf{)!7(kysOx~|Un~8HSodvhlQAo^p{6#xs_dP{*ubl)C zoIXZhPuxRqrSaK4e$4ZUldQxxQ<411tuiOt{Bd6nAAsL0APUrkl=zMVa6v!@Vh$&W@SZ&Y(yA+@)CPbV? zMv;gX7+{L?jhZX~zrnWo4_Sa)iTcPs=PV2MwJdGab}U&*-45OT=Kbpczj>Buw;44A z*$1@#FI?ubzd4fF$PgfRVS`sSra&GuqXnYc?cVzPS3L3O5P|i?#LbEK z`%%sNk?;8^E|l0u08&ycAoqq>#iWl-W0r61BRoOxS0K3P#o*;pFsW!ciiN)>Veam1 zjkDe=Z_t@{&1t|Fw31?eT;wv%%)7(J*4|z_r)5Ad6Avz)p%IhBr!U;xz8>;AykA@M ze%FEWJRd%;-+K8XuXwVOeq8p6cx&$@RtBZ(GTVOS77HQ(@HPGsk@<n%VX=8O;xiY=hV-1uCocNePJKasm>G& zwR8)CmQq`ydZA)TSl-}k{gf1EY?L8bC;RFDDywQi2B_#e<{fepm1)K~S_niLC2y`$ zdtS^5f3-sVnfgLH{Lq%gW}YsaB)#$camPDnQ5LUJshc(A&)OMXNa@FVY1?m4OX$qM zXIBmIiLhA>A*7)5&_b5IzNdYS_>;5GSe5wGw5l$p;|DSPk{x$2836&9Zyj$u9Lubp z%9GnQcrCm+Th#yqA=Tbxl0qxwt|Sh+o^8b5MU;snHW2NmKpIe_Jv=_nOi!0MNtxdu zcK>@h#M^!7{6cZEi)3cHO(TCx3l|5woEAGj{fZxdz4zCqd(Kk3 zpML0czuXGu`p@q!>N+c22Oc$dJ&&?>-4!j8XbEU#Mz&-77>zy;Y_VZfTvB|n;R z%;ET)Psx!8bLaZhj1itP(@ZNvEAx;H4GFL5e^K?;QEk1!7HEP7X$ek?g+TG*(&7@_ zr4;uf#ai4Qin}|--JujM?(VL|-TkG%d+%HCoyFg*tn;0jJ+r@=y~R#zH-^$yx}cWP zWyWa4>~_r>I@%;8I@sHT0~kOlFtVhin*bUjLuA}Jk8aa60>_LxU~!o@&XK31zOAA{ z>i0_FiM#QRhN)F>$LS>t`ymvsW72O7JU*GxuWIwsz7-rB+glJP(R!ja*r;7T8SDpv z zUUS&j8Fr*v&d%JYU{ox8R=7B=97Wmm)rL?cXjMDmPMl{Bh|16GDKQHxMu13 ztL3(*l?l+O}MNE5jcBNiulU@Y2Z90zLWZ>go*6^=fIgV9S8jr z{yUM6SQa}6O)8!q5Q`~@FaH;7NS40+I&nnB6vVJvTEDkm>P1|R~End zSU}KwCW^(61kkh~?EP1hMuZx*hyI*d68Vvbx9vSw-7ibMMVxK2^m^_l4iy0*tjizA zY*aMdFu1+`tIEScfb?p6NS!X2ew0S8-5;`GL>g$maknr_KfBj;;nlzo5VcwETaL4X0+!O+cqNGl{^^;bR zmlqewlL$X_dmuXJ$MxaT!+50oq0Y-FuM(Yi0VMX!zYKqE`Y=M;u;ekxJ?gpgCE!X% z^v=COS`x%4RV3Eb>Sf8A{90nRu;(({uc`Bok59DE9_|B=2F|uEVvC=QOc^5TwOD{O z(N;yv@p;i+4(?(k{?Da+J9@=gjoRvH&Y9OYN3K-TpFGU-my4Kwv-kdAru$cb_PX$F z5Gf0Q&}x*)Ae9E#tkg!>gCtHdvJ=`wUq^BAsE!`)SO&gm^jLBtcv~?Jb1t00DU8+A z!kOqS9N@M~#|#$bHNqvrxyKk!=o@dLcH=iSsuou~#nI#I>s9#6ALYIkTMeC~6j{Fz zp$Oq%BS7*v%tx_u+`pR{&v^&)9vKQGB3J*ajbq01E5suh>{gB}Vp_S~Ca!3{CDgs` zV91P7Yo+zBb9Q|wN40MIZd=8p!LEW)6z?6Ynnx`XK=xIyNcm)**-fLR@MftREWOh6 zoG5S?S>d(E;7C4D?zUWV(zYkV>^3e_?!Gc1=&9V@>olbx|LT>cdhz=e&)ND2o8I1r z=7(jk77n-bS6-*RPJE|p4zs$CqvW0^sd~2^8QHp(q)NJhDV(wU+IIc;-5m?vntwXh zl2ct<+m5oYHcyv!${r`yb$T>8`cuYR-Q>xV(!Un!f4>?Sc3P=xs;^Z{Ft%>JTk{}D zDVx_kL$1(vyAoU92rkm-Z?M=KSjYb_i2kb-;OCM5-71i@_wFvt2=8k+lBHC(@Rx8i zFakBikfR5JbK6IN`Hk9J7u(}KC@T&bTNnu;!c>98n)^VMfom(I1u8PYJp;(Tj>5m~ z+EO9dbu8)DKr!o}P0`W5jKB5^0;2sEEpPI|j^5|m5F3M-VIvDss2fvVHY*0rbsBzY z9~$|dA|@=a7_D5`ss1CUWR}48PV~p?r{#wi>x=ww$`bKf`^aZpMtZvG&m}DE@K+2vy-$p;pqr0~>q=H<(n77KRv%T{SDjzK!?;(PDMtnP01H8=AT|9S^YOoNCtgIm1d4+5XNRI zv+87|;>7jfeV6j0sn_rEi0So%m%EF2>DBH;oy$FDPm`mNRvdyBpKv?zm6sdxWIhg$ zJ$yo&UuFjpsMiF(_cD6!M=cF2LHy2-Fe`(Xa=99(Opmj=Xz}!-ChL|e7DKO>1@6pW z9&TT@Q`}$RQoORQ&)dX5dV5=KFTytu)GS8XXP9$kCZVQf5!3~ml%4UhO@EJ@cbu1k z?OG^HdS>mbyWVbal+^hpQxP*INQCCS^0?;3|u}S!!u36kQ@Ke+v-70$z6#HAl zr<0;y|H`y(00@A9;0=PeZJu}_5E=sjAkxkuhu>MXN#o}(?*gQ+wH$mxyt)g}&W_@; z(0dyAT<*D!!Tf929jp|j&?=DsPzb11=IU2VYuP(rStHgf)A8tcdB7r1=dWfiPbvEP zK7yvf>S2U;FyiG6YEX%44wJT zt~l7cI#06%kRb%t=kOT*#2{-qxtF(S^*Ra>2|k!k`Gg$lo99Y%+AQus$NC=e`SMXj z0nf^9>oj@i5`*AcR#vat!154t9T&@5Q_xdwcY_E{Y$yo$HXe;MHDM2}=ivS?%hCHi z&9s0xWP-h6Kp{Q_wzD+AhXYLvd3w*dkpG;c3W$yTi=>&cfCUO2l7%flvn~s; z0esQJ)uG~w-kRg*mpc%}bjQ#DJf$@^&!XL((CrLccyN8as#v|@CcnS^G0+{eKho@} z932J$dE)>OuVOC_Mzqh0nB6y;Lz!ET>uiXAy{wnGT{hUXT(t6%6^=57W5#ipuZ~r% z;}f=?#%4alTmF)j`x!zc4}8+J9J1y2a_pR@JQx@f#BhHQeUAd_)n(7N%S$V=g(Uxa z68z)Fqii{bQ9dp&TTy!0!Qar_#U$%`xT7Ny~8&2n1G9HWrk_O$h)+)v9jbRl26 z3niD*uG;Qf`XiZ!TKB!+4t9GHCe-;RC(gU$i{wiV4}MLN@*@K2x z2W?lQ8wG?am#JY$-Q7XR#bUC^x9kGQFhaoK;E$=90x|ETil#mv7G;qWVdrt6(_*!- zsA)Q#05rVCguH)83W2klgo_sY2?74d;M!}r&oZ?F3Go|Znr$>KBtR~wMNcHjN2J`S zpa6E1ZB$_*TY=i--nU&Y=pn5( zUGX+F{Joy(Z+>*BxV`;B#_ze-p82?!*?eW?b$MjtVIVx6(n}-oRHSfCh??PbXYsOq zo5>mS*z({gWA&Ytamn_4y@7+$S>n*%YPC#Y7-bS~W&=Ul1S!goD5WqNNf780%yB!Y!=pTt5j-?}x7|VRucb zLk$^(@&2MCmvKVe^eGt4@Xqa8x?^&y)fFmUx(Lf%pQlr~O7WCHmsIG+2_;{N7=gH9 zHkwseG3?D_fc}c+zhAsw3Kl+-gmjy}Ebj(2y_*EhbHnUC zE*>w!B~=dT@WoLcmL}K#@jn@wFuXttfLYn{*MAW;O52a9=lRHhMA$q~HUZ}nXvaO` z|Kyib#wbIm5U2o{LxY_31OOx-4mJn|*!QP9jU0L_4VryQ)fV{Ie81u7V751KaO;OK z-MZAcc?r*Iaf9na;Jvtn-L|gMTQrc84+4x}5a2%{$EQw21jhF10?@<7?}18RpfK2$ z28uWy7&f6vqagtT;zL=0@1)AQTX6*U_s+tL^JW$p8Hp4OxpOx!*#vWYPaa;Qtet){jN4c3~SY;;nb%G0o2yi5B0I1hPyn_bI zw0{pfVvBi4xsT!kjoy8xwdGZjCXS|(dZo-11O{SD^T*E^j^vU;sU?f;dWo+Z|KM9N zbTZ(XLjpiA#eOG6yf`o;s~9RW`Z4J{Xh#PDwp;?69aAEq0rV@!+izdJaP4nYvn6&L zH1lOO11OAJZ`Zp&Yu^?pwp=o=T->ZajUdjpt{KAq(CCs)w)lJ!VqDrkT)rC1Xga2~ znDaXCi|}Iy?za06+Av4#&U=0YGJlh>_j zVCDd=PXeI#_@jUO{yg{X%a_rQ&*huF)z=e;_Die06PC#qOoBK0HmlsMC&4{F+oA66)1>j{3!_O!6CyLXCK>^P=I?elVE?kK{|L7f(ot^%H zKT89`vv{lj8dQ{C)?pI{y}oTb%J8rJ*0Tx{ScC7~h1XQUlca3#I{^Lw7+G{hCn%;E zgaLxu$3&b+2*udrO!(}M85vZzi&F%By2|T5Z)c&y0?Onm=METv9cfsAk~g|=P1L2m z+&5ragrj#yTkmTUm^CdXGf(2o5Hc@!aWWos4N z!skAk@Sg<_f5=Ns5?Z^(aSti-o$v(Ss{sum)#j+U5-p%Uk<;FI6AkNB)8h0WDVOdv;o(&WtG zwaW+?ABx25<6D$A`Y$~GgZ+_F>WHctGC_fLy5L7&<+lkXMlf~opK8d@Zw;Ny;Aj0* ztZdX>AhuHt9&Gf2k-Y{m*Em1_XXTH`iBL1gD{4^#@R{$%ex0d*qhW!{yAIpN0wS~3 z8-ORc7XXlS+O1a;5&;dt`xabMRabOa-%tpX`5Q2zFtE?dh zKw&TQSRfS_M|8o{HUBQuGabzj!!+_xU|7F$n>^_u^VNj&%D1B28o z)lmti_Tvu}tXENc80&bQ1Zycz9}SDR<&B_GvN);P`R=cumUgvP&LZ4wn#=_lP+Bgr z$RBn)wqmHH{e~?nROzR9%Zf8!0eYxCZ%>a6TWcMSbEC%M5@~h~UaJF3F-S64?XO>_H0?_!WWqXtKN z71dkrq@Diq@#Z8?BD0OgOc4Ed-NUX_rUth@Gy#lhw%t!xIAU!4hZol$#F5?(YZmAn z(7jGTM>fe;Neu)SDfT(ZTY>fy+U;%fz}RU0hSi8`l|JTdtX5i$_T^`H&j);>{D{Qcrud!!F>@ePsr2U zQJSgm@yy9_=6|Iy-{K|_0#F<$0Kp%w*5i)=(dh0@D&VYqz!YS#3H5B-iYPr8c({N?xaTzwmY@1#cG{R+^4Rg4;aIrhm6q zac<+f6?`Icwc7obFA1oA&GFzWEo$_1M5SHW+tT8h`#G60EhB^1we&2$Lh#qc^AFXp z_^7})f`|cuwV(IiWMo<#rMjZqN8& zTdGdG?6{F#!yWe8we!c{$x52@^Lf7q>hh=V1|pS*>zvFve%|S07^VRU%?vdiRYyKQ z_Z}RxtvKvv?$3_gu%6T6{|6EN5tZ;B(wYPjf{yPCg&`q9u)$z#292RU-;{aiH*Q*C z9Fb7~^ec+Een5((#@>jy2!WbVM~7uHel#kz#Y}=KIA1D-x@rO)ThgJafd#{x7MM*g zFCg^$$r}jtRx8uAijbSCW|WK3W^Mbxjk|r`o?MsAijvTvh{lcbpaBo7+cB8oGTJ^u1~fy>Dx4OaUMf3aX>MO;X}qDSp3i5B8iAM775{F3-sH z4bYk-io^$W3$^=+&%ucamL+6cojbao$1RH{ae7*o^2{Vyeaw$na7MJ=X374hjKA&! zeO$9iifuxkwjS$D!>_SEWk=JInvW@se~~{MEIjxBFm3t=$^RVy31ROekpU(E`%e)m zdJv?UZ;dvbINedyO=;`S5D$t2tsD-}&B2cgd7?H4$ABnZ1xak8pkn?2$s}V98>ZFz zWDtv8&O9Rl2Gp>O{|WGW+1H@aW(bSk89UKQo@g+GoOV>Da>!9I@-j~i!ol;KmVo_1PV#_{_yVAY`D7H*^w<1GMwY2#_U$j#ldQEcOW zZQ(WJ5Iyu{6+`JFt_}g;ra2`aa9#$Roku zt$ZJN>YvBEVLw-l|HACnVOM<-_*y^0#u;4?0<`Fv=bDX9s-Bt@U;(;akK>))f1-ES zhWL>y^!?PnbQi1HKA!&B35K(At zJXRpc6Q=SV%uY*R${{Aoui1< zMcRAfYmz16_`tVi{ZEmHADkxh%I=49BpgSM(wQWyk}6c?a^@{r?vm`t)pR&6rZ7i6 zd0I~V$US?To`=L(2*MRE*%n&e`S!7;xyf-t zF?^*A1qlzyA^^FmCKd!asx6JDB?qQR=PjAf1gYK#E&ElT+V&y%`b)wF5TSS3O#pxa zijalI___K!fDp_?90tQPDZ_&h4JL?rchIgbVomRF+_Q5j$jOh4j?SjkL548=D!~X@ zV&DM!G4$kM)vB>n&W%6WhIB%&l0gXjH1LJ#d~=fC2PcyEiKr&ko0Awa%&qsu@JSo@ z+J(cbi`B zyKR=#ZHk(Xc^wZTupE;nXO`4gC2eo!G|a%5C*><5yxA7J{~p>LC$pbi7gbJCbPe1Ka(S$mRFnCKkHq{x}AO}abkY_ zL#zjH#8CkPk27|eX1*5}_P93)-_pokLmw*8%a?us9g;#M4Y-$u9mn@EOLH4_RfJ6q zPzOLOvIb%T3Z#e~Y;Dg$8jD&?RrnJCKyDum7Es|xVE=WGI0Qje+GY7Co<*55Ix?F9 z*k^ly=mY1LCOA=omzA9sK^TZ9l-F{0=1_C7t(|S!nIGbH**A5Vt2Xc%POgaH4FQne zh%#S#WWE7?s!}g!9(Fxboq-9jV$c`*ZI^col8s&jnN9u$E`}^(R2po4!D75>ZRK-a zeU-`Mww$tP)8Zpbue-OTcN-9kD>EtKgfhms<8D#i|3OLx6H%6@|5zt7ND6avX=y2a zwT-`1cS^i4%v_p~yi#0a?9NoN%ye`wKQF_c!e`t))8Bp(3 z^9H_R6?&@$QPr=a*wvU>2#m$G>Sz|4;a1dDy1YK(`({`^c|fi)t&o`U5aV_tkfu)b zwy$k#|BPA3>*m|)+W4LaHLgWo?_d*q@10Xlvck$Kzr~Sq-F2A9T-((?VavmdyPw8jn|V$a|I~tO9+2$3A?yVc2^p()0HBP%2$g&HV85+LI3KWC+Y=-cB9$f{Hi$3zICh5BZFb4lWY zI&hP-icrfUp!-mzz=1Yn#S;SbX0g^gdhMUpq(%&d$@`@A{;)iBhjJ;hge zpmv-qe$@1s$+L0mU5q5)gGw4?)F6hxDNYIXBej{c1Fm||fUo&Dp zgEhypchTI~Sg(9nPmYR)l$L#xZ7TVlT`hL9%*T@nYe9P<_^B{QW3h{#SRD}5d57z* z#P=adc}q)a$$$!o*9Wk)60eIeHS)8-lPdht!&Inh9>^~C=3tC61XX(FaQuV(0u-qO zsoW{-;zWYo+(f|Xg4A00fAJm8R8@Zv$Bu#uN0FCr;bAjKF3fiH7@O~-wnIcF$lp+aV*xA{GXdzkP_}9MQigZnt>f_l?7etWXJ8P!9{1xvPLxMuW7=K=qr(5QS zd9tWjuz4GLA47`BwMK=!a@r8`&N*#dNm2Q|_w1Mi4-_q|FQyM{o;L*DC_=k#R$uu4 zP*K*xk^yWXlJGC3=i7oM>Wt}x%H)-+!w)JoU|xDYjmCoT8J?Mqr9)oD201UwJnOUa z`)T9;+Q$(j6M;fj(G2x#id{Jzo%BfRMBnH~82hFOng1IxA|y04Z#qOgL9>P`mdfLG z#2PGx0|qan58)Aac@y__9un8&fbF8J{C9E`V;Lw=r@#O5$%uj!Q)1KYM(7`_wEqey z!rrvkx=D#NB$O1`Kac)WSiWcEOx2_l{E;OYg~TYdZx8~Il=j!~Mvepg1f^;^v~r_M zxj`)DaJGGI8>!O88_)y05kTyI;86Z$PePa{cs|Wlb&%AniSBhx5Mc0fsdmT3gR;|* z5wLY}m#agY@Nf~Jzj3QnAENlCD!mqai_ZW_SxpEKq|%LW_(cQ7ds)6i2ynjnT7ykr znn6PM*21P<*x+xMztWyn!}4x?NW-$@Lv}DCMh$!0Rq9QY@cNvlg%H?#UY$$2M4O2j z0kHd~Vx`@79eQj_Svs`Ret%)`sweW}dvA7}CFgNX+B9t4U6 zCNTJzK8a+&bn328kaa8xMe3V?p;{&(gOw=gV0Z+OF(jV?Je+Ev5MADst*FuAYb%kdjB5`;ITedC1br|wh|6+G)us!v z6LDHRT{4t~PHIvO>5Bo50rqeZg2)hhQcYX{HV6zZ4f^>)8@VSgmZGY#3cy&HBJujR zli|VZNIF;b%h}evh;xOT=fgsqs)eQ3w*st;6IAMB7PuuSI>V)6(Y|uHB0eR#0}lt^ zx1QoFI5@{!%-a0kuD98RfY^Y$FiKGd)Q8!4Xf%Hx_WTMGUA@SOH(zASihde7dv`App6 zsO?EjL1C10R(mt#%fD>0qw%)a-mN+)ZNb()1Q2yTkAs~x0abh7;E1+h&Oiu=GOR?H zT0ZUSuw~gL|96gWlsBK+TM9a7`to7!w-*71nXJjm zfnrdQS7|&H0S3hGuw!bvT*An_=pXg^K8x7kHnrkpcz@bu+(C`IU#6-#%~iz$fZ{{} zYBjB}FMWkv6zTDpoA%E?H($nQGkLAo=xLeFQodqTW(>?**-+0bjOAF>TlwWG8)IqK zy<0BvI?`ljNUrNbB2ck7yn7;0&JAlyR1F^)!(|_W)KeH%MpHJT4MrMDkd#vpNTHs= z`fQv`=dXA)blm*(dctKjr_qJ!Ix+xzmY7E#sn5t;KIRwuzxOEx+H@1o*IRtb6KOm{ zv!bS!Xgf~m*6?)kUxyoQULP&q*tFG|Pg27N`$rTO|Nm`)C)f9|zH`+*uk;=O;8r+S zG`@?}!pi9uCqx7t<^JJ4fXOKw8KxMN@J47CRkYei2&N9J)h)N{mrco++C^m)kp$?6 z3nQp!L|J9KsUyX{smB~_#MBy`(G>fw%H$LiQ5Bdh%px@@DiIOK;Ncx44b%h>5GbI| zEWnrkY?f+~$EqQB(bBNs#s{|s<0mx79T8b5k&%igVu>olRUJ5Cks=zzfyNPrQB-$Xj5K34(bVibz?k9k~@{0-H zDWxn>$sDH4hy-oNM8iPa@+M+57Vs5eNv0j^nzI5j4)x@gId1(xj|;M@(@eid@F7x? zW~)m6)=*KAG(|0i3fFz6YQ(_iPisul!24^P}4LUuCk{sEIBhK4TW9i+b z&VWL+cCC*Sp-`ooUsp?K>Aa5n^%XvN#%g^jU6lG12JP01LDgsdb~(1FDuH zdaUbjMpY~O&Lk*e3e5JE9@R4xvOES0#n)JvY$7EE99-4V3frS0ST|pcJP_9aplk>n z%c3=oq)22SU1%89BsK*UYL?kg?~{#3pUZs2GdLX+)i-*tRYAGtgKVVa-c1+cP>>17 zT@?hMW$1?jF{N6A&OrFR0^IpElp*{d1e3{}w2GE59bQ56xI zK@YD%D2FQn>PAsu?}O0kyhd~V&MY)WUma^)#6ro;lxix&yREJ>JfZj?0DqZ}1}+;S z_O|UuY>WvjYDSnbGThGap$El7&o7Dc$4z*}N2BGx?7a=T&ZoyVqypb=$NPi-L_e2} z&b#*84PWbI?!8av=cvpt;QLlwcVg7iAaJ$G))MP;MUmq@doNr~TU3^GDkDlG!wwun4uB}dfofraSkepD( zym^f&CcYsbNV_JQ71X79KA*JBA(z$E0X1Zw{930jbkJVAsn!&pmGlb4Fekov&L2DEeV(^jEIv8BTA(v!PuQ{+S#tI7Ewzg$sVX0Kt*>_A6#xsFT7%(B} zQyo(sef40K;uUTzh^7kr8OLEUf2t1m+Q8Mj-`J@c5l9Vdx%^VmvfIIcSaCYn&HA!3 zK>?_)Bt0TtD_z!3s^(&cl+{*Q5AWaIxlp3SqhJ8WJW#1aIy%E`;+dst0c+B!0^Ym74};&o|Q-Ou&oi6CjaD{0GOrx+zzf_jL6RWrrgU7zRywbmWs@O9fLf8$8xZ8XB0&u=f_Dm_&2!RTh~^ zQ)v>TGGuT)Cem`1bWmUQ<9Lgf)o}BYL1K^)j1IZS9+$+#1c-v9nn6w^+r-J`jVIRa zuIKXIIwb(R$$|St&~1m*TFl|;sct62cK_(cc3l}3RbHt78PDyIwaQ}3%o6Tg>uB_| zr!aUNuSuNaS{JOus16{e|LH3^F0*qzIU3h8>-zG6kfUd78qW)QvF>vD(e!T`yVL05 zyqlM4*$IMG7;BGXyP3+WqMlwlfYq0a4xldr6$7c$v=`Hk+^fFD)%Kc!V?OV|mg zdtx!mPMUAzSAl-qZ#Zze?_+E+Ly5*nNf`=cJ%8FcIMo;EL=#t{g?-<&rjI`~vth@S z)@Z3~aqDJ29q=LI`qGP_kZcT|YtKs%4t9@J@Xjbt!$L>IQWVgAl%h9Dw0GN){(87d z(g4>>Y91&mB0;e*@iRthq?tU9KW9{9`;pC6G~r8_uoyI{ z-()bulvg^jdG{DT0CRSgow(|BUnS_8hY8TP^1QNmIIa?S;S8Ai6QIiB5PJK$6eB8F z#0b7b3V&L=-LFErK?flWW$fS9b-h-l!zr%c@XI~MjkJOaDf_%; z;6-DhQs$@J5RCzYz42c14EQ`-cZTxEN|7-Ec0j&i{}=Lxx0(Iw0IHu1UWu$d-#07~ z1`|ZM_o{`1+^7|qwnK#1FmjH%(ke-CFzBioKTClTRXe^1K30G7;F_PJbh1h=3Te#= z&Q;&9A|3xN1R`D5;7S+|vnUDtA*|TXsP%bjiRe{f?F=m~G=WrtMgB602@2h?$h|%G zP-$?Zy6GYaE_@H=!kj_wgJrW~zo^V4R;JT7)570qjFCWI!&n^v10(1QosV?0R0Ua0 z`Ln%P&fc&}@fOJAZHkCRy6G4Z*HY>?s(4Kq$vu1gc`lsbnLvO)Hj#JZ?1379-h>v? z5ftR_3kZobN>!f65*YHy4fa zy~ivxd^>JinNL5zJedPSCc`yakV_CL)4(KFE{@?qQl>WWearQQVl|8Do@${iSYtVW zFDC@|BP1}ob}r#dm3-IobDcwwW3nM|q01nTDINp}%!NcN=PA#8uPkBD9E=Y(tlva= zml=l)h)>IPh-Pasjb+6Yi;kDgD?3iwtwu7KvitWbQV;F-R=9qXf;Mj^cPY8Ou81ft zfzC`UEZiSAoxuG#Jj-8?Hz2~odN)V;E`}PO`JjeYR!8u~<`=9`mz%QCJVOG2D$P-$ zsTd6YxZ`dU;hKSg`r`V+fZ{FsJK^Nq7*MDGTg4nei0ZQi8ZOVD-{csL1oX z*7b1XMJg(XV|~E<^|`^rp&93|Ohz5rEa3C^_89=CJ~0Fc0pjQbSN#?E1?t>BB_Nam zMNAS66DTA^l`#B@#gr1xL$cXYL2J|yLfC^crMR4lcQ+fm)c-{A+xm%Jj?VntfZ52lV|S;7TrwgYb2kOE9cn{}&l__vq{Cx_4v9e3`*jSJBrS=eKvHu;-7(j8 z(?7N*3$bAwmT?elwL=qt+k11kk(CH38!$>)`jhO})05UBoF#;V)-oKLni0%7WrA3A zk90|@l9GwL@)1jcAB+&?k%U->t19Vl5g>0f(rOl*s`cUpo~H65)8I(Fd|dhbrpfQZ zkV8S|c8rP^9ObKDWsYO5-wj_1`^J91paFiGu>ja^TDbMRJWwo11I7J;@Qq3ke`YdL zG!S>Mme838WH^9~C+F>7>ZZ~X#_BCX8}c4sL|cof#Jp(Id@WulZaDRA8_BMaAwsTg ztNDtVSu`3c_msGs0oZvcdsDOCsCqu*db$Kpy<1r*;tQ`Dk$*WU;-YvL%UFaM&t>f& zhmR_Sc$KtUV+;=lzqszCerDkQ%lKLMM=!I4*FUnXW^@uW-H%_b^46P=Y{g2z4)R~c z2VfAAInEiJv_2f4u6HuGTuvO-FP1kpYS7`=4c%j~J2^NEXYfBfKb#1@JYM|UVhxcLgN-?fbHdAi)j^7-E2n;0L6)w<%t4^0+T8e=Fe#K=p*HJ%TcCUg<=hA;ZOuq0WWTBc^PTi-t z)8<;wfButi9mDp2vj78xKssTVLQWs6IEIpz zf;gR+BvWvzbR1$E(Y<5>=&ccb?}=tTQMp3VpQp|KweZ?GH+=%za`})$aB^GG5wF=xYM!$jrPlF zaO!2N?fv1m4$e{T3jzs+-xr-m%&U)WiW+;hf+NdOg4F_SaLWxHr)5r%Z`<=#MK8IR zd-oON#^N7;G~5w*Mx;~8PqUwp!ljXSe?05GS#djgRYM2Oj0=K4k`Z?Z#mcO4&TGGQ z=}uh4#0GZ3!nLnEE8~W5gZv9&>U7B79i@zUohxgJy6LcXEO#SJdht)mlGz3|OxA%x z`c`2WHAJ25VIamZNUa*eojne9`w$ZC0o7X|HmhY2{Y*;>H7l$z`Te5fX4L>H!;57v z8H*WcZlK}u=z?=Bvd82^1i%MY?spjB(wObtgw1*EssOec4& zE(k9G37=b!RUFZ-Xf7uXQ7G6M9ot)s?n@Y&F96$W(UHWdY|y6LH!&(m46KCx#>7bW zU1M-Ewj%bY5CuwvatA?pv4VM(@GRUldR;hB5>i&-4S}-zf$VIQV;~3!ejlXI79v3` z#NV}1NiE>N69=W0sd3t2*}`z)gU>pQK3X<+FhHc0!6Z@%I^VRmBJm`kFu8S(i7$?feOe^~(aSpXtx z)n9Lu#?3`X1s?pZ#0=K|z2J|YZWdSo%oV4bb-Lc7=A94WemyU1vx&o=J2q}MDfd6j z<8Uk9RBx^sPdO4pcG8_z;4F=v^Ll_}=$+#vT%rRWik4N~1pg2ZF5*9XQ>RjU9XjI) z|KHdzq#woKki&?9e%V3lxuwR+#4HdUs}f0#Gnj*i+P}N6_vE>-h_Y zJsID2OG^=B2oS(B6x8L55F{yXG2COO^WDH;J!DCVZV0JGiA#Chg5F#K0{M09Aq+fx zI(9&K1%coIecgM$=PPHV1|_j(z`X(9ZAekZ6~cZR`qj_q%bLy~cvD*Z+fsnK-4c+j;ST;njJ! z*wNnKS<}r=!djnz0Mi}88_U_kC#SbyKmEHJrs>4 zUBOBg?gww+N*Z@NnO@^!gv0QtEJ$|@c}oSy!hhwxw=Ww2f(-!S0HE}};xJVaahCiv zXGS@FjJLD(O}w9-Djtaptvx(eVSIZoI8fCNrCL1{A9bh|G0Z#al}~jVl^+WPf`mh# zDoX?fP_kjL({?zhwYSW$lpb)6<7AvpMClCM7ewYXSUqbry$hmor{KUfIe>^?r^6E;W2(aBbsA#5T-8A*{Kg!2dqfSMv79oW~pvZXA zw5?epQ;Zvhe}AWxTXGS;{SwVm12mEiqVj&apEdlnnr5<0=IC~ow%1u+Pzl2^5^myV zmhyk6H?$L~a~1B8+hqn8CcW=g=8EmNcXXQjZf04mR$tv$pGy+zQ*8;5@qZ=RM}dtq zk05J|$HG9?cva~6q_vXKvYQhy=m2uO%%%C4VS7rG@A}-m@6+3O>8q~!y@q}3w<>-1 zwtiuFO_t0vrn(L*wRu#ShXya!<{mWgo&Ub5FN#>O?%ec2C1dGH``+_LNyQS9=DJ^$ zsBPQdal-XIG{6Nl#UD=+{n zw51D5njxvO-A%tmOmHLfEE!Cqb<*q6KjkybmeZOQT70uaKvPQ6NP6YXk^`d8<757H z9Y~z`{eV2{NDz#GWHE zG3g{xnBn5M{vJD>#bPYimtfSrQBLfHSV0n36XXvG0u@?re=&zGjL{|ux;byL0gt65hyW-^1mbORWCDO;OI7p%hcLZ0Ei_GO z{aOqoE*D2aP>qp&{K3U0 z_C?Dk(U(?4UH9*QSC5<8no1-Z>hvx{?c8KPxp{tK)vFZ~A@v3Mo_oAULByra)1Kw* zG2?0x<%Ig~@Ad@|L4s&Dpghr%-Al&cZ>{QmcfwZab*#Eo03j43f2Jy&bzM6`5iS(5 zH%4R7#gH9a{zh2^0X^Bz@087FFBq)`A}r=3rYB>i)X|7HEJX~;%cXpf3uIbhS6+>Va@2(NHwhUv)t)r?)f%vGNx1Ro(ct?o2*yFU)d=L?<;Slb*_X}C%)p6wF)k8fyj08Te#%I`^Q%X-{z=_F0N75oBYc8_@?TnRb+HY@ zRiMQ2X2A#&#x@y7=tz^C{dks3Ygmj|PF+pXE0;UTxgbTfX+iq+{{aa>_P#A30Bnq5 z*HHud4~WO&KoCo&)2UQBm+^~vW3(bf<**L2-5q@g4hFX6Oxs5kwK-)vH!QHs7y4;aHaM<-(W~ zPljvOtX#IDo!BvAs;jCXkHjE?u5C|$e%i(@1KybZW-gm!ZTvtt4;mD9nVfG~Kt_vg z$C$mC<4Q;(OhS(1RLV#LfMYvsj6xC-R92{^I+;i(oKjfUI<%5@eO+V83%h&rsp?uQ zt{90xS&R+<1tCMoO4u+GggW$#zUTW48WnVQx7*63tK4*zixjd6)74Jg*qVK%R5_nB zmfxp2ov2rF3$vYFtd(OCL7)_n3J?{dC`E>p(Fg=Ym3Cvo=xP7|7t{X!>rXDouZts! zHI4f2P6Ml>mixBj)DGHhd;`3*YLgwuL|V+hBJ=LUZmwc9V;jxMqQz7opPLy9RC3fA=+QIkz`ug=vl+*36Kl;QwoomMdjI~Vo-hrc| zWV8L{L#h$01+iq z)nvJ%2#5d(#X@AqO(hc5YzryYCNLU>8N?XDSR$RSu0tTrfGQS?SI69L9eM&7XhtML zNmW&)l1T$Xq!>Q`K&-dI^t#+5qKcv+BE}eIQOYu*2*|o;X_c*&IZ;KdA*6~4FQTw8 z05LO)C<}w21Z)QZdZiUrNfMFvHJ-+z!I&R}Ce%h$5Hb`Bt&0}ieElC=S7tFZ7Sozy z>5cjG`}Q4C%;#O(B@)+7+O}_5Hj7q*id7hf6>Ufxt&Ij2KvF113#v94crJeC6B zVyRqHTb0We`!qH(v16&8au<;X44t@$Z;}a{l(6;?43K2=830vV-N$uf2v&DjCeUc1 z#Vi2oD_u4lDynJItq-7Qjym~{SNy6&FaO=GJ2$yet85_W-a-Jf?MPMlSL8pK@=^|9 z^IzQi!104E008g(-|zUdwvx%A&b-1GSh%yev^O+o8hpR$^^J?J2H{h>Rb`BJ&@ zhzDlhe&C>J9QX=E!B7zq5fKrQ9P=q|EF~h$Mg$Nm#gmMPIF3V>D@00JAQf$akQ6G{ zO;(ErA`U`dm_UR`7%(1BGNJ`ShzOROsIG17%VEGG+ORQ#i2eF+M2=&$5F%jj7k^e; z>D5&L0I@f4hAR3ChFY^25VXirNQ7Wp4hTa<#Hkcx0zgF|3J^Ow+w1%E6*hzd3T-f6 zAs--tWh^^Z2C1uQ0HeG8yb&oCJXbja2o|~TU=}yLyadt{;+SwRFBp?Jr5nuxZToGj~3PV5e{B$BM3`)6V$73-! z2+Cn7P8bkKAle6CP`a}nq3`RGggS^PVhDm{QZ;cm5lhu2Eejmw6^a>Q3yHcjJ#2Is z<`p^J-C0yZmhys9$@osF*k@1@aM=hXlBvFZ`gV2oxDJa1%oI=6s1T5Wi4aI6njw7| z?8Z&T?%WKoEbaH5agi3Y;bQX3o_=!1{7v>db5w51Bh#N-funZc_eWdTtK7#AJv9BL zRo0k2_djWy`rb$yoqc24a}UpL?=kj((c7K4-0080YcW3vE zkfOh*nAp3a>%w*STn1+Y42@ZjaT+(EK3MQ41uxTL_FOf zCa9PMGc#HVbgMKXh(^Oe$f%hev;>6#dVP735k!Pg8%6;T4PX$dV#EtbEW*a1FbEP_ zEJi825~L%Q9Z&&TJ>8vEHB}!iUfj~BU#hxRm%3IgUtC*PQz$L(*MAIKiH^>;rrIhp zwj#}8m`GQJ;szdTk2!^*uwN=^9Rhxy=ZY(iy}W82{SoGa;6EeN63V2oj-wWrGk zKP(x;epg3ZGHDxNqYNbo*d*OpA(J!46msmiY{l%FnrejrzL)}(#23+C*;5wiz@RA4 zltTb*Ox#kPD|3uc7L!Z^kOgcMS;7X;AhHG>^_(vSl8peQoG4CW1BJWaf9}#h-+j~E z(#T`0yB=Sdt+GCT<>lKRdvp2k4=z3SYNn|sGw0J0JcR+VNbkVt|weR=b zUrrvhuEy2uRnq_?&2b4_!WIIvZS42caL_|cyzc0rqr7HD_ z000pP5lB_uM{rLsj7f;PsKO65|ipiQ~1xzQ^Z*@S*B zA9`Lgne@uJL^6qD`DMxEyZbaWr>j#KVs}^3_Y5g?ExV8}XwAuF8bV!3#v;oO0&Q}I zbX|2N&x?f#kwwUnx*RcQ1V4@ma5}EQD^uR=e$a%(zdh%IL0z_fMYvQBj9> z-#zx-VaGm>BDFS7qG#6A&ptF~b(O&#cY^PXSMl0$FZe1JtTdhZK%e&XO6Lw|eQMMpG{x8UAWr#4@E=bqao z00tfN`&VB5?)1BV|K9dDjf;O{+a*}~;uD_$H1D#{(9g&(PIdifF4offf86J|R}Z-R z?lVVj=rINP^pS<+ca>SAt^R;^)UVd_VIVW_soU?FH}>aeZ`CpRrb*Kl)0jhlcJgj5 zs%Opv*WW(9)!y{*Gf&-pZCIRcfBn8&ADs0`cNiPE&G%0}c9)h<$APxj@16AIf?N2n24+Ms(whrascQ`&iohZyveU_Q zB9?HS7y{aka$TpQI2kSJREjKRJL&F@jB8m&>rhjsqhMH^v@HVK02>JcgQ%DVgbjzF zAYEM@PozR4;D-p%)6pTCwUO%D+I%(#EU|Ul9L>c44^eKy1*jRd zaX%XXAl_$SAAszz5knIM0B(K1Mu4T=B@N(KZFbd_XLODjoaFM7x8Hib8^HEF1_0Bw zbX5+>?(Kl?z#-Kzy={J{h6b4b+{_O$O_Tm~(>(wHQY(>|oIOmfCE<5|8 z{a(NF=;J5cIOVoC9{l0P(KztU#QXT^`=9-!#Jw+3xvBb=;p27~H#q%e0;+-~Hy^OW z1-a8-oPObE|CQiEE-smV@4e5wy=Zw4)@{7`9)}*i-^i{f9trk2YBOu)tov^G^L2NB z^4+IqUO48zq?2=g!PB>0d+m*r)fID}I=Jo&_Aotj9=h@NsdH9^)q}_F{=FmiOwM}2 z+V7B&iRj{qSP%FjOZWXpe}hVUL>+9sl4AfsM3hp}YaXLOq*7iQkt;zlm3=A(E|z5( zV~|v3lc-*?hKMSaokalFE$>kw*#x~~4@$4qE(0(dzwC8&c3I*Y2obna^_rQH2w8L( zDyfJ**xi*WdmbAd1|Fd(1TUYrML=t%h(#2zgrP_!*U9v4t{XFQ)0RPl?YOIb&nA@% za$Vh>B7$fUS|F0HPUMR@bjWxZcM`tmfk6=t!_Zc?4h?{EokST6mB^k@2Y$I+qY@B= zL7)wC=bDu(7A>f&Z&Z%s#^MAj)6)|KAqM5@R5}Fm%b~CYIv|0Lp8S$!YdXE4q=kW) zSs@CG);@@_l!caMTXrs&DHaNdNQ6q+B8+HT;Fhvwqr{<@_QCgzQi4dX8v{|bK{`SMM2JJ`vxpVPM-+et*r9DO>-c zYgPZB{_XyO4it_^4&UjAScxcL|De~VNu-Vv4A9!-f1)Gm{v#;Iq*sAK&b{dwp zl3N{f%;<~Hc^1=NcKL7I94*BcOQMsURB(aUZbM(Q_pSkrO*DiIpIR4zjhQa$! zTygv%O9vfv`Q4L#v0d%|=f+M}vbL^HrR^^@F=M|F+9{X*nKJV3EkFLjg)h_`dhWR= zU$XVaRi#BUAG-E{VUyo(A9L5zqlV@dEL-;a({C^9x1ap?)~cN!EpA&fecHm+yO=Nf z*5H#nkC}L8>;AvH@}eI%<=%b#h93<*;)CST&+m1}$mrUMST9!E-)KwssDu5dtnTtw zcXzOuOP=%A|sf&5}5)-Y|PqPMW5<5ur^;wYps+Lsl>;ivIztL0f~SG z2n9fx383OBBV>_U`wd2e2m=cMA{wo;JsC?}gCU_*a-%B0qL8RkmHPLvVbUveh?0+u5Zt0D4sMRMhqZouq-Q)4J_j{E?VB> zbCqE&9FkH3Oct6z$Z#T&@cmRVk?id1G^QeisYE;}+Jv6O((!yg$DtwH$#mxkNnj5Z>`z{UvbQUFSkZLe9=UenM*NXCTNTjN|lchV!i zJYPOgqIT=!@4YO)#fTV}*1SApVLO0Z3Jn10{KBOrAggyxA8Q;_509^0-WEb%SpLwn zOS}6$f8OOU0RV`fZ|OV4lvd>pBqP^?^+Kmuamxjl{MH;Y?aE_EI6Zeg`RSJQ=11Im z6V z^1#jW007>)>daGD9Q2dZ5Bq$KOlj$~hab4>!I!*){&Me$L!)Q<5#O$1AYK&%s2xA9 zp9268A26mFAhY=no1_Q;+~(np2(4{d4dCyNs3iaZAOJ~3K~&c6{>KOJ?Hspdk_$^- zdh)e40Jq>70MK0@eUt^Vd&hyqpiTS0BOlLO6F^^>_rQJccMZPvtW)j;0N`S7@UWpK zw=!cO9gPFu*!9}D-8V~IyR>2ADd(QMbpp;g`EZ1pvcGZ@<@s=53Gc zh%TCl^@wk-rF+!D{$uiGy}H}0%*F@(+xNk*FojT7rIZmOf*=AxMVbNRN||URKoVf2 z%Kl13m<6#nCkYX-w+xa93n{30!vGQzA_5BoNF@!4g$clj7%|GCL?Wex09>J7?-HdH zA`pmQE-NHqwrrJ7Cj0=ka1e&YOg82?3CCeeDUDm+iN`=a!o_Sb`QJ*{HRj`()KZOI2%TASdS7taHi&1MZ-f=C1BF zKs8#o2UdV(Nhh6}<|-|ASPCp-*`ek_mV;8sD}Azbv7K)8MT=;*99zlio=k`Wp+Nvd z#oELZwr$z1t8*&hc4xYqTB->IfrtT^^4X$;vCuDtp%-`KwKa|TeA$;0paMiUVF$&~ zMZ2@3$M^kIGN}X2Y%v(m3q!4og^~a%QcBrGP8e!LU`E5@m3=`JU=a}|2}iZ`X^NBc zaYuVRT@z2$W(sRa0+K*P2%_kif0r*EC_(9s_T6UDo!4LUOi$x(I}YjtvwExDS~XjY zu7=4A-{~^j^;e)vYl={{*(NPEK%r}1Ck#67h`$_EAG!Oik7VC{=vS|JdtSH0h!{X` zEDv;^k^K z+kekD@3`QIsU3s%*mtvLc%Zv?9OyG{>pn2)^|#mP?T1(pW>@E-db@4sJr|W?H&O9qb#zSCC@}Z3bjENU9>d5DBvevQPm6C!72p z9R+|H9jmIo#`9x&uWZ?t15V6!8yoAi33EMNtCuYYu>1Av@0Lrh1(xk1v zM7sL@1q+Sw2edTLc=?U6?2{mfkc#DUVLXWqZpxzcioh=jhhztVmkh&1Ts~Rj3?4Wd zbOV31t^63Q0{1(X9wNLq~R zx?*%W)S-o8cd74yK32?*TX70%Yif(iwQb8?wPHotFB4%Zp2}wnwN-T;T|N+&eJ_*C zfDy7S-w&82)LQFMYp`w1W(q7}ED;X^O@x-R%4J_`zxRwV00JsP5EM;em`o-(D3-!v zLRmHOR195R3>5_g76<}!)N}slx5(CMN1gVdIq{Nv&*&fPeDSVNx5Rfi^(Q}F_{YmG zd$7-s#)LEPpVqL~t!IsEq~2-$k~dqk2i4cE-P@G2CA&Hu`EPuEa4{bMaH*uZVp63; z1E2%5b~_7z33;8~!T>sC0FW>Up)`BulCE*XGp|m2U?~7)*DQHwTGf~xyYK(uZ6ED& z`sUr~L@KUVF8L(r+cK;vuD~9$=Rx~S`peWCFWP;@_kO%%KUx0vq?b~cUAKPcC1l=v z`n}jTXN~&bX4#$g$GIEwM?pa7(KxUkVORI70s!N$T_$mU z!P9r#{q(!5!nzT=9)802$JO^P>$tG+*}vWSdcTu?`Q3pQ= z-8rMPX}43)JZwywf;BJw;kfVr`kl<67yovBp>fDQN9{f6v#Lw})ocDdhhh8)-yQOg zB6xDklYge+pJq7yH)?#-sh;j2f?!E?|o`vNia3?fWs&D+EjD?!zXW@ zH1+)s9I(x1xqmXPv>P^Qv0=u!2c6dR$e)iInIr&EeJ38XNBC)of$4egvD@x?_JdC9 zJMOzDpSagXG5FVQW_sSAeA_(>w>|rKc<`#bKi=fT3r`$hL;uA8nhPI3e*LY_E-9xr z8auMe9(UXco2R1dGvYtyD|x#A&n(@e4z~WuF^Ggny|%n)L@~2wFi616AON5Q0fZ2p zio1vaK;@DUskr+h2m%^5L?p&kd}Jy+BJ|cvs^mklF%`cO9fo8W1rQOalAOf71(*Z` z5J_Prfd)kg*tRzsw#i-x9I$xqqC_I*ICi=!k+cxCaqL(kmC6*de!i#ZQCF_C(U3iS zzwrM1^V$nu>!L*~S9P?mTrJA7P^WB8SL=a85-ojfWs-4>jjA7&&6nDWddv<7$J4bf zeFrW2Xx`4_H-6{knTLG;$mY8GnXk=#?9oSETgN>=SM)>X5QM!@G!wafn+McXHH;io zx5qA{KU~%|<*AviOXeFBA_AhM)2Ub@9*eoFQc7uUj4{fBs;Vm6c1!J@0PKq& z21Y4`h+1m^K*Z{rnz-XOR#izMSJvfBsa#?jGkD;RV@A~X-)v>;ssN~LvyiemN>=(` zQ=ED0Jx`Uln6hX8#23U#xYdIv{%*f9@t%BQm($<+Npp4O4j6CVt_7w{zyIb1qt6== z0|4Oc3%5UJjy&!=HBpm(9q6tF4^MmJuE`w$@YZcNpP!%b{VnjtyI)=cfRi7+w&j4s zH(LF`JMPa_UGZglpMyPtdqz@PE>HPezm-R{8CN6oqN{!3b>v>b5ku|Jwv z^Zb)D&c1K#)L%I^4jxzY%-w(b%iW*(x&9MRo_g7y!9llPqXBtYZV zSO58kJFMSsLB;nbzoEDM&6d^w+n>4N=VQOB11bIdcox4-p?LB@(acR zJ5fFIxT}ww81Lvz9CXKfR}SeN2a-d#-x!{F&{Cju(=InE> zdbWMY1>ZmKfY@s{{_3oAZ+^41<*#ZNzV&Lk$@%Ef8*Y1WVS4^u``BIK#uK-zoi@cu zH#9cZSAWhvrZj)@^X&lrw%V-mi=N)Q11IeCPZYy1KC+GWK6Lxv?X<-O3-)>ZpuGpx z4xD)47BhC(?$XtV4?S>VivyrnJ^tf8f4<__yKcL2OvgQ^e&;uC4mmeHbiS8xcWKh=4*CfG}4)eGMC0Yfd6M5N0r}06+u*7y%Ga zScEGjw?%*fjetV3TpL4eY1w@A=KV*H>1fX;QwhtWy87CtriMX-2DS8SNhf27Xpy)Q z3afxQTPl9Iqi`ZZN(9*x!1@JU9 zPB`tIlQwsdgx#T)kPWe#=9qPT70G{7+Uv-Plq6>(}>E$fbpo zU&?KI?)bXsl8X2isIQMfDSYONZ^H#dE=?K zx+ZRydHTkCKLVh()$^Wyx_07(_Fo@!*=xIAxmQtvm^a5CG-7y5N`W)C9_F8t|!!(`D@w?1Bb<+G6%vmTIN^5$!AxvBP7C;js0Q}XqDUUkRmqg@CV z+;HsAUf%MNMMsRO1=!^mx8L%{$nXE+@GEzI`?7JH9edfIR!tdoK>z@%X{Vq3;ScXU zG2zzLt%DzV^t_E+vwWwW2cPiteedS?+hg<|`xmC315iJH-$TFu58g4}%K2>oP~E@w zs|+9i>c~=qw;Ejs3)ZgsYDa86A`Pp1ulbb^o%X{A244T+nL9QsXg=-_zrFjB<7RyE zZ&Yu0>GLnuocx3H9{c2(D-L_+rfp9C{dHFyKfYE00F>rjaoB~+e>nSreTF)~ho89P z`7b_q|I4d?HmcvEe|ls)|MZW4{AlWqH+(X0$YamXyLr*<6^&cP=U)7U{?|7TdiJl= ziwFL`Z%nx(j`;nxGk)PmS82q5%vbYtFZ>&p?&Qt+<(IQ`k2=`;C&y?&KtzEM1qsk- zCJ;qR04vpwz-UEa*jE6RQr5sE03yQ1fG`P*Ff(fwMaV3crHnDg7_D^>gvWI?P*HUdyF`z0imOs4#N1_88hUVL^&!I*TyNhadeHC26@n_5?O zCDY52sjB+wl&p zY9(dhCtsS?zkjVI!Rs&2?9Q3Rt((rC^RCWhwj4I#Va$4+-U>gr+Ytx+=#*pFFZzDD zvnSh@Eo8#@q7|L3%T{);SzhOu$y25{wuDZ)bJYq3s1V5_4t=9-1t?nEL;yl;blD3; z3562+M964)+EVN|mJlgBk#b4Lh+R8|gqpddt4n+NiV#x?EDC}Ekwm~4_I*E@Ol!?y z7y_UOYpp9mMo5H2g+c+CUDv^6{MlD#lsmJDc+P97{czQ))>Vr&bIJ`GbK79R;kGp%&)@S{E zuLJAn-Z|~|Yq!1l7FXf$p$!kcbndTT`pnPOA9((+?>x9=z+OLjbk#LS-TdC2*I#=l zK);<&c;Xi$qiP!K6DrLw$qZ*RXux?c~JiVZGQT5s< znRa@_*v&?KE84(so&I$DkP}BPpZvOYz_yK%{b=U+bfAH#S-WKqlyy~j^YJa}> zS!ZnZ^kskEz2X0{cisU~6loiOtE$7~o!Q;loY-A*5>ONcOqfwID+W9<->H~$JOjp4 zPeBYIcm_~0fB{4#O3q7KSlDHAoSoRI!uQ842$=8WKJfbY2h8mDboW+Qb#*=SzRx?k z+c|eXfBiXGvnRcG>x(DPndp#Fza!FLoqYS{llMAY{<*Jx^WJq`^Dmh+W9P*eJ~U_2 zy`v@pGwWK-f>CdNJ8i^qr5+5BSU>5N z<$BTR-W_J`Bx~3WXQWPlchZ}y?-?-IL!2p20CV>`&*4)6vWFd4qyhl(cj}S~%NxS% zR}Z}>=KOc?%V!4rTIKC!I%t;v>#5H|UEdg*nQUs}FYS>53xBp%f`jjzUUvS>=WoCD zu}`)yn{dq$GrxZQ+kait)e&F&)}&3TmkiBU003~-#h;Y-t9N%UB8ip1bY@s_$pGCi z7~V;daL+-hps#x2r~R){_w;nw`p~K8S^s$Y_H%ol_{c-C6nm`L{4o!Eztz+I54Cg$ z@E$}1jN1e3_scORjVD7b7~@D-%+dh_=Nw}U0vU#3S(ZhqA}L6KVH%ccG0tRJK?pI% zIAGCOst*tW5$dZ(%Blu;p(KxSrJonaXX*d5A*FiN^5gW#&Ff)KJS zClb049Yx1G+8hvLj3baR))I!-O1tNeLVAu?t1db-yA4WwBValcD9lHyc89{c>mRV zAgwqc1DFhaXFTwa2M5V*p;$yS;<0G$_7(q}HfzmkR~_Z_4!Qc_?dR95DsL0PPKS5$ z+D@o{25m3AYsrq*#V<_g7CP~?Ub)$Bh&5Kt`)2Nfj1jNo%YVMVH&MB++Gzali@K+8 z8~AT5j1rn&Ro7Trm@7-RaD1@S@d)v!eZtB-dgPd+M~-=J?;jrd!8!MSu;*Q`+uuCr z_BVg{?U?by$Bh5s?cbL)->|bu$}i4P0gUw(4?u2lup?gX%(u!OJwjN~zH; ziz@*#oj=G#p3*^u@b22OY7M}%pEdNp1p<&H5#apSRTf81k4%`gtF@xZfF8fKzx?u< z{rp~{WwW+`a(tQ-{~!kadeaPlaQexk=e~H;=)0#^ethGVLynz)i@$1pJrG&iy&vq( z?_LT3(E5vK=HCBs+Ryu6XS_0D)bP=h|9-(2&viflftOyo<0N~m*!&^5G3ilw_f zzz!KXreggH&N*WYAuI&t+``q8 z4S~a#COiC*SlrNfJA?_rgkZ)kM#BJTo@$6KRqUL70 zpi6%6=_j9BxAgndPCXd`0)zn&0Fn^Oa7pJ3?Kxs_H%mV~5e*B0S8Q%twyojqm!DD1 zI=91#94>b-@04@THK@qyb5vTV-b-dp4TW00-VBa$BGyWgpcKh6df=gJ(z5%%_RMo@ z*Dr&WD=7!J!|9MQR-6KeA}PU4+)!O1sDS}%nju7TL?A|Sdq@)^?r^AzB2!8c#ymMi zi;fwaT2z)~&N+=I%4%yl2hJ%+Qns8(RbF?Z`@wlDL1Qo zY0-aCl=;sT9an1U?#D7UkW~x-0P#&9UHV4i@ec~JJdl;Kd(%FywdHkbyGOI)&l@x_ z&z|f4G?1)g|21BJ^z|?Q`N-nG0{}P*dmnzqsLLlDo8|g5{=I>dUtY6Z&o0Sw{afpF z#`%Ww9b0$q>e{Km7G(}{s;a80tE+qU>Xn*mr>6b%==bE5?#`rFgT)>5z{CwK`Ys>dU&nao$k3O-x-GG0BRhujmfVPSz12U8Ln#u3g-Ra&xPK|f< zIQx{Wf4tGKa88AO++jcQjKmqo1phH-W@(xQW?OTNLF%t4?rhil_kVZfd}!c5FL5g2j63S}ZI za}3R52^?$F+neH{X2Xn|@ksN;i4!+%si>)Mja#}()VBJzvQ3*`e*W3V9(d$|habP= zo;zoJI_>X|J-O(+c`kqIWur$Uga9xg0th425uhm$CQhHKkfyq-*2c; zIh_iL0-HCLJ#g>C?@oTtA=+;I%Ne6DxaihfZpIk9TrR^jCB@h24tIb9;*47+6UhuRKmdeVf>B_Uo2KbVI6 zgdjx1m~hb{elf-nhD0K2G0M0A1_S^B1;mgnJ5#gP*J>tGr9-g(83H7P*ilpe<@}{9 zHf)I}v_o^ivbKsXn;Vzh@#M63msM#tz10&?kB#@xpZiWY|mPM+9g>=ztdkG z|4PG(593Rw=_}sdI`P)~|D;+B0D$B#>HKeMF?%pAx}ki>7xR{EuBfu;fl`n?v5yO? z2X(6K9BjS~@wnYne=kCa+S=M#vt}(=u%NB2&2Et2I1iL9-ciYWX z@)ciBxZ~Q1xwl5&qIF-s@Y;%Y{=3eMNU99XWMW@c2LL?9eX{{Je!QS@?;{EJQ`MD3 zc-04UYWBK=lk8!a5A}fJ^D5ZWWpF1jzk7btMt%1QRJ`}(N4w15*=Fw2ql=;aNyio}Nkk$DA%TEV3ouEV z!V*HzUXNJ_AxV~#pCW`P2mwk81W6NFjxn|@D`^i55)eWFVJ?7DnpABNLWSuWWAD5_ zt5>&#lO(7CQB8|RG*hG8ipMpHgUorDx)E_vz1aj(zN@9RU2AD=Yv(b;G=u;vY7tp6 zOp^g%3_!6(d3$|b!n8!RUDjySNM6uUD@@ z$DI4YM_*9Z;`TWZM?1>53n7e1%rZn)Ufwm=T$`4W&JmFP9ykF}K)OXu$!9pz-04j5 zc^|lcOi^LphV>h%5#73Ji{w#v*41oUv!CzQ*io5m8&hl#(MS`n}8_Q}uk(f7k zX)xO~m~L5yhse~@%mg*`gy58MN+hH@JXv`~JECozG2m?9g47+6GD0BJw3sCTFk@VB zz=#MCgb<7|qZBX!0E{sRATlv5-RV#*%F4>OstAZ8b7TLx+Axewn>Hmc`hJhvKTN_# z93UBtv+w_6@3%kt*zRBM|D*j5wC@e-$N$B4>n}oxoi+89wGBnNS=~ztJuc^=wJgs+ z?e?z|?>|0m=IFmo1psgbj~sl$9XI{;%skbewEhRcAOJ~3K~&GGO4l2vOjt1JFVikPt3aCi=ns8KubBSS z%T;GQc8ebaP&0WnQ~c_gDVd$q!P@lYvgWf-acrJCcHD|6M5}j&bR+-p8?Nm=;nCSQ zU-L@O_ipOvO>FyqeJgP9w)(i~ckk!j!$aRbc-y4G?_AR(Suc>ct(>tumUZz>Lj$0C zN8NV(v)6xj|7jPQ&)j&dzhT~+|6Fmxdm~*6`NcNlOaK_+L`Nh2y#KWb)qe5hisO!d zbby-#kGb;VZZA$L3EB@^Hou?!E!}^+r~3i6bhiiCp(e*r&Iut&Jq0B_NjR4!2@@g! zDT-oQ7VNeI>sUHqOgSS6V?q$YvLsVREz9DZQ_7MBtr=qgz$lXl0T7IH&PCFR762R$ zhla&dlgD{e{8_>5f}Dc>#?uLvd|u zRsFnqOBXCyoJi=5Sp*?2IHwE&Rxu`)W=5kfBsirKb}-IYty@3h{7X{&L9fq4KuF9; z3pkEBe(=uPhS_uGFJHQ%wy|;Ily{CAbV58)Gq6i;sG(k6@_k%4Q?qi@9MWL}`kp`h zM39o}f3&YwfVdD_wpF#6BpTK>uHUq@YCWNp|DmdD0*CPOdBukug%d!y01fgWrDv1z+m>!QB8U-S|y{@Ej@{hl? zHcfNOmMw?YA|xrw6y{{6`3|X?z*5TKi?09rqU(V(Du`s;oZ9^V5s4@2npzJ2^W0mM z>Dnp(k1{LqMc&!_ttb9Dq0LMR9zs{}qgnG0s!DmbDDzwAfs7$HP5Abv3HChjCqS>5 zG5NiZUzukDG=KEaeLY5xxM<{w`FlC9kgT&_oAt7J-W_KRZoc8d?(x-g%lnV}@`PQ}FohWcCpZ8M*FvDYKFk0{^c+7;;R{ zOi}y&r!#AZy!!P&PYYrIrFitaA8tQ)diBg1o$h;mOdp5cXKj9$67wTV z_bV>$T`~Eo-{|T7t&5Eu{$kzDo*Ap|f&NK%wZDlZAm&~(G&^?~3B0i{%uB+j`> zO(X>7k_;{-HBAuYa-e|UladAIw-f{2PemzSS6%zppP~co3 zY;ahzY}G$se5EziHsttIHf$(sYp(vw1?O(ruy*x^wUX)@JmmOWufNLSaS=rZLIB}? zbuIybs1Qsb0*nj9%|x_iXMLNoGsImAAJngl0zhQ40Gun5OeE$Hq+47GMH9uHOB{-$Zbt<- z;yY?~?%g+{(df*XGY`4m?{PVMlom-y6!q*c5fWQE&7L;?L#}-A#Sgw-_`;Kq-F(9} z2iv**{psf)9sAsM=b!S@-PhVz>OVo#jlhtR0I+2K+?}-zhstSZrlfBoxU-+1GVWW7im9;;fndV9o~->Ywzf7_jz8?E&b zcXrV6OPVwdZC|~5b8Q6q@_G#DSL~Pm?K9)Cohw(?IJyn&lSP_0tc?`)FY)daT(8@(e5;nx zvtPFi^*?Ddvvq&}YZhzM6}7#5-PR_^>e;_rro*<4vH9=AVs$@Qx;O8r-EHZ<^{p{4 zpquu$bbmK6e(8&Y+&|r=?E&^jv#T+&t0@5hnE^-VJkP(kliv{?!v&XB1!0T;1OUNU z2+kY{g zv8A#$FTY?{#b&=#ZfmSH6OmwER@bimw(qDG7I(|U>r#b<0AbTXjv08^2?LLqziMSD z9(&-vhYAY|(^CU!9yKQ?qqJ+6@o!B0e$n^AKzc)U{koOksSZf@5miFPB}JECbxB@P zsng|`9I^ml00?2UZ){BR9RVRB;2a1NAj>4D)Sstx2aJ-rOh^FmfM5RGUAJcj({pm9 zzy0m@rAto zVT2_lD8ZO9#yd=X5g?8b;am^|cz40@}|M=aO?()xeX?uYE(G+C53NruGJmKFV$6$d37P_Samk=k4N=%ah z0)!;aO-d;tgfWH)W=u#jktEVlq6;C@GTTCJ7?8u^a5|hEghegF5kg3C5JCtnIABRu zuthm=LS(@y<0iGZX>rCZ1!GV=ilQ)!DU!^D098``E-Yc$^19ugh!(G?t$pLocM9`@ zZE?M&Ig%FC7cE`jaHkga=m#nJ0xO`%#~~-|%bH4hNQ7TCaw`gWCYsiq}7cO3Q_PJ*l73Og<-Q{s?-MTd%*H)C3yIk_s zeFtES5Rw6*-Nq#X0oqHi;2M_o0e}ER2u2D50D%w)zyd%BPE)+@Q%)ZuFfk1? z5l2xi;rDwW9xKRBJ@Lfjwp3KFZ`vAb4Jp*JIAfB8-A+(lstgz;Ng<_O{Sq@3MK(;M zPhO!GkLuAlGWAe666c5!i4YLNl!;>LrX+M(Q8dF8AQ(bGB#9`T1GTJV#0(SArDv}m z1CLH2XBSarD4(t}+)cj}y~{C7GQkJrrl?8|WSJ1mErB z3Lu1#Wfc>KB?%K^noJ-t4U;io94U$nh+&6n2_`Fqa!X(dh{Od3$2&XmfLG#g58Yd!((gQD(^HaJeKGmZX`V zd>V_y7_~5xF_xNI+PFF8uHT9J7ifvoFbnX<3MrB1+6e7#g zUeO$T$9f4{iEx3Y5c2&X}RO8J&V%psW^ zg~QQ&eN|ALU9jySK?1=wxVuAe2|soG!f?p}MX)i*cR0C`4IVeTdW-rXbvLo%yVHO_ML44aG;6V09k88NSoW5pHo z9NBJ0Er~yZ4d!MG8FG#1Gdz8j`E)^8rQz%LLn6*F(4WkEob+)FwR_WC_Sy0qE!C5) zHImfq7{1cO4$A(LBG=Hs7riomwwU528>nLpVl|RYpoF-5Ad;a0M8lBcwGkbs65Eh_ zDbo*v*x2Oyi|%TD-XJ&}L8g}6oLJbe-tP%8>Hhkf(&9e?VC=S`VfJL5%j+0hH;Abg zFs;oG?E4As2*Q6B2~v8hOt=>a;{3Fq{hgVgZbP%UY9vvyPH8_TXF;E0ZeSXq51HX) z6AlaGGe}Zf+slgy4U0h5VsjG2t3;?|spUvJkBk@<)DyrqSGlHP55D&eZ)_|vIr!XT zSmTEdMdX~^3J6k^w1a@cb|@hRz^G!%+gboC;1C+h;g9@}NPe`)#(8>X+sCOv$D_$} z4X5Q(E)d4f^2D?$`3ez=cxxGlJe^q&HMmx!2VY;@e&XfS5+a!4g9qOq z`?!5{g?>5nEUBL7YU1talGAdX%I*Hs)h4>c*-M9;#nB94A=6e1LX+*MHo>8Wm08A~ zAo1S#6OA<9WOR|KUd?CcG<9MMs zihb}WG8YIQ9bvoN@_aj#Gs-3OHC**qG5#qL+dsXOD*S5D>CN`=BmMw0_S82o7%x-! zGvox|#rtDB=gMUP_?58Nbbv=dkQwNUXsgxs!7|1siXg5?8lRU$E{`TpkE#uj;Txp1 z)PV^j5isQ6NAyRuZ{H9_DD9V`6GxCrfX%{`MH8)KR34M#*4Mr>*p^rR%wqkgzcX1c8s(bK@{oaQBYxk zp3fr*1wcd{ZYP}tZ2$P5L-TgbuxciHoP;%H>r}h~{_}aKcGX+IOxgVXjuA0O2iU)q z)BIMt+6h2oXQ>%Az+=JI+RL!Qx!D;-ne`kX0^ttF+ z=5x&0PX(iplrTw5(>~*n@q9O(7(1TTuZP!Sl`YDO7Mjv+l_Iw(b(30$j5R-Gx%%W1 zg3$Ty_&nq#UZ7U%S5zLZ3o=;2Wm{y8pntu?W+U(qwtK6Km)%6%_n`ErI&2oAWQ(nZ zcYac-4ZU>=h!T_k{`rBT=HoQGyW3rQi+@T%xVE06u5>F`RMlAZZ)4hzz<_{&-57zV zKv*2j+npyVz3Q@hCB83sE}K*|?YO=SU%KJ(br2LBB6L9P*qQy^EDQGEY#h5+B#K5ea#HyC;c;GUP z5w;2?TA?gLFKsjk5z!GIBZB4&k_I}my#(DCNz4q>hURY)d-kDoGp&~DRcORtk=0(u6QAH3A8s0^X({&EOG)GB-L;!)VaUh$2dg5;p<0 z7ZwPh9xZUatSF~jIDeGB8$PfN;ZsnN(Ajh$Q zMcq(|skkaffO)n|?D1l8BDs+vo5gN>^~Kvl^OjZFG6wm(82MD1&fzwEQj{)k_O8#) z3qQ%SC|nGL;U_P39ZayZntnN%jV-V%GS1e}*;LTD(_Vak|JGVtGyL&bbS!*!<_Vtc z@|nCb>$2ei)oFhMRQ}toG4*XJ5h^f75xSorfBBRoh}3O8zV+^KKID?L@HSF0eWBjK zZyz*lX*#LRTG26ib3p8RQ|s97=^Llh(qyv3ZOgK`0BtVmwD|BvrT%n@*~{^v5d`Z%5m27qL(_xzyl48tM}b?EitWBs64{q`(?Du;t^NTEp=j&$G zoaeh`_>T;94e&{er8Enjqu#fpB&0TyiK>z4G%8P(*Yc-CP-nmVm91VnQZ@ z;E#3~^?124e&W&`((iZYT%k^h(I!f1@l{@2T>JM-8a*W=3^`tHj|gjgn7AfG1|AM{ zg`WuP*GPQCa%{N3=acu4#Rp6UA@Cihe6Zb5YYXh1Z*nPVunDAa)P1r~SmE|KF~gm7 zLXo{O5#=d|aAdHeuu$K_5Sa<<0=1$L!f5V7O4Kx*i_PE^ggwQ8bDas~bU`BlS|#|g z6hTHaVe)dYa5Q4vt>c+=yOSQCGS)geKY&OewZWKd)djAd-wj&L)$GGRSOwBAy|C&~P_S*JZ5UF+V#xu6%VQKi>RP`{jVupAz#aHh6*@_J* zmgl98V#CwN$3WdjlAnLuh^O)HE-?y!uY1)-FFX)xSJ?C?N@dw*OD(yRh7D`i`3ksH z1%=3c?HFpDy4z{8vA)Yo|IE|Oj85Mw>tz=k>2q$&QCM$qIOlfo7QVKP$!qXqv01kp_;8h{h?6!~^7TuRD1kk88e`uyhR zECyFujz|bf9TJ03YWeRZ&1kh%4>0qQN)m&Zirc`+!s zEfp)IfbFyPP0^6i&S43fYpX0XnuxL%0unkBHyIh8C}9v|&;+c0HDV-EK56_#-0!R$ zuU$6W*bbhw>?*z_zb%E@{yIBK;u%BD2Ep~kStUI^qG(CXuNCVHXHL)3kpJF2kf_0F zy(pQ?-!xQUjh>A}J+^48ij__(#_IA1MQ2Htj4{SoYE1q%xbLrdACX>2vx66b(kB7zJjN6?U z`+SJCUkneuf`6i$s_fjZ68a;>xU^EU#6%;2<5VfkrdF)4Xo7q>G`zT3QbXVjHu9I} z_F0W8bz#;+{~G9yKzYaD73xj{VmOZR3)nsSC&q|sRC(4t)J^kfoe%haz94$OO$X&lOSrjhK z5*YdP%k7y1sY}@GqC02B?amVS<6xzHfyQgTf%o#FrbHKV^U3A#ajfUPp09sHacyPvySrFRLkRM(-_Otp7{(kYq2xD**te1fxkS zDD(qy{OGu3mEpHV5ytEAj1b6Rfe71KkwwESM-Wh8h_*QEcE3A;62FZMiwvC-X|tl? z>7;b{5PmkR9<{O4CD2 z2`wct>R{y+O|EZ3YNE&4PUu3C?b=_f=b%PVEe=vTpX!D|wnmhPvX^LlS~;oTuRl6! zs-<&-j52dvXxerFSUfWQJ`6n89cTke9Q;I!>1Y}h4O$pYZlb6TpE zhal~2nnaK|FPbRhcn=;~P_}&P55*R7Oq^r@1q25UKuZCjA&B6i2ZaIh`LYuV3`>`K zE_jro@y%COG>V_iKRr{GpWcAAE!9|T|5Vg$Xf`{61;}^K&L;eSz#rjYOD9muWEf-w zK*6Cy%cK3FC&yd3D9?PmU;hFcUtP;U#&k3$PE2#LwP@QC zWVZw@>2 zURTL9DMjbWjTjrP4!cJcixDez&u=G@IOns3?t;vGAIrTb?10*z3lq~g zRv^LM&%oEd)aYW0sqXU7wG{jJCm98Ncz{&=(Qt?0MHk9Xqs9pro-8F zaoeip6$o!O3IOof?KsvDE*7L5#toEQ4%yayo@TzIdK8DI$>Jux+UTRpy=bYQ2fr6# zp9gaz%geby?@mvT`xOxy*}Rx4YIqI};OcqeLL@$W_{Hu5vxM+h^eE7Ao)>Y*;(dZI zZs$l1q4v!WZ=DIlxhE75A^|FIr(-oT(j~FI(aV4_Ni}
Pjv%#*+kPk|%$%;cRNMYK&WnaDL5$UM>uIpamHVOdeu^SP z*w=cu^1Nj!xLMoPkQuQm{pGZ%HQC`Z(ze|ISbZY{F0}>;`GPvn125aFoW|R0WHpu( z?03I3>&%ZwPKLNoLqNt;9A+)DBmuWh!E)TGtUR71SQhT=D_3nq0`F z*n6v((?+jl48i(2iu3b7lbovS9p@?g2r4Hkk8k$)-w3I1a<{?FgBx5Wfl zXY4P=I43HK28)LVfQHTbNlF()imyQ|e!->x@0uoec6G62u#m3IoVjND#7Rk-u9!|J z*E|Ek4-MKwfY{!?-6KW#vTS+RT(Mdy0vG zTXkuPW2glU0WHNO06TW~%46d%P>dTRqhu6gs18#l4x0w94~-QX7Cj)SlC*TL%CZDa zgmt)1Hj7x#0&XNhK5IfuC9WZAz3xk9=GROCo23*cg3JspQG00!qirJOboFrM7LmNo zyv18LQ4`~8Lf?ZG4;i+eJ&;ZVzf9v(F|MwnleW>^lr?s)*SGrewpC)@?vPgN3w1pg zdcNXc2G*I>9p9ec6`Cw{SP)cH8QGMQ%8N0DK72Ak#Zo_%SVauMsfEPA8E}c^hmXf= zCzwP46iB6@;C`H9h#!@~O+pB6`zVm&);`%_1KYZ8+^-)MnhH9qk>CPq+uUulu0=ry zu^Ug*2_s-565n!cyPXTk)9767cZaw#sC#nqIAk*7v+)Z@8qh;i}5f+cJmmIc*libjcmNd3#!5H|@!;xdmL!DOd9nI=EjFUgLVpc><9%9ho$;xE)nBciCvwpU(6X z3U_`yhAws8|08l$0OlNj{L1q>c+?E3bAD-QE=l&}?sRNhx4!|m2}Aniq0-7smzah zj&e;Rv{>IoF>E5z16cQfa3Vwr$WYM8B9u$Su&{7YeypMhar?hMJFvF`pyQ4l8ca+l z4)5lgsXb&FrW$eNVv&BShF0>ZE<_h(RG%Q&hqI|F;z^eGmyJbi6H;IyhAO}LgpQX< z*Ol{R+ceMNUX{Wleg53Rtdx{m<}o)nr!=RmtX*EvwqAECf>2gYFNJ|2tBN(p$uu5e zpAJ{A^E_Dg?w9w<>Kxc$A34N8kRgiaKS*2ZF`;jO)}Endq9FlI8TAPRPi%H!8Ct5K zqpFICEGIr*93diz!Y@ojiIPw}qCb6il6eBPF_^rHsW|xArn%HXGFd#G(Y|K@>!%Wt zkT1Q73U|Y5$RSn5x8L?^IvcaIv+G|i4siOZ2iZ--Fw?DK&kU2uq?Fdr#mH4+FU!jl zOjb@%UFgRc#-{(uMzWLPJ@RPNy>oVrrx^Jp46Li?2;S~EMvU%ApkA=?8?CVWcB|`7yW?Tgvx7xYP1aB|cRz6=LNjQ1xFT9vn z{P1N!vHCd9ZLVR?;q(5fK+@w^uiI^_wdF~z_uw*J%$@Q_ZfomxdH1zluj`-|;NOGY z9Od<~rjFxW*~O6S>7{!uIIX?0gcx@_kcLgN(RSA>wKnZ}6=h~y@%D16k<8Ct8tr+q zptEKPQ{04eJ6yQr#omnA<9c|NfVIp>_6MkoPaNM=T9%F@KLSi)(pFlNuEg$h&?xB%XcKsQ zjWX{SZhIW1!tHQp%y#?`)C-Hm&BQC6A3?1--UA1z zk6y+T9hc=%doQoQ`;CZw|Js}#kbcr7Glq{ z^5M|ezU%YPrvKCDee?rmf5X855^sDxX}W*JNF9=p1)+&T_-*?<8=wM$i)q$R0j=YlA$2W-&*zj+v;!UF)3kZj}=fP@>1VX z;34#JX*E6Rp)mj;WdBqgK-qmKo&Xr&S=fs-^%3$NlC5okOnJ3zkQ0-O6WGKG*GKF* za2iFRpawWbf_l=*M^X%w#vLjy7E{!I%!Ly9uG5tg)sXhl4VS}U288ckJ&C0Cd=dZD z{5c>a>1PC5iTT%%eA)=}3?S$T|(g}WuOF%U-*gp?5Qk*=KF@OJtP z;Xwy|N??_jLgZ|2htzMb(*4rF)GivXRCp z)ygLb6a?`kke?k(9Zv0f-c1th?u}4~`+3R1DG3MIojg7Z#I8bwL)&^ePs-jz0v9tt z3R%#8`*>T(`ZK=RmXQ0Z3k7%}hiKe9!Fivq^f|?w)o3b%0co}?Lx>}Tqgqc|axntJ zN~fnh!1Es7HD4^npPck`ouc72nT$XxYYN7Z~7Ku`J8 z%O1WCbLU^jZkt;Bz1`?Wb!;3N0Kl|C@bhdBW<+Q8kdS$;*YK>kHBAii#f%Z`9w0B} zZZhbT4Ds9V;kb&3M4ejq=i%wP`W*bqW%a?K!)DR{udXTIHtPj~e97Cc+rK643kuDa z$w*GyYu8(XEan0yY?Sde8^_ zs&9isSNRq~cdX@O^AUG=RR&n;k6}=P@_XWUmI^-jvXgrgj*1K(Zg|v$yK}t-4tC(n ze1)1%dru6x8A#uSJ&rTN0MT^TX|sIY@N`WvUaTdU`kDQHQN*pj#i7soem>m%zt^6; zzZ{UEi$ETsgg?@QD4G2<12kpzKNH0h`JqW5oJ%Q7r!-37(OA+$0#hP^(jP5?o(0(C z7AV@Gn>)UTP_2 z9{IjsubRkIfLz4oba2Dc{IpR*_npR27;uU)R{k1=njpeL!vf&Q@Ctzl1{u_`HxEr} zq2!)|x*Bl!aYa0`+w9*HD5J#E&?=>S?!s_BL7T&l{{;pF_8=htpb*;)A(k0n8c%?@ z?1dI*Cac~U!r}!5pU(8ZVoTqZ6Zs{m-L%zSD?uYd46MKDtysEi{ z)o%+6SipCDN$Bx%VL_SL2IU`n69s8)L{szYq7=k?20Gu#Inox(it39`lCk;wX7CcQ zm?s_^^OvnYk%d5AZdiore)Y3-=GY{3y-#wu?1`Tv!PlgP5_t$#Clq?vNsiS9 zZ^u+j@pOt<=?NS;UYh7VncPR!$9S9dyWmX{jmOtyT<|V5A!2vi6*67QlCvE!{%%1}16o>^M2QwwM z1Uql`2xSUIbH7URlh5W>O1xH$fmV8slo`hi9s`HlLRMympr6O{?#!( zNmAP|ttHCpw=L@Ta0^z*^QJ*FpN75JD;wTAnn+CT_THk>`-Yq2B$3OQw)#+CKd;Ll zZKI)$7LCYOyGa5W^V7MxI4F#hXBx z&Njq5oQ8lR8dg;HS<;V<8jniH)`Ssh3}I-Zp?s}>;rtwE2T#gPhVrSMNIrM%{P8*P zUBrTPGCKi6;FFymK{h>ja;zsZCtd(0sC<&bfF?)7AVr(AY`O$;?t!W*7QZ!fFExd5 z$&l)!lv0+_|Ku?p;DJ^a(V>h$L`Mih1L{-Cz!gHfHBHm-CzbrFpk#$h&%lwyI7sO) z21aD%r38*Z)!_1&0h3izr$N|+PW`aZ0U#VRLK#ltm;sEWogkZqr4X%C%V;t00b?$* z`PIcdnq3@|-aA?N9nz>@6Z$ELYL=dTQ9%?yzbIC6o}7QivV=HhZN2`fDTkg)9m%R+ ztk2OI)AW-_p(H4oNX*3z4@#*9 zlA+DWj%5)8My_ZjiQQE$ect#}u`hbTQ+Jc9osHveA7TF&lI zHOEe3JG69w*w;+C5m})H%)wFskGF1k6F&0qN)4m9o5D(LS=RaKjg%I8BLyfT9Mp7+ z9&^1UVT5|a-i|WI?kr1a&gm|<`G;zJ8^b|@OhX5oM>Yj*@4aww|Cn*n8-?e!yEa_r z>{_3(ijOSlILEW&X`9Q7PktSkTf|zvGcdWn*F!RT09X`)>qX2`rxYks*4NF@oaUU) zqX%&2G=E8ElhJ1NZmQl_-jOc=>0E|OkBawEkf}VM#qzMYd-~f8uMQwTIQPANzmIMB zi_czg$@t}){za5)O%4+85}QKt+I&`DEcs(`~}VLEEIzW4P%^%ZVekLRc@%gUQu@COn9Omm0JOs(Wf zpkpb-xM+s;`Fv}&+c+K2pvs9Kw-Qfva>ol;Kha&e!24f}A3m5cao#XtAwElSsp84ERevUjDE~o6l?*t zLN%VJZ^hFsv!uOroORn_cHx+#eDO#Z;R$V<#ms5PwsrExX{1-T-R-uTyf*-&HoXJ~o9AYIFC-Et8L`+RE=u|;+z1MS8hR5@^~ zytJ5t(8IvKe;}U>Wg%_E(ih0+pSq(_8Z#6wPf)V;QC69={YQ+BcnVUjlm#{QL#Y(> zB0-^l4_~TQCjZ=$dgqfm%PGmRyYZ4y^Oj2~hmDt&)7vUnqeU@q?xy!;XLb?4pyThk zHf-N3&+a*Hfq%a_gW#djkmFyi950M=U%@nwBnZWS)F_64w1Z6?dP02j+(JU~+G^m_ z^3pEDYv3W%kv3pTo}>-jb?!;?=#i<$kUaC3jXZ7H^Iw|bb%H-+B!GY3^oRulbB>e49UZ>n;0a z$?EG706?8C{oR_+$dT|ucLN6X0z4~NwdH<-DuGUR36)ppcJ9z}8=2d2*i}`tLI?#R ze1nDWV&A(v@AuhBZb`(d)*ljtHzD8m9uD`x^6cB8N8yKz@OBdO+3g$Ul%=jM*Ra@} zw~)-P=LmR`)!cigm5$2m`E{nsy4P;6<2_rRtNl)J-S4O3OASr%cUKCo7lO;~(}QmC zlVGBc)%mIFSx;y8=5j|zlN}Liv+m8kZ@1l2%tqor*V&M3A4EXyeSyr!qj+k{@Y%C> z-%0i!m}S2U5%4yNDjVZN7TJ_+@Ej{y)h_J$Hema@$$qWEOlF2MrD31r@JvEllr8 zcT*$WucT6mG8Ot{Y2gniWOVzfU^JxgwBVH_90`97V8cJY5COz|6jvF;s1X(`Pacgu zLJa&P7_v`hQv`;b4Zojeo;8fZ*+N1?OQA;NR-pOm3f+4%tc1x}hfVUZjsy_;b8;kd z%*;zivmO^mN%8dYg5sg;kUs;QaERhV*RiBOMVTzF1`qrbRSI!L0l02BHe)H-oq??H z9v)4Dspf+nHv-eM1xR7{{EWRDF(l(7EE;|bP!;8X2)nhK!2%i)= zcfLjY4*`KSZaetb1+3yWvy@x!+c5kjLV1npnYp%8#FWv<_!RR~F)=aBdaae&PLJ}k zD&W&d+~^CBr|>I8V7tS?<@+M2anT-Yo`LO$J|myO#+Y{^IFMGm)nZ|yS1=GJJ|K>5 z#~%g=(5n(p?Ks>et^RU%4;R3z4Evq-6Ak$d3J=X?d`}`06^ud&D@=bNOV_se>es{x zg^R7+qt?YiL=g9(yG;3%fA~fdLgS3MqMKfckxz%ie(R^J;OBVQ!#kqf=Qm5R@I5`^ zlqpV+1_%ld5Jz*hsOhJ_QuErJ*!_CgF6?+svexZ38IrRS`o7~B`!zX}3EJCvWeh^sbdv!54V8|pwsoItqul^s!7$}L z_4H2MMdjW;IKH2Mv_9+U-Fd=qIiJu~ob+&axk*lEZf&8)CD8a4Xc2Bk&elKhZE*M; znoItk=?Un0q<%rE@nIJwz2Pa179+Io8ls& z01|`S6TVyRPY>*j!Z1D;A&*_>r}ZDt!qz~%`f={dm(#~KiqtN5EtWq+pbs#^`)ggz z`)Cuc(0k6u%MI7v@zVQJs)w+fwXlc-7|hbX+jJ}B)@!9us7Xgn+|~?ivpFxIhX=3+ z!_+El5N1&U9_QnHH$4FrOn#cqcQ?)!0ITaag0M$eT}vcDPoW2_-Ic$fBiO~uae9}B zo7H!C=^{h$apKwb_1ugYPG1?G_+^OOV_NW1Qf=L76rZ{o(8Nj%n9tzWeh2y9o=$rm z*Vw$=D|EEP`fqxk<33hBv*dal;KApWKELYWWu`1}GgYKI=lvI6(;H3`qBn;5!I4T9 z`TmGTR%HATwg}A2XR1%B3el^U**`3Cn>1hC6B|ndrSoJb(k%&K!bb4? zkzh>lGdi{*zAptaJQ4<67x*|z$K;my8y501Au7L3J18zY);^A{xMEcP0BZkD;WPQ0 zX~ZX764?A(C-naCBs*H~BUBJGuKc9%7CpqE>l2UM+SrvzwF=z#Pl(FLQW9?rvnLMO z5zUaYha#-9r*0lPv5B{^{iLpu6aT3AM`B0%<05iuZfcD5_>#!YX=!gs4?GFJSeaQ;*J?9%vU1C-X&%P6 zM)I>*2R0AIUxLb_qwahzn=-#uZn1VbMIW(A_Qr|fRq8b7K#{o*0Ynjd1lT2mMA-oH zaDeol;4|D*LOfRE2(e<+2+AT$y4|jKIq79lGoO0t(KYh}KTHbQI zJ;oQ7nxVEC)H^-~$15UT!mM)!H2QT+@9T6tt`GLXd<^6-A`6~lif<~M_b)D zjnpEKmLxBtTl~zMcl_!dd)LA*BdS}^ueQGMd0p?;ByONfrepwX-TC9k8`0rxg^=6D z>!%uxlr7ICf2PZq#BqfP0IBwEMJ~7}U;Sex`m*KBRfy+Xhu21k2g1KD@R+a84NbTG z3uyE4f@aI{;9#r$x|5^@J?|3x(rr39b76hz@-?=?b_EIWadRSk={`xK+fv?r)~G}; z$TI8xvP1IbJluz8(XH|w_=Is__9iy(J1e!(4Q`wh>fFh~wIVEeYxX(Wdd$5Qu6ZWG z{lX4tdb(Hlb$%?a@!q;u?*@NShhg+s818zXska1>hBjyMT?q@nJ&X&z{V-vAUr^9} zbUnIw+Shx(Ed-pAyx){#+kQkl$NC)FzCZW?z}+9GV^zL)?WJkd+7X?-W@>v{VZk*nu2 zIs9?sD~PZy^sy70b6;xfv+xohDf~Pcd!pw#JZMJpGF_vSyM9|G+2pee^7T=hbR~Z2 zoAY^3ds-oaxNQk(9X+E9hlRDR{0(J9!A{&A*|G;7)$cAJSra-;#k{8C3|Ts!VOU%% zqKc{UfMTwhzM7$hN;2hVt{62%s6oRF#ySwvjm9!W4ZUcS@FOJ0)fWaU@>`z(j{qu# zg>Yn(Rs4(02r-Iw^O6IjP?)bReZc6anNywWY+f}y{6Oia z`pwi?YU}I=gRf=VAHM%S4`M@~Q)@eunnGDB4O7znLZQD~{ zYL>&PlmQnpKB2Yi5uyJ#AfhT&Le#BA6-1joilvbs$f`y^a)V414;NdV-F!pHdo=Ww z{P?5-1^IsAl#8-NY=5R%%eol)GB1&u-}R!lY)(R9k|--TgKLP=eeu*~t*VtNQ(?Ko z{dO_2i8aJr?QHe=>ERspVy(M$E;}-5z0G8Nk%>m)JE=Xu!5R&s@d%}H+s3Ag^! z?Zw3DYIUO!FosF3lGeHP+n@c!NWd+wcz<1Kt{rowX~JPPwlt+?H0=i9AtbM}?T&0k3p zx~~L_F{p3HvOGfBUA{}bF08folfaMIhh5i=K-bGGm%woaoeAPCF76hWj!XC@LXLLu z^?B%TtI2X_!D-#2k1d}|)!fd*!|vzE@eh#iyTWuM%$pfW+l_+o(^5CYkX7hqYKvj( zDpoQDxAW+g570nOT%)pkw3mA&*8EZuYxOaprSKBo^x|&KF7O7vpEC17``B&>zb1(J zP_A~r95W-E?=agJF3PH(7UGvm@VxANG;66jc@J$5v>Qo9o+o-rR!ON1;G@RYi)zmd{SQED!Jml_|ew3`*b4L zsr;ED$#()Q$>y1-gHMFK(id&u@Yn$CFr)?0q6q#a!KuPSj%nD9mp;8%@tHO{|9xGO zxOn&RN@LQ#pCDt?^?Voz?`wmKy7~gT$PhZTpsD`DLdAOeo=_IVblSj&~|!JWLV8^KP)+lo!P6bp`Kn}_H8I$+Te1Y&>o7I8G9*~BtD{L zT91At<}*cWs=mNKM~!Vc?y>TjF1#IP-05Hnl=O_=YAt;m@V`qdi@Q}I=3AXzBhIjP znV3suoyW{-tA6|~(``T8Yt$uLgI`lkTtjTH$j`;SCG0stn$=Y8T1&&TThKXo0;A9n ztI;PTkI%O%^8|T#kT)BaHO^``x%w?*+vQv0UAeUM_uX#%GzX&E_1*JS9)89A##qHf zB6!o-yfV`)I4@v|Xl8u|sv>&+`ZVBge3UcACgXN;(l049ybeBo-M4zb%Um+Dp9+VO z)biNt42?-w(PYQUt}VEJ?W$vIIc_+*N@i;2-sHZWuh|Qnkzyba-sn7}`N}`U5hDF* zN|TNXjma2@S8%_{+x*XAz++tC#rT( z;Ig<@Var*AoAxX00|S3Xx_ozE3|{|AmVW!KTujPT#OY?9@vH7>SO6_nRAXep#zoxU z{CxnhLOq?U?ca~@xd_m`&rc%|tISB#QtAR$vEHGf!J1fs|KkFD{q8i&J7bQlu_+hL z6@_T&ZJ;FbZ_3y`0Y9<%K4*i2g$KmAJ=5I%d`YlasoCa)hzT9E`#oqR91jYi_a81a zHhP@o5GC15e_|ZRF#ek~Gh?`VY zWh2XP#P&h>IH=Z}9{Wcyx^l!0%Mgb@5pebX)qelzU%rFMLNbs-xa< zp`bi~=s7vz2H$vs8{m&ApE$x{bIGjT-p5-ZL)OnEK6-JDUot`#iAxGwaKsj;71g{5zpczun!*` z0HCPV9|u*Sf)VVnDMQD51egkh!H|VR>7bEubJOM>SJ6JHQ7^vL^h`Ypu2*wY*km=_ z7hC$}>VjixhVJi}1q0AbQ_*subF}HZ=fITuDkH?#P{Rc*ABl8(maYbYWB}`ylxb z-`a&s-8X-6WcFXB*yqDVUwx-hncBKZfuyJkWWi@!UvH1W)?>g8=bl@d`xHCMlo% zx9Km_c%YfAa-5(=EtRlk%$74$L62>rj@k9>1j_$43^)u!kIbzc@-^bKQm+_>je32g z+D!_)k=5gyJ9tAmb zi{lg~FS)`t4vzg454;E&Ec#D6Kg9Pp3h5C59>6HTGBQU1&8xdP=_nx7fK-)mp(6WG zn^00bztMTKHQXB!DF)~Vq3Iu{VYjq(ws@Y zhg68fF$9Z>SJ#4J3+(>uYy3-08xGG-WUui^G${E`>FOW+LL?nA1H51|Qrb@lSqKn7 zhw5h z_X`2Z1~q;MPwI!k#t<$Ei4+;#s-aD!3v?`}Iu)}|j!7k&{Fx?*G!7CnF=Ub=t>E}j z@XO1T=XE5p@7-V9ji-$&vz*Pz+~Z3Ui+N7~zJ{eg%pSE|u!7jXwQSo&Gse$eapxP2 z6>K~z=`a;#s#rcQB(cAJnmm7hT$ogZjADmooHX0C+8!CfS52bHJ{JN1dy~HQk(PDr zrt(6wc%R6mGHN&sVykyNG}e3x*lw*ig1Ev$I`Jt_;j8P4O}lq6BEVvN%Io>EDPOI4u`n*rVIo=jt5B z54~+dCZ0Z2?1|+-pzz)aE)5=5?pqyfY>bR$i1&VZ>Doiylac4GCY?8Q&oUq+i5h~B zwTzjJ9tRPmk}QQSnOUC-Pl*lL!{s-Z#MB3wP$ZjB3@`k=ErOt3ZR+Cz){>XQD!MB< zjSN~|>*`Ip)I{~H;R$ST0E8Y-QvF4ZOiQv)sTKyCzeBVU2Jtj4vn31qv(tEvV3G0J zFBS8SL~?Lu*~A7~nyYI=p!VG79JFY4M-lN-H4GTt6nqSVzSbgyA)V;S2+{QOEG=^G zwuvIoQ0#iOLuTNWbDWQ08i`GCZ2TFAJs&R2&>sHmeu$V zcl$V+QbK&+1i0Kq9H;|+8%}T9xaLss8TtKVvxMS1$FXDmoo0Ny5d{AQd*J+8jfzko zVbRl=d|XEpA`|MI8{!4c7^&@l@6n0pD;(w1IrcVh=H+kqI9NS-zNl*Ux!hO&X4IXX znTaE+7BDuku3gL~Q2!yt|FPfuEmN?CyJ7oOklpPS5p|c2Y4qRP}OaxX)F zQ@|QV#g4IQOvgStQmmav!^S1(+83V5QR*tt;=PsAnJK>v+}F@E`hycH4IQA<;j*yn zcHFx*d)~aMWZc;svrVA))~vQK>4vtzU^Fht5FgGl_|W1i|wFti}MpC;*C?vBkpa zZhSsRQ{`5zdq2~oZTHK^8}7%CTRwCifF4%*k5n^!Xr|Y_sK3EoW@1r2g)l|IF)~?5 zy_CE(fY#D=?Az=nj-ddjEc=iw`xx?wT^ZfCF>4l{D$OeNU+o()Dm;xJ+jsj(&Qm<- z<$j)%#c}q5e_pP#kM;Pp^IS^N=DczzwxiJ6$#>DBN;i0K};&} zzrDN||MBZ+{#LCL_K%N1jh@hZQ*M?pqpGQ#yUvE7%19(!zHxHz&uo@WIjL@^NAa}s z)Z56me#$E6Xb-4it$gXUKkzNUF%efh{y@T!EXsaJkrak{B*AvFajnYuBR{Z$yR!Q# zAa%=eT4%{;udTj0o0p$I^l?i#~USByUr$U5=2WUfZtsQ&wt~qK`cWTl1y|2)y<2PGHlr({Vcm-o@>v6-(dx zZ#$LJW86>W!=v!bvXG|y3uyr4b$_B0ND0~S?Pm`{d7~!HiRM)p@(B88^4&0m{mO$8C_lfr9u9I?5fOlXsJaOG zlt#|l1YZat-d+?(FXdje^nDg?R;T?2ePMn23a8vHrBr<-LLnMjG#h;sgTa1wBt5!I zT@!NW0_?o707P>VGJx;bTt8WIBp9u^;GTV`=0VC8J-Fg=r(^W0K zG8F)pn{CwSEunz9GO!}>r?Z?=k$WXMH7Pe-a)Ma2#|GX8T>|)`-OS^omx;#Kd>*vb zzIS0FqmKL_j=Fx^nJ+FAno?Q0pnCgH_d9-KCt8S>f~MC^#2~lanY6Mn2T>zaZhc*s zllY=QnrjAG;6dS!#K~{JQ>K~)^)(zJk$AgtYf=EpouQr{{y|;*dRHpkX}#9u+6mHO z@mv{)gNIiTe!Gj0k7uX5?KterO;2xy9HtNFYq-PLkbS;}YpFZ`!HW1jYVD{DLSyT zxv8m8m1+Dtdz&4rKyDM0si8yTIHuv>4@7@5FW+^en{54M^h^V+5Qwu|iJY3IRfL zm_w~{>_aWMq{)|vFwi1vX@X888qB^OR4}=z`>=d5;&Po$Y2vrcIq!!3=6uFN^nE>z$sa z*Sq47>rk$>(bMSCN8~7?no#;lE#x9-$0H7$Gnk2?>#u z7>$HdgoGrBYeeZ(QQ=kbvTAaqG9dOL?>7JTvK|kG^19OxJCOAux~D}uF=tDOZ)@O z#20zyrz=EQ`2I2-m;+=fanPfIFzWWmX<@{Hq=RO*HHf>ck3T-!@qM1w6pzgkR99JL zkrZoM*m;dqbwG;VYubfw;qIq2tvsC z?)%{X#7K=LRyZ&Fd(G+e<}K^~GYPczpwA|Bq~{UjhcjV0hb7~o>?fP%zo?tIVVQE!@_ek$wpjE?Ak}B_W$qi%4Rt{U z={O_aC{=0yZ%7YyKVhG(dX-f5beDKz$AYTh#b*}}A4hzE<5`pyrVwRe1(iA(Zsn`m zq%kd&M!@JIgF<0vTfv$r39ez0FWi!AG-(>SOA%9Tvv2J#gb78@ugFf7_dcCu-G80V zyx8S)G1*utoO{!O9G(@z9?JNU*{ zHOe}OyWr$Q{!8yFscjpo>fwTa%CO4UuiRURQ=kv~DLxTSEJ?(Chxpfzj9XxJQ(aL# z#%oc!@hR5QG}GhbpD@?zsHnnYA90_n)IP%CtPMF5Ki$eavwj>4*GpEM`qI~jZ-`v& z?B`f2bQ5SNKU!7<_$JVH+~OuZ?^#=1d$3}1==ix;p0@7q-k8X+EcdFO|NVPkgD;cq zo1OOsZ$Lu1n*n+QQNvvImRztDc&hL%O~+~ldS|)h?99<`Rs>I{r|O!efw|O<-7V${ zWg~KCx3b+)O|n3mB9hZkd&?|e4*#tt&kuKh@Z1Rf1)twx`?=a{!eP7XO+_uGH>Roy;JG zeE8aJX2p>vC{4f|Rm@E;weRQ*q*cmh2hk^~8{QT43%MI0iV1?2)rE|mKr%5o zuDo_u8yM|sj4pa&>0RscKw@`5reE<7b~MFahk+eiZO%KvgBT=jUvZ62Hd4-Yl6zRP zl9zin^u3S&)xnIgLPy4?;4rlOvpYL^^aN8znY*aIKbC=kg<$QWBE&M*d@`aVbkLlx zTj5rl4Nd5J{q;fQ<~S`@>CmhpVqITmVQ-+^ga~}t%=~%cR0!Afv!bQiLQ+cFcZ2Lu zL32#*q8C+N%k}kR@W$1}Pvz&k#a;ZThV00^p(a~u{GH~z0tX`hAk}|jvh_`;Ut}b; zPcCgW zfQR%3DMQq=apf(oMp0TFCT{KnY(YGe1aY-@MsS08qY-;nAYEsmywmLRu)k;mZ=J1b zZaMkEx<(4a+LG?_VaD_2BjPb)?kKL|M%kpIeqJ4t#P?y7gCdo8fcaEe9@Qy`Y$>JD zq+<6`kKy)2wrhruUOKHN>DubwEDE3_C?d+OTLet~+UBXB@Wts(sk>SYGugk?WZ!@n zz2jictz@ltLB3-!i?HV1X`RW1(0=xXu^lj+4(BSz5#pAQN z*~_gTX-X&K*SQ{R-->fAiTO9_{P&Fp$woI686*?%vG=RS3nV^AcvEwHJ=<&Z=}_|+{zuB7?W=y5g*&=!4FncYU1p$; z0>h#<(J~sUxqv7hNsM9?*%$-SF*_kKlNK9i>hAmQ`BP_ypz}{(Rd&CYX;59i%?M?H zt3R#39`7k_Ly}b-S_k*n2E?~?9Qx@^4!1WLQreoa$T17@zKaj+WqcrQW@&RRC+Uer z2=%Sc5t*(1lvE0fVkSgMCVxzEZIU`q?Hx+%w$JVZw^?^Y%gh`Z6LKRm3fqm_u>O>b z22jiw@C_-u?QEEzWs%$c2WRc9SsJRnou22P@D7KrS|WG#yhS$4ZqV#NXv*rfSwii= zYFsYk-hoKIEM5~TNw0hKPj2;L*3?U~hmy0&#G@34z1x4Cn|u74o-(wTpS;>(`@lAP zVbNeGpQVp)b9%P5`BeoCp|C+eoANx2>i-tddieWu@Z&#ov&T59%XO*8?ng&|He6mI zPE3dXO(*|(DU(eZsU6FI;1(i}%^S>nRU{!Phvv*uCMN1iqt}=ljq6yPknc(5Oz6_x z%HwZfJBzx4tPKbFLRVsVB)Lfq)m2pJ$knjyWNQOJ21U7DbQ2{~fJ^0VMI#nMCdNm# z))ytU4AOb;xmAm@X-&ugs+rR}z4Lb6NKcm>GwugFEw~o6OJm$6p}^e-Fz$y^*|o+# zz5`EgQQu;zZi14nE(d}&`kd|axZybYQfJ&CGT57=9^^*#K3h??8psq$^51}xqa@?=n5LzxdL z|Niv1&Z0itx!-__mtx(`h<6UIPBIulT1>!`eu64XCgke$V4^{ViTk>bHX= z+3hyD!@ZjreZx4x(X^q3aGO9{Qr?pBVTD=^(}l{V;Hs~zImI5IV#G#87@?H8Y_}-m zg&!U84g>S3oHdE00>J%hWijTfFLBm>q{>l7&x>s%)cuV_IzC9Px<0zjh^G>}W zL_SDtiVtlXK?U+&wKIft^`6|P602*MRCe4L2|9MHa^9C(h6)T z03`M85Tn@^mE1@X@TZgIlEQ`VrU-Z|`Z^1GckSQ>P7R8P>B-~E##amBrzai_=#4&o zwisQ$ouJ@fU*Dg_jq-eUS}Stl7tJ&i&Q+6n*!Y&({~d$V`d5knO&R|V0~j+Q_n9H_ zouK5yk_upNA_=Qg`ultOsFoSoUpreA34NC246yFP3RqDnW~=(bpjshvIqH6uv~t>j z|87!mMNuEyt2LdLYNq}cMJYq2j)tOu#0W0(n+v2FWLOri8^5`L;VVB@dP=hlzRD^} za^*bC>Xi`X1M+Wd5SWaiPX&kJ>5`+yQ=&1lYsLdAa(j}dr7I*Rad`oA_{MJWfyuCi zDg8Hln%vAP?C*RvY?G%H$Mg%OvI06O{cFvddA{%%np6M!T8+YfRAI{8AP;+5X{d<@Ec=pFcd;4a;>`oPqI&w|CTUpwV3 zg)c3_kf9q9)#Y%HfFG50Guzia(G!BkH~akxs%V>nZKx7uB_$|DyEcw$%IHa&iw{IU@E>ZV6k=xL+HL ze6XU@IwPdsB4NCBszVrz4S&_}&Dgy7zeN5Ie>~gR0uvCy)1x=|)Il#)xZ(gWaA9kL zaqn)EzTGrDCyRrZ3ChJa-Op9&Ee}Xlio^sGdR zZY(HL^ohE2qQv8r@}T0HH<^WEd~yz^DLk0ay7^qy#Vj8g!uGMIz|`?~{I|3GiW#>) zGq0mPhsSL$Y(|x0W(qo;Ru(Y3RU|Hc4Tx2^KJe~_8T#05Qo_E$vc-R?Jgt8qC*7Ql z5xAKZq!riG`@0{1p*Re9Kfh&KcWmxqUmFFV*$H?+!fZ>RcsiUR2fMwgGI(N3m?PeJ ziW4oA@wN>9MccL*ITjflak`-|;K|z`^s~LG!RP6h-#V(c(x#^8;{75~N;gR8o&V_w z{7*c>?oUcs<&uUR*#lD(b^Wi4=+hRtafoY>6F1-qXb0r5_%Iv*ilK${s+g^&caJq8 z6M1Qy465`KmKj;AyG%!z*9cYor$0lV3|f}I){Jm}m6l31V`*AeERLwmG%j5cT+y!a zu6C{@Oj1Zd8Hx?El7Tjb(G_l;Xq(&4*GN&VZ+=J&@R5tho5fB}+$*s<%d#pM34VUrXzb)p~ zx3!gK-O^6zq6DzN*QO(!>aSj+!?pU)tLNb_3W_QoylOrX7Zylrh=Q)=BPU->6omMKfCyQiasi{!0HnX>*g2LO-)N;>Zh(%+`|j#a94^XK_v~>SS&_e zr^0=Z`je@~L-Mefi4ulus20sE=xa9rl9ce}XsV5Z7p+Bp=kd(;zkmOF>Cw=^kYn{B z%q{nu0UJ|;J%0X;nW?l&b~>!2A7ha%UHz|u<3ACB@!Xb@lwC%);j|%$nNIP!0ulYN zIx`_l@7s{3P6s-ZhpD#^QV^DyHq1k@+h5gTA_!FL;ywCXl*ZuXP*sX}(8=23+me$^ z>vr$`Zz*3MR#jK=W_8?;8zAYHUThw<)fO3+WJFM_zw)5By+C%;ckB`U@8u3`0}vSl zKV}BxoQ(XLK>lJmJRsYxd#|F&K_oCoz9FgIlqS5m%J`hIaQ3abzm1ivQ&|^k{0eRs zSu><}FgUYmU0N3P2Y%DY*R_3S4CZQI^0SNmFjmnbKiLed&m0U}mZ?=Z&RY3@a7gM| zTp#`AHM*-c>-#mw(HSwE%qUxD>fY+n)7iUb#2!TZ8T`r?Y#3S`+*16t3l=i5*Asd3 zw$o3~x&E+Y=xafB{yaY%qoyX!K)?n;k@(%{qTzH<0lSez-ZSWS*?{b$C{=~Q98=vy z?%`miKicr1U(F_lcAd^=9rM`sE!?hRV7$3BBO6R{)kBV9bN3Gofsd6PRgg+@(b)>Ci1j(*;2SWWk z&_SVZ16$_$+W{PvbCzX~Lij%S?iZ!~{LygQz9328Vr`b-M|{K4r}=G9y%fKWb!gAY z9YPLETHHoQXIRmFXxO;tsOC^IV4|WlF{^jB4jol9qrZoqxm2RO#f|eGU4MyE_Y=t1 zz{ZL-S7AU&Ph zY&{;P2<%}M+Tz z{Y$hBA4w=mwh&=sW5G+ez@MBuRJOIcZqcxu+?Ex8AF-9$c<_G(N6N4t^e#w%0ZOj= ziRF}o`V))RGD=iYk|0gyU@OVr9lSg)xBUo^$SX{=eu7ZZvKLLLf~pddxC`gn1h%ns z7GEd#UEr#LPn3A}7Hrf5u;KIr?WYyl#=2IZBp!h(G{G2bBFM7l=8A9x;GB&qbaKT@ zD;{kQDx?9~#UCxOc-e7iNvQn-OI4|?2v)Stg}g`&s8YQ{BYbEz`C0s}y4})_HG#&a z#`J;JFFS!PZ82z7Gu95oYeh+JWU4c7#q)-GFE%#zxMWVqe}awg5T1Uk!0*OT*dDt` zLHD@d^fK6Ek>iKk$jAsk%HR{R`-`9wr4RfV`_dLz^jl=Ou~%E@ z&~_}fRQ28hCFCX4C-n-xqBXciy83yOIXL(K)@h3Rprzt`~?&6pm94vn4 z_{Bk^RblI=?HzHVI0J@w2#AfLt!Zw_P}xPBGJu_EywFoPlGx8u?y!zbuJ}Ft5AL+M zCo0kDi@d75sU(z&z{(b;{JYITtn5Fmrbjg(3@xv7lre?2K~ykSfq<0iV}xmAqBOj&slCGqXlyW$OCFV=PU$AO>C<^= zoKoOK1e`j$te6)|lENx^{H12oq}2Mbm(zfMMkul|m{YXe-3^!u|Ml&I`&^fF zWJ)W&iiO*}&g}XcQGca3~FvAL#yfa_G|>? z!fxKAvJQVu=iI)YJtc5m&pF;bPyzFB142b+`-59;?bsSsSb+_v1(T~9uVqp*8kaWW zNTIo~(pq&RZTszB3f1X(An{)>6=$KQDSvaPj(Zu7s>)81V9-j6=MJ_icH?*6{i?}eciSBdrJp;&NWc+rbt;7LXD;}&0EhPZqTwcT(Ze^QhKVt%je)o(@1w5PgRodj)hz5%ofe;ats31F@crfC0 z!gDs1n}`jl+$5Cdi1d@BW+E^O&|R3fbGnNxC<@=wV5YlvH*ALQ)q>wPO6ECXBj07} zVQxce?a^K;l_rKMC#U)-;;52sH)M9#UQ+&1>)v@~YuOw{luV@ag<Ps2KDbfqS4;~*hMO|Js1rz*obJnB#UlZ)c{9ny= z)-mVWt2k39Ceo5VaW+b1kWS`zNARyaEwdVV5`e(u_>F>ZO7n|JXx_Y~X3Fc%7!$)q z%H4QRUq(OJY&K|H3>JDCeEC#wdRb4;6%5=xO}m_aU>EqyhHw&IyW4<`*>pl=PgZ%6 zK>7y2%*MFRx2D5_-{WY8;v8B`G}+ild$=^NXV737$W(Qm)QKnqlxQE{IcmY_vLtFx zG$Mu!WCY>MpL%~*h0e2Xdww)ApRpEr6*#r&kk{5Y0DOWH?k$z(f4$QdSdMCFd2;)E z;j)~0)wNG>At{a*IV3^?y2nO8G9)v~d|<=%*T)U`U9<$*k{gv5UyUx_axBvEV%Y_; z`s;M#ipp%+Qnqz&7bO$5Y!a9+f-L3PtOTDA>k7>Fd(B^BhjrV~`5(+`1&L<@S(C3v zBxXA#j&B7tH|f?uvA=UAuGe~{oAgvl-S10`a|*yKY?GE|b0vU445R>>-BF^?&SU?< z%CQhUeB)foFk)n@uv*6U;LEVNW?ePXf2CElaB4*766H7ZXR2lCSrom$n^*f^p^yGY z88*dx1FK_->v2*Ba!Qr?FRG?_GI@zNMRhW%^i%z?epnI}T^(u=9llu)xZT6%0*+4v zD7|8L8j0T{A6DycdWSICh&XcW4j(J#p`UL&mPniXxLe#E{=2tgq8v>ET)p>W8Ic1W z(9;_Wy3TIfD)w%<-Ynb2Tw&Lp6FKs`G%E*`p^lA~Pw?_McbS;Woe*T_a#W$F{Hzic z`_j8f&jG=*P8n0p8`3;#iimvX3dzyMeZ?_EcNn`+ww~<`N3SL5v*~VItUT;6F44H5oe%=0X zEb0=lhYtmUdHCcl%TZ}8gz3VQ4I3YP=!lMH8XVJU_0}^n&K*U<0B1pwuz}MDp6ofG z6td^*r2%*fEuh$4QM==`*YmP6!7KY|iYtx^cST$;fS;^P#i2J zKRhn>@2^`&u#}eUj+v z{jd1((r>bxnB`;}1HYq#aW0wtprS)Sx{F+8wgj$K6v~^8^AaYfNWf%*pO6PTI9?4+ zMmx1~ZZz*>4fRJ+<^=;W4B?x+5W{Zz3}1Y0>j&5x_27ZA902bgyQ@gpwS(!3uy zvsJZsIbL2BRx&HgFKeGe1vxD(8I$c#!)wXS zHx{ypkT;nMPQkqJ+IcJ5fJcWb6OiP(WZFg{7!(hnq-9ps57To4%SU=_Z-fP_?RGDV z0OCLtgqX(2@Y8_mD%iPHF#hs(FZZ_d2kFK7DZVG*Pw|^%Ax>?cN+PnmMHGp6ITt!u zuMJf;mxi4%Bo0>qdV*qx#(}y|p){O`skZ|MRbD>rmzrWqzWLP&D|un@kwNV-ZYf`^&Gj!3IL~Ih@afKWfA$HGXFnO z(z$%z^Zd%E>F%>(HXsBF)U8>jgM{p=0TB{ILS(hoUFek(U=>=!e-rBa{Rzl8x0{z-1A@bBBfp3Y z1DSCY4PLLDvvBvTc1{{&;vEp_MY=Y(J~8AL9vhFRkEQneseQDN{@lGelav2?m~AFW zv1wu~D9PSR?%j3T1Gc?MXsaS&wr%*{GsXPn&+7eCXJH z`IsPV=x!WH*R5AJ?(4KPPqxvT)!7m3&(HO%J$kl{&a-;Okg*%o-JmaFHXUGe8bAej zrxG2El*o7q)aDK&WH;TX{Bax5?DbISm^jj{Z!y+&YKBt+puo6v>pGD!q_Nh{*Pw$z z0nVVlATH;t%bwM--|v@vF246rZLnKfDu(p=2}tSId0gJ#VYvKEy-`19sBpNSHs2UJ z6}x?LUV874Y{^1bN*P*rbj0!z04V?4HQcDp#(TC@Vp2G%sr|1v*I}rPnUFBnC8lDJ zbVHF(Sygb8or_ir_Y_o>pvh^-W;t z16KTbQK4L>mhwTxXL<$C8N|nDGtzjHRjVV)h`eUATpce~FIKK;xiRXOiIBL>?lcL& zF^FvjD3l{jZbSnCTf;J3NZthEsfMF`v7&tN!i|DKUfDDiz&p2j14FQj3#Li&Zl}vO z9k;9GsqH&J$1;}c1p<~Ic!l#8zGZ{u+1xSpqr&csA*C9+G?NS9}aZvK&LM=efE zVxW;qJ5s!Suwr&c7pqW|8fYKxTbgo*atW(H`W9!9TPC7WAs+mPhdE*RxIJGar8&4L z0&YUz%rTp?k$GNa)gJB>HBf;_v=`zPkrjyd#c??zG8M;eCl&g19sh)5<=Pvy*^`kr zC_X+B*X$SO`V|#8N@sO{Cel2d<`Ke()3%Ov+m4O|&?o}ON??$1t6Y8lEHzQgMO0$8 zB*%o?Z9&O=iy`~KW0{25?@orHKXbk44YLIJ@~)m37yLH{YteZ`8+qiR;uwpQ1i4Tp zaRWL6sBgkn*XH@H*7}QV6LxXGX!pD-wC{7;C`_L3R4#4~3+&En}5HiBeKi zx;0fbkTBi6crcHzm`DhK4&px;b8MCS1)SW-7^AhSP&663BnU&4BYC*^5*b_IsJHbd zba|i-BheeZa&1a?P&8r*pBYq5z1ZFyQuy{_R{fNw>?H1lZ3Cg?=D8TRVcE2M@((Rt zRj0!;Nw7zqN>L|I5yvyU)sW%JeFGb*`NFmgp1v>fWKvR1=x4!^vRi9LQNvJMJNfaw zdx2^vk7Bo5LQXJfWHDWPrMz{AW$u$nJEoFEG; z(6pU-ugky7zp+G_;uvmg4Lgka5uNFvXe=>i72m-3T+aRbT3+~OD~hn$ z#o&hPINr0 z5AchLiWQ_nT{%3P+y#+Cga$SjHcdGXn;gG2cC?e(IgbI{z448tO zQ+p+?+NU^%Yy)N+XS(RQIh7wWsGF-tDUON8gB{%6twVPs1m%g=utmeeBdoEXgw~O? z+j{0}u`Ncq$9zQTP^%a#%r(3zP`Y&pJ8F>gAjw2k?pF(>AP`o~D&5ME@e0FIr(bPy z$Ao^4zvQlSbTJm|xtZM&m@=6n@-pSE$q2}To{G#WgmXGo^e}mKVrYn_FtdsBH)2HC z4id;h=8}xS9)&Mn=GyplsU=2uMP@R)n8^?`#xei`6bn~$iu1iuK9L~4;Z%aqAP|*) zY}HEV`?Hb9!|V`aoN1QiU9QQAr{wosq1QcJiaf-RhLix}SoC3_S+IQk%v*1P&~Gm@ z!qULd@dAw-s8w>OBg75pyOvajX55V!MyVNhpu{LL+kuEkZvUGYE_nJwc5{pYpTLJW z6O>vXUbbxOkpI`lY5PTax%}`w&pbJ@xUT>KXUU~pFBTa-~2bHH_9o0jLx*pNH zyGxN5D7!&DsrRZkRW13u_l-9&Rx#inDa|+soIF$qLQ5R3x)&vBlL)=KL2aggv%5S~ zH#Z)b7{_J3dfkP`!-l}E^7Hr{FOQn32uwtB@x(n!kD+BQJ66Jc9h73#6W7&fV9eir zg=x}Rlr-@0;=aLY`O)T{{h|K$HP_@XKW; zVtO{nK*z*l=`U6^WB-si%B#4^ao^JFYvk+)^aZmGRoq~e+Y{i#i*Mh)DSp=MsB7q; zEX#zJ1c9PKxfP2CmhKvVqTZgaKcuWXkhf3&J`|oi?uAPLR z>`Z>r$aR}Psj8Yaw@?Uu@eWi^OVU*4yE>@nGh}^Rf|P`=be33xV@S)k4ccHEKFZu5f8R?P{*|be6Q*+EZ#f(-DHExlb}8}?4IJMJ+WuZLELAr> z?WtV&gE%E4`dQ9+6$ZVD`Z`#!ayu&l-Qfm@K2!F=(@y^jMIqp#fJ3BMmKJygC$_(+L z#K6|2t&|)`Q*x~$@8ce_-a6yPIR&x9oSF3+k;0*;H%<9JZQa*QOSRMzN;8@*%4}QB zFuO*mUXJryE`|C|pU z6b0`{4S}%HiC}KE`t78Fkuw#L8Zfg7#K7<=v{!bxu}T_W?f{kHe-fn~E3Hj-Lp^R8 z9c)fhyk02-346@cfunRp^@khZsb}8Jb)RBFQ8K~z+>j+IX9p$gc z@v2+d)2pS+6$8Vq?@iE+Gn-ChU-y4FkXXL-2z>r_x+m*fBdcL!ets-f!p$kBs;%I# zz}0u4xa<*TasiYv?vARHjhVWu-T|gHLyB`;6EV;nh>+YRyP?txF7LTe@cFoN)UO!! zF%Tac3*w3j9s=?6QLAw2=CgQslpp9>qXfzw8E=zl1K+P!1$O=njD3Lg77bqWKPi{&!QbnZ&QhLZsCyF1-^`P&a~Y7L_OQH zmMJu2u7B+w&j&0;S|SrPHP^V)TP6OHM`4=n^VXXHt%t+>cP)J*fk>xvfq_y!PpIHv z_hAN3p7N%okqK*IlH`OSCwr4S`CFmJOJCz;`^|xGK@LND9Iam7jxMUpjE7;XRR^ES zwgcjs0t|*a-Q9aAA6BKGQ9>cFizu?BzK3`PC|EMZc{ZLehbZE4TPVw$Vd^qsu-jfw zjP{HcX)x+XJ&ybbIKkv>f7S9VFzyvon*7H1QxTD`Pf=3?O*&2)>52)NQ!g7EN8~AD zm(lN=TpVoy>gA{whV}-VlsrGIq7g!ZE)k;gM#x%41*?^^g1FFykoVp5`+q25P+}*q z<}!Wjv|CpAA5Li^W-E1nwpGoXWfOJ+*P^ZopAxPdqb1^pmLw62kCvPd`tF z*m^2H5w|Ta$}dX@l9XfBkD*o+K9(P?8kU|CROxMm?%}#Xiwi&hym6=bmY+Z09^#Ak zZss%d_qZP@LXl=O8} z7P6`_(@mr`0i_V~r$E<*5oebGY->v?)T{**Ls^mWTYI)Yi?|2z;B*0gLac%~pOAZ` zICKvrAk zd;u;>1b0Z^`6n&P{J*o)gdkZcxO{bfaZD3Am(o~3W3-(ria3%x(s_-{)hxF6vAfnG^&+D(!hJEWqS`#XMCg4vd!Ac@JW-~ z)X1}z58chrYhQ`pZtu5dy*)3lJVohp%kmC&qT+7rX9nFnG^!-}zP6ILNXJ6**E_un zjN+}BGR$kyKa(HJh7qWP@f{DvUC~wGoeGIQ3i3NNx)Z09gVC%t=?-XU$30x%{#OZyCLX)fMXD6&n3-Q_%n8THcp0h0T=;*O$I+ zom|*iS!8v2U%u5^QQUOZ1DH-34+-(a-Kd-s$%=bak^!F-Iy9d_NN6VFbalTYO)#t; zq*079j}n0QL+XAiKlalIn6TM&E+TN5>iky=P-VvLgr{IuWLA`%6y$l6DB+OE8=|9= z_)e)dsXjJNZc#EJer+yXj2ANi{tecedFtw3U^Ho-vu3K7GZ%eqx((u%JNe$fgzj!b zGP#S3X-o(fhJP%TkunY{6F}P=P;UpU*`smWx~A^B|1@83{YIGhI31-+;PUQo2hN+* zVyVhw@8_a_@vqJ*$T2r5Ec8Ay6HRPn3Cl1H47XCS$_j;TI~LZ>9OvXn4CXt>*VN@n zxHvrzYo+!QubSU7_|!bXgz(4L%x7@4q3xGgZVgd`Cv%oAj?e> z@>kcp`xCKgI1iMySfe=$@3fX7a39ZuQ505zBEH@W+?$Ylkr;=0xYzE6axN z$Aq{9@VaeOkY`ogJLFn7fBSLmaLK;qRH@JNkj3!9>F-N2hc?{LvjskjpB}s0E4CF^ ziPe`!tM}g>9q|3D^`XcJM|jun%%{yin>JwkzI^$B1=iC!Ctq$uG7*-@K>D2n`oM|+ z1&DJhlE0R4FJK}gCx{*c-qbOHa?;`j#mVVZ#AN|@@=6ddz=@38dV5*v^Z(>A`c&Gi&-N}SLOe*x->$m+9 zpgTglBd>&4rF}xhj~603l)jHzGJwB)%GCxdbU(2A_C$Wvfw*8as%Yd}GR12FJP}zkrCBTYjK5ph)5{ z1Tr1l)C1@035Nmj*EiH4|MAeD1t#X&#=>i- zg*6?|%v~kK&AxF67aYfWWaWJ4&COnqNyA;YuC#T|*axz+r167yz|1TO;PPcfF16M1 z{?4~j`G?55uwGywL=e@fq1qk$NpKyKu>JP(yy@9K0Y)Hb4GU|^(Mz1?y&RF_v|U5k z_{^JI!DI*P&A|07xz3;nE+{#j3uL}?>*QR29!idtC8aSSBiZ#t-MULtT$z<~`Is*+ zz~l!pQND=IQ~32$sVVE$lA^lpnmd|NU<`rE*J^d*^fI6}%vicEig(d9Nm`7G3*xQim}>d$z#kYzj^_^-q3Nb0x6q zPTml6CV5T$Y@Q51)|eL{39tuhfR;kS-HCLtOJgcExiQhUR#TXnbvwC?t_N;#j`l~n z+3m4~p>=JpYb!e8Ta+Ah!Zkc+YrAE-?4_mH2+ai}8yeEARtcRbMF*j%$>lODsho;t z?-%SVdb_@`(Uq0nqn{Dw)~~2xZdJX=k=3sTydK+u74QqnZJV&a$FUcwg+-jLX4{q- z>+>6F3nGILgl7`Yyrm#L{kqacjW@S3B;QkWG>RD>&j#*9_I)DZ;Y+wsDRI*i8`fk2 zIK*w_KbjdJZLqcj(qQZZXDke!{dD@^f2NE93kraA#G`4iW2Js%Xq!e)zKKNilt{&k zjLXu!vE_1TL_BG(%?I^HAi&fwC*+1%gewAyOSA^3NDlu+Xo_zj>*P_s`=1n?;~Jk! z@we`5eGE6sx_V(gYkO7w{PzdDkLL$R1p@zu%nI5;KI7afNeK2I1Dr6<(vBm0l>eg z;chaxD-($E|6b!~4rx?`XHomkSzkLAG%iJ~BWA4)vzh*4ZQI&Dgoy8E&)v(E(-NuX z9fKb^+y>u1X!JnmTHz2{baB1XT!$#uldi%;#QoTk@`W--*F(qfSGfvPL`6_gnWe{U zkyCUg(c8E)Nrt}|HKhw!W5!%U-Jppqv21cYiga{9m2_Be@i(f_PpIcR?W<9?%7z3oW`{j*Sd zxs&S0C&Mt$%Fk>{e|zZdLg|%%^)5wx&$szfIugSNzv@M= z{1GWOoxi3y|MV6)t@foh6<;9w*@?{sFdqQ0u`Hu>QaWba(KXbneDdw??{8A~Uq0_2 z(3H7cjou+}1RGz4(~eA-C)pI^y`S9V`126kOZy@>W4IeJnl}~dEwO%~m z;dyTFBm47fe#pq8-^zMCH={1Xc`^-CdR7Ne?mnLUcs~7B3%z#c zyET6qv@|gmenxBf>!x_J#q;yOuRny%^3I3OO+Efo-|x+|_4?tz`s9Ou3!T(I+6beX zN41%dKB(&uuv7hgb#bpEzw0c&9T?=C%Dhn5NGO`{6qU66Mu}A@Cps(}afu=h6mf)v z^Y(#1%Z)XT$T8RZ{HU_n08Fsi_zE@ZDR@Boh-s`Z2BgAFy8q$T%c@!)Jr-tJX21t; zQ#+Z;0WSd74Zw>PoB_Pj_~tXq9}OApd)kao>AS}-Ov@D~RoYS>1E!0f=y|$kJ$3i6 z(~a!u(97(vxIp4W#>?6ZqEe?fD8=PnwMmD=DGoEu4CQuMNpYz6#?e=Ch>cUjtX{9B z1ZnO~yvd|E)qEKd6zOu!Za{F1Y|up_@GHZ^pT*wQ3oS6_N-#i?q%u2ulEiX1Y|73l zJ0(sXDw~gBt=_@p=aah4-^(yw=W+RCGix7=+o~SJ>RAkd>muJxbUd9n&rOC3YXFZL zV-hV}M%$%tNC->P85p}|w*pBZ+;I+-pq>ukvXeO0W`(}?S}79Wr6l>nOxmk05NVY4F&%2%nd0H0^WQqRuw zmi+&YzSxZzQfP?0SmmGnc)@(-yJYhCIMd>F@ZT?Y&r<|0zc63@8Q#5G_quw~`knd1 zpOduqxuhrTuKUJ$hT`b?34`8dCn}OTty38R73V+jS)`!daTj?6W=T zhR~_U3**ImnIV6#J#PQ`qidz7|8nH<>(=d}yO)2v#xBZV{l!ttqAx=4MxKY?ecnIA z8F7+o>iqBd&aW|cH6ZP>7f?h!F;(I^1JFUNGKnEw9P5k`GEQ<<NigEm& z7m;Ut=37OFQ(Lg$TTjQWO~j}xdoH}&=3951CAWgQ_;L_^ASe)2b_%-ln1s)KOU12Z zLGX)YzKhjM<2KZI>jcl7OO(2{Qhj0L{MMsi5c9y9*FSE?vG?R@Gn6L^S}ME^A-9sO zMwig>*>NJqADTi6iwpASL0W1*Y=KH#s<~ug_i#1l8dB_SQd8Z6O|8aCCA3jom0U;z zW^|SNZ9=L+fx(M))c1TvaVw^ZF0*VY4PpXo^?vKw<7=^80r%Js>K7Crv<17koh=_6 z>G>$yN_0M9p~b%dRf3ebw)ElEH5C=tCY|Cw2^IY~ZpPhr@O?Wg)EtQ8B4I^++}SAn zXES~AnheiN>+Nn3;$_*-TP|0hR;<5eG>+UclNinuB#Br|6@sXMxzm>GC9(m~04ymt zH#Huj@$%mG8b1F&-PE(Yv3jrlC${aoE)EWtBrNB`Ca(Szex@*G&aJlp{UPm7;NLUl zk4L{VH+MNMe+!_$PhGWNv>C|Sr#4)UyCkCtuRk<>9@0`I5Yo=dm4BaG>`uUr`RK0B zK7RZ)ywCDF^uQ7EY4B<%7#lv{^zrwb*~`x^$};pnHm>#$KF;r-3_m}X;QNGa>;9hn z{Iq`TvSJOP6&!%#Sbrm?!|~ z!xuf2FY*b`egr;`IG(ac`@dF`|DFdgwtal^0w5(&bahmXB?CC8$p_H{Eq2Mv2Ts>$ zz48SCHvepWJV=$<`S4SSa}GY=Y8bhZq&y$FdfB11wiG(A!MSu&+@N^ceEpMA!?SH> zvBBait(YM3IXy}eo2SrB-)%#*)7 zPmQ-Zj*FLb(?~Vm%O=qhXf*yzGuAxF4mGT|!EUyAKZBB#ZUOTah@0O`_I6ceVYA4! zfZZQC+PI(kBfZH@m$avNtM}Y`F(dTNi(=5BEB{ylw7_)3^Qo+wWtwboqVuZ(AA}AkMWh zl5k&HPWEUV-SGK;HFlj*O=ZzKh>cM|#zAnTt5|45(MU%D5d;ww5m166f^?*pB#uat zI@ACGB3;3NfV4meJ@noo2~t82HH3temx1-xdOzmJJAdxI``&fVK6{;WcR71~o0}Kf z8(6>BmT(4urnt$`nm1(3s#xpS++M0~=U{f=P+td`xywFlXGfa89wds&=R(gw-B}Dq z?BjW%nej7y6S9KJdnSrsilKd1fxI#M4Nw*ia}02rJRhD@bhwYI$rZz`;EKd z4}MfG*Nk-fTNt8bo3& z7C_s@ysWWh?crz->mt(!yuI0{g2-cI}ub)w2PmMdZedvS@$1xH)N?zMx%tF z@OAIr)wY3fUCfjTzFqCi-JGkr?cI}|uT*d6gyvtgyB?N=NEP>Fq(1vpDlzmCX)f+~ zrmZsf>hI`pz+NLG+Zy|pIq4g5k3O1q-YZcZb*V2^w0&pBZGLLi0z+68cxJI~=%gl| zspKU6<6g2m4{PpT}x!`wEMJE#YK9*_qdvv}G7ZKW@sYp0}+!pDfNheU*XRIIf zMbzFA>7IQWtm%UXioPVPI#0qZNX={LY`o}rDc@c&?+uRdFiG`Pm0#1I#jKVh>-~kK z=Dxa_{b=`&O}yyWOOMpNIfFJ)ZlYcVq&GI$uJcP8q{N zB3-3U%@ZJX@_^$k>+HBH8KWDRMSnQdPbLgU`3KVNvZ)zL|WxT|dR zSN4LKXW7OKj9y@vhE-cj*{7bH5p>K6U(oH%aJvp0eV%IfX;AQN-oT`!JO0%b6#?l) z(_-VZ^Wd{ayZO@Z%@dVQ7X108XS0EHt|YdJ|!hzPwNS`5NLbHl|-I-eH{IE z>HeD-UuogNJN&!XY2GFMEnA^wKxgUxApfs6$z_6^s3v|RF;!sqT6u($;CpP=Z(cF( z-_k-F{|vbuZc%1!^;yNo9&S=DzTqA<3e5RCBkPUDhf1E0j*>hoJI-Yg_w|ODW#Q8= z{J-8Dd*X7o)QsmJ-#2SSh@IBGH%XU=bjMJg2j^aYzcyl6bvEv#fnmQ|nxXso?9zr4 zxSy|fhIjL8eQ;W*?M{p64=J6t^pK7Fa6$}!9xjj4c$WU@ThyP&7t!7`4P}C8+rNL# zG<^MF(Lbu!#sxmTka8538Pw~UKIJ_Hvi2giGDl$bEF|{LDb2mlPzIc2Yt71xbfqO# zz4Y1&HAlSDWaUFA=^xp6Ky}K;!bq^6iw_AGG45T%6pg8|B4qk19RWE@c?722ARMG9 z9P@>clLvNg8$$_}e@-ZG#<71CETPVGB-D2PmYgeN%mgSEJ>G9b89OrZYeYS};jBeO z-a6Sem*=<2@JP+_Mrk%s0cWers(4 z%6cL&B~ryLi{3>-_tuA)GcB|Imi4YQ`Wl#A|i4*Lb1>7K)%jrL{{gi+O*+j>1xiBuHb@6#$_3KrU#V#+LH@!BDrK zTFpwD+q8X}|Bt@?fbnCW@+&z$0JOX&IsIHV7@uY&w0@BA)qM8=?rj`^=j%4RJ#t{r z&3$Y%a~X}8=GID1cEzkkJ~38vAy>#j#%|fyuhiWwF~2}-SO+dOi$HdXJSR%$nh5^e zwUFgA0toL$yk_-Mdd^&CA9TKKS(Dzf299$6&S8jb#t2u8Grz+!cJc+F>~aa3U^XP$ z=Et1b3>b?{Fh;MOd6K}({3@Q2N>=!gH$@T=*(bp$2=`wdJF4~#IFAXSIbtt&ZPXZZ zfCdgfmYG&GMJ3!l&8HPR6vHx$PXTyaHydoac9hyJ?VxPoP#aZ!h>uG%;46_Xmu=xq~wYb2G za%+(RQG5|JdKiI*mqT(2dEnjZms0z{#AaXUnoEfvBhPn^?Mvb|7^8h66{`swf$L5z z;sp#jwS*qb29N9LLilg2f+%sTea-UiK7rXAwO?nBEJ9u{ zZx*l?2$&szD*+bURS6uhhe2nkm6@yp7=1Z1CK=*RhJ!Y<$E-_#0|4Ned&wB)rY@Gz z!0!Ra!637Whya2ri{`_dk$_+gg8W83A*8it$Y$ms#C>6k?biZXJ6Vh4@0pi04(bIm z$1g$|(V9?#_9UqU+NIc6w|`&Vdqs-1qFtg2mRvg+_Uxrr7|Uwku3$`?*O4d8HCYp& zB?KF|tZuh}=t`S`6E>;AKd5!)XySt0!9;@n4Zway31dr+zHNnJ7fuDh5%gjVBLSqE zMW?vAs3h$nj#2ZJ9UVa zPK3Mpt@N$=j~GF>5*Qm+C}xs5dcIeH-cv#fa&S#dv^`pQkVN*O^L$qE)VkWLeP(8* zcKj9A4G1IIi8K!Fak8fWs1V4cwAN4^&hnYcTji zwa2kBY#J5QgnY4s2iC8vBdDNtod45AvIur-Ls~jJyzB0OiHVjdH%R{pDtuY_S2_;kf?Y0X4@P7RQ~iRCtp`)xp{-?R~6aZ>P2>H;$hsgNza zq2%d@VFqUUhi*_PbE%v=%6%LDF_~i#;XhZG6@6$@8)%ib=5OR)E^{mZ07%gm;yfcB z!_jXb$Q_VifP5_xZp$Wj+m`+g08rvhTM5tDk3V;fFF#>l+0913MgHjSg02wf5y|8~ zNR{N>Q#r+(TKAgpYdbJ0{tUIysB{I}7ntpgsY?iUwF~5}y&}0D1DyYgch(QPbm(!GjFBG=s@+l#MZy(=AZ^}b@AR+DN>;17_j3H(O@HUa1ED7rMxjTnG@VpxH1SQ&Z;~*+sQRig zAA5`-EWc7zW*J;v_`TdSaIEoiaDe{(ddfK8o1+Of+$RYSxd*J5XN9eR@fPYkFlpjj zO0TxIM&JY*h3bf?HUGMnC}E%)$X{^jkn11OMYfJHNah{v^?Er&nk^eOSi(g#{Q$+h zUzp3DYt$o+fYR)a(lKpT`a$>U=-}}1tebAWFM{w4x6u8hJl$53)MK9XZQ3-{;c1ex z{^~3b-2v#ZLZ89j!Z4TGiu7iI&Z=JZlLLyIpvLl0?{Wb?Z97*>!ysm}c%S~Z9_YoA zYG8*m!DlQDIhI}P)ZYD7M+X$2bgp(aY^6;yuw3r>vfT&wYGdr{xW5{jRVmh#l(gS!}Fqn{0}DnR-=% zr-FQvkIxLg7b77I2BW4T!+$O-_Z#IpKF}ig81Q+x*Y=r-YbHC8CGzWXTc)=)2F?&& z`1s^k3J?4OFBV1?o*kUPcRb1^49(HTi;_6$s-H_?H32fHolv3fXaf+0l>RKy#~>s; z{7wz;%FQXcdB*g0Tnk++y(WjJmuvq{b7F5}4XR#=B<&Ox};<0R1 z*LG)qZ*xS}%d5Ag4(u71GacUzS|j-cSYZLaSO%A}gvgYLw za*v3OlA!JiG(0Vc^+^~EbHNF1z5YThQ8%^Bd&c(CF+)sO*j@8yIc~)MK+_W4w?%g} zQ4+4q;KgR5v~lkpK}^su!nh`r_{DLm6o|l$lkDo?5QsL0P*Hu1CGCYnFfvFF@wif#4?PxXJh;|G>RJhT&;{o|T}5=EL0-mW&Mh zlB@s#PC4^Ui{EtIILMHCPg~JTfRGz;4;p8%2X7>3aww$M+0L+ILry3$)O~K$vsn$}b zzSV>CKKX5s{dX&N8;hW&_hL5`cP`=D{h2Ue7&I~7+jP~#6Ra1z#!^tGc9oOFr!@R6bGjbA;IF^E z9+nng_i%Pa+3!nRM~5LAXixQ_twYpk@wQX9b&F|!eIcu+yWqcjzr6RXQoTUNXWX6_ zop(~{cjia<*rn>jt6T_i@ri;M@2ps@_c=6k5BmbX;zX_E z&??IIa>lZ)c7W2uS18IuXLfa!q!R^?GGq#${oEKJEONy1U@-ipS(W>*zo82B1Wv_; zmYvbf-g0L=qPeBwMTP$*x7zo8-mU_~%GJZ>mGsc3H&Aq)J95 z-N1EnazAF(bBspl5tGI~CMzO<00*K`SuS(-xo)qqLA8ukBC(vT*(RKzrl!!T0(10J;*`FV_$p0YC o-W@x7d7*XW|7CRihaJM=>@SE<3y;Wg`L`>5-A6jb_iR4?4{H 0: + plane = -plane + + norm = np.linalg.norm(plane[0:3]) + plane = plane / norm + return plane + + def __len__(self): + raise NotImplementedError + + def __getitem__(self, item): + raise NotImplementedError diff --git a/lib/datasets/kitti_rcnn_dataset.py b/lib/datasets/kitti_rcnn_dataset.py new file mode 100644 index 0000000..38a79ed --- /dev/null +++ b/lib/datasets/kitti_rcnn_dataset.py @@ -0,0 +1,1474 @@ +import numpy as np +import os +import pickle +import torch + +from lib.datasets.kitti_dataset import KittiDataset +import lib.utils.kitti_utils as kitti_utils +import lib.utils.roipool3d.roipool3d_utils as roipool3d_utils +from lib.config import cfg +from torch.nn.functional import grid_sample + + +from lib.utils.sample2grid import sample2grid_F,sample2GaussianGrid_F, sample2BilinearGrid_F + +def grid_sample_reverse(point_feature, xy, img_shape): + ''' + :param point_feature: (B,C,N) + :param xy: (B,N,2) --> [-1,1] + :param img_shape: [B,C,H,W] + :return: + ''' + + # print('#######point_feature:', point_feature.shape) + # print('#######xy:', xy.shape) + # print('#######size:', size) + # size = [i for i in img_shape] + # size[1] = point_feature.shape[1] + size = [1, 4, 384, 1280] + project_point2img = sample2BilinearGrid_F(point_feature, xy, size) + + return project_point2img + +def interpolate_img_by_xy(img, xy, normal_shape): + """ + :param img:(H,W,c) + :param xy:(N,2) (x,y)->(w,h) + :param normal_shape:(2),H_size and W_size + :return:interpolated features (N,3) + """ + # (B,3,H,W) + channel = img.shape[-1] + img = torch.from_numpy(img).unsqueeze(0).permute(0, 3, 1, 2) + # print(xy.min(),xy.max()) + xy = xy * 2 / (normal_shape - 1.) - 1. + # print(xy.min(), xy.max()) + + # print(xy) + xy = torch.from_numpy(xy).view(1, 1, -1, 2) + # xy=torch.cat([xy[:,:,:,1:2],xy[:,:,:,0:1]],dim = 3) + # (1,3,1,N) + ret_img = grid_sample(img, xy, padding_mode = 'zeros', mode = 'bilinear') + # (N,3) + ret_img = ret_img.view(channel, -1).permute(1, 0).numpy() + + return ret_img + +def sigmoid_func(x): + return 1 / (1 + np.exp(-x)) + +class KittiRCNNDataset(KittiDataset): + def __init__(self, root_dir, npoints = 16384, split = 'train', classes = 'Car', mode = 'TRAIN', + random_select = True, + logger = None, rcnn_training_roi_dir = None, rcnn_training_feature_dir = None, + rcnn_eval_roi_dir = None, + rcnn_eval_feature_dir = None, gt_database_dir = None): + super().__init__(root_dir = root_dir, split = split) + if classes == 'Car': + self.classes = ('Background', 'Car') + aug_scene_root_dir = os.path.join(root_dir, 'KITTI', 'aug_scene') + elif classes == 'People': + self.classes = ('Background', 'Pedestrian', 'Cyclist') + elif classes == 'Pedestrian': + self.classes = ('Background', 'Pedestrian') + aug_scene_root_dir = os.path.join(root_dir, 'KITTI', 'aug_scene_ped') + elif classes == 'Cyclist': + self.classes = ('Background', 'Cyclist') + aug_scene_root_dir = os.path.join(root_dir, 'KITTI', 'aug_scene_cyclist') + else: + assert False, "Invalid classes: %s" % classes + + self.num_class = self.classes.__len__() + + self.npoints = npoints + self.sample_id_list = [] + self.random_select = random_select + self.logger = logger + + if split == 'train_aug': + self.aug_label_dir = os.path.join(aug_scene_root_dir, 'training', 'aug_label') + self.aug_pts_dir = os.path.join(aug_scene_root_dir, 'training', 'rectified_data') + else: + self.aug_label_dir = os.path.join(aug_scene_root_dir, 'training', 'aug_label') + self.aug_pts_dir = os.path.join(aug_scene_root_dir, 'training', 'rectified_data') + + # for rcnn training + self.rcnn_training_bbox_list = [] + self.rpn_feature_list = { } + self.pos_bbox_list = [] + self.neg_bbox_list = [] + self.far_neg_bbox_list = [] + self.rcnn_eval_roi_dir = rcnn_eval_roi_dir + self.rcnn_eval_feature_dir = rcnn_eval_feature_dir + self.rcnn_training_roi_dir = rcnn_training_roi_dir + self.rcnn_training_feature_dir = rcnn_training_feature_dir + + self.gt_database = None + + if not self.random_select: + self.logger.warning('random select is False') + + assert mode in ['TRAIN', 'EVAL', 'TEST'], 'Invalid mode: %s' % mode + self.mode = mode + + if cfg.RPN.ENABLED: + if gt_database_dir is not None: + self.gt_database = pickle.load(open(gt_database_dir, 'rb')) + + if cfg.GT_AUG_HARD_RATIO > 0: + easy_list, hard_list = [], [] + for k in range(self.gt_database.__len__()): + obj = self.gt_database[k] + if obj['points'].shape[0] > 100: + easy_list.append(obj) + else: + hard_list.append(obj) + self.gt_database = [easy_list, hard_list] + logger.info('Loading gt_database(easy(pt_num>100): %d, hard(pt_num<=100): %d) from %s' + % (len(easy_list), len(hard_list), gt_database_dir)) + else: + logger.info('Loading gt_database(%d) from %s' % (len(self.gt_database), gt_database_dir)) + + if mode == 'TRAIN': + self.preprocess_rpn_training_data() + else: + self.sample_id_list = [int(sample_id) for sample_id in self.image_idx_list] + self.logger.info('Load testing samples from %s' % self.imageset_dir) + self.logger.info('Done: total test samples %d' % len(self.sample_id_list)) + elif cfg.RCNN.ENABLED: + self.sample_id_list = [int(sample_id) for sample_id in self.image_idx_list] + self.logger.info('Load testing samples from %s' % self.imageset_dir) + self.logger.info('Done: total test samples %d' % len(self.sample_id_list)) + # for idx in range(0, self.num_sample): + # sample_id = int(self.image_idx_list[idx]) + # obj_list = self.filtrate_objects(self.get_label(sample_id)) + # if len(obj_list) == 0: + # # logger.info('No gt classes: %06d' % sample_id) + # continue + # self.sample_id_list.append(sample_id) + # + # print('Done: filter %s results for rcnn training: %d / %d\n' % + # (self.mode, len(self.sample_id_list), len(self.image_idx_list))) + + def preprocess_rpn_training_data(self): + """ + Discard samples which don't have current classes, which will not be used for training. + Valid sample_id is stored in self.sample_id_list + """ + self.logger.info('Loading %s samples from %s ...' % (self.mode, self.label_dir)) + for idx in range(0, self.num_sample): + sample_id = int(self.image_idx_list[idx]) + obj_list = self.filtrate_objects(self.get_label(sample_id)) + #if cfg.LI_FUSION.ENABLED: ##### + if len(obj_list) == 0: + # self.logger.info('No gt classes: %06d' % sample_id) + continue + self.sample_id_list.append(sample_id) + + self.logger.info('Done: filter %s results: %d / %d\n' % (self.mode, len(self.sample_id_list), + len(self.image_idx_list))) + + def get_label(self, idx): + if idx < 10000: + label_file = os.path.join(self.label_dir, '%06d.txt' % idx) + else: + label_file = os.path.join(self.aug_label_dir, '%06d.txt' % idx) + + assert os.path.exists(label_file) + return kitti_utils.get_objects_from_label(label_file) + + def get_image(self, idx): + return super().get_image(idx % 10000) + + def get_image_shape(self, idx): + return super().get_image_shape(idx % 10000) + + def get_calib(self, idx): + return super().get_calib(idx % 10000) + + def get_road_plane(self, idx): + return super().get_road_plane(idx % 10000) + + @staticmethod + def get_rpn_features(rpn_feature_dir, idx): + rpn_feature_file = os.path.join(rpn_feature_dir, '%06d.npy' % idx) + rpn_xyz_file = os.path.join(rpn_feature_dir, '%06d_xyz.npy' % idx) + rpn_intensity_file = os.path.join(rpn_feature_dir, '%06d_intensity.npy' % idx) + if cfg.RCNN.USE_SEG_SCORE: + rpn_seg_file = os.path.join(rpn_feature_dir, '%06d_rawscore.npy' % idx) + rpn_seg_score = np.load(rpn_seg_file).reshape(-1) + rpn_seg_score = torch.sigmoid(torch.from_numpy(rpn_seg_score)).numpy() + else: + rpn_seg_file = os.path.join(rpn_feature_dir, '%06d_seg.npy' % idx) + rpn_seg_score = np.load(rpn_seg_file).reshape(-1) + return np.load(rpn_xyz_file), np.load(rpn_feature_file), np.load(rpn_intensity_file).reshape(-1), rpn_seg_score + + def filtrate_objects(self, obj_list): + """ + Discard objects which are not in self.classes (or its similar classes) + :param obj_list: list + :return: list + """ + type_whitelist = self.classes + if self.mode == 'TRAIN' and cfg.INCLUDE_SIMILAR_TYPE: + type_whitelist = list(self.classes) + if 'Car' in self.classes: + type_whitelist.append('Van') + if 'Pedestrian' in self.classes: # or 'Cyclist' in self.classes: + type_whitelist.append('Person_sitting') + + valid_obj_list = [] + for obj in obj_list: + if obj.cls_type not in type_whitelist: + continue + if self.mode == 'TRAIN' and cfg.PC_REDUCE_BY_RANGE and (self.check_pc_range(obj.pos) is False): + continue + valid_obj_list.append(obj) + return valid_obj_list + + @staticmethod + def filtrate_dc_objects(obj_list): + valid_obj_list = [] + for obj in obj_list: + if obj.cls_type in ['DontCare']: + continue + valid_obj_list.append(obj) + + return valid_obj_list + + @staticmethod + def check_pc_range(xyz): + """ + :param xyz: [x, y, z] + :return: + """ + x_range, y_range, z_range = cfg.PC_AREA_SCOPE + if (x_range[0] <= xyz[0] <= x_range[1]) and (y_range[0] <= xyz[1] <= y_range[1]) and \ + (z_range[0] <= xyz[2] <= z_range[1]): + return True + return False + + @staticmethod + def get_valid_flag(pts_rect, pts_img, pts_rect_depth, img_shape): + """ + Valid point should be in the image (and in the PC_AREA_SCOPE) + :param pts_rect: + :param pts_img: + :param pts_rect_depth: + :param img_shape: + :return: + """ + val_flag_1 = np.logical_and(pts_img[:, 0] >= 0, pts_img[:, 0] < img_shape[1]) + val_flag_2 = np.logical_and(pts_img[:, 1] >= 0, pts_img[:, 1] < img_shape[0]) + val_flag_merge = np.logical_and(val_flag_1, val_flag_2) + pts_valid_flag = np.logical_and(val_flag_merge, pts_rect_depth >= 0) + + if cfg.PC_REDUCE_BY_RANGE: + x_range, y_range, z_range = cfg.PC_AREA_SCOPE + pts_x, pts_y, pts_z = pts_rect[:, 0], pts_rect[:, 1], pts_rect[:, 2] + range_flag = (pts_x >= x_range[0]) & (pts_x <= x_range[1]) \ + & (pts_y >= y_range[0]) & (pts_y <= y_range[1]) \ + & (pts_z >= z_range[0]) & (pts_z <= z_range[1]) + pts_valid_flag = pts_valid_flag & range_flag + return pts_valid_flag + + def __len__(self): + if cfg.RPN.ENABLED: + return len(self.sample_id_list) + elif cfg.RCNN.ENABLED: + if self.mode == 'TRAIN': + return len(self.sample_id_list) + else: + return len(self.image_idx_list) + else: + raise NotImplementedError + + def __getitem__(self, index): + if cfg.LI_FUSION.ENABLED: + return self.get_rpn_with_li_fusion(index) + + if cfg.RPN.ENABLED: + return self.get_rpn_sample(index) + elif cfg.RCNN.ENABLED: + if self.mode == 'TRAIN': + if cfg.RCNN.ROI_SAMPLE_JIT: + return self.get_rcnn_sample_jit(index) + else: + return self.get_rcnn_training_sample_batch(index) + else: + return self.get_proposal_from_file(index) + else: + raise NotImplementedError + + def get_rpn_with_li_fusion(self, index): + sample_id = int(self.sample_id_list[index]) + ######sample_id = 3186 + #print('sample_id:', sample_id) + if sample_id < 10000: + calib = self.get_calib(sample_id) + img = self.get_image_rgb_with_normal(sample_id) + img_shape = self.get_image_shape(sample_id) + pts_lidar = self.get_lidar(sample_id) + + # get valid point (projected points should be in image) + pts_rect = calib.lidar_to_rect(pts_lidar[:, 0:3]) + pts_intensity = pts_lidar[:, 3] + + img_seg_mask = self.get_KINS_car_mask(sample_id) + + if cfg.USE_PAINTING_SCORE: + painting_score_map = self.get_painting_score_lidar(sample_id) + + if cfg.USE_PAINTING_FEAT: + painting_feat_map = self.get_painting_feat_lidar(sample_id) + + if cfg.USE_PSEUDO_LIDAR: + #print('###################USE_PSEUDO_LIDAR ') + pts_pseudo_lidar = self.get_pseudo_lidar(sample_id) + pts_pseudo_rect = calib.lidar_to_rect(pts_pseudo_lidar[:, 0:3]) + pts_pseudo_intensity = np.zeros_like(pts_pseudo_lidar[:, 0]) + + else: + assert False, print('unable to use aug data with img align') + calib = self.get_calib(sample_id % 10000) + # img = self.get_image_by_python(sample_id % 10000) + img_shape = self.get_image_shape(sample_id % 10000) + + pts_file = os.path.join(self.aug_pts_dir, '%06d.bin' % sample_id) + assert os.path.exists(pts_file), '%s' % pts_file + aug_pts = np.fromfile(pts_file, dtype = np.float32).reshape(-1, 4) + pts_rect, pts_intensity = aug_pts[:, 0:3], aug_pts[:, 3] + + pts_img, pts_rect_depth = calib.rect_to_img(pts_rect) + pts_valid_flag = self.get_valid_flag(pts_rect, pts_img, pts_rect_depth, img_shape) + + pts_rect = pts_rect[pts_valid_flag][:, 0:3] + pts_intensity = pts_intensity[pts_valid_flag] + pts_origin_xy = pts_img[pts_valid_flag] + + if cfg.USE_PSEUDO_LIDAR: + pts_pseudo_img, pts_rect_pseudo_depth = calib.rect_to_img(pts_pseudo_rect) + pts_valid_flag = self.get_valid_flag(pts_pseudo_rect, pts_pseudo_img, pts_rect_pseudo_depth, img_shape) + + pts_pseudo_rect = pts_pseudo_rect[pts_valid_flag][:, 0:3] + pts_pseudo_intensity = pts_pseudo_intensity[pts_valid_flag] + pts_pseudo_origin_xy = pts_pseudo_img[pts_valid_flag] + + select_points_num = pts_rect.shape[0] + all_pseudo_choice = np.arange(0, len(pts_pseudo_rect), dtype=np.int32) + all_pseudo_choice = np.random.choice(all_pseudo_choice, select_points_num, replace=False) + + pts_pseudo_rect = pts_pseudo_rect[all_pseudo_choice, :] + pts_pseudo_intensity = pts_pseudo_intensity[all_pseudo_choice] + pts_pseudo_origin_xy = pts_pseudo_origin_xy[all_pseudo_choice,:] + + ### concat pseudo and real lidar point: + pts_rect = np.concatenate((pts_rect, pts_pseudo_rect), axis=0) + pts_intensity = np.concatenate((pts_intensity, pts_pseudo_intensity), axis=0) + pts_origin_xy = np.concatenate((pts_origin_xy, pts_pseudo_origin_xy), axis=0) + + if cfg.USE_PAINTING_SCORE: + pts_paint_scores = painting_score_map[pts_origin_xy[:,1].astype(int), pts_origin_xy[:,0].astype(int), :] + pts_paint_ori_scores = sigmoid_func(pts_paint_scores) + #pts_rect = np.concatenate((pts_rect, pts_paint_scores), axis=-1) + + if cfg.USE_PAINTING_FEAT: + pts_paint_ori_feats = painting_feat_map[pts_origin_xy[:,1].astype(int), pts_origin_xy[:,0].astype(int), :] + #pts_rect = np.concatenate((pts_rect, pts_paint_feats), axis=-1) + + # if cfg.USE_IM_DEPTH: ## shape: 384, 1280, 4 + # img[pts_origin_xy[:,1].astype(int), pts_origin_xy[:,0].astype(int), 3] = pts_rect_depth[pts_valid_flag] / 100.0 ### 除以100是由于输入的depth也除以了100进行归一化 + + # TODO use GT_AUG_ENABLE, failed to align img2d to random box from other scene + # if cfg.GT_AUG_ENABLED and self.mode == 'TRAIN': + # # all labels for checking overlapping + # all_gt_obj_list = self.filtrate_dc_objects(self.get_label(sample_id)) + # all_gt_boxes3d = kitti_utils.objs_to_boxes3d(all_gt_obj_list) + # + # gt_aug_flag = False + # if np.random.rand() < cfg.GT_AUG_APPLY_PROB: + # # augment one scene + # gt_aug_flag, pts_rect, pts_intensity, extra_gt_boxes3d, extra_gt_obj_list = \ + # self.apply_gt_aug_to_one_scene(sample_id, pts_rect, pts_intensity, all_gt_boxes3d) + + # generate inputs + if self.mode == 'TRAIN' or self.random_select: + # make sure len(pts_rect) ==self.npoints + if self.npoints < len(pts_rect): + ######################################### + if self.npoints > 4096: + pts_depth = pts_rect[:, 2] + pts_near_flag = pts_depth < 40.0 + far_idxs_choice = np.where(pts_near_flag == 0)[0] + near_idxs = np.where(pts_near_flag == 1)[0] + near_idxs_choice = np.random.choice(near_idxs, self.npoints - len(far_idxs_choice), replace = False) + + choice = np.concatenate((near_idxs_choice, far_idxs_choice), axis = 0) \ + if len(far_idxs_choice) > 0 else near_idxs_choice + np.random.shuffle(choice) + else: + all_choice = np.arange(0, len(pts_rect), dtype = np.int32) + choice = np.random.choice(all_choice, self.npoints, replace=False) + ######################################### zliu smaple 2020.10.20 + else: + choice = np.arange(0, len(pts_rect), dtype = np.int32) + if self.npoints > len(pts_rect): + extra_choice = np.random.choice(choice, self.npoints - len(pts_rect), replace = False) + choice = np.concatenate((choice, extra_choice), axis = 0) + np.random.shuffle(choice) + + ret_pts_rect = pts_rect[choice, :] + ret_pts_intensity = pts_intensity[choice] - 0.5 # translate intensity to [-0.5, 0.5] + ret_pts_origin_xy = pts_origin_xy[choice, :] + if cfg.USE_PAINTING_SCORE: + pts_paint_scores = pts_paint_ori_scores[choice, :] + + else: + ret_pts_rect = pts_rect + ret_pts_intensity = pts_intensity - 0.5 + ret_pts_origin_xy = pts_origin_xy[choice, :] + if cfg.USE_PAINTING_SCORE: + pts_paint_scores = pts_paint_ori_scores[choice, :] + + pts_features = [ret_pts_intensity.reshape(-1, 1)] + ret_pts_features = np.concatenate(pts_features, axis=1) if pts_features.__len__() > 1 else pts_features[0] + + # print('#################################') + # print('sample_id:', sample_id) + # print('ret_pts_origin_xy:', ret_pts_origin_xy.shape) + # print('ret_pts_rect:', ret_pts_rect.shape) + # #assert ret_pts_origin_xy.shape[0]==2048 + # print('#################################') + # if cfg.INPUT_CROSS_FUSION: ERROR!! + # cross_input_feat = np.concatenate([ret_pts_rect, ret_pts_features], axis=1) + # xy_cor_ori = ret_pts_origin_xy.copy() + # size_range = [1280.0, 384.0] + # x_cor_ori = xy_cor_ori[:, 0, np.newaxis] / (size_range[0] - 1.0) * 2.0 - 1.0 + # y_cor_ori = xy_cor_ori[:, 1, np.newaxis] / (size_range[1] - 1.0) * 2.0 - 1.0 + # xy_cor_concat = np.concatenate([x_cor_ori, y_cor_ori],axis=-1) + # cross_input_feat_torch = torch.from_numpy(cross_input_feat).unsqueeze(0).permute(0, 2, 1) + # xy_cor_concat_torch = torch.from_numpy(xy_cor_concat).unsqueeze(0) + # project_point2img_feat = grid_sample_reverse(cross_input_feat_torch, xy_cor_concat_torch, img_shape=img.shape) + + if cfg.INPUT_CROSS_FUSION: ## shape: 384, 1280, 4 + point2img = np.zeros([384, 1280, 4], dtype=np.float) + point2img[ret_pts_origin_xy[:, 1].astype(int), ret_pts_origin_xy[:, 0].astype(int), 0:3] = ret_pts_rect / (np.abs(ret_pts_rect).max()+0.0001) + point2img[ret_pts_origin_xy[:, 1].astype(int), ret_pts_origin_xy[:, 0].astype(int), 3] = ret_pts_intensity ### 除以100是由于输入的depth也除以了100进行归一化 + + ##print('########img.shape:', np.concatenate([img, point2img],axis=-1).shape) + + sample_info = {'sample_id': sample_id, 'random_select': self.random_select, 'img': np.concatenate([img, point2img], axis=-1), + 'pts_origin_xy': ret_pts_origin_xy} + + else: + if cfg.USE_PAINTING_SCORE: + sample_info = {'sample_id': sample_id, 'random_select': self.random_select, 'img': img, + 'pts_origin_xy': ret_pts_origin_xy, 'pts_paint_scores': pts_paint_scores} + elif cfg.USE_PAINTING_FEAT: + sample_info = {'sample_id': sample_id, 'random_select': self.random_select, 'img': img, + 'pts_origin_xy': ret_pts_origin_xy, 'pts_paint_feats': pts_paint_feats} + else: + sample_info = {'sample_id': sample_id, 'random_select': self.random_select, 'img': img, + 'pts_origin_xy': ret_pts_origin_xy} + + if self.mode == 'TEST': + if cfg.RPN.USE_INTENSITY: + pts_input = np.concatenate((ret_pts_rect, ret_pts_features), axis = 1) # (N, C) + else: + pts_input = ret_pts_rect + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = ret_pts_rect + sample_info['pts_features'] = ret_pts_features + + return sample_info + + gt_obj_list = self.filtrate_objects(self.get_label(sample_id)) + # if cfg.GT_AUG_ENABLED and self.mode == 'TRAIN' and gt_aug_flag: + # gt_obj_list.extend(extra_gt_obj_list) + gt_boxes3d = kitti_utils.objs_to_boxes3d(gt_obj_list) + + gt_alpha = np.zeros((gt_obj_list.__len__()), dtype = np.float32) + for k, obj in enumerate(gt_obj_list): + gt_alpha[k] = obj.alpha + + # data augmentation + aug_pts_rect = ret_pts_rect.copy() + aug_gt_boxes3d = gt_boxes3d.copy() + if cfg.AUG_DATA and self.mode == 'TRAIN': + # + aug_pts_rect, aug_gt_boxes3d, aug_method = self.data_augmentation(aug_pts_rect, aug_gt_boxes3d, gt_alpha, + sample_id) + sample_info['aug_method'] = aug_method + + # prepare input + if cfg.RPN.USE_INTENSITY: + pts_input = np.concatenate((aug_pts_rect, ret_pts_features), axis = 1) # (N, C) + else: + pts_input = aug_pts_rect + + if cfg.RPN.FIXED: + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = aug_pts_rect + # + sample_info['pts_features'] = ret_pts_features + sample_info['gt_boxes3d'] = aug_gt_boxes3d + return sample_info + + # generate training labels + rpn_cls_label, rpn_reg_label = self.generate_rpn_training_labels(aug_pts_rect, aug_gt_boxes3d) + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = aug_pts_rect + sample_info['pts_features'] = ret_pts_features + sample_info['rpn_cls_label'] = rpn_cls_label + sample_info['rpn_reg_label'] = rpn_reg_label + sample_info['gt_boxes3d'] = aug_gt_boxes3d + if cfg.USE_IMAGE_LOSS: + sample_info['img_seg_mask'] = img_seg_mask + # if cfg.USE_IMAGE_LOSS: + # rpn_image_seg_label = np.zeros([384, 1280, 1], dtype=np.float) + # rpn_image_seg_label[ret_pts_origin_xy[:, 1].astype(int), ret_pts_origin_xy[:, 0].astype(int), 0] = rpn_cls_label + # sample_info['rpn_image_seg_label'] = rpn_image_seg_label + return sample_info + + def get_rpn_sample(self, index): + sample_id = int(self.sample_id_list[index]) + if sample_id < 10000: + calib = self.get_calib(sample_id) + img = self.get_image_rgb_with_normal(sample_id) + img_shape = self.get_image_shape(sample_id) + pts_lidar = self.get_lidar(sample_id) + + # get valid point (projected points should be in image) + pts_rect = calib.lidar_to_rect(pts_lidar[:, 0:3]) + pts_intensity = pts_lidar[:, 3] + else: + calib = self.get_calib(sample_id % 10000) + img = self.get_image_rgb_with_normal(sample_id % 10000) + img_shape = self.get_image_shape(sample_id % 10000) + + pts_file = os.path.join(self.aug_pts_dir, '%06d.bin' % sample_id) + assert os.path.exists(pts_file), '%s' % pts_file + aug_pts = np.fromfile(pts_file, dtype = np.float32).reshape(-1, 4) + pts_rect, pts_intensity = aug_pts[:, 0:3], aug_pts[:, 3] + + pts_img, pts_rect_depth = calib.rect_to_img(pts_rect) + pts_valid_flag = self.get_valid_flag(pts_rect, pts_img, pts_rect_depth, img_shape) + + pts_rect = pts_rect[pts_valid_flag][:, 0:3] + pts_intensity = pts_intensity[pts_valid_flag] + pts_in_img = pts_img[pts_valid_flag] + + # 在这里加上RGB信息。 + shape = self.image_hw_with_padding_np + pts_rgb = interpolate_img_by_xy(img, pts_in_img, shape) + + if cfg.GT_AUG_ENABLED and self.mode == 'TRAIN': + # print(pts_intensity.shape, pts_rgb.shape) + pts_features = np.concatenate([pts_intensity.reshape(-1, 1), pts_rgb], axis = 1) + # all labels for checking overlapping + all_gt_obj_list = self.filtrate_dc_objects(self.get_label(sample_id)) + all_gt_boxes3d = kitti_utils.objs_to_boxes3d(all_gt_obj_list) + + gt_aug_flag = False + if np.random.rand() < cfg.GT_AUG_APPLY_PROB: + # augment one scene + gt_aug_flag, pts_rect, pts_features, extra_gt_boxes3d, extra_gt_obj_list = \ + self.apply_gt_aug_to_one_scene(sample_id, pts_rect, pts_features, all_gt_boxes3d) + pts_intensity, pts_rgb = pts_features[:, 0], pts_features[:, 1:] + # generate inputs + if self.mode == 'TRAIN' or self.random_select: + if self.npoints < len(pts_rect): + # pts_depth = pts_rect[:, 2] + # pts_near_flag = pts_depth < 40.0 + # far_idxs_choice = np.where(pts_near_flag == 0)[0] + # near_idxs = np.where(pts_near_flag == 1)[0] + # near_idxs_choice = np.random.choice(near_idxs, self.npoints - len(far_idxs_choice), replace = False) + # + # choice = np.concatenate((near_idxs_choice, far_idxs_choice), axis = 0) \ + # if len(far_idxs_choice) > 0 else near_idxs_choice + # np.random.shuffle(choice) + ######################################### + if self.npoints > 4096: + pts_depth = pts_rect[:, 2] + pts_near_flag = pts_depth < 40.0 + far_idxs_choice = np.where(pts_near_flag == 0)[0] + near_idxs = np.where(pts_near_flag == 1)[0] + near_idxs_choice = np.random.choice(near_idxs, self.npoints - len(far_idxs_choice), replace=False) + + choice = np.concatenate((near_idxs_choice, far_idxs_choice), axis=0) \ + if len(far_idxs_choice) > 0 else near_idxs_choice + np.random.shuffle(choice) + else: + all_choice = np.arange(0, len(pts_rect), dtype=np.int32) + choice = np.random.choice(all_choice, self.npoints, replace=False) + ######################################### zliu smaple 2020.10.20 + else: + choice = np.arange(0, len(pts_rect), dtype = np.int32) + if self.npoints > len(pts_rect): + extra_choice = np.random.choice(choice, self.npoints - len(pts_rect), replace = False) + choice = np.concatenate((choice, extra_choice), axis = 0) + np.random.shuffle(choice) + + ret_pts_rect = pts_rect[choice, :] + ret_pts_intensity = pts_intensity[choice] - 0.5 # translate intensity to [-0.5, 0.5] + ret_pts_rgb = pts_rgb[choice, :] + + else: + ret_pts_rect = pts_rect + ret_pts_intensity = pts_intensity - 0.5 + + ret_pts_rgb = pts_rgb + pts_features = [] + if cfg.RPN.USE_INTENSITY: + pts_features.append(ret_pts_intensity.reshape(-1, 1)) + if cfg.RPN.USE_RGB: + pts_features.append(ret_pts_rgb) + ret_pts_features = None + if pts_features.__len__() >= 1: + ret_pts_features = np.concatenate(pts_features, axis = 1) + + sample_info = { 'sample_id': sample_id, 'random_select': self.random_select, 'rgb': ret_pts_rgb } + + if self.mode == 'TEST': + if ret_pts_features != None: + pts_input = np.concatenate((ret_pts_rect, ret_pts_features), axis = 1) # (N, C) + else: + pts_input = ret_pts_rect + + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = ret_pts_rect + sample_info['pts_features'] = ret_pts_features + return sample_info + + gt_obj_list = self.filtrate_objects(self.get_label(sample_id)) + if cfg.GT_AUG_ENABLED and self.mode == 'TRAIN' and gt_aug_flag: + gt_obj_list.extend(extra_gt_obj_list) + gt_boxes3d = kitti_utils.objs_to_boxes3d(gt_obj_list) + + gt_alpha = np.zeros((gt_obj_list.__len__()), dtype = np.float32) + for k, obj in enumerate(gt_obj_list): + gt_alpha[k] = obj.alpha + + # data augmentation + aug_pts_rect = ret_pts_rect.copy() + aug_gt_boxes3d = gt_boxes3d.copy() + if cfg.AUG_DATA and self.mode == 'TRAIN': + aug_pts_rect, aug_gt_boxes3d, aug_method = self.data_augmentation(aug_pts_rect, aug_gt_boxes3d, gt_alpha, + sample_id) + sample_info['aug_method'] = aug_method + + # prepare input + if cfg.RPN.USE_INTENSITY or cfg.RPN.USE_RGB: + pts_input = np.concatenate((aug_pts_rect, ret_pts_features), axis = 1) # (N, C) + else: + pts_input = aug_pts_rect + + if cfg.RPN.FIXED: + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = aug_pts_rect + sample_info['pts_features'] = ret_pts_features + sample_info['gt_boxes3d'] = aug_gt_boxes3d + return sample_info + + # generate training labels + rpn_cls_label, rpn_reg_label = self.generate_rpn_training_labels(aug_pts_rect, aug_gt_boxes3d) + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = aug_pts_rect + sample_info['pts_features'] = ret_pts_features + sample_info['rpn_cls_label'] = rpn_cls_label + sample_info['rpn_reg_label'] = rpn_reg_label + sample_info['gt_boxes3d'] = aug_gt_boxes3d + return sample_info + + @staticmethod + def generate_rpn_training_labels(pts_rect, gt_boxes3d): + cls_label = np.zeros((pts_rect.shape[0]), dtype = np.int32) + reg_label = np.zeros((pts_rect.shape[0], 7), dtype = np.float32) # dx, dy, dz, ry, h, w, l + gt_corners = kitti_utils.boxes3d_to_corners3d(gt_boxes3d, rotate = True) + extend_gt_boxes3d = kitti_utils.enlarge_box3d(gt_boxes3d, extra_width = 0.2) + extend_gt_corners = kitti_utils.boxes3d_to_corners3d(extend_gt_boxes3d, rotate = True) + for k in range(gt_boxes3d.shape[0]): + box_corners = gt_corners[k] + fg_pt_flag = kitti_utils.in_hull(pts_rect, box_corners) + fg_pts_rect = pts_rect[fg_pt_flag] + cls_label[fg_pt_flag] = 1 + + # enlarge the bbox3d, ignore nearby points + extend_box_corners = extend_gt_corners[k] + fg_enlarge_flag = kitti_utils.in_hull(pts_rect, extend_box_corners) + ignore_flag = np.logical_xor(fg_pt_flag, fg_enlarge_flag) + cls_label[ignore_flag] = -1 + + # pixel offset of object center + center3d = gt_boxes3d[k][0:3].copy() # (x, y, z) + center3d[1] -= gt_boxes3d[k][3] / 2 + reg_label[fg_pt_flag, 0:3] = center3d - fg_pts_rect # Now y is the true center of 3d box 20180928 + + # size and angle encoding + reg_label[fg_pt_flag, 3] = gt_boxes3d[k][3] # h + reg_label[fg_pt_flag, 4] = gt_boxes3d[k][4] # w + reg_label[fg_pt_flag, 5] = gt_boxes3d[k][5] # l + reg_label[fg_pt_flag, 6] = gt_boxes3d[k][6] # ry + + return cls_label, reg_label + + def rotate_box3d_along_y(self, box3d, rot_angle): + old_x, old_z, ry = box3d[0], box3d[2], box3d[6] + old_beta = np.arctan2(old_z, old_x) + alpha = -np.sign(old_beta) * np.pi / 2 + old_beta + ry + + box3d = kitti_utils.rotate_pc_along_y(box3d.reshape(1, 7), rot_angle = rot_angle)[0] + new_x, new_z = box3d[0], box3d[2] + new_beta = np.arctan2(new_z, new_x) + box3d[6] = np.sign(new_beta) * np.pi / 2 + alpha - new_beta + + return box3d + + def apply_gt_aug_to_one_scene(self, sample_id, pts_rect, pts_features_offline, all_gt_boxes3d): + """ + :param pts_rect: (N, 3) + :param all_gt_boxex3d: (M2, 7) + :return: + """ + assert self.gt_database is not None + # extra_gt_num = np.random.randint(10, 15) + # try_times = 50 + if cfg.GT_AUG_RAND_NUM: + extra_gt_num = np.random.randint(10, cfg.GT_EXTRA_NUM) + else: + extra_gt_num = cfg.GT_EXTRA_NUM + try_times = 100 + cnt = 0 + cur_gt_boxes3d = all_gt_boxes3d.copy() + cur_gt_boxes3d[:, 4] += 0.5 # TODO: consider different objects + cur_gt_boxes3d[:, 5] += 0.5 # enlarge new added box to avoid too nearby boxes + cur_gt_corners = kitti_utils.boxes3d_to_corners3d(cur_gt_boxes3d) + + extra_gt_obj_list = [] + extra_gt_boxes3d_list = [] + new_pts_list, new_pts_intensity_list = [], [] + src_pts_flag = np.ones(pts_rect.shape[0], dtype = np.int32) + + road_plane = self.get_road_plane(sample_id) + a, b, c, d = road_plane + + while try_times > 0: + if cnt > extra_gt_num: + break + + try_times -= 1 + if cfg.GT_AUG_HARD_RATIO > 0: + p = np.random.rand() + if p > cfg.GT_AUG_HARD_RATIO: + # use easy sample + rand_idx = np.random.randint(0, len(self.gt_database[0])) + new_gt_dict = self.gt_database[0][rand_idx] + else: + # use hard sample + rand_idx = np.random.randint(0, len(self.gt_database[1])) + new_gt_dict = self.gt_database[1][rand_idx] + else: + rand_idx = np.random.randint(0, self.gt_database.__len__()) + new_gt_dict = self.gt_database[rand_idx] + + new_gt_box3d = new_gt_dict['gt_box3d'].copy() + new_gt_points = new_gt_dict['points'].copy() + new_gt_intensity = new_gt_dict['intensity'].copy() + new_gt_rgb = new_gt_dict['rgb'].copy() + new_gt_features = np.concatenate([new_gt_intensity.reshape(-1, 1), new_gt_rgb], axis = 1) + + new_gt_obj = new_gt_dict['obj'] + center = new_gt_box3d[0:3] + if cfg.PC_REDUCE_BY_RANGE and (self.check_pc_range(center) is False): + continue + + if new_gt_points.__len__() < 5: # too few points + continue + + # put it on the road plane + cur_height = (-d - a * center[0] - c * center[2]) / b + move_height = new_gt_box3d[1] - cur_height + new_gt_box3d[1] -= move_height + new_gt_points[:, 1] -= move_height + new_gt_obj.pos[1] -= move_height + + new_enlarged_box3d = new_gt_box3d.copy() + new_enlarged_box3d[4] += 0.5 + new_enlarged_box3d[5] += 0.5 # enlarge new added box to avoid too nearby boxes + + cnt += 1 + new_corners = kitti_utils.boxes3d_to_corners3d(new_enlarged_box3d.reshape(1, 7)) + iou3d = kitti_utils.get_iou3d(new_corners, cur_gt_corners) + valid_flag = iou3d.max() < 1e-8 + if not valid_flag: + continue + + enlarged_box3d = new_gt_box3d.copy() + enlarged_box3d[3] += 2 # remove the points above and below the object + + boxes_pts_mask_list = roipool3d_utils.pts_in_boxes3d_cpu( + torch.from_numpy(pts_rect), torch.from_numpy(enlarged_box3d.reshape(1, 7))) + pt_mask_flag = (boxes_pts_mask_list[0].numpy() == 1) + src_pts_flag[pt_mask_flag] = 0 # remove the original points which are inside the new box + + new_pts_list.append(new_gt_points) + new_pts_intensity_list.append(new_gt_features) + cur_gt_boxes3d = np.concatenate((cur_gt_boxes3d, new_enlarged_box3d.reshape(1, 7)), axis = 0) + cur_gt_corners = np.concatenate((cur_gt_corners, new_corners), axis = 0) + extra_gt_boxes3d_list.append(new_gt_box3d.reshape(1, 7)) + extra_gt_obj_list.append(new_gt_obj) + + if new_pts_list.__len__() == 0: + return False, pts_rect, pts_features_offline, None, None + + extra_gt_boxes3d = np.concatenate(extra_gt_boxes3d_list, axis = 0) + # remove original points and add new points + pts_rect = pts_rect[src_pts_flag == 1] + pts_features_offline = pts_features_offline[src_pts_flag == 1] + new_pts_rect = np.concatenate(new_pts_list, axis = 0) + new_pts_intensity = np.concatenate(new_pts_intensity_list, axis = 0) + pts_rect = np.concatenate((pts_rect, new_pts_rect), axis = 0) + pts_features_offline = np.concatenate((pts_features_offline, new_pts_intensity), axis = 0) + + return True, pts_rect, pts_features_offline, extra_gt_boxes3d, extra_gt_obj_list + + def data_augmentation(self, aug_pts_rect, aug_gt_boxes3d, gt_alpha, sample_id = None, mustaug = False, stage = 1): + """ + :param aug_pts_rect: (N, 3) + :param aug_gt_boxes3d: (N, 7) + :param gt_alpha: (N) + :return: + """ + aug_list = cfg.AUG_METHOD_LIST + aug_enable = 1 - np.random.rand(3) + if mustaug is True: + aug_enable[0] = -1 + aug_enable[1] = -1 + aug_method = [] + if 'rotation' in aug_list and aug_enable[0] < cfg.AUG_METHOD_PROB[0]: + angle = np.random.uniform(-np.pi / cfg.AUG_ROT_RANGE, np.pi / cfg.AUG_ROT_RANGE) + aug_pts_rect = kitti_utils.rotate_pc_along_y(aug_pts_rect, rot_angle = angle) + if stage == 1: + # xyz change, hwl unchange + aug_gt_boxes3d = kitti_utils.rotate_pc_along_y(aug_gt_boxes3d, rot_angle = angle) + + # calculate the ry after rotation + x, z = aug_gt_boxes3d[:, 0], aug_gt_boxes3d[:, 2] + beta = np.arctan2(z, x) + new_ry = np.sign(beta) * np.pi / 2 + gt_alpha - beta + aug_gt_boxes3d[:, 6] = new_ry # TODO: not in [-np.pi / 2, np.pi / 2] + elif stage == 2: + # for debug stage-2, this implementation has little float precision difference with the above one + assert aug_gt_boxes3d.shape[0] == 2 + aug_gt_boxes3d[0] = self.rotate_box3d_along_y(aug_gt_boxes3d[0], angle) + aug_gt_boxes3d[1] = self.rotate_box3d_along_y(aug_gt_boxes3d[1], angle) + else: + raise NotImplementedError + + aug_method.append(['rotation', angle]) + + if 'scaling' in aug_list and aug_enable[1] < cfg.AUG_METHOD_PROB[1]: + scale = np.random.uniform(0.95, 1.05) + aug_pts_rect = aug_pts_rect * scale + aug_gt_boxes3d[:, 0:6] = aug_gt_boxes3d[:, 0:6] * scale + aug_method.append(['scaling', scale]) + + if 'flip' in aug_list and aug_enable[2] < cfg.AUG_METHOD_PROB[2]: + # flip horizontal + aug_pts_rect[:, 0] = -aug_pts_rect[:, 0] + aug_gt_boxes3d[:, 0] = -aug_gt_boxes3d[:, 0] + # flip orientation: ry > 0: pi - ry, ry < 0: -pi - ry + if stage == 1: + aug_gt_boxes3d[:, 6] = np.sign(aug_gt_boxes3d[:, 6]) * np.pi - aug_gt_boxes3d[:, 6] + elif stage == 2: + assert aug_gt_boxes3d.shape[0] == 2 + aug_gt_boxes3d[0, 6] = np.sign(aug_gt_boxes3d[0, 6]) * np.pi - aug_gt_boxes3d[0, 6] + aug_gt_boxes3d[1, 6] = np.sign(aug_gt_boxes3d[1, 6]) * np.pi - aug_gt_boxes3d[1, 6] + else: + raise NotImplementedError + + aug_method.append('flip') + + return aug_pts_rect, aug_gt_boxes3d, aug_method + + def get_rcnn_sample_info(self, roi_info): + sample_id, gt_box3d = roi_info['sample_id'], roi_info['gt_box3d'] + rpn_xyz, rpn_features, rpn_intensity, seg_mask = self.rpn_feature_list[sample_id] + + # augmentation original roi by adding noise + roi_box3d = self.aug_roi_by_noise(roi_info) + + # point cloud pooling based on roi_box3d + pooled_boxes3d = kitti_utils.enlarge_box3d(roi_box3d.reshape(1, 7), cfg.RCNN.POOL_EXTRA_WIDTH) + + boxes_pts_mask_list = roipool3d_utils.pts_in_boxes3d_cpu(torch.from_numpy(rpn_xyz), + torch.from_numpy(pooled_boxes3d)) + pt_mask_flag = (boxes_pts_mask_list[0].numpy() == 1) + cur_pts = rpn_xyz[pt_mask_flag].astype(np.float32) + + # data augmentation + aug_pts = cur_pts.copy() + aug_gt_box3d = gt_box3d.copy().astype(np.float32) + aug_roi_box3d = roi_box3d.copy() + if cfg.AUG_DATA and self.mode == 'TRAIN': + # calculate alpha by ry + temp_boxes3d = np.concatenate([aug_roi_box3d.reshape(1, 7), aug_gt_box3d.reshape(1, 7)], axis = 0) + temp_x, temp_z, temp_ry = temp_boxes3d[:, 0], temp_boxes3d[:, 2], temp_boxes3d[:, 6] + temp_beta = np.arctan2(temp_z, temp_x).astype(np.float64) + temp_alpha = -np.sign(temp_beta) * np.pi / 2 + temp_beta + temp_ry + + # data augmentation + aug_pts, aug_boxes3d, aug_method = self.data_augmentation(aug_pts, temp_boxes3d, temp_alpha, mustaug = True, + stage = 2) + aug_roi_box3d, aug_gt_box3d = aug_boxes3d[0], aug_boxes3d[1] + aug_gt_box3d = aug_gt_box3d.astype(gt_box3d.dtype) + + # Pool input points + valid_mask = 1 # whether the input is valid + + if aug_pts.shape[0] == 0: + pts_features = np.zeros((1, 128), dtype = np.float32) + input_channel = 3 + int(cfg.RCNN.USE_INTENSITY) + int(cfg.RCNN.USE_MASK) + int(cfg.RCNN.USE_DEPTH) + pts_input = np.zeros((1, input_channel), dtype = np.float32) + valid_mask = 0 + else: + pts_features = rpn_features[pt_mask_flag].astype(np.float32) + pts_intensity = rpn_intensity[pt_mask_flag].astype(np.float32) + + pts_input_list = [aug_pts, pts_intensity.reshape(-1, 1)] + if cfg.RCNN.USE_INTENSITY: + pts_input_list = [aug_pts, pts_intensity.reshape(-1, 1)] + else: + pts_input_list = [aug_pts] + + if cfg.RCNN.USE_MASK: + if cfg.RCNN.MASK_TYPE == 'seg': + pts_mask = seg_mask[pt_mask_flag].astype(np.float32) + elif cfg.RCNN.MASK_TYPE == 'roi': + pts_mask = roipool3d_utils.pts_in_boxes3d_cpu(torch.from_numpy(aug_pts), + torch.from_numpy(aug_roi_box3d.reshape(1, 7))) + pts_mask = (pts_mask[0].numpy() == 1).astype(np.float32) + else: + raise NotImplementedError + + pts_input_list.append(pts_mask.reshape(-1, 1)) + + if cfg.RCNN.USE_DEPTH: + pts_depth = np.linalg.norm(aug_pts, axis = 1, ord = 2) + pts_depth_norm = (pts_depth / 70.0) - 0.5 + pts_input_list.append(pts_depth_norm.reshape(-1, 1)) + + pts_input = np.concatenate(pts_input_list, axis = 1) # (N, C) + + aug_gt_corners = kitti_utils.boxes3d_to_corners3d(aug_gt_box3d.reshape(-1, 7)) + aug_roi_corners = kitti_utils.boxes3d_to_corners3d(aug_roi_box3d.reshape(-1, 7)) + iou3d = kitti_utils.get_iou3d(aug_roi_corners, aug_gt_corners) + cur_iou = iou3d[0][0] + + # regression valid mask + reg_valid_mask = 1 if cur_iou >= cfg.RCNN.REG_FG_THRESH and valid_mask == 1 else 0 + + # classification label + cls_label = 1 if cur_iou > cfg.RCNN.CLS_FG_THRESH else 0 + if cfg.RCNN.CLS_BG_THRESH < cur_iou < cfg.RCNN.CLS_FG_THRESH or valid_mask == 0: + cls_label = -1 + + # canonical transform and sampling + pts_input_ct, gt_box3d_ct = self.canonical_transform(pts_input, aug_roi_box3d, aug_gt_box3d) + pts_input_ct, pts_features = self.rcnn_input_sample(pts_input_ct, pts_features) + + sample_info = { 'sample_id' : sample_id, + 'pts_input' : pts_input_ct, + 'pts_features' : pts_features, + 'cls_label' : cls_label, + 'reg_valid_mask': reg_valid_mask, + 'gt_boxes3d_ct' : gt_box3d_ct, + 'roi_boxes3d' : aug_roi_box3d, + 'roi_size' : aug_roi_box3d[3:6], + 'gt_boxes3d' : aug_gt_box3d } + + return sample_info + + @staticmethod + def canonical_transform(pts_input, roi_box3d, gt_box3d): + roi_ry = roi_box3d[6] % (2 * np.pi) # 0 ~ 2pi + roi_center = roi_box3d[0:3] + # shift to center + pts_input[:, [0, 1, 2]] = pts_input[:, [0, 1, 2]] - roi_center + gt_box3d_ct = np.copy(gt_box3d) + gt_box3d_ct[0:3] = gt_box3d_ct[0:3] - roi_center + # rotate to the direction of head + gt_box3d_ct = kitti_utils.rotate_pc_along_y(gt_box3d_ct.reshape(1, 7), roi_ry).reshape(7) + gt_box3d_ct[6] = gt_box3d_ct[6] - roi_ry + pts_input = kitti_utils.rotate_pc_along_y(pts_input, roi_ry) + + return pts_input, gt_box3d_ct + + @staticmethod + def canonical_transform_batch(pts_input, roi_boxes3d, gt_boxes3d): + """ + :param pts_input: (N, npoints, 3 + C) + :param roi_boxes3d: (N, 7) + :param gt_boxes3d: (N, 7) + :return: + """ + roi_ry = roi_boxes3d[:, 6] % (2 * np.pi) # 0 ~ 2pi + roi_center = roi_boxes3d[:, 0:3] + # shift to center + pts_input[:, :, [0, 1, 2]] = pts_input[:, :, [0, 1, 2]] - roi_center.reshape(-1, 1, 3) + gt_boxes3d_ct = np.copy(gt_boxes3d) + gt_boxes3d_ct[:, 0:3] = gt_boxes3d_ct[:, 0:3] - roi_center + # rotate to the direction of head + gt_boxes3d_ct = kitti_utils.rotate_pc_along_y_torch(torch.from_numpy(gt_boxes3d_ct.reshape(-1, 1, 7)), + torch.from_numpy(roi_ry)).numpy().reshape(-1, 7) + gt_boxes3d_ct[:, 6] = gt_boxes3d_ct[:, 6] - roi_ry + pts_input = kitti_utils.rotate_pc_along_y_torch(torch.from_numpy(pts_input), torch.from_numpy(roi_ry)).numpy() + + return pts_input, gt_boxes3d_ct + + @staticmethod + def rcnn_input_sample(pts_input, pts_features): + choice = np.random.choice(pts_input.shape[0], cfg.RCNN.NUM_POINTS, replace = True) + + if pts_input.shape[0] < cfg.RCNN.NUM_POINTS: + choice[:pts_input.shape[0]] = np.arange(pts_input.shape[0]) + np.random.shuffle(choice) + pts_input = pts_input[choice] + pts_features = pts_features[choice] + + return pts_input, pts_features + + def aug_roi_by_noise(self, roi_info): + """ + add noise to original roi to get aug_box3d + :param roi_info: + :return: + """ + roi_box3d, gt_box3d = roi_info['roi_box3d'], roi_info['gt_box3d'] + original_iou = roi_info['iou3d'] + temp_iou = cnt = 0 + pos_thresh = min(cfg.RCNN.REG_FG_THRESH, cfg.RCNN.CLS_FG_THRESH) + gt_corners = kitti_utils.boxes3d_to_corners3d(gt_box3d.reshape(-1, 7)) + aug_box3d = roi_box3d + while temp_iou < pos_thresh and cnt < 10: + if roi_info['type'] == 'gt': + aug_box3d = self.random_aug_box3d(roi_box3d) # GT, must random + else: + if np.random.rand() < 0.2: + aug_box3d = roi_box3d # p=0.2 to keep the original roi box + else: + aug_box3d = self.random_aug_box3d(roi_box3d) + aug_corners = kitti_utils.boxes3d_to_corners3d(aug_box3d.reshape(-1, 7)) + iou3d = kitti_utils.get_iou3d(aug_corners, gt_corners) + temp_iou = iou3d[0][0] + cnt += 1 + if original_iou < pos_thresh: # original bg, break + break + return aug_box3d + + @staticmethod + def random_aug_box3d(box3d): + """ + :param box3d: (7) [x, y, z, h, w, l, ry] + random shift, scale, orientation + """ + if cfg.RCNN.REG_AUG_METHOD == 'single': + pos_shift = (np.random.rand(3) - 0.5) # [-0.5 ~ 0.5] + hwl_scale = (np.random.rand(3) - 0.5) / (0.5 / 0.15) + 1.0 # + angle_rot = (np.random.rand(1) - 0.5) / (0.5 / (np.pi / 12)) # [-pi/12 ~ pi/12] + + aug_box3d = np.concatenate([box3d[0:3] + pos_shift, box3d[3:6] * hwl_scale, + box3d[6:7] + angle_rot]) + return aug_box3d + elif cfg.RCNN.REG_AUG_METHOD == 'multiple': + # pos_range, hwl_range, angle_range, mean_iou + range_config = [[0.2, 0.1, np.pi / 12, 0.7], + [0.3, 0.15, np.pi / 12, 0.6], + [0.5, 0.15, np.pi / 9, 0.5], + [0.8, 0.15, np.pi / 6, 0.3], + [1.0, 0.15, np.pi / 3, 0.2]] + idx = np.random.randint(len(range_config)) + + pos_shift = ((np.random.rand(3) - 0.5) / 0.5) * range_config[idx][0] + hwl_scale = ((np.random.rand(3) - 0.5) / 0.5) * range_config[idx][1] + 1.0 + angle_rot = ((np.random.rand(1) - 0.5) / 0.5) * range_config[idx][2] + + aug_box3d = np.concatenate([box3d[0:3] + pos_shift, box3d[3:6] * hwl_scale, box3d[6:7] + angle_rot]) + return aug_box3d + elif cfg.RCNN.REG_AUG_METHOD == 'normal': + x_shift = np.random.normal(loc = 0, scale = 0.3) + y_shift = np.random.normal(loc = 0, scale = 0.2) + z_shift = np.random.normal(loc = 0, scale = 0.3) + h_shift = np.random.normal(loc = 0, scale = 0.25) + w_shift = np.random.normal(loc = 0, scale = 0.15) + l_shift = np.random.normal(loc = 0, scale = 0.5) + ry_shift = ((np.random.rand() - 0.5) / 0.5) * np.pi / 12 + + aug_box3d = np.array([box3d[0] + x_shift, box3d[1] + y_shift, box3d[2] + z_shift, box3d[3] + h_shift, + box3d[4] + w_shift, box3d[5] + l_shift, box3d[6] + ry_shift]) + return aug_box3d + else: + raise NotImplementedError + + def get_proposal_from_file(self, index): + sample_id = int(self.image_idx_list[index]) + proposal_file = os.path.join(self.rcnn_eval_roi_dir, '%06d.txt' % sample_id) + roi_obj_list = kitti_utils.get_objects_from_label(proposal_file) + + rpn_xyz, rpn_features, rpn_intensity, seg_mask = self.get_rpn_features(self.rcnn_eval_feature_dir, sample_id) + pts_rect, pts_rpn_features, pts_intensity = rpn_xyz, rpn_features, rpn_intensity + + roi_box3d_list, roi_scores = [], [] + for obj in roi_obj_list: + box3d = np.array([obj.pos[0], obj.pos[1], obj.pos[2], obj.h, obj.w, obj.l, obj.ry], dtype = np.float32) + roi_box3d_list.append(box3d.reshape(1, 7)) + roi_scores.append(obj.score) + + roi_boxes3d = np.concatenate(roi_box3d_list, axis = 0) # (N, 7) + roi_scores = np.array(roi_scores, dtype = np.float32) # (N) + + if cfg.RCNN.ROI_SAMPLE_JIT: + sample_dict = { 'sample_id' : sample_id, + 'rpn_xyz' : rpn_xyz, + 'rpn_features': rpn_features, + 'seg_mask' : seg_mask, + 'roi_boxes3d' : roi_boxes3d, + 'roi_scores' : roi_scores, + 'pts_depth' : np.linalg.norm(rpn_xyz, ord = 2, axis = 1) } + + if self.mode != 'TEST': + gt_obj_list = self.filtrate_objects(self.get_label(sample_id)) + gt_boxes3d = kitti_utils.objs_to_boxes3d(gt_obj_list) + + roi_corners = kitti_utils.boxes3d_to_corners3d(roi_boxes3d) + gt_corners = kitti_utils.boxes3d_to_corners3d(gt_boxes3d) + iou3d = kitti_utils.get_iou3d(roi_corners, gt_corners) + if gt_boxes3d.shape[0] > 0: + gt_iou = iou3d.max(axis = 1) + else: + gt_iou = np.zeros(roi_boxes3d.shape[0]).astype(np.float32) + + sample_dict['gt_boxes3d'] = gt_boxes3d + sample_dict['gt_iou'] = gt_iou + return sample_dict + + if cfg.RCNN.USE_INTENSITY: + pts_extra_input_list = [pts_intensity.reshape(-1, 1), seg_mask.reshape(-1, 1)] + else: + pts_extra_input_list = [seg_mask.reshape(-1, 1)] + + if cfg.RCNN.USE_DEPTH: + cur_depth = np.linalg.norm(pts_rect, axis = 1, ord = 2) + cur_depth_norm = (cur_depth / 70.0) - 0.5 + pts_extra_input_list.append(cur_depth_norm.reshape(-1, 1)) + + pts_extra_input = np.concatenate(pts_extra_input_list, axis = 1) + pts_input, pts_features = roipool3d_utils.roipool3d_cpu(roi_boxes3d, pts_rect, pts_rpn_features, + pts_extra_input, cfg.RCNN.POOL_EXTRA_WIDTH, + sampled_pt_num = cfg.RCNN.NUM_POINTS) + + sample_dict = { 'sample_id' : sample_id, + 'pts_input' : pts_input, + 'pts_features': pts_features, + 'roi_boxes3d' : roi_boxes3d, + 'roi_scores' : roi_scores, + 'roi_size' : roi_boxes3d[:, 3:6] } + + if self.mode == 'TEST': + return sample_dict + + gt_obj_list = self.filtrate_objects(self.get_label(sample_id)) + gt_boxes3d = np.zeros((gt_obj_list.__len__(), 7), dtype = np.float32) + + for k, obj in enumerate(gt_obj_list): + gt_boxes3d[k, 0:3], gt_boxes3d[k, 3], gt_boxes3d[k, 4], gt_boxes3d[k, 5], gt_boxes3d[k, 6] \ + = obj.pos, obj.h, obj.w, obj.l, obj.ry + + if gt_boxes3d.__len__() == 0: + gt_iou = np.zeros((roi_boxes3d.shape[0]), dtype = np.float32) + else: + roi_corners = kitti_utils.boxes3d_to_corners3d(roi_boxes3d) + gt_corners = kitti_utils.boxes3d_to_corners3d(gt_boxes3d) + iou3d = kitti_utils.get_iou3d(roi_corners, gt_corners) + gt_iou = iou3d.max(axis = 1) + sample_dict['gt_boxes3d'] = gt_boxes3d + sample_dict['gt_iou'] = gt_iou + + return sample_dict + + def get_rcnn_training_sample_batch(self, index): + sample_id = int(self.sample_id_list[index]) + rpn_xyz, rpn_features, rpn_intensity, seg_mask = \ + self.get_rpn_features(self.rcnn_training_feature_dir, sample_id) + + # load rois and gt_boxes3d for this sample + roi_file = os.path.join(self.rcnn_training_roi_dir, '%06d.txt' % sample_id) + roi_obj_list = kitti_utils.get_objects_from_label(roi_file) + roi_boxes3d = kitti_utils.objs_to_boxes3d(roi_obj_list) + # roi_scores = kitti_utils.objs_to_scores(roi_obj_list) + + gt_obj_list = self.filtrate_objects(self.get_label(sample_id)) + gt_boxes3d = kitti_utils.objs_to_boxes3d(gt_obj_list) + + # calculate original iou + iou3d = kitti_utils.get_iou3d(kitti_utils.boxes3d_to_corners3d(roi_boxes3d), + kitti_utils.boxes3d_to_corners3d(gt_boxes3d)) + max_overlaps, gt_assignment = iou3d.max(axis = 1), iou3d.argmax(axis = 1) + max_iou_of_gt, roi_assignment = iou3d.max(axis = 0), iou3d.argmax(axis = 0) + roi_assignment = roi_assignment[max_iou_of_gt > 0].reshape(-1) + + # sample fg, easy_bg, hard_bg + fg_rois_per_image = int(np.round(cfg.RCNN.FG_RATIO * cfg.RCNN.ROI_PER_IMAGE)) + fg_thresh = min(cfg.RCNN.REG_FG_THRESH, cfg.RCNN.CLS_FG_THRESH) + fg_inds = np.nonzero(max_overlaps >= fg_thresh)[0] + fg_inds = np.concatenate((fg_inds, roi_assignment), + axis = 0) # consider the roi which has max_overlaps with gt as fg + + easy_bg_inds = np.nonzero((max_overlaps < cfg.RCNN.CLS_BG_THRESH_LO))[0] + hard_bg_inds = np.nonzero((max_overlaps < cfg.RCNN.CLS_BG_THRESH) & + (max_overlaps >= cfg.RCNN.CLS_BG_THRESH_LO))[0] + + fg_num_rois = fg_inds.size + bg_num_rois = hard_bg_inds.size + easy_bg_inds.size + + if fg_num_rois > 0 and bg_num_rois > 0: + # sampling fg + fg_rois_per_this_image = min(fg_rois_per_image, fg_num_rois) + rand_num = np.random.permutation(fg_num_rois) + fg_inds = fg_inds[rand_num[:fg_rois_per_this_image]] + + # sampling bg + bg_rois_per_this_image = cfg.RCNN.ROI_PER_IMAGE - fg_rois_per_this_image + bg_inds = self.sample_bg_inds(hard_bg_inds, easy_bg_inds, bg_rois_per_this_image) + + elif fg_num_rois > 0 and bg_num_rois == 0: + # sampling fg + rand_num = np.floor(np.random.rand(cfg.RCNN.ROI_PER_IMAGE) * fg_num_rois) + rand_num = torch.from_numpy(rand_num).type_as(gt_boxes3d).long() + fg_inds = fg_inds[rand_num] + fg_rois_per_this_image = cfg.RCNN.ROI_PER_IMAGE + bg_rois_per_this_image = 0 + elif bg_num_rois > 0 and fg_num_rois == 0: + # sampling bg + bg_rois_per_this_image = cfg.RCNN.ROI_PER_IMAGE + bg_inds = self.sample_bg_inds(hard_bg_inds, easy_bg_inds, bg_rois_per_this_image) + fg_rois_per_this_image = 0 + else: + import pdb + pdb.set_trace() + raise NotImplementedError + + # augment the rois by noise + roi_list, roi_iou_list, roi_gt_list = [], [], [] + if fg_rois_per_this_image > 0: + fg_rois_src = roi_boxes3d[fg_inds].copy() + gt_of_fg_rois = gt_boxes3d[gt_assignment[fg_inds]] + fg_rois, fg_iou3d = self.aug_roi_by_noise_batch(fg_rois_src, gt_of_fg_rois, aug_times = 10) + roi_list.append(fg_rois) + roi_iou_list.append(fg_iou3d) + roi_gt_list.append(gt_of_fg_rois) + + if bg_rois_per_this_image > 0: + bg_rois_src = roi_boxes3d[bg_inds].copy() + gt_of_bg_rois = gt_boxes3d[gt_assignment[bg_inds]] + bg_rois, bg_iou3d = self.aug_roi_by_noise_batch(bg_rois_src, gt_of_bg_rois, aug_times = 1) + roi_list.append(bg_rois) + roi_iou_list.append(bg_iou3d) + roi_gt_list.append(gt_of_bg_rois) + + rois = np.concatenate(roi_list, axis = 0) + iou_of_rois = np.concatenate(roi_iou_list, axis = 0) + gt_of_rois = np.concatenate(roi_gt_list, axis = 0) + + # collect extra features for point cloud pooling + if cfg.RCNN.USE_INTENSITY: + pts_extra_input_list = [rpn_intensity.reshape(-1, 1), seg_mask.reshape(-1, 1)] + else: + pts_extra_input_list = [seg_mask.reshape(-1, 1)] + + if cfg.RCNN.USE_DEPTH: + pts_depth = (np.linalg.norm(rpn_xyz, ord = 2, axis = 1) / 70.0) - 0.5 + pts_extra_input_list.append(pts_depth.reshape(-1, 1)) + pts_extra_input = np.concatenate(pts_extra_input_list, axis = 1) + + pts_input, pts_features, pts_empty_flag = roipool3d_utils.roipool3d_cpu(rois, rpn_xyz, rpn_features, + pts_extra_input, + cfg.RCNN.POOL_EXTRA_WIDTH, + sampled_pt_num = cfg.RCNN.NUM_POINTS, + canonical_transform = False) + + # data augmentation + if cfg.AUG_DATA and self.mode == 'TRAIN': + for k in range(rois.__len__()): + aug_pts = pts_input[k, :, 0:3].copy() + aug_gt_box3d = gt_of_rois[k].copy() + aug_roi_box3d = rois[k].copy() + + # calculate alpha by ry + temp_boxes3d = np.concatenate([aug_roi_box3d.reshape(1, 7), aug_gt_box3d.reshape(1, 7)], axis = 0) + temp_x, temp_z, temp_ry = temp_boxes3d[:, 0], temp_boxes3d[:, 2], temp_boxes3d[:, 6] + temp_beta = np.arctan2(temp_z, temp_x).astype(np.float64) + temp_alpha = -np.sign(temp_beta) * np.pi / 2 + temp_beta + temp_ry + + # data augmentation + aug_pts, aug_boxes3d, aug_method = self.data_augmentation(aug_pts, temp_boxes3d, temp_alpha, + mustaug = True, stage = 2) + + # assign to original data + pts_input[k, :, 0:3] = aug_pts + rois[k] = aug_boxes3d[0] + gt_of_rois[k] = aug_boxes3d[1] + + valid_mask = (pts_empty_flag == 0).astype(np.int32) + + # regression valid mask + reg_valid_mask = (iou_of_rois > cfg.RCNN.REG_FG_THRESH).astype(np.int32) & valid_mask + + # classification label + cls_label = (iou_of_rois > cfg.RCNN.CLS_FG_THRESH).astype(np.int32) + invalid_mask = (iou_of_rois > cfg.RCNN.CLS_BG_THRESH) & (iou_of_rois < cfg.RCNN.CLS_FG_THRESH) + cls_label[invalid_mask] = -1 + cls_label[valid_mask == 0] = -1 + + # canonical transform and sampling + pts_input_ct, gt_boxes3d_ct = self.canonical_transform_batch(pts_input, rois, gt_of_rois) + + sample_info = { 'sample_id' : sample_id, + 'pts_input' : pts_input_ct, + 'pts_features' : pts_features, + 'cls_label' : cls_label, + 'reg_valid_mask': reg_valid_mask, + 'gt_boxes3d_ct' : gt_boxes3d_ct, + 'roi_boxes3d' : rois, + 'roi_size' : rois[:, 3:6], + 'gt_boxes3d' : gt_of_rois } + + return sample_info + + def sample_bg_inds(self, hard_bg_inds, easy_bg_inds, bg_rois_per_this_image): + if hard_bg_inds.size > 0 and easy_bg_inds.size > 0: + hard_bg_rois_num = int(bg_rois_per_this_image * cfg.RCNN.HARD_BG_RATIO) + easy_bg_rois_num = bg_rois_per_this_image - hard_bg_rois_num + + # sampling hard bg + rand_num = np.floor(np.random.rand(hard_bg_rois_num) * hard_bg_inds.size).astype(np.int32) + hard_bg_inds = hard_bg_inds[rand_num] + # sampling easy bg + rand_num = np.floor(np.random.rand(easy_bg_rois_num) * easy_bg_inds.size).astype(np.int32) + easy_bg_inds = easy_bg_inds[rand_num] + + bg_inds = np.concatenate([hard_bg_inds, easy_bg_inds], axis = 0) + elif hard_bg_inds.size > 0 and easy_bg_inds.size == 0: + hard_bg_rois_num = bg_rois_per_this_image + # sampling hard bg + rand_num = np.floor(np.random.rand(hard_bg_rois_num) * hard_bg_inds.size).astype(np.int32) + bg_inds = hard_bg_inds[rand_num] + elif hard_bg_inds.size == 0 and easy_bg_inds.size > 0: + easy_bg_rois_num = bg_rois_per_this_image + # sampling easy bg + rand_num = np.floor(np.random.rand(easy_bg_rois_num) * easy_bg_inds.size).astype(np.int32) + bg_inds = easy_bg_inds[rand_num] + else: + raise NotImplementedError + + return bg_inds + + def aug_roi_by_noise_batch(self, roi_boxes3d, gt_boxes3d, aug_times = 10): + """ + :param roi_boxes3d: (N, 7) + :param gt_boxes3d: (N, 7) + :return: + """ + iou_of_rois = np.zeros(roi_boxes3d.shape[0], dtype = np.float32) + for k in range(roi_boxes3d.__len__()): + temp_iou = cnt = 0 + roi_box3d = roi_boxes3d[k] + gt_box3d = gt_boxes3d[k] + pos_thresh = min(cfg.RCNN.REG_FG_THRESH, cfg.RCNN.CLS_FG_THRESH) + gt_corners = kitti_utils.boxes3d_to_corners3d(gt_box3d.reshape(1, 7)) + aug_box3d = roi_box3d + while temp_iou < pos_thresh and cnt < aug_times: + if np.random.rand() < 0.2: + aug_box3d = roi_box3d # p=0.2 to keep the original roi box + else: + aug_box3d = self.random_aug_box3d(roi_box3d) + aug_corners = kitti_utils.boxes3d_to_corners3d(aug_box3d.reshape(1, 7)) + iou3d = kitti_utils.get_iou3d(aug_corners, gt_corners) + temp_iou = iou3d[0][0] + cnt += 1 + roi_boxes3d[k] = aug_box3d + iou_of_rois[k] = temp_iou + return roi_boxes3d, iou_of_rois + + def get_rcnn_sample_jit(self, index): + sample_id = int(self.sample_id_list[index]) + rpn_xyz, rpn_features, rpn_intensity, seg_mask = \ + self.get_rpn_features(self.rcnn_training_feature_dir, sample_id) + + # load rois and gt_boxes3d for this sample + roi_file = os.path.join(self.rcnn_training_roi_dir, '%06d.txt' % sample_id) + roi_obj_list = kitti_utils.get_objects_from_label(roi_file) + roi_boxes3d = kitti_utils.objs_to_boxes3d(roi_obj_list) + # roi_scores = kitti_utils.objs_to_scores(roi_obj_list) + + gt_obj_list = self.filtrate_objects(self.get_label(sample_id)) + gt_boxes3d = kitti_utils.objs_to_boxes3d(gt_obj_list) + + sample_info = { 'sample_id' : sample_id, + 'rpn_xyz' : rpn_xyz, + 'rpn_features' : rpn_features, + 'rpn_intensity': rpn_intensity, + 'seg_mask' : seg_mask, + 'roi_boxes3d' : roi_boxes3d, + 'gt_boxes3d' : gt_boxes3d, + 'pts_depth' : np.linalg.norm(rpn_xyz, ord = 2, axis = 1) } + + return sample_info + + def collate_batch(self, batch): + if self.mode != 'TRAIN' and cfg.RCNN.ENABLED and not cfg.RPN.ENABLED: + assert batch.__len__() == 1 + return batch[0] + + batch_size = batch.__len__() + ans_dict = { } + + for key in batch[0].keys(): + if cfg.RPN.ENABLED and key == 'gt_boxes3d' or \ + (cfg.RCNN.ENABLED and cfg.RCNN.ROI_SAMPLE_JIT and key in ['gt_boxes3d', 'roi_boxes3d']): + max_gt = 0 + for k in range(batch_size): + max_gt = max(max_gt, batch[k][key].__len__()) + batch_gt_boxes3d = np.zeros((batch_size, max_gt, 7), dtype = np.float32) + for i in range(batch_size): + batch_gt_boxes3d[i, :batch[i][key].__len__(), :] = batch[i][key] + ans_dict[key] = batch_gt_boxes3d + continue + + if isinstance(batch[0][key], np.ndarray): + if batch_size == 1: + ans_dict[key] = batch[0][key][np.newaxis, ...] + else: + # for kk in range(batch_size): + # print('key', key) + # print('batch[k][key]:', batch[kk][key].shape) + ans_dict[key] = np.concatenate([batch[k][key][np.newaxis, ...] for k in range(batch_size)], + axis = 0) + + else: + ans_dict[key] = [batch[k][key] for k in range(batch_size)] + if isinstance(batch[0][key], int): + ans_dict[key] = np.array(ans_dict[key], dtype = np.int32) + elif isinstance(batch[0][key], float): + ans_dict[key] = np.array(ans_dict[key], dtype = np.float32) + + return ans_dict + + +if __name__ == '__main__': + img = np.array([0, 0, 0, 0, 5, 2.]).reshape(3, 2, 1) + print(img[2, 0]) + + xy = np.array([2., 0.5]).reshape(1, 2) + y = interpolate_img_by_xy(img, xy, np.array([3., 2.])) + print(y) diff --git a/lib/net/cross_entropy_loss.py b/lib/net/cross_entropy_loss.py new file mode 100644 index 0000000..2ffcc53 --- /dev/null +++ b/lib/net/cross_entropy_loss.py @@ -0,0 +1,196 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .seg_utils import weight_reduce_loss + + +def cross_entropy(pred, + label, + weight=None, + class_weight=None, + reduction='mean', + avg_factor=None, + ignore_index=-100): + """The wrapper function for :func:`F.cross_entropy`""" + # class_weight is a manual rescaling weight given to each class. + # If given, has to be a Tensor of size C element-wise losses + loss = F.cross_entropy( + pred, + label, + weight=class_weight, + reduction='none', + ignore_index=ignore_index) + + # apply weights and do the reduction + if weight is not None: + weight = weight.float() + loss = weight_reduce_loss( + loss, weight=weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def _expand_onehot_labels(labels, label_weights, target_shape, ignore_index): + """Expand onehot labels to match the size of prediction.""" + bin_labels = labels.new_zeros(target_shape) + valid_mask = (labels >= 0) & (labels != ignore_index) + inds = torch.nonzero(valid_mask, as_tuple=True) + + if inds[0].numel() > 0: + if labels.dim() == 3: + bin_labels[inds[0], labels[valid_mask], inds[1], inds[2]] = 1 + else: + bin_labels[inds[0], labels[valid_mask]] = 1 + + valid_mask = valid_mask.unsqueeze(1).expand(target_shape).float() + if label_weights is None: + bin_label_weights = valid_mask + else: + bin_label_weights = label_weights.unsqueeze(1).expand(target_shape) + bin_label_weights *= valid_mask + + return bin_labels, bin_label_weights + + +def binary_cross_entropy(pred, + label, + weight=None, + reduction='mean', + avg_factor=None, + class_weight=None, + ignore_index=255): + """Calculate the binary CrossEntropy loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, 1). + label (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + ignore_index (int | None): The label index to be ignored. Default: 255 + + Returns: + torch.Tensor: The calculated loss + """ + if pred.dim() != label.dim(): + assert (pred.dim() == 2 and label.dim() == 1) or ( + pred.dim() == 4 and label.dim() == 3), \ + 'Only pred shape [N, C], label shape [N] or pred shape [N, C, ' \ + 'H, W], label shape [N, H, W] are supported' + label, weight = _expand_onehot_labels(label, weight, pred.shape, + ignore_index) + + # weighted element-wise losses + if weight is not None: + weight = weight.float() + loss = F.binary_cross_entropy_with_logits( + pred, label.float(), pos_weight=class_weight, reduction='none') + # do the reduction for the weighted loss + loss = weight_reduce_loss( + loss, weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def mask_cross_entropy(pred, + target, + label, + reduction='mean', + avg_factor=None, + class_weight=None, + ignore_index=None): + """Calculate the CrossEntropy loss for masks. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + target (torch.Tensor): The learning label of the prediction. + label (torch.Tensor): ``label`` indicates the class label of the mask' + corresponding object. This will be used to select the mask in the + of the class which the object belongs to when the mask prediction + if not class-agnostic. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + ignore_index (None): Placeholder, to be consistent with other loss. + Default: None. + + Returns: + torch.Tensor: The calculated loss + """ + assert ignore_index is None, 'BCE loss does not support ignore_index' + # TODO: handle these two reserved arguments + assert reduction == 'mean' and avg_factor is None + num_rois = pred.size()[0] + inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device) + pred_slice = pred[inds, label].squeeze(1) + return F.binary_cross_entropy_with_logits( + pred_slice, target, weight=class_weight, reduction='mean')[None] + + +class CrossEntropyLoss(nn.Module): + """CrossEntropyLoss. + + Args: + use_sigmoid (bool, optional): Whether the prediction uses sigmoid + of softmax. Defaults to False. + use_mask (bool, optional): Whether to use mask cross entropy loss. + Defaults to False. + reduction (str, optional): . Defaults to 'mean'. + Options are "none", "mean" and "sum". + class_weight (list[float], optional): Weight of each class. + Defaults to None. + loss_weight (float, optional): Weight of the loss. Defaults to 1.0. + """ + + def __init__(self, + use_sigmoid=False, + use_mask=False, + reduction='mean', + class_weight=None, + loss_weight=1.0): + super(CrossEntropyLoss, self).__init__() + assert (use_sigmoid is False) or (use_mask is False) + self.use_sigmoid = use_sigmoid + self.use_mask = use_mask + self.reduction = reduction + self.loss_weight = loss_weight + self.class_weight = class_weight + + if self.use_sigmoid: + self.cls_criterion = binary_cross_entropy + elif self.use_mask: + self.cls_criterion = mask_cross_entropy + else: + self.cls_criterion = cross_entropy + + def forward(self, + cls_score, + label, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function.""" + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.class_weight is not None: + class_weight = cls_score.new_tensor(self.class_weight) + else: + class_weight = None + loss_cls = self.loss_weight * self.cls_criterion( + cls_score, + label, + weight, + class_weight=class_weight, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_cls diff --git a/lib/net/dice_loss.py b/lib/net/dice_loss.py new file mode 100644 index 0000000..d0f3550 --- /dev/null +++ b/lib/net/dice_loss.py @@ -0,0 +1,117 @@ +"""Modified from https://github.com/LikeLy-Journey/SegmenTron/blob/master/ +segmentron/solver/loss.py (Apache-2.0 License)""" +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .seg_utils import weighted_loss + + +@weighted_loss +def dice_loss(pred, + target, + valid_mask, + smooth=1, + exponent=2, + class_weight=None, + ignore_index=255): + assert pred.shape[0] == target.shape[0] + total_loss = 0 + num_classes = pred.shape[1] + for i in range(num_classes): + if i != ignore_index: + dice_loss = binary_dice_loss( + pred[:, i], + target[..., i], + valid_mask=valid_mask, + smooth=smooth, + exponent=exponent) + if class_weight is not None: + dice_loss *= class_weight[i] + total_loss += dice_loss + return total_loss / num_classes + + +@weighted_loss +def binary_dice_loss(pred, target, valid_mask, smooth=1, exponent=2, **kwards): + assert pred.shape[0] == target.shape[0] + pred = pred.reshape(pred.shape[0], -1) + target = target.reshape(target.shape[0], -1) + valid_mask = valid_mask.reshape(valid_mask.shape[0], -1) + + num = torch.sum(torch.mul(pred, target) * valid_mask, dim=1) * 2 + smooth + den = torch.sum(pred.pow(exponent) + target.pow(exponent), dim=1) + smooth + + return 1 - num / den + + +class DiceLoss(nn.Module): + """DiceLoss. + + This loss is proposed in `V-Net: Fully Convolutional Neural Networks for + Volumetric Medical Image Segmentation `_. + + Args: + loss_type (str, optional): Binary or multi-class loss. + Default: 'multi_class'. Options are "binary" and "multi_class". + smooth (float): A float number to smooth loss, and avoid NaN error. + Default: 1 + exponent (float): An float number to calculate denominator + value: \\sum{x^exponent} + \\sum{y^exponent}. Default: 2. + reduction (str, optional): The method used to reduce the loss. Options + are "none", "mean" and "sum". This parameter only works when + per_image is True. Default: 'mean'. + class_weight (list[float], optional): The weight for each class. + Default: None. + loss_weight (float, optional): Weight of the loss. Default to 1.0. + ignore_index (int | None): The label index to be ignored. Default: 255. + """ + + def __init__(self, + smooth=1, + exponent=2, + reduction='mean', + class_weight=None, + loss_weight=1.0, + ignore_index=255, + **kwards): + super(DiceLoss, self).__init__() + self.smooth = smooth + self.exponent = exponent + self.reduction = reduction + self.class_weight = class_weight + self.loss_weight = loss_weight + self.ignore_index = ignore_index + + def forward(self, + pred, + target, + avg_factor=None, + reduction_override=None, + **kwards): + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.class_weight is not None: + class_weight = pred.new_tensor(self.class_weight) + else: + class_weight = None + + pred = F.softmax(pred, dim=1) + num_classes = pred.shape[1] + one_hot_target = F.one_hot( + torch.clamp(target.long(), 0, num_classes - 1), + num_classes=num_classes) + valid_mask = (target != self.ignore_index).long() + + loss = self.loss_weight * dice_loss( + pred, + one_hot_target, + valid_mask=valid_mask, + reduction=reduction, + avg_factor=avg_factor, + smooth=self.smooth, + exponent=self.exponent, + class_weight=class_weight, + ignore_index=self.ignore_index) + return loss diff --git a/lib/net/lovasz_loss.py b/lib/net/lovasz_loss.py new file mode 100644 index 0000000..e6b9381 --- /dev/null +++ b/lib/net/lovasz_loss.py @@ -0,0 +1,300 @@ +"""Modified from https://github.com/bermanmaxim/LovaszSoftmax/blob/master/pytor +ch/lovasz_losses.py Lovasz-Softmax and Jaccard hinge loss in PyTorch Maxim +Berman 2018 ESAT-PSI KU Leuven (MIT License)""" + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .seg_utils import weight_reduce_loss + + +def lovasz_grad(gt_sorted): + """Computes gradient of the Lovasz extension w.r.t sorted errors. + + See Alg. 1 in paper. + """ + p = len(gt_sorted) + gts = gt_sorted.sum() + intersection = gts - gt_sorted.float().cumsum(0) + union = gts + (1 - gt_sorted).float().cumsum(0) + jaccard = 1. - intersection / union + if p > 1: # cover 1-pixel case + jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] + return jaccard + + +def flatten_binary_logits(logits, labels, ignore_index=None): + """Flattens predictions in the batch (binary case) Remove labels equal to + 'ignore_index'.""" + logits = logits.view(-1) + labels = labels.view(-1) + if ignore_index is None: + return logits, labels + valid = (labels != ignore_index) + vlogits = logits[valid] + vlabels = labels[valid] + return vlogits, vlabels + + +def flatten_probs(probs, labels, ignore_index=None): + """Flattens predictions in the batch.""" + if probs.dim() == 3: + # assumes output of a sigmoid layer + B, H, W = probs.size() + probs = probs.view(B, 1, H, W) + B, C, H, W = probs.size() + probs = probs.permute(0, 2, 3, 1).contiguous().view(-1, C) # B*H*W, C=P,C + labels = labels.view(-1) + if ignore_index is None: + return probs, labels + valid = (labels != ignore_index) + vprobs = probs[valid.nonzero().squeeze()] + vlabels = labels[valid] + return vprobs, vlabels + + +def lovasz_hinge_flat(logits, labels): + """Binary Lovasz hinge loss. + + Args: + logits (torch.Tensor): [P], logits at each prediction + (between -infty and +infty). + labels (torch.Tensor): [P], binary ground truth labels (0 or 1). + + Returns: + torch.Tensor: The calculated loss. + """ + if len(labels) == 0: + # only void pixels, the gradients should be 0 + return logits.sum() * 0. + signs = 2. * labels.float() - 1. + errors = (1. - logits * signs) + errors_sorted, perm = torch.sort(errors, dim=0, descending=True) + perm = perm.data + gt_sorted = labels[perm] + grad = lovasz_grad(gt_sorted) + loss = torch.dot(F.relu(errors_sorted), grad) + return loss + + +def lovasz_hinge(logits, + labels, + classes='present', + per_image=False, + class_weight=None, + reduction='mean', + avg_factor=None, + ignore_index=255): + """Binary Lovasz hinge loss. + + Args: + logits (torch.Tensor): [B, H, W], logits at each pixel + (between -infty and +infty). + labels (torch.Tensor): [B, H, W], binary ground truth masks (0 or 1). + classes (str | list[int], optional): Placeholder, to be consistent with + other loss. Default: None. + per_image (bool, optional): If per_image is True, compute the loss per + image instead of per batch. Default: False. + class_weight (list[float], optional): Placeholder, to be consistent + with other loss. Default: None. + reduction (str, optional): The method used to reduce the loss. Options + are "none", "mean" and "sum". This parameter only works when + per_image is True. Default: 'mean'. + avg_factor (int, optional): Average factor that is used to average + the loss. This parameter only works when per_image is True. + Default: None. + ignore_index (int | None): The label index to be ignored. Default: 255. + + Returns: + torch.Tensor: The calculated loss. + """ + if per_image: + loss = [ + lovasz_hinge_flat(*flatten_binary_logits( + logit.unsqueeze(0), label.unsqueeze(0), ignore_index)) + for logit, label in zip(logits, labels) + ] + loss = weight_reduce_loss( + torch.stack(loss), None, reduction, avg_factor) + else: + loss = lovasz_hinge_flat( + *flatten_binary_logits(logits, labels, ignore_index)) + return loss + + +def lovasz_softmax_flat(probs, labels, classes='present', class_weight=None): + """Multi-class Lovasz-Softmax loss. + + Args: + probs (torch.Tensor): [P, C], class probabilities at each prediction + (between 0 and 1). + labels (torch.Tensor): [P], ground truth labels (between 0 and C - 1). + classes (str | list[int], optional): Classes chosen to calculate loss. + 'all' for all classes, 'present' for classes present in labels, or + a list of classes to average. Default: 'present'. + class_weight (list[float], optional): The weight for each class. + Default: None. + + Returns: + torch.Tensor: The calculated loss. + """ + if probs.numel() == 0: + # only void pixels, the gradients should be 0 + return probs * 0. + C = probs.size(1) + losses = [] + class_to_sum = list(range(C)) if classes in ['all', 'present'] else classes + for c in class_to_sum: + fg = (labels == c).float() # foreground for class c + if (classes == 'present' and fg.sum() == 0): + continue + if C == 1: + if len(classes) > 1: + raise ValueError('Sigmoid output possible only with 1 class') + class_pred = probs[:, 0] + else: + class_pred = probs[:, c] + errors = (fg - class_pred).abs() + errors_sorted, perm = torch.sort(errors, 0, descending=True) + perm = perm.data + fg_sorted = fg[perm] + loss = torch.dot(errors_sorted, lovasz_grad(fg_sorted)) + if class_weight is not None: + loss *= class_weight[c] + losses.append(loss) + return torch.stack(losses).mean() + + +def lovasz_softmax(probs, + labels, + classes='present', + per_image=False, + class_weight=None, + reduction='mean', + avg_factor=None, + ignore_index=255): + """Multi-class Lovasz-Softmax loss. + + Args: + probs (torch.Tensor): [B, C, H, W], class probabilities at each + prediction (between 0 and 1). + labels (torch.Tensor): [B, H, W], ground truth labels (between 0 and + C - 1). + classes (str | list[int], optional): Classes chosen to calculate loss. + 'all' for all classes, 'present' for classes present in labels, or + a list of classes to average. Default: 'present'. + per_image (bool, optional): If per_image is True, compute the loss per + image instead of per batch. Default: False. + class_weight (list[float], optional): The weight for each class. + Default: None. + reduction (str, optional): The method used to reduce the loss. Options + are "none", "mean" and "sum". This parameter only works when + per_image is True. Default: 'mean'. + avg_factor (int, optional): Average factor that is used to average + the loss. This parameter only works when per_image is True. + Default: None. + ignore_index (int | None): The label index to be ignored. Default: 255. + + Returns: + torch.Tensor: The calculated loss. + """ + + if per_image: + loss = [ + lovasz_softmax_flat( + *flatten_probs( + prob.unsqueeze(0), label.unsqueeze(0), ignore_index), + classes=classes, + class_weight=class_weight) + for prob, label in zip(probs, labels) + ] + loss = weight_reduce_loss( + torch.stack(loss), None, reduction, avg_factor) + else: + loss = lovasz_softmax_flat( + *flatten_probs(probs, labels, ignore_index), + classes=classes, + class_weight=class_weight) + return loss + + +class LovaszLoss(nn.Module): + """LovaszLoss. + + This loss is proposed in `The Lovasz-Softmax loss: A tractable surrogate + for the optimization of the intersection-over-union measure in neural + networks `_. + + Args: + loss_type (str, optional): Binary or multi-class loss. + Default: 'multi_class'. Options are "binary" and "multi_class". + classes (str | list[int], optional): Classes chosen to calculate loss. + 'all' for all classes, 'present' for classes present in labels, or + a list of classes to average. Default: 'present'. + per_image (bool, optional): If per_image is True, compute the loss per + image instead of per batch. Default: False. + reduction (str, optional): The method used to reduce the loss. Options + are "none", "mean" and "sum". This parameter only works when + per_image is True. Default: 'mean'. + class_weight (list[float], optional): The weight for each class. + Default: None. + loss_weight (float, optional): Weight of the loss. Defaults to 1.0. + """ + + def __init__(self, + loss_type='multi_class', + classes='present', + per_image=False, + reduction='mean', + class_weight=None, + loss_weight=1.0): + super(LovaszLoss, self).__init__() + assert loss_type in ('binary', 'multi_class'), "loss_type should be \ + 'binary' or 'multi_class'." + + if loss_type == 'binary': + self.cls_criterion = lovasz_hinge + else: + self.cls_criterion = lovasz_softmax + #assert classes in ('all', 'present') or mmcv.is_list_of(classes, int) + if not per_image: + assert reduction == 'none', "reduction should be 'none' when \ + per_image is False." + + self.classes = classes + self.per_image = per_image + self.reduction = reduction + self.loss_weight = loss_weight + self.class_weight = class_weight + + def forward(self, + cls_score, + label, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function.""" + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.class_weight is not None: + class_weight = cls_score.new_tensor(self.class_weight) + else: + class_weight = None + + # if multi-class loss, transform logits to probs + if self.cls_criterion == lovasz_softmax: + cls_score = F.softmax(cls_score, dim=1) + + loss_cls = self.loss_weight * self.cls_criterion( + cls_score, + label, + self.classes, + self.per_image, + class_weight=class_weight, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_cls diff --git a/lib/net/point_rcnn.py b/lib/net/point_rcnn.py new file mode 100644 index 0000000..cafbb52 --- /dev/null +++ b/lib/net/point_rcnn.py @@ -0,0 +1,112 @@ +import torch +import torch.nn as nn +from lib.net.rpn import RPN +from lib.net.rcnn_net import RCNNNet +from lib.config import cfg + +from torch.nn.functional import grid_sample + + +def Feature_Gather(feature_map, xy): + """ + :param xy:(B,N,2) normalize to [-1,1] + :param feature_map:(B,C,H,W) + :return: + """ + + # use grid_sample for this. + # xy(B,N,2)->(B,1,N,2) + xy = xy.unsqueeze(1) + + interpolate_feature = grid_sample(feature_map, xy) # (B,C,1,N) + + return interpolate_feature.squeeze(2) # (B,C,N) + + +class PointRCNN(nn.Module): + def __init__(self, num_classes, use_xyz = True, mode = 'TRAIN'): + super().__init__() + + assert cfg.RPN.ENABLED or cfg.RCNN.ENABLED + + if cfg.RPN.ENABLED: + self.rpn = RPN(use_xyz = use_xyz, mode = mode) + + if cfg.RCNN.ENABLED: + rcnn_input_channels = 128 # channels of rpn features + if cfg.RCNN.BACKBONE == 'pointnet': + self.rcnn_net = RCNNNet(num_classes = num_classes, input_channels = rcnn_input_channels, + use_xyz = use_xyz) + elif cfg.RCNN.BACKBONE == 'pointsift': + pass + else: + raise NotImplementedError + + def forward(self, input_data): + + if cfg.RPN.ENABLED: + output = { } + # rpn inference + with torch.set_grad_enabled((not cfg.RPN.FIXED) and self.training): + if cfg.RPN.FIXED: + self.rpn.eval() + rpn_output = self.rpn(input_data) + + output.update(rpn_output) + backbone_xyz = rpn_output['backbone_xyz'] + backbone_features = rpn_output['backbone_features'] + ####print('##########xyz.shape:', backbone_xyz.shape) + + # rcnn inference + if cfg.RCNN.ENABLED: + with torch.no_grad(): + rpn_cls, rpn_reg = rpn_output['rpn_cls'], rpn_output['rpn_reg'] + + rpn_scores_raw = rpn_cls[:, :, 0] + + if cfg.USE_IMAGE_SCORE: + rpn_point_scores = rpn_scores_raw + rpn_image_scores = Feature_Gather(rpn_output['rpn_image_seg'], rpn_output['l_xy_cor']).squeeze(1) + output['rpn_point_scores'] = rpn_point_scores + output['rpn_image_scores'] = rpn_image_scores + rpn_scores_raw = (rpn_image_scores + rpn_point_scores) + + + rpn_scores_norm = torch.sigmoid(rpn_scores_raw) + seg_mask = (rpn_scores_norm > cfg.RPN.SCORE_THRESH).float() + pts_depth = torch.norm(backbone_xyz, p = 2, dim = 2) + + # proposal layer + rois, roi_scores_raw = self.rpn.proposal_layer(rpn_scores_raw, rpn_reg, backbone_xyz) # (B, M, 7) + + output['rois'] = rois + output['roi_scores_raw'] = roi_scores_raw + output['seg_result'] = seg_mask + + rcnn_input_info = { 'rpn_xyz' : backbone_xyz, + 'rpn_features': backbone_features.permute((0, 2, 1)), + 'seg_mask' : seg_mask, + 'roi_boxes3d' : rois, + 'pts_depth' : pts_depth + } + + if cfg.DEEP_RCNN_FUSION: + rcnn_input_info['img_feature'] = rpn_output['img_feature'] + rcnn_input_info['l_xy_cor'] = rpn_output['l_xy_cor'] + + + if self.training: + rcnn_input_info['gt_boxes3d'] = input_data['gt_boxes3d'] + + rcnn_output = self.rcnn_net(rcnn_input_info) + output.update(rcnn_output) + + elif cfg.RCNN.ENABLED: + output = self.rcnn_net(input_data) + else: + raise NotImplementedError + + return output + +if __name__=='__main__': + pass \ No newline at end of file diff --git a/lib/net/pointnet2_msg.py b/lib/net/pointnet2_msg.py new file mode 100644 index 0000000..16a395a --- /dev/null +++ b/lib/net/pointnet2_msg.py @@ -0,0 +1,413 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from pointnet2_lib.pointnet2.pointnet2_modules import PointnetFPModule, PointnetSAModuleMSG +from lib.config import cfg +from torch.nn.functional import grid_sample + +from lib.utils.sample2grid import sample2grid_F,sample2GaussianGrid_F, sample2BilinearGrid_F +from lib.net.self_attention import PointContext3D + + +BatchNorm2d = nn.BatchNorm2d + +def conv3x3(in_planes, out_planes, stride = 1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size = 3, stride = stride, + padding = 1, bias = False) + +def conv1x1(in_planes, out_planes, stride = 1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size = 1, stride = stride, + padding = 0, bias = False) + +class BasicBlock(nn.Module): + def __init__(self, inplanes, outplanes, stride = 1): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, outplanes, stride) + self.bn1 = BatchNorm2d(outplanes) + self.relu = nn.ReLU(inplace = True) + self.conv2 = conv3x3(outplanes, outplanes, 2*stride) + + def forward(self, x): + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + out = self.conv2(out) + + return out + +class Fusion_Conv(nn.Module): + def __init__(self, inplanes, outplanes): + + super(Fusion_Conv, self).__init__() + + self.conv1 = torch.nn.Conv1d(inplanes, outplanes, 1) + self.bn1 = torch.nn.BatchNorm1d(outplanes) + + def forward(self, point_features, img_features): + #print(point_features.shape, img_features.shape) + fusion_features = torch.cat([point_features, img_features], dim=1) + fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) + + return fusion_features + + +class Fusion_Cross_Conv(nn.Module): + def __init__(self, inplanes, outplanes): + + super(Fusion_Cross_Conv, self).__init__() + self.inplanes = inplanes + self.outplanes = outplanes + self.conv1 = conv3x3(inplanes, outplanes, stride=1) + self.bn1 = BatchNorm2d(outplanes) + + def forward(self, point_features, img_features): + fusion_features = torch.cat([point_features, img_features], dim=1) + fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) + + return fusion_features + + +class P2IA_Layer(nn.Module): + def __init__(self, channels): + print('##############ADDITION PI2 ATTENTION#########') + super(P2IA_Layer, self).__init__() + self.ic, self.pc = channels + rc = self.ic // 4 + self.conv1 = nn.Sequential(nn.Conv1d(self.pc, self.pc, 1), + nn.BatchNorm1d(self.pc), + nn.ReLU()) + self.fc1 = nn.Linear(self.ic, rc) + self.fc2 = nn.Linear(self.pc, rc) + self.fc3 = nn.Linear(rc, 1) + + + def forward(self, img_feas, point_feas): + batch = img_feas.size(0) + img_feas_f = img_feas.transpose(1,2).contiguous().view(-1, self.ic) #BCN->BNC->(BN)C + point_feas_f = point_feas.transpose(1,2).contiguous().view(-1, self.pc) #BCN->BNC->(BN)C' + # print(img_feas) + ri = self.fc1(img_feas_f) + rp = self.fc2(point_feas_f) + att = F.sigmoid(self.fc3(F.tanh(ri + rp))) #BNx1 + att = att.squeeze(1) + att = att.view(batch, 1, -1) #B1N + # print(img_feas.size(), att.size()) + + point_feas_new = self.conv1(point_feas) + out = point_feas_new * att + + return out + + +class Fusion_Cross_Conv_Gate(nn.Module): + def __init__(self, inplanes_I, inplanes_P, outplanes): + print('##############USE Fusion_Cross_Conv_Gate(ADD)#########') + super(Fusion_Cross_Conv_Gate, self).__init__() + self.P2IA_Layer = P2IA_Layer(channels=[inplanes_I, inplanes_P]) + self.inplanes = inplanes_I + inplanes_P + self.outplanes = outplanes + self.conv1 = conv3x3(self.inplanes, self.outplanes, stride=1) + self.bn1 = BatchNorm2d(self.outplanes) + + def forward(self, point_features, img_features, li_xy_cor, image): + + point_features = self.P2IA_Layer(img_features, point_features) + + project_point2img_feature = grid_sample_reverse(point_features, li_xy_cor, img_shape=image.shape) + + fusion_features = torch.cat([project_point2img_feature, image], dim=1) + + fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) + + return fusion_features + + +class IA_Layer(nn.Module): + def __init__(self, channels): + super(IA_Layer, self).__init__() + self.ic, self.pc = channels + rc = self.pc // 4 + self.conv1 = nn.Sequential(nn.Conv1d(self.ic, self.pc, 1), ##### + nn.BatchNorm1d(self.pc), #### + nn.ReLU()) + self.fc1 = nn.Linear(self.ic, rc) + self.fc2 = nn.Linear(self.pc, rc) + self.fc3 = nn.Linear(rc, 1) + + + def forward(self, img_feas, point_feas): + batch = img_feas.size(0) + img_feas_f = img_feas.transpose(1,2).contiguous().view(-1, self.ic) #BCN->BNC->(BN)C + point_feas_f = point_feas.transpose(1,2).contiguous().view(-1, self.pc) #BCN->BNC->(BN)C' + # print(img_feas) + ri = self.fc1(img_feas_f) + rp = self.fc2(point_feas_f) + att = F.sigmoid(self.fc3(F.tanh(ri + rp))) # BNx1 + att = att.squeeze(1) + att = att.view(batch, 1, -1) # B1N + # print(img_feas.size(), att.size()) + + img_feas_new = self.conv1(img_feas) + out = img_feas_new * att + + return out + + +class Atten_Fusion_Conv(nn.Module): + def __init__(self, inplanes_I, inplanes_P, outplanes): + super(Atten_Fusion_Conv, self).__init__() + + self.IA_Layer = IA_Layer(channels = [inplanes_I, inplanes_P]) + self.conv1 = torch.nn.Conv1d(inplanes_P + inplanes_P, outplanes, 1) + self.bn1 = torch.nn.BatchNorm1d(outplanes) + + + def forward(self, point_features, img_features): + img_features = self.IA_Layer(img_features, point_features) + + fusion_features = torch.cat([point_features, img_features], dim=1) + fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) + + return fusion_features + + +def Feature_Gather(feature_map, xy): + """ + :param xy:(B,N,2) normalize to [-1,1] + :param feature_map:(B,C,H,W) + :return: + """ + + # use grid_sample for this. + # xy(B,N,2)->(B,1,N,2) + xy = xy.unsqueeze(1) + + interpolate_feature = grid_sample(feature_map, xy) # (B,C,1,N) + + return interpolate_feature.squeeze(2) # (B,C,N) + + +def grid_sample_reverse(point_feature, xy, img_shape): + + # print('#######point_feature:', point_feature.shape) + # print('#######xy:', xy.shape) + # print('#######size:', size) + size = [i for i in img_shape] + size[1] = point_feature.shape[1] + project_point2img = sample2BilinearGrid_F(point_feature, xy, size) + + return project_point2img + + +def get_model(input_channels = 6, use_xyz = True): + return Pointnet2MSG(input_channels = input_channels, use_xyz = use_xyz) + + +class Pointnet2MSG(nn.Module): + def __init__(self, input_channels = 6, use_xyz = True): + super().__init__() + + self.SA_modules = nn.ModuleList() + channel_in = input_channels + + skip_channel_list = [input_channels] + for k in range(cfg.RPN.SA_CONFIG.NPOINTS.__len__()): + mlps = cfg.RPN.SA_CONFIG.MLPS[k].copy() + channel_out = 0 + for idx in range(mlps.__len__()): + mlps[idx] = [channel_in] + mlps[idx] + channel_out += mlps[idx][-1] + + # if cfg.USE_SELF_ATTENTION: + # channel_out += cfg.RPN.SA_CONFIG.ATTN[k] + + self.SA_modules.append( + PointnetSAModuleMSG( + npoint = cfg.RPN.SA_CONFIG.NPOINTS[k], + radii = cfg.RPN.SA_CONFIG.RADIUS[k], + nsamples = cfg.RPN.SA_CONFIG.NSAMPLE[k], + mlps = mlps, + use_xyz = use_xyz, + bn = cfg.RPN.USE_BN + ) + ) + skip_channel_list.append(channel_out) + channel_in = channel_out + + ################## + if cfg.LI_FUSION.ENABLED: + self.Img_Block = nn.ModuleList() + self.Fusion_Conv = nn.ModuleList() + self.DeConv = nn.ModuleList() + if cfg.CROSS_FUSION: + self.Cross_Fusion = nn.ModuleList() + if cfg.USE_IM_DEPTH: + cfg.LI_FUSION.IMG_CHANNELS[0] = cfg.LI_FUSION.IMG_CHANNELS[0] + 1 + + if cfg.INPUT_CROSS_FUSION: + cfg.LI_FUSION.IMG_CHANNELS[0] = cfg.LI_FUSION.IMG_CHANNELS[0] + 4 + + for i in range(len(cfg.LI_FUSION.IMG_CHANNELS) - 1): + self.Img_Block.append(BasicBlock(cfg.LI_FUSION.IMG_CHANNELS[i], cfg.LI_FUSION.IMG_CHANNELS[i+1], stride=1)) + if cfg.LI_FUSION.ADD_Image_Attention: + self.Fusion_Conv.append( + Atten_Fusion_Conv(cfg.LI_FUSION.IMG_CHANNELS[i + 1], cfg.LI_FUSION.POINT_CHANNELS[i], + cfg.LI_FUSION.POINT_CHANNELS[i])) + else: + self.Fusion_Conv.append(Fusion_Conv(cfg.LI_FUSION.IMG_CHANNELS[i + 1] + cfg.LI_FUSION.POINT_CHANNELS[i], + cfg.LI_FUSION.POINT_CHANNELS[i])) + + if cfg.CROSS_FUSION: + if cfg.USE_P2I_GATE: + self.Cross_Fusion.append(Fusion_Cross_Conv_Gate(cfg.LI_FUSION.IMG_CHANNELS[i + 1], cfg.LI_FUSION.POINT_CHANNELS[i], + cfg.LI_FUSION.IMG_CHANNELS[i + 1])) + else: + self.Cross_Fusion.append(Fusion_Cross_Conv(cfg.LI_FUSION.IMG_CHANNELS[i + 1] + cfg.LI_FUSION.POINT_CHANNELS[i], + cfg.LI_FUSION.IMG_CHANNELS[i + 1])) + + self.DeConv.append(nn.ConvTranspose2d(cfg.LI_FUSION.IMG_CHANNELS[i + 1], cfg.LI_FUSION.DeConv_Reduce[i], + kernel_size=cfg.LI_FUSION.DeConv_Kernels[i], + stride=cfg.LI_FUSION.DeConv_Kernels[i])) + + self.image_fusion_conv = nn.Conv2d(sum(cfg.LI_FUSION.DeConv_Reduce), cfg.LI_FUSION.IMG_FEATURES_CHANNEL//4, kernel_size = 1) + self.image_fusion_bn = torch.nn.BatchNorm2d(cfg.LI_FUSION.IMG_FEATURES_CHANNEL//4) + + if cfg.LI_FUSION.ADD_Image_Attention: + self.final_fusion_img_point = Atten_Fusion_Conv(cfg.LI_FUSION.IMG_FEATURES_CHANNEL//4, cfg.LI_FUSION.IMG_FEATURES_CHANNEL, cfg.LI_FUSION.IMG_FEATURES_CHANNEL) + else: + self.final_fusion_img_point = Fusion_Conv(cfg.LI_FUSION.IMG_FEATURES_CHANNEL + cfg.LI_FUSION.IMG_FEATURES_CHANNEL//4, cfg.LI_FUSION.IMG_FEATURES_CHANNEL) + + if cfg.USE_SELF_ATTENTION: ## set as False + # ref: https://github.com/AutoVision-cloud/SA-Det3D/blob/main/src/models/backbones_3d/pointnet2_backbone.py + # point-fsa from cfe + print('##################USE_SELF_ATTENTION!!!!!!!! ') + self.context_conv3 = PointContext3D(cfg.RPN.SA_CONFIG, IN_DIM=cfg.RPN.SA_CONFIG.MLPS[2][0][-1] + cfg.RPN.SA_CONFIG.MLPS[2][1][-1]) + self.context_conv4 = PointContext3D(cfg.RPN.SA_CONFIG, IN_DIM=cfg.RPN.SA_CONFIG.MLPS[3][0][-1] + cfg.RPN.SA_CONFIG.MLPS[3][1][-1]) + self.context_fusion_3 = Fusion_Conv(cfg.RPN.SA_CONFIG.ATTN[2] + cfg.RPN.SA_CONFIG.MLPS[2][0][-1] + cfg.RPN.SA_CONFIG.MLPS[2][1][-1], + cfg.RPN.SA_CONFIG.MLPS[2][0][-1] + cfg.RPN.SA_CONFIG.MLPS[2][1][-1] ) + self.context_fusion_4 = Fusion_Conv(cfg.RPN.SA_CONFIG.ATTN[3] + cfg.RPN.SA_CONFIG.MLPS[3][0][-1] + cfg.RPN.SA_CONFIG.MLPS[3][1][-1], + cfg.RPN.SA_CONFIG.MLPS[3][0][-1] + cfg.RPN.SA_CONFIG.MLPS[3][1][-1]) + + self.FP_modules = nn.ModuleList() + + for k in range(cfg.RPN.FP_MLPS.__len__()): + pre_channel = cfg.RPN.FP_MLPS[k + 1][-1] if k + 1 < len(cfg.RPN.FP_MLPS) else channel_out + self.FP_modules.append( + PointnetFPModule(mlp = [pre_channel + skip_channel_list[k]] + cfg.RPN.FP_MLPS[k]) + ) + #self.Cross_Fusion_Final = Fusion_Cross_Conv(cfg.LI_FUSION.IMG_FEATURES_CHANNEL//4 + cfg.LI_FUSION.IMG_FEATURES_CHANNEL, cfg.LI_FUSION.IMG_FEATURES_CHANNEL//4) + + def _break_up_pc(self, pc): + xyz = pc[..., 0:3].contiguous() + features = ( + pc[..., 3:].transpose(1, 2).contiguous() + if pc.size(-1) > 3 else None + ) + + return xyz, features + + + def forward(self, pointcloud: torch.cuda.FloatTensor, image=None, xy=None): + xyz, features = self._break_up_pc(pointcloud) + + l_xyz, l_features = [xyz], [features] + batch_size = xyz.shape[0] + + if cfg.LI_FUSION.ENABLED: + #### normalize xy to [-1,1] + size_range = [1280.0, 384.0] + + x = xy[:, :, 0] / (size_range[0] - 1.0) * 2.0 - 1.0 + y = xy[:, :, 1] / (size_range[1] - 1.0) * 2.0 - 1.0 + xy = torch.cat([x.unsqueeze(-1), y.unsqueeze(-1)],dim=-1) + l_xy_cor = [xy] + img = [image] + + for i in range(len(self.SA_modules)): + li_xyz, li_features, li_index = self.SA_modules[i](l_xyz[i], l_features[i]) + + + if cfg.LI_FUSION.ENABLED: + li_index = li_index.long().unsqueeze(-1).repeat(1,1,2) + li_xy_cor = torch.gather(l_xy_cor[i],1,li_index) + + image = self.Img_Block[i](img[i]) + + if cfg.CROSS_FUSION: + if cfg.USE_P2I_GATE: + first_img_gather_feature = Feature_Gather(image, li_xy_cor) # , scale= 2**(i+1)) + image = self.Cross_Fusion[i](li_features, first_img_gather_feature, li_xy_cor, image) + else: + img_shape = image.shape + project_point2img_feature = grid_sample_reverse(li_features, li_xy_cor, img_shape) + image = self.Cross_Fusion[i](project_point2img_feature, image) + + #print(image.shape) + img_gather_feature = Feature_Gather(image, li_xy_cor) #, scale= 2**(i+1)) + + li_features = self.Fusion_Conv[i](li_features, img_gather_feature) + + if cfg.USE_SELF_ATTENTION: + if i == 2: + # Get context visa self-attention + l_context_3 = self.context_conv3(batch_size, li_features, li_xyz) + # Concatenate + #li_features = torch.cat([li_features, l_context_3], dim=1) + li_features = self.context_fusion_3(li_features, l_context_3) + if i == 3: + # Get context via self-attention + l_context_4 = self.context_conv4(batch_size, li_features, li_xyz) + # Concatenate + #li_features = torch.cat([li_features, l_context_4], dim=1) + li_features = self.context_fusion_4(li_features, l_context_4) + + l_xy_cor.append(li_xy_cor) + img.append(image) + + l_xyz.append(li_xyz) + l_features.append(li_features) + + + for i in range(-1, -(len(self.FP_modules) + 1), -1): + l_features[i - 1] = self.FP_modules[i]( + l_xyz[i - 1], l_xyz[i], l_features[i - 1], l_features[i] + ) + + if cfg.LI_FUSION.ENABLED: + DeConv = [] + for i in range(len(cfg.LI_FUSION.IMG_CHANNELS) - 1): + DeConv.append(self.DeConv[i](img[i + 1])) + de_concat = torch.cat(DeConv,dim=1) + + img_fusion = F.relu(self.image_fusion_bn(self.image_fusion_conv(de_concat))) + img_fusion_gather_feature = Feature_Gather(img_fusion, xy) + l_features[0] = self.final_fusion_img_point(l_features[0], img_fusion_gather_feature) + + if cfg.LI_FUSION.ENABLED: + return l_xyz[0], l_features[0], img_fusion, l_xy_cor[0] + else: + return l_xyz[0], l_features[0], None, None + +class Pointnet2MSG_returnMiddleStages(Pointnet2MSG): + def __init__(self, input_channels = 6, use_xyz = True): + super().__init__(input_channels, use_xyz) + + def forward(self, pointcloud: torch.cuda.FloatTensor): + xyz, features = self._break_up_pc(pointcloud) + + l_xyz, l_features = [xyz], [features] + idxs = [] + for i in range(len(self.SA_modules)): + li_xyz, li_features, idx = self.SA_modules[i](l_xyz[i], l_features[i]) + l_xyz.append(li_xyz) + l_features.append(li_features) + idxs.append(idx) + + for i in range(-1, -(len(self.FP_modules) + 1), -1): + l_features[i - 1] = self.FP_modules[i]( + l_xyz[i - 1], l_xyz[i], l_features[i - 1], l_features[i] + ) + + return l_xyz, l_features, idxs diff --git a/lib/net/rcnn_net.py b/lib/net/rcnn_net.py new file mode 100644 index 0000000..d0addff --- /dev/null +++ b/lib/net/rcnn_net.py @@ -0,0 +1,443 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from pointnet2_lib.pointnet2.pointnet2_modules import PointnetSAModule +from lib.rpn.proposal_target_layer import ProposalTargetLayer +import pointnet2_lib.pointnet2.pytorch_utils as pt_utils +import lib.utils.loss_utils as loss_utils +from lib.config import cfg + +import lib.utils.kitti_utils as kitti_utils +import lib.utils.roipool3d.roipool3d_utils as roipool3d_utils +from torch.nn.functional import grid_sample +from lib.utils.sample2grid import sample2grid_F,sample2GaussianGrid_F, sample2BilinearGrid_F + + +BatchNorm2d = nn.BatchNorm2d +def conv3x3(in_planes, out_planes, stride = 1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size = 3, stride = stride, + padding = 1, bias = False) + + +class BasicBlock(nn.Module): + def __init__(self, inplanes, outplanes, stride = 1): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, inplanes//2, 1) + self.bn1 = BatchNorm2d(inplanes//2 ) + self.relu = nn.ReLU(inplace = True) + self.conv2 = conv3x3(inplanes//2, outplanes, stride) + + def forward(self, x): + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + + return out + +def Feature_Gather(feature_map, xy): + """ + :param xy:(B,N,2) 归一化到[-1,1] + :param feature_map:(B,C,H,W) + :return: + """ + # use grid_sample for this. + # xy(B,N,2)->(B,1,N,2) + xy = xy.unsqueeze(1) + + interpolate_feature = grid_sample(feature_map, xy) # (B,C,1,N) + + return interpolate_feature.squeeze(2) # (B,C,N) + +class Fusion_Conv(nn.Module): + def __init__(self, inplanes, outplanes): + + super(Fusion_Conv, self).__init__() + + self.conv1 = torch.nn.Conv1d(inplanes, outplanes, 1) + self.bn1 = torch.nn.BatchNorm1d(outplanes) + + def forward(self, point_features, img_features): + #print(point_features.shape, img_features.shape) + fusion_features = torch.cat([point_features, img_features], dim=1) + fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) + + return fusion_features + + +#================addition attention (add)=======================# +class IA_Layer(nn.Module): + def __init__(self, channels): + print('##############ADDITION ATTENTION(ADD) RCNN#########') + super(IA_Layer, self).__init__() + self.ic, self.pc = channels + rc = self.pc // 4 + self.conv1 = nn.Sequential(nn.Conv1d(self.ic, self.pc, 1), + nn.BatchNorm1d(self.pc), + nn.ReLU()) + self.fc1 = nn.Linear(self.ic, rc) + self.fc2 = nn.Linear(self.pc, rc) + self.fc3 = nn.Linear(rc, 1) + + + def forward(self, img_feas, point_feas): + batch = img_feas.size(0) + img_feas_f = img_feas.transpose(1,2).contiguous().view(-1, self.ic) #BCN->BNC->(BN)C + point_feas_f = point_feas.transpose(1,2).contiguous().view(-1, self.pc) #BCN->BNC->(BN)C' + # print(img_feas) + ri = self.fc1(img_feas_f) + rp = self.fc2(point_feas_f) + att = F.sigmoid(self.fc3(F.tanh(ri + rp))) #BNx1 + att = att.squeeze(1) + att = att.view(batch, 1, -1) #B1N + # print(img_feas.size(), att.size()) + + img_feas_new = self.conv1(img_feas) + out = img_feas_new * att + + return out + + +class Atten_Fusion_Conv(nn.Module): + def __init__(self, inplanes_I, inplanes_P, outplanes, num_points = None): + super(Atten_Fusion_Conv, self).__init__() + + self.IA_Layer = IA_Layer(channels = [inplanes_I, inplanes_P]) + #self.conv1 = torch.nn.Conv1d(inplanes_P, outplanes, 1) + self.conv1 = torch.nn.Conv1d(inplanes_P + inplanes_P, outplanes, 1) + self.bn1 = torch.nn.BatchNorm1d(outplanes) + + + def forward(self, point_features, img_features): + # print(point_features.shape, img_features.shape) + + img_features = self.IA_Layer(img_features, point_features) + #print("img_features:", img_features.shape) + + # fusion_features = img_features + point_features ### ori + # fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) ### ori + + fusion_features = torch.cat([point_features, img_features], dim=1) ### new 7.12 + fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) ### new 7.12 + + return fusion_features + + +class Fusion_Cross_Conv(nn.Module): + def __init__(self, inplanes, outplanes): + + super(Fusion_Cross_Conv, self).__init__() + self.inplanes = inplanes + self.outplanes = outplanes + self.conv1 = conv3x3(inplanes, outplanes, stride=1) #torch.nn.Conv1d(inplanes, outplanes, 1) + self.bn1 = BatchNorm2d(outplanes) + print('############## USE RCNN CROSS FUSION!!') + print('############## USE RCNN CROSS FUSION!!') + print('############## USE RCNN CROSS FUSION!!') + #self.conv2 = conv3x3(outplanes, outplanes, stride=1) + + def forward(self, point_features, img_features): + #print(point_features.shape, img_features.shape) + fusion_features = torch.cat([point_features, img_features], dim=1) + + # print('##############fusion_features:', fusion_features.shape) + # print('##############inplanes:', self.inplanes) + # print('##############outplanes:', self.outplanes) + fusion_features = F.relu(self.bn1(self.conv1(fusion_features))) + #fusion_features = self.conv2(fusion_features) + + return fusion_features + +def grid_sample_reverse(point_feature, xy, img_shape): + + # print('#######point_feature:', point_feature.shape) + # print('#######xy:', xy.shape) + # print('#######size:', size) + size = [i for i in img_shape] + size[1] = point_feature.shape[1] + project_point2img = sample2BilinearGrid_F(point_feature, xy, size) + + return project_point2img + + +class RCNNNet(nn.Module): + def __init__(self, num_classes, input_channels=0, use_xyz=True): + super().__init__() + + self.SA_modules = nn.ModuleList() + channel_in = input_channels + + if cfg.RCNN.USE_RPN_FEATURES: + self.rcnn_input_channel = 3 + int(cfg.RCNN.USE_INTENSITY) + int(cfg.RCNN.USE_MASK) + int(cfg.RCNN.USE_DEPTH) + self.xyz_up_layer = pt_utils.SharedMLP([self.rcnn_input_channel] + cfg.RCNN.XYZ_UP_LAYER, + bn=cfg.RCNN.USE_BN) + c_out = cfg.RCNN.XYZ_UP_LAYER[-1] + self.merge_down_layer = pt_utils.SharedMLP([c_out * 2, c_out], bn=cfg.RCNN.USE_BN) + + for k in range(cfg.RCNN.SA_CONFIG.NPOINTS.__len__()): + mlps = [channel_in] + cfg.RCNN.SA_CONFIG.MLPS[k] + + npoint = cfg.RCNN.SA_CONFIG.NPOINTS[k] if cfg.RCNN.SA_CONFIG.NPOINTS[k] != -1 else None + self.SA_modules.append( + PointnetSAModule( + npoint=npoint, + radius=cfg.RCNN.SA_CONFIG.RADIUS[k], + nsample=cfg.RCNN.SA_CONFIG.NSAMPLE[k], + mlp=mlps, + use_xyz=use_xyz, + bn=cfg.RCNN.USE_BN + ) + ) + channel_in = mlps[-1] + + if cfg.DEEP_RCNN_FUSION: + self.Img_Block_RCNN = nn.ModuleList() + IMG_CHANNEL = int(cfg.RCNN_IMG_CHANNEL // 2) + self.Img_Block_RCNN.append(BasicBlock(32, IMG_CHANNEL, stride=2)) + self.Img_Block_RCNN.append(BasicBlock(IMG_CHANNEL, IMG_CHANNEL*2, stride=2)) + + if cfg.LI_FUSION.ENABLED: + self.Fusion_Conv_RCNN = nn.ModuleList() + self.Fusion_Conv_RCNN.append(Atten_Fusion_Conv(IMG_CHANNEL,128,128)) + self.Fusion_Conv_RCNN.append(Atten_Fusion_Conv(IMG_CHANNEL*2,256,256)) + + else: + self.Fusion_Conv_RCNN = nn.ModuleList() + self.Fusion_Conv_RCNN.append(Fusion_Conv(IMG_CHANNEL+128,128)) + self.Fusion_Conv_RCNN.append(Fusion_Conv(IMG_CHANNEL*2+256,256)) + + if cfg.CROSS_FUSION: + self.Cross_Fusion = nn.ModuleList() + self.Cross_Fusion.append(Fusion_Cross_Conv(IMG_CHANNEL+128, IMG_CHANNEL)) + self.Cross_Fusion.append(Fusion_Cross_Conv(IMG_CHANNEL*2+256, IMG_CHANNEL*2)) + + # classification layer + cls_channel = 1 if num_classes == 2 else num_classes + cls_layers = [] + pre_channel = channel_in + for k in range(0, cfg.RCNN.CLS_FC.__len__()): + cls_layers.append(pt_utils.Conv1d(pre_channel, cfg.RCNN.CLS_FC[k], bn=cfg.RCNN.USE_BN)) + pre_channel = cfg.RCNN.CLS_FC[k] + cls_layers.append(pt_utils.Conv1d(pre_channel, cls_channel, activation=None)) + if cfg.RCNN.DP_RATIO >= 0: + cls_layers.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO)) + self.cls_layer = nn.Sequential(*cls_layers) + + if cfg.RCNN.LOSS_CLS == 'SigmoidFocalLoss': + self.cls_loss_func = loss_utils.SigmoidFocalClassificationLoss(alpha=cfg.RCNN.FOCAL_ALPHA[0], + gamma=cfg.RCNN.FOCAL_GAMMA) + elif cfg.RCNN.LOSS_CLS == 'BinaryCrossEntropy': + self.cls_loss_func = F.binary_cross_entropy + elif cfg.RCNN.LOSS_CLS == 'CrossEntropy': + cls_weight = torch.from_numpy(cfg.RCNN.CLS_WEIGHT).float() + self.cls_loss_func = nn.CrossEntropyLoss(ignore_index=-1, reduce=False, weight=cls_weight) + else: + raise NotImplementedError + + if cfg.USE_IOU_BRANCH: + iou_branch = [] + iou_branch.append(pt_utils.Conv1d(channel_in, cfg.RCNN.REG_FC[0], bn=cfg.RCNN.USE_BN)) + iou_branch.append(pt_utils.Conv1d(cfg.RCNN.REG_FC[0], cfg.RCNN.REG_FC[1], bn=cfg.RCNN.USE_BN)) + iou_branch.append(pt_utils.Conv1d(cfg.RCNN.REG_FC[1], 1, activation=None)) + if cfg.RCNN.DP_RATIO >= 0: + iou_branch.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO)) + self.iou_branch = nn.Sequential(*iou_branch) + #pass + + # regression layer + per_loc_bin_num = int(cfg.RCNN.LOC_SCOPE / cfg.RCNN.LOC_BIN_SIZE) * 2 + loc_y_bin_num = int(cfg.RCNN.LOC_Y_SCOPE / cfg.RCNN.LOC_Y_BIN_SIZE) * 2 + reg_channel = per_loc_bin_num * 4 + cfg.RCNN.NUM_HEAD_BIN * 2 + 3 + reg_channel += (1 if not cfg.RCNN.LOC_Y_BY_BIN else loc_y_bin_num * 2) + + reg_layers = [] + pre_channel = channel_in + for k in range(0, cfg.RCNN.REG_FC.__len__()): + reg_layers.append(pt_utils.Conv1d(pre_channel, cfg.RCNN.REG_FC[k], bn=cfg.RCNN.USE_BN)) + pre_channel = cfg.RCNN.REG_FC[k] + reg_layers.append(pt_utils.Conv1d(pre_channel, reg_channel, activation=None)) + if cfg.RCNN.DP_RATIO >= 0: + reg_layers.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO)) + self.reg_layer = nn.Sequential(*reg_layers) + + self.proposal_target_layer = ProposalTargetLayer() + self.init_weights(weight_init='xavier') + + def init_weights(self, weight_init='xavier'): + if weight_init == 'kaiming': + init_func = nn.init.kaiming_normal_ + elif weight_init == 'xavier': + init_func = nn.init.xavier_normal_ + elif weight_init == 'normal': + init_func = nn.init.normal_ + else: + raise NotImplementedError + + for m in self.modules(): + if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv1d): + if weight_init == 'normal': + init_func(m.weight, mean=0, std=0.001) + else: + init_func(m.weight) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + nn.init.normal_(self.reg_layer[-1].conv.weight, mean=0, std=0.001) + + def _break_up_pc(self, pc): + xyz = pc[..., 0:3].contiguous() + features = ( + pc[..., 3:].transpose(1, 2).contiguous() + if pc.size(-1) > 3 else None + ) + + return xyz, features + + def forward(self, input_data): + """ + :param input_data: input dict + :return: + """ + if cfg.RCNN.ROI_SAMPLE_JIT: + if self.training: + with torch.no_grad(): + target_dict = self.proposal_target_layer(input_data) + + pts_input = torch.cat((target_dict['sampled_pts'], target_dict['pts_feature']), dim=2) + target_dict['pts_input'] = pts_input + if cfg.DEEP_RCNN_FUSION: + sampled_xy_cor = target_dict['sampled_xy_cor'] ## B,N,2 [B, 64, 512, 2] + sampled_xy_cor = sampled_xy_cor.view(sampled_xy_cor.shape[0], + sampled_xy_cor.shape[1] * sampled_xy_cor.shape[2], 2) + else: + rpn_xyz, rpn_features = input_data['rpn_xyz'], input_data['rpn_features'] + batch_rois = input_data['roi_boxes3d'] + + pts_extra_input_list = [] + if cfg.DEEP_RCNN_FUSION: + pts_extra_input_list.append(input_data['l_xy_cor']) ######### + + if cfg.RCNN.USE_INTENSITY: + pts_extra_input_list.append([input_data['rpn_intensity'].unsqueeze(dim=2), + input_data['seg_mask'].unsqueeze(dim=2)]) + else: + pts_extra_input_list.append(input_data['seg_mask'].unsqueeze(dim=2)) + + # if cfg.RCNN.USE_INTENSITY: + # pts_extra_input_list = [input_data['rpn_intensity'].unsqueeze(dim=2), + # input_data['seg_mask'].unsqueeze(dim=2)] + # else: + # pts_extra_input_list = [input_data['seg_mask'].unsqueeze(dim=2)] + + if cfg.RCNN.USE_DEPTH: + pts_depth = input_data['pts_depth'] / 70.0 - 0.5 + pts_extra_input_list.append(pts_depth.unsqueeze(dim=2)) + pts_extra_input = torch.cat(pts_extra_input_list, dim=2) + + pts_feature = torch.cat((pts_extra_input, rpn_features), dim=2) + pooled_features, pooled_empty_flag = \ + roipool3d_utils.roipool3d_gpu(rpn_xyz, pts_feature, batch_rois, cfg.RCNN.POOL_EXTRA_WIDTH, + sampled_pt_num=cfg.RCNN.NUM_POINTS) + + if cfg.DEEP_RCNN_FUSION: + sampled_pts, sampled_xy_cor, sampled_features = \ + pooled_features[:, :, :, 0:3], pooled_features[:, :, :, 3:5], pooled_features[:, :, :, 5:] + sampled_xy_cor = sampled_xy_cor.view(sampled_pts.shape[0], + sampled_pts.shape[1] * sampled_pts.shape[2], 2) + pooled_features = torch.cat((sampled_pts, sampled_features), dim=-1) + + # canonical transformation + batch_size = batch_rois.shape[0] + roi_center = batch_rois[:, :, 0:3] + pooled_features[:, :, :, 0:3] -= roi_center.unsqueeze(dim=2) + for k in range(batch_size): + pooled_features[k, :, :, 0:3] = kitti_utils.rotate_pc_along_y_torch(pooled_features[k, :, :, 0:3], + batch_rois[k, :, 6]) + + pts_input = pooled_features.view(-1, pooled_features.shape[2], pooled_features.shape[3]) + else: + pts_input = input_data['pts_input'] + target_dict = {} + target_dict['pts_input'] = input_data['pts_input'] + target_dict['roi_boxes3d'] = input_data['roi_boxes3d'] + if self.training: + target_dict['cls_label'] = input_data['cls_label'] + target_dict['reg_valid_mask'] = input_data['reg_valid_mask'] + target_dict['gt_of_rois'] = input_data['gt_boxes3d_ct'] + + xyz, features = self._break_up_pc(pts_input) + + if cfg.RCNN.USE_RPN_FEATURES: ## True + xyz_input = pts_input[..., 0:self.rcnn_input_channel].transpose(1, 2).unsqueeze(dim=3) + xyz_feature = self.xyz_up_layer(xyz_input) + + rpn_feature = pts_input[..., self.rcnn_input_channel:].transpose(1, 2).unsqueeze(dim=3) + + merged_feature = torch.cat((xyz_feature, rpn_feature), dim=1) + merged_feature = self.merge_down_layer(merged_feature) + l_xyz, l_features = [xyz], [merged_feature.squeeze(dim=3)] + else: + l_xyz, l_features = [xyz], [features] + + + if cfg.DEEP_RCNN_FUSION: + batch_size = sampled_xy_cor.shape[0] + l_xy_cor = [sampled_xy_cor] ## torch.Size([1, 51200, 2]) + img = [input_data['img_feature']] # [1, 32, 384, 1280] + + + for i in range(len(self.SA_modules)): + li_xyz, li_features, li_index = self.SA_modules[i](l_xyz[i], l_features[i]) + + if cfg.DEEP_RCNN_FUSION & (i < len(self.SA_modules) - 1): ### + if cfg.RCNN.SA_CONFIG.NPOINTS[i]==-1: + #print("####cfg.RCNN.SA_CONFIG.NPOINTS[i]###:", cfg.RCNN.SA_CONFIG.NPOINTS[i]) + #print("#######cfg.RCNN.SA_CONFIG.NPOINTS[i]==-1!!!#########") + NUM_POINTS = 1 + else: + #print("####cfg.RCNN.SA_CONFIG.NPOINTS[i]###:", cfg.RCNN.SA_CONFIG.NPOINTS[i]) + NUM_POINTS = cfg.RCNN.SA_CONFIG.NPOINTS[i] + + # print('\n') + #print("#######USE DEEP_RCNN_FUSION!!!#########i=:", i) + li_index = li_index.view(batch_size, -1) + li_index = li_index.long().unsqueeze(-1).repeat(1,1,2) ## [1, 12800, 2] + li_xy_cor = torch.gather(l_xy_cor[i],1,li_index) + image = self.Img_Block_RCNN[i](img[i]) + + if cfg.CROSS_FUSION: + img_shape = image.shape + cross_feat = li_features.clone() + cross_feat = cross_feat.contiguous().view(batch_size, -1, cross_feat.shape[1], NUM_POINTS).permute(0, 2, 1, 3) # (B,ROIS,C,N) + cross_feat = cross_feat.contiguous().view(batch_size, cross_feat.shape[1], -1) + project_point2img_feature = grid_sample_reverse(cross_feat, li_xy_cor, img_shape) + # print('#######project_point2img_feature:', project_point2img_feature.shape) + # print('#######image:', image.shape) + image = self.Cross_Fusion[i](project_point2img_feature, image) + # l_xy_cor_ori.append(li_xy_cor_ori) + + img_gather_feature = Feature_Gather(image, li_xy_cor) + + img_gather_feature = img_gather_feature.contiguous().view(batch_size,image.shape[1], -1, NUM_POINTS).permute(0, 2, 1, 3) # [1, 100, 32, 128] + img_gather_feature = img_gather_feature.contiguous().view(-1, image.shape[1], NUM_POINTS) # [100, 32, 128] + + li_features = self.Fusion_Conv_RCNN[i](li_features, img_gather_feature) ## [100, 128, 128] + l_xy_cor.append(li_xy_cor) ## [1, 12800, 2] + img.append(image) + + l_xyz.append(li_xyz) + l_features.append(li_features) + + rcnn_cls = self.cls_layer(l_features[-1]).transpose(1, 2).contiguous().squeeze(dim=1) # (B, 1 or 2) + rcnn_reg = self.reg_layer(l_features[-1]).transpose(1, 2).contiguous().squeeze(dim=1) # (B, C) + if cfg.USE_IOU_BRANCH: + rcnn_iou_branch = self.iou_branch(l_features[-1]).transpose(1, 2).contiguous().squeeze(dim=1) # (B,1) + ret_dict = {'rcnn_cls': rcnn_cls, 'rcnn_reg': rcnn_reg, 'rcnn_iou_branch': rcnn_iou_branch} + else: + ret_dict = {'rcnn_cls': rcnn_cls, 'rcnn_reg': rcnn_reg} + + if self.training: + ret_dict.update(target_dict) + return ret_dict \ No newline at end of file diff --git a/lib/net/rpn.py b/lib/net/rpn.py new file mode 100644 index 0000000..e630c68 --- /dev/null +++ b/lib/net/rpn.py @@ -0,0 +1,145 @@ +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +from lib.rpn.proposal_layer import ProposalLayer +import pointnet2_lib.pointnet2.pytorch_utils as pt_utils +import lib.utils.loss_utils as loss_utils +from lib.config import cfg +import importlib +from pointnet2_msg import Pointnet2MSG + +from lib.net.cross_entropy_loss import CrossEntropyLoss +from lib.net.cross_entropy_loss import CrossEntropyLoss +from lib.net.lovasz_loss import LovaszLoss + +BatchNorm2d = nn.BatchNorm2d +def conv3x3(in_planes, out_planes, stride = 1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size = 3, stride = stride, + padding = 1, bias = False) + +class Image_Seg(nn.Module): + def __init__(self, inplanes, outplanes, stride = 1): + super(Image_Seg, self).__init__() + self.conv1 = conv3x3(inplanes, inplanes, stride) + self.bn1 = BatchNorm2d(inplanes) + self.relu = nn.ReLU(inplace = True) + self.conv2 = conv3x3(inplanes, outplanes, stride) + + def forward(self, x): + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + + return out + + +class RPN(nn.Module): + def __init__(self, use_xyz = True, mode = 'TRAIN'): + super().__init__() + self.training_mode = (mode == 'TRAIN') + + # MODEL = importlib.import_module(cfg.RPN.BACKBONE) + # self.backbone_net = MODEL.get_model(input_channels=int(cfg.RPN.USE_INTENSITY), use_xyz=use_xyz) + input_channels = int(cfg.RPN.USE_INTENSITY) + 3 * int(cfg.RPN.USE_RGB) + if cfg.RPN.BACKBONE == 'pointnet2_msg': + self.backbone_net = Pointnet2MSG(input_channels =input_channels, use_xyz = use_xyz) + # elif cfg.RPN.BACKBONE == 'pointformer': + # self.backbone_net = Pointformer(input_channels =input_channels, use_xyz = use_xyz) + # classification branch + cls_layers = [] + pre_channel = cfg.RPN.FP_MLPS[0][-1] + for k in range(0, cfg.RPN.CLS_FC.__len__()): + cls_layers.append(pt_utils.Conv1d(pre_channel, cfg.RPN.CLS_FC[k], bn = cfg.RPN.USE_BN)) + pre_channel = cfg.RPN.CLS_FC[k] + cls_layers.append(pt_utils.Conv1d(pre_channel, 1, activation = None)) + if cfg.RPN.DP_RATIO >= 0: + cls_layers.insert(1, nn.Dropout(cfg.RPN.DP_RATIO)) + self.rpn_cls_layer = nn.Sequential(*cls_layers) + + # regression branch + per_loc_bin_num = int(cfg.RPN.LOC_SCOPE / cfg.RPN.LOC_BIN_SIZE) * 2 + if cfg.RPN.LOC_XZ_FINE: + reg_channel = per_loc_bin_num * 4 + cfg.RPN.NUM_HEAD_BIN * 2 + 3 + else: + reg_channel = per_loc_bin_num * 2 + cfg.RPN.NUM_HEAD_BIN * 2 + 3 + reg_channel += 1 # reg y + + reg_layers = [] + pre_channel = cfg.RPN.FP_MLPS[0][-1] + for k in range(0, cfg.RPN.REG_FC.__len__()): + reg_layers.append(pt_utils.Conv1d(pre_channel, cfg.RPN.REG_FC[k], bn = cfg.RPN.USE_BN)) + pre_channel = cfg.RPN.REG_FC[k] + reg_layers.append(pt_utils.Conv1d(pre_channel, reg_channel, activation = None)) + if cfg.RPN.DP_RATIO >= 0: + reg_layers.insert(1, nn.Dropout(cfg.RPN.DP_RATIO)) + self.rpn_reg_layer = nn.Sequential(*reg_layers) + + if cfg.RPN.LOSS_CLS == 'DiceLoss': + self.rpn_cls_loss_func = loss_utils.DiceLoss(ignore_target = -1) + elif cfg.RPN.LOSS_CLS == 'SigmoidFocalLoss': + self.rpn_cls_loss_func = loss_utils.SigmoidFocalClassificationLoss(alpha = cfg.RPN.FOCAL_ALPHA[0], + gamma = cfg.RPN.FOCAL_GAMMA) + + self.rpn_img_seg_loss_func = CrossEntropyLoss(use_sigmoid=True, reduction='none') + # if cfg.USE_IMAGE_LOSS_TYPE=='CrossEntropyLoss': + # self.rpn_img_seg_loss_func = CrossEntropyLoss(use_sigmoid=True) + # elif cfg.USE_IMAGE_LOSS_TYPE=='LovaszLoss': + # self.rpn_img_seg_loss_func = LovaszLoss(loss_type='binary',per_image=True) + elif cfg.RPN.LOSS_CLS == 'BinaryCrossEntropy': + self.rpn_cls_loss_func = F.binary_cross_entropy + else: + raise NotImplementedError + + if cfg.USE_IMAGE_LOSS: + self.rpn_image_cls_layer = Image_Seg(inplanes=32, outplanes=1) ############# + + self.proposal_layer = ProposalLayer(mode = mode) + self.init_weights() + + def init_weights(self): + if cfg.RPN.LOSS_CLS in ['SigmoidFocalLoss']: + pi = 0.01 + nn.init.constant_(self.rpn_cls_layer[2].conv.bias, -np.log((1 - pi) / pi)) + + nn.init.normal_(self.rpn_reg_layer[-1].conv.weight, mean = 0, std = 0.001) + + def forward(self, input_data): + """ + :param input_data: dict (point_cloud) + :return: + """ + pts_input = input_data['pts_input'] + if cfg.LI_FUSION.ENABLED: + img_input = input_data['img'] + xy_input = input_data['pts_origin_xy'] + if cfg.USE_PAINTING_SCORE: + pts_paint_scores = input_data['pts_paint_scores'] #(B, N,1) + backbone_xyz, backbone_features, img_feature, l_xy_cor = self.backbone_net(pts_input, img_input, xy_input, pts_paint_scores) + elif cfg.USE_PAINTING_FEAT: + pts_paint_feats = input_data['pts_paint_feats'] #(B, N,1) + backbone_xyz, backbone_features, img_feature, l_xy_cor = self.backbone_net(pts_input, img_input, xy_input, pts_paint_feats) + else: + backbone_xyz, backbone_features, img_feature, l_xy_cor = self.backbone_net(pts_input, img_input, xy_input) # (B, N, 3), (B, C, N) + else: + backbone_xyz, backbone_features, img_feature, l_xy_cor = self.backbone_net(pts_input) # (B, N, 3), (B, C, N) + + rpn_cls = self.rpn_cls_layer(backbone_features).transpose(1, 2).contiguous() # (B, N, 1) + rpn_reg = self.rpn_reg_layer(backbone_features).transpose(1, 2).contiguous() # (B, N, C) + #print('rpn_cls:', rpn_cls.shape) + + ret_dict = { 'rpn_cls' : rpn_cls, 'rpn_reg': rpn_reg, + 'backbone_xyz': backbone_xyz, 'backbone_features': backbone_features, + 'img_feature': img_feature, 'l_xy_cor': l_xy_cor # img_feature.shape: [2, 32, 384, 1280] + } + + if cfg.USE_IMAGE_LOSS: + rpn_image_seg = self.rpn_image_cls_layer(img_feature) + ret_dict['rpn_image_seg'] = rpn_image_seg # [2, 1, 384, 1280] + # print('#####rpn_image_seg', ret_dict['rpn_image_seg'].shape) + # print('#####img_feature', ret_dict['img_feature'].shape) + + return ret_dict diff --git a/lib/net/sa_block.py b/lib/net/sa_block.py new file mode 100644 index 0000000..080e4d1 --- /dev/null +++ b/lib/net/sa_block.py @@ -0,0 +1,144 @@ +import torch +from torch import nn +# ref: https://github.com/AutoVision-cloud/SA-Det3D + +class SA_block(nn.Module): + """Self-Attention block with dot product for point/voxel/pillar context. + A part of the code is from MLCVNet (CVPR 2020). + """ + def __init__(self, inplanes, planes, groups=4): + super().__init__() + self.groups = groups + + # linear transform to get values + self.t = nn.Conv1d(inplanes, planes, kernel_size=1, stride=1, bias=False) + # linear transform to get keys + self.p = nn.Conv1d(inplanes, planes, kernel_size=1, stride=1, bias=False) + # linear transform to get query + self.g = nn.Conv1d(inplanes, planes, kernel_size=1, stride=1, bias=False) + # conv linear + self.z = nn.Conv1d(planes, inplanes, kernel_size=1, stride=1, groups=self.groups, bias=False) + + # norm (essentially LayerNorm per group) + self.gn = nn.GroupNorm(num_groups=self.groups, num_channels=inplanes) + + # softmax + self.softmax = nn.Softmax(dim=-1) + + def kernel(self, t, p, g, b, c, h): + """Return the output after dot product per head + Args: + t: output of linear value + p: output of linear query + g: output of linear keys + b: batch size + c: no of channels + h: spatial breadth of feature maps + """ + proj_query = p.view(b, c, h).permute(0, 2, 1) # B X H x C + proj_key = g # B X C x (H) + energy = torch.bmm(proj_query, proj_key) # transpose check + total_energy = energy + attention = self.softmax(total_energy) # BX (N) X (N) + proj_value = t + out = torch.bmm(proj_value, attention.permute(0, 2, 1)) + out = out.view(b, c, h) + return out + + def forward(self, x): + residual = x + + t = self.t(x) + p = self.p(x) + g = self.g(x) + + b, c, h = t.size() + + if self.groups and self.groups > 1: + _c = int(c / self.groups) + + ts = torch.split(t, split_size_or_sections=_c, dim=1) + ps = torch.split(p, split_size_or_sections=_c, dim=1) + gs = torch.split(g, split_size_or_sections=_c, dim=1) + + _t_sequences = [] + for i in range(self.groups): + _x = self.kernel(ts[i], ps[i], gs[i], b, _c, h) + _t_sequences.append(_x) + x = torch.cat(_t_sequences, dim=1) + else: + x = self.kernel(t, p, g, b, c, h) + x = self.z(x) + x = self.gn(x) + residual + return x + + +class SA_block_def(nn.Module): + """Self-Attention block with dot product for point/voxel/pillar context. + """ + + def __init__(self, inplanes, planes, groups=4): + super().__init__() + self.groups = groups + + # linear transform to get values + self.t = nn.Conv1d(inplanes, planes, kernel_size=1, stride=1, bias=False) + # linear transform to get keys + self.p = nn.Conv1d(inplanes, planes, kernel_size=1, stride=1, bias=False) + # linear transform to get query + self.g = nn.Conv1d(inplanes, planes, kernel_size=1, stride=1, bias=False) + # conv linear + self.z = nn.Conv1d(planes, inplanes, kernel_size=1, stride=1, groups=self.groups, bias=False) + + # norm (essentially LayerNorm per group) + self.gn = nn.GroupNorm(num_groups=self.groups, num_channels=inplanes) + + # softmax + self.softmax = nn.Softmax(dim=-1) + + def kernel(self, t, p, g, b, c, h): + """Return the output after dot product per head + Args: + t: output of linear value + p: output of linear query + g: output of linear keys + b: batch size + c: no of channels + h: spatial breadth of feature maps + """ + proj_query = p.permute(0, 2, 1) # B X H x C + proj_key = g # B X C x (H) + energy = torch.bmm(proj_query, proj_key) # transpose check + total_energy = energy + attention = self.softmax(total_energy) # BX (N) X (N) + proj_value = t + out = torch.bmm(proj_value, attention.permute(0, 2, 1)) + return out + + def forward(self, x, y): + residual = x + + t = self.t(y) + p = self.p(x) + g = self.g(y) + + b, c, h = t.size() + + if self.groups and self.groups > 1: + _c = int(c / self.groups) + + ts = torch.split(t, split_size_or_sections=_c, dim=1) + ps = torch.split(p, split_size_or_sections=_c, dim=1) + gs = torch.split(g, split_size_or_sections=_c, dim=1) + + _t_sequences = [] + for i in range(self.groups): + _x = self.kernel(ts[i], ps[i], gs[i], b, _c, h) + _t_sequences.append(_x) + x = torch.cat(_t_sequences, dim=1) + else: + x = self.kernel(t, p, g, b, c, h) + x = self.z(x) + x = self.gn(x) + residual + return x + diff --git a/lib/net/seg_utils.py b/lib/net/seg_utils.py new file mode 100644 index 0000000..a1153fa --- /dev/null +++ b/lib/net/seg_utils.py @@ -0,0 +1,101 @@ +import functools + +import torch.nn.functional as F + + +def reduce_loss(loss, reduction): + """Reduce loss as specified. + + Args: + loss (Tensor): Elementwise loss tensor. + reduction (str): Options are "none", "mean" and "sum". + + Return: + Tensor: Reduced loss tensor. + """ + reduction_enum = F._Reduction.get_enum(reduction) + # none: 0, elementwise_mean:1, sum: 2 + if reduction_enum == 0: + return loss + elif reduction_enum == 1: + return loss.mean() + elif reduction_enum == 2: + return loss.sum() + + +def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): + """Apply element-wise weight and reduce loss. + + Args: + loss (Tensor): Element-wise loss. + weight (Tensor): Element-wise weights. + reduction (str): Same as built-in losses of PyTorch. + avg_factor (float): Avarage factor when computing the mean of losses. + + Returns: + Tensor: Processed loss values. + """ + # if weight is specified, apply element-wise weight + if weight is not None: + assert weight.dim() == loss.dim() + if weight.dim() > 1: + assert weight.size(1) == 1 or weight.size(1) == loss.size(1) + loss = loss * weight + + # if avg_factor is not specified, just reduce the loss + if avg_factor is None: + loss = reduce_loss(loss, reduction) + else: + # if reduction is mean, then average the loss by avg_factor + if reduction == 'mean': + loss = loss.sum() / avg_factor + # if reduction is 'none', then do nothing, otherwise raise an error + elif reduction != 'none': + raise ValueError('avg_factor can not be used with reduction="sum"') + return loss + + +def weighted_loss(loss_func): + """Create a weighted version of a given loss function. + + To use this decorator, the loss function must have the signature like + `loss_func(pred, target, **kwargs)`. The function only needs to compute + element-wise loss without any reduction. This decorator will add weight + and reduction arguments to the function. The decorated function will have + the signature like `loss_func(pred, target, weight=None, reduction='mean', + avg_factor=None, **kwargs)`. + + :Example: + + >>> import torch + >>> @weighted_loss + >>> def l1_loss(pred, target): + >>> return (pred - target).abs() + + >>> pred = torch.Tensor([0, 2, 3]) + >>> target = torch.Tensor([1, 1, 1]) + >>> weight = torch.Tensor([1, 0, 1]) + + >>> l1_loss(pred, target) + tensor(1.3333) + >>> l1_loss(pred, target, weight) + tensor(1.) + >>> l1_loss(pred, target, reduction='none') + tensor([1., 1., 2.]) + >>> l1_loss(pred, target, weight, avg_factor=2) + tensor(1.5000) + """ + + @functools.wraps(loss_func) + def wrapper(pred, + target, + weight=None, + reduction='mean', + avg_factor=None, + **kwargs): + # get element-wise loss + loss = loss_func(pred, target, **kwargs) + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + return wrapper diff --git a/lib/net/self_attention.py b/lib/net/self_attention.py new file mode 100644 index 0000000..e312b58 --- /dev/null +++ b/lib/net/self_attention.py @@ -0,0 +1,40 @@ +import torch.nn as nn +from lib.net.sa_block import SA_block +# ref: https://github.com/AutoVision-cloud/SA-Det3D + +class PointContext3D(nn.Module): + def __init__(self, model_cfg, IN_DIM, dropout=0.1): + super().__init__() + self.model_cfg = model_cfg + self.IN_DIM = IN_DIM + + # Self attention layers + self.self_attn1 = SA_block(inplanes=self.model_cfg.ATTN_DIM, planes=self.model_cfg.ATTN_DIM) + self.self_attn2 = SA_block(inplanes=self.model_cfg.ATTN_DIM, planes=self.model_cfg.ATTN_DIM) + # MLP layer + self.reduce_dim = nn.Sequential(nn.Conv1d(IN_DIM, self.model_cfg.ATTN_DIM, kernel_size=1), + nn.BatchNorm1d(self.model_cfg.ATTN_DIM), + nn.ReLU(inplace=True), + nn.Conv1d(self.model_cfg.ATTN_DIM, self.model_cfg.ATTN_DIM, kernel_size=1), + nn.BatchNorm1d(self.model_cfg.ATTN_DIM), + nn.ReLU(inplace=True) + ) + + def add_context_to_points(self, point_feats): + """Full pairwise self-attention for all point features""" + context_points = self.self_attn1(point_feats) + context_points = self.self_attn2(context_points) + return context_points + + def forward(self, batch_size, l_features, l_xyz): + """ + Args: + :param batch_size: + :param l_xyz: + :param l_features: + """ + # reduce dim of point features + l_features_red = self.reduce_dim(l_features) + # get context for every point feature + point_context_features = self.add_context_to_points(l_features_red) + return point_context_features \ No newline at end of file diff --git a/lib/net/train_functions.py b/lib/net/train_functions.py new file mode 100644 index 0000000..9f9392d --- /dev/null +++ b/lib/net/train_functions.py @@ -0,0 +1,382 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import lib.utils.loss_utils as loss_utils +from lib.config import cfg +from collections import namedtuple +from torch.nn.functional import grid_sample + + +def Feature_Gather(feature_map, xy): + """ + :param xy:(B,N,2) normalize to [-1,1] + :param feature_map:(B,C,H,W) + :return: + """ + + # use grid_sample for this. + # xy(B,N,2)->(B,1,N,2) + xy = xy.unsqueeze(1) + + interpolate_feature = grid_sample(feature_map, xy) # (B,C,1,N) + + return interpolate_feature.squeeze(2) # (B,C,N) + +def model_joint_fn_decorator(): + ModelReturn = namedtuple("ModelReturn", ['loss', 'tb_dict', 'disp_dict']) + MEAN_SIZE = torch.from_numpy(cfg.CLS_MEAN_SIZE[0]).cuda() + + def model_fn(model, data): + if cfg.RPN.ENABLED: + pts_rect, pts_features, pts_input = data['pts_rect'], data['pts_features'], data['pts_input'] + gt_boxes3d = data['gt_boxes3d'] + # pts_rgb = data['pts_rgb'] + + if not cfg.RPN.FIXED: + rpn_cls_label, rpn_reg_label = data['rpn_cls_label'], data['rpn_reg_label'] # (2, 4096) (2, 4096, 7) + rpn_cls_label = torch.from_numpy(rpn_cls_label).cuda(non_blocking = True).long() + rpn_reg_label = torch.from_numpy(rpn_reg_label).cuda(non_blocking = True).float() + + inputs = torch.from_numpy(pts_input).cuda(non_blocking = True).float() + gt_boxes3d = torch.from_numpy(gt_boxes3d).cuda(non_blocking = True).float() + input_data = { 'pts_input': inputs, 'gt_boxes3d': gt_boxes3d } + else: + input_data = { } + for key, val in data.items(): + if key != 'sample_id': + input_data[key] = torch.from_numpy(val).contiguous().cuda(non_blocking = True).float() + if not cfg.RCNN.ROI_SAMPLE_JIT: + pts_input = torch.cat((input_data['pts_input'], input_data['pts_features']), dim = -1) + input_data['pts_input'] = pts_input + # input() + if cfg.LI_FUSION.ENABLED: + img = torch.from_numpy(data['img']).cuda(non_blocking = True).float().permute((0, 3, 1, 2)) + pts_origin_xy = torch.from_numpy(data['pts_origin_xy']).cuda(non_blocking = True).float() + input_data['img'] = img + input_data['pts_origin_xy'] = pts_origin_xy # [2, 4096, 2] + if cfg.RPN.USE_RGB or cfg.RCNN.USE_RGB: + pts_rgb = data['rgb'] + # print(pts_rgb.shape) + pts_rgb = torch.from_numpy(pts_rgb).cuda(non_blocking = True).float() + input_data['pts_rgb'] = pts_rgb + if cfg.USE_PAINTING_SCORE: + pts_paint_scores = data['pts_paint_scores'] + input_data['pts_paint_scores'] = torch.from_numpy(pts_paint_scores).cuda(non_blocking = True).float() + if cfg.USE_PAINTING_FEAT: + pts_paint_feats = data['pts_paint_feats'] + input_data['pts_paint_feats'] = torch.from_numpy(pts_paint_feats).cuda(non_blocking = True).float() + ret_dict = model(input_data) + + tb_dict = { } + disp_dict = { } + loss = 0 + if cfg.RPN.ENABLED and not cfg.RPN.FIXED: + rpn_cls, rpn_reg = ret_dict['rpn_cls'], ret_dict['rpn_reg'] + if cfg.USE_IMAGE_LOSS: + #rpn_image_seg_label = rpn_cls_label # [2, 4096] + if cfg.USE_IMG_DENSE_LOSS: + img_seg_predict = ret_dict['rpn_image_seg'] # [2, 1, 384, 1280] + img_seg_sparse_predict = Feature_Gather(img_seg_predict, ret_dict['l_xy_cor']) # [2, 1, 4096] + rpn_image_seg_label = torch.from_numpy(data['img_seg_mask']).cuda(non_blocking=True).float() + else: + rpn_image_seg = ret_dict['rpn_image_seg'] # [2, 1, 384, 1280] + img_seg_predict = Feature_Gather(rpn_image_seg, ret_dict['l_xy_cor']) # [2, 1, 4096] + rpn_image_seg_label = torch.from_numpy(data['rpn_cls_label']).cuda(non_blocking = True).float() + img_seg_sparse_predict = img_seg_predict + + else: + rpn_image_seg_label = None + img_seg_predict = None + img_seg_sparse_predict = None + + rpn_loss, rpn_loss_cls, rpn_loss_loc, rpn_loss_angle, rpn_loss_size, rpn_loss_iou = get_rpn_loss(model, + rpn_cls, + rpn_reg, + rpn_cls_label, + rpn_reg_label, + tb_dict, + img_seg_predict=img_seg_predict, + rpn_image_seg_label = rpn_image_seg_label, + img_seg_sparse_predict = img_seg_sparse_predict + ) + rpn_loss = rpn_loss * cfg.TRAIN.RPN_TRAIN_WEIGHT + loss += rpn_loss + disp_dict['rpn_loss'] = rpn_loss.item() + disp_dict['rpn_loss_cls'] = rpn_loss_cls.item() + disp_dict['rpn_loss_loc'] = rpn_loss_loc.item() + disp_dict['rpn_loss_angle'] = rpn_loss_angle.item() + disp_dict['rpn_loss_size'] = rpn_loss_size.item() + disp_dict['rpn_loss_iou'] = rpn_loss_iou.item() + # if cfg.USE_IMAGE_LOSS: + # disp_dict['rpn_loss_iou'] = rpn_loss_iou.item() + + + if cfg.RCNN.ENABLED: + if cfg.USE_IOU_BRANCH: + rcnn_loss,iou_loss,iou_branch_loss = get_rcnn_loss(model, ret_dict, tb_dict) + disp_dict['reg_fg_sum'] = tb_dict['rcnn_reg_fg'] + + rcnn_loss = rcnn_loss * cfg.TRAIN.RCNN_TRAIN_WEIGHT + disp_dict['rcnn_loss'] = rcnn_loss.item() + loss += rcnn_loss + disp_dict['loss'] = loss.item() + disp_dict['rcnn_iou_loss'] = iou_loss.item() + disp_dict['iou_branch_loss'] = iou_branch_loss.item() + else: + rcnn_loss = get_rcnn_loss(model, ret_dict, tb_dict) + disp_dict['reg_fg_sum'] = tb_dict['rcnn_reg_fg'] + + rcnn_loss = rcnn_loss * cfg.TRAIN.RCNN_TRAIN_WEIGHT + disp_dict['rcnn_loss'] = rcnn_loss.item() + loss += rcnn_loss + disp_dict['loss'] = loss.item() + + return ModelReturn(loss, tb_dict, disp_dict) + + def get_rpn_loss(model, rpn_cls, rpn_reg, rpn_cls_label, rpn_reg_label, tb_dict, img_seg_predict=None, rpn_image_seg_label=None,img_seg_sparse_predict=None): + batch_size = rpn_cls.shape[0] + if isinstance(model, nn.DataParallel): + rpn_cls_loss_func = model.module.rpn.rpn_cls_loss_func + rpn_img_seg_loss_func = model.module.rpn.rpn_img_seg_loss_func + else: + rpn_cls_loss_func = model.rpn.rpn_cls_loss_func + rpn_img_seg_loss_func = model.rpn.rpn_img_seg_loss_func + + rpn_cls_label_flat = rpn_cls_label.view(-1) + rpn_cls_flat = rpn_cls.view(-1) + fg_mask = (rpn_cls_label_flat > 0) + + # RPN classification loss + if cfg.RPN.LOSS_CLS == 'DiceLoss': + rpn_loss_cls = rpn_cls_loss_func(rpn_cls, rpn_cls_label_flat) + + elif cfg.RPN.LOSS_CLS == 'SigmoidFocalLoss': + rpn_cls_target = (rpn_cls_label_flat > 0).float() + pos = (rpn_cls_label_flat > 0).float() + neg = (rpn_cls_label_flat == 0).float() + cls_weights = pos + neg + pos_normalizer = pos.sum() + cls_weights = cls_weights / torch.clamp(pos_normalizer, min = 1.0) + rpn_loss_cls = rpn_cls_loss_func(rpn_cls_flat, rpn_cls_target, cls_weights) + rpn_loss_cls_pos = (rpn_loss_cls * pos).sum() + rpn_loss_cls_neg = (rpn_loss_cls * neg).sum() + rpn_loss_cls = rpn_loss_cls.sum() + tb_dict['rpn_loss_cls_pos'] = rpn_loss_cls_pos.item() + tb_dict['rpn_loss_cls_neg'] = rpn_loss_cls_neg.item() + + if cfg.USE_IMAGE_LOSS: + ## full image seg loss: + rpn_image_seg_label = rpn_image_seg_label.view(-1) + rpn_image_seg_target = (rpn_image_seg_label > 0).float() + + img_pos = (rpn_image_seg_label>0).float() + img_neg = (rpn_image_seg_label==0).float() + img_weight = (img_pos + img_neg) / (torch.clamp(img_pos.sum(), min = 1.0)) + rpn_img_seg_loss = rpn_cls_loss_func(img_seg_predict.view(-1), rpn_image_seg_target, img_weight) + rpn_img_seg_loss = rpn_img_seg_loss.sum() + # print('#######rpn_img_seg_loss:', rpn_img_seg_loss) + + if cfg.USE_MC_LOSS: + P1 = F.log_softmax(img_seg_sparse_predict.view(batch_size, -1), dim=-1) # img prop + P2 = F.log_softmax(rpn_cls_flat.view(batch_size, -1), dim=-1) # point prop + + P1_1 = F.softmax(img_seg_sparse_predict.view(batch_size, -1), dim=-1) # img prop + P2_1 = F.softmax(rpn_cls_flat.view(batch_size, -1), dim=-1) # point prop + P = (P1_1.clone() + P2_1.clone()) / 2.0 + + kl_loss_i2p = F.kl_div(P1, P.detach(), reduction='none') + kl_loss_p2i = F.kl_div(P2, P.detach(), reduction='none') + + mc_loss = cfg.I2P_Weight * kl_loss_i2p + cfg.P2I_Weight * kl_loss_p2i + + p1_score = torch.sigmoid(img_seg_sparse_predict.view(batch_size, -1)) + p2_score = torch.sigmoid(rpn_cls_flat.view(batch_size, -1)) + + if cfg.ADD_MC_MASK: + kl_element_weight = (torch.max(p1_score, p2_score) >= cfg.MC_MASK_THRES).float() + else: + kl_element_weight = torch.ones_like(p1_score) + mc_loss = (kl_element_weight.contiguous().view(-1) * mc_loss.contiguous().view(-1) * (pos + neg)).sum() + + + if cfg.USE_IMAGE_SCORE: + # rpn_cls_flat = (rpn_cls_flat + img_seg_predict.view(-1)) / 2.0 #### img score divide 2 + rpn_cls_flat = rpn_cls_flat + img_seg_sparse_predict.view(-1) + + + elif cfg.RPN.LOSS_CLS == 'BinaryCrossEntropy': + weight = rpn_cls_flat.new(rpn_cls_flat.shape[0]).fill_(1.0) + weight[fg_mask] = cfg.RPN.FG_WEIGHT + rpn_cls_label_target = (rpn_cls_label_flat > 0).float() + batch_loss_cls = F.binary_cross_entropy(torch.sigmoid(rpn_cls_flat), rpn_cls_label_target, + weight=weight, reduction='none') + cls_valid_mask = (rpn_cls_label_flat >= 0).float() + rpn_loss_cls = (batch_loss_cls * cls_valid_mask).sum() / torch.clamp(cls_valid_mask.sum(), min = 1.0) + else: + raise NotImplementedError + + # RPN regression loss + point_num = rpn_reg.size(0) * rpn_reg.size(1) + fg_sum = fg_mask.long().sum().item() + # print('#######rpn average fg_sum:', fg_sum / batch_size) + if fg_sum != 0: + loss_loc, loss_angle, loss_size, loss_iou, reg_loss_dict = \ + loss_utils.get_reg_loss(torch.sigmoid(rpn_cls_flat)[fg_mask], torch.sigmoid(rpn_cls_flat)[fg_mask], + rpn_reg.view(point_num, -1)[fg_mask], + rpn_reg_label.view(point_num, 7)[fg_mask], + loc_scope = cfg.RPN.LOC_SCOPE, + loc_bin_size = cfg.RPN.LOC_BIN_SIZE, + num_head_bin = cfg.RPN.NUM_HEAD_BIN, + anchor_size = MEAN_SIZE, + get_xz_fine = cfg.RPN.LOC_XZ_FINE, + use_cls_score = True, + use_mask_score = False) + + loss_size = 3 * loss_size # consistent with old codes + # print('#######rpn loss_iou:', loss_iou) + loss_iou = cfg.TRAIN.RPN_CE_WEIGHT * loss_iou + rpn_loss_reg = loss_loc + loss_angle + loss_size + loss_iou + else: + # loss_loc = loss_angle = loss_size = rpn_loss_reg = rpn_loss_cls * 0 + loss_loc = loss_angle = loss_size = loss_iou = rpn_loss_reg = rpn_loss_cls * 0 + + rpn_loss = rpn_loss_cls * cfg.RPN.LOSS_WEIGHT[0] + rpn_loss_reg * cfg.RPN.LOSS_WEIGHT[1] + if cfg.USE_IMAGE_LOSS: + rpn_loss = rpn_loss + cfg.IMAGE_WEIGHT * rpn_img_seg_loss + tb_dict['rpn_img_seg_loss'] = rpn_img_seg_loss.item() + if cfg.USE_MC_LOSS: + rpn_loss = rpn_loss + cfg.MC_LOSS_WEIGHT * mc_loss + tb_dict['mc_loss'] = mc_loss.item() + + + tb_dict.update({ 'rpn_loss_cls' : rpn_loss_cls.item(), 'rpn_loss_reg': rpn_loss_reg.item(), + 'rpn_loss' : rpn_loss.item(), 'rpn_fg_sum': fg_sum, 'rpn_loss_loc': loss_loc.item(), + 'rpn_loss_angle': loss_angle.item(), 'rpn_loss_size': loss_size.item(), + 'rpn_loss_iou' : loss_iou.item() }) + + # return rpn_loss + return rpn_loss, rpn_loss_cls, loss_loc, loss_angle, loss_size, loss_iou + + def get_rcnn_loss(model, ret_dict, tb_dict): + rcnn_cls, rcnn_reg = ret_dict['rcnn_cls'], ret_dict['rcnn_reg'] + cls_label = ret_dict['cls_label'].float() + reg_valid_mask = ret_dict['reg_valid_mask'] + roi_boxes3d = ret_dict['roi_boxes3d'] + roi_size = roi_boxes3d[:, 3:6] + gt_boxes3d_ct = ret_dict['gt_of_rois'] + pts_input = ret_dict['pts_input'] + mask_score = ret_dict['mask_score'] + + gt_iou_weight = ret_dict['gt_iou'] + + # rcnn classification loss + if isinstance(model, nn.DataParallel): + cls_loss_func = model.module.rcnn_net.cls_loss_func + else: + cls_loss_func = model.rcnn_net.cls_loss_func + + cls_label_flat = cls_label.view(-1) + + if cfg.RCNN.LOSS_CLS == 'SigmoidFocalLoss': + rcnn_cls_flat = rcnn_cls.view(-1) + + cls_target = (cls_label_flat > 0).float() + pos = (cls_label_flat > 0).float() + neg = (cls_label_flat == 0).float() + cls_weights = pos + neg + pos_normalizer = pos.sum() + cls_weights = cls_weights / torch.clamp(pos_normalizer, min = 1.0) + + rcnn_loss_cls = cls_loss_func(rcnn_cls_flat, cls_target, cls_weights) + rcnn_loss_cls_pos = (rcnn_loss_cls * pos).sum() + rcnn_loss_cls_neg = (rcnn_loss_cls * neg).sum() + rcnn_loss_cls = rcnn_loss_cls.sum() + tb_dict['rpn_loss_cls_pos'] = rcnn_loss_cls_pos.item() + tb_dict['rpn_loss_cls_neg'] = rcnn_loss_cls_neg.item() + + elif cfg.RCNN.LOSS_CLS == 'BinaryCrossEntropy': + rcnn_cls_flat = rcnn_cls.view(-1) + batch_loss_cls = F.binary_cross_entropy(torch.sigmoid(rcnn_cls_flat), cls_label, reduction = 'none') + cls_valid_mask = (cls_label_flat >= 0).float() + rcnn_loss_cls = (batch_loss_cls * cls_valid_mask).sum() / torch.clamp(cls_valid_mask.sum(), min = 1.0) + + elif cfg.TRAIN.LOSS_CLS == 'CrossEntropy': + rcnn_cls_reshape = rcnn_cls.view(rcnn_cls.shape[0], -1) + cls_target = cls_label_flat.long() + cls_valid_mask = (cls_label_flat >= 0).float() + + batch_loss_cls = cls_loss_func(rcnn_cls_reshape, cls_target) + normalizer = torch.clamp(cls_valid_mask.sum(), min = 1.0) + rcnn_loss_cls = (batch_loss_cls.mean(dim = 1) * cls_valid_mask).sum() / normalizer + + else: + raise NotImplementedError + + # rcnn regression loss + batch_size = pts_input.shape[0] + fg_mask = (reg_valid_mask > 0) + fg_sum = fg_mask.long().sum().item() + if fg_sum != 0: + # print('#######rcnn average fg_sum:', fg_sum / batch_size) + if cfg.USE_IOU_BRANCH: + iou_branch_pred = ret_dict['rcnn_iou_branch'] + iou_branch_pred_fg_mask = iou_branch_pred[fg_mask] + else: + iou_branch_pred_fg_mask = None + + all_anchor_size = roi_size + anchor_size = all_anchor_size[fg_mask] if cfg.RCNN.SIZE_RES_ON_ROI else MEAN_SIZE + + loss_loc, loss_angle, loss_size, loss_iou, reg_loss_dict = \ + loss_utils.get_reg_loss(torch.sigmoid(rcnn_cls_flat)[fg_mask], mask_score[fg_mask], + rcnn_reg.view(batch_size, -1)[fg_mask], + gt_boxes3d_ct.view(batch_size, 7)[fg_mask], + loc_scope = cfg.RCNN.LOC_SCOPE, + loc_bin_size = cfg.RCNN.LOC_BIN_SIZE, + num_head_bin = cfg.RCNN.NUM_HEAD_BIN, + anchor_size = anchor_size, + get_xz_fine = True, get_y_by_bin = cfg.RCNN.LOC_Y_BY_BIN, + loc_y_scope = cfg.RCNN.LOC_Y_SCOPE, loc_y_bin_size = cfg.RCNN.LOC_Y_BIN_SIZE, + get_ry_fine = True, + use_cls_score = True, + use_mask_score = True, + gt_iou_weight = gt_iou_weight[fg_mask], + use_iou_branch = cfg.USE_IOU_BRANCH, + iou_branch_pred = iou_branch_pred_fg_mask) + + loss_size = 3 * loss_size # consistent with old codes + # rcnn_loss_reg = loss_loc + loss_angle + loss_size + # print('#######rcnn loss_iou:', loss_iou) + loss_iou = cfg.TRAIN.CE_WEIGHT * loss_iou + if cfg.USE_IOU_BRANCH: + iou_branch_loss = reg_loss_dict['iou_branch_loss'] + rcnn_loss_reg = loss_loc + loss_angle + loss_size + loss_iou + iou_branch_loss + else: + rcnn_loss_reg = loss_loc + loss_angle + loss_size + loss_iou + tb_dict.update(reg_loss_dict) + else: + loss_loc = loss_angle = loss_size = loss_iou = rcnn_loss_reg = iou_branch_loss = rcnn_loss_cls * 0 + + rcnn_loss = rcnn_loss_cls + rcnn_loss_reg + tb_dict['rcnn_loss_cls'] = rcnn_loss_cls.item() + tb_dict['rcnn_loss_reg'] = rcnn_loss_reg.item() + tb_dict['rcnn_loss'] = rcnn_loss.item() + + tb_dict['rcnn_loss_loc'] = loss_loc.item() + tb_dict['rcnn_loss_angle'] = loss_angle.item() + tb_dict['rcnn_loss_size'] = loss_size.item() + tb_dict['rcnn_loss_iou'] = loss_iou.item() + tb_dict['rcnn_cls_fg'] = (cls_label > 0).sum().item() + tb_dict['rcnn_cls_bg'] = (cls_label == 0).sum().item() + tb_dict['rcnn_reg_fg'] = reg_valid_mask.sum().item() + + if cfg.USE_IOU_BRANCH: + tb_dict['iou_branch_loss'] = iou_branch_loss.item() + # print('\n') + # print('iou_branch_loss:',iou_branch_loss.item()) + return rcnn_loss, loss_iou, iou_branch_loss + else: + return rcnn_loss + + + return model_fn diff --git a/lib/rpn/proposal_layer.py b/lib/rpn/proposal_layer.py new file mode 100644 index 0000000..ef0b127 --- /dev/null +++ b/lib/rpn/proposal_layer.py @@ -0,0 +1,142 @@ +import torch +import torch.nn as nn +from lib.utils.bbox_transform import decode_bbox_target +from lib.config import cfg +import lib.utils.kitti_utils as kitti_utils +import lib.utils.iou3d.iou3d_utils as iou3d_utils + + +class ProposalLayer(nn.Module): + def __init__(self, mode = 'TRAIN'): + super().__init__() + self.mode = mode + self.MEAN_SIZE = torch.from_numpy(cfg.CLS_MEAN_SIZE[0]).cuda() + + def forward(self, rpn_scores, rpn_reg, xyz): + """ + :param rpn_scores: (B, N) + :param rpn_reg: (B, N, 8) + :param xyz: (B, N, 3) + :return bbox3d: (B, M, 7) + """ + batch_size = xyz.shape[0] + proposals = decode_bbox_target(xyz.view(-1, 3), rpn_reg.view(-1, rpn_reg.shape[-1]), + anchor_size = self.MEAN_SIZE, + loc_scope = cfg.RPN.LOC_SCOPE, + loc_bin_size = cfg.RPN.LOC_BIN_SIZE, + num_head_bin = cfg.RPN.NUM_HEAD_BIN, + get_xz_fine = cfg.RPN.LOC_XZ_FINE, + get_y_by_bin = False, + get_ry_fine = False) # (N, 7) + proposals[:, 1] += proposals[:, 3] / 2 # set y as the center of bottom + proposals = proposals.view(batch_size, -1, 7) + + scores = rpn_scores + _, sorted_idxs = torch.sort(scores, dim = 1, descending = True) + + batch_size = scores.size(0) + ret_bbox3d = scores.new(batch_size, cfg[self.mode].RPN_POST_NMS_TOP_N, 7).zero_() + ret_scores = scores.new(batch_size, cfg[self.mode].RPN_POST_NMS_TOP_N).zero_() + for k in range(batch_size): + scores_single = scores[k] + proposals_single = proposals[k] + order_single = sorted_idxs[k] + + if cfg.TEST.RPN_DISTANCE_BASED_PROPOSE: + scores_single, proposals_single = self.distance_based_proposal(scores_single, proposals_single, + order_single) + else: + scores_single, proposals_single = self.score_based_proposal(scores_single, proposals_single, + order_single) + + proposals_tot = proposals_single.size(0) + ret_bbox3d[k, :proposals_tot] = proposals_single + ret_scores[k, :proposals_tot] = scores_single + + return ret_bbox3d, ret_scores + + def distance_based_proposal(self, scores, proposals, order): + """ + propose rois in two area based on the distance + :param scores: (N) + :param proposals: (N, 7) + :param order: (N) + """ + nms_range_list = [0, 40.0, 80.0] + pre_tot_top_n = cfg[self.mode].RPN_PRE_NMS_TOP_N + pre_top_n_list = [0, int(pre_tot_top_n * 0.7), pre_tot_top_n - int(pre_tot_top_n * 0.7)] + post_tot_top_n = cfg[self.mode].RPN_POST_NMS_TOP_N + post_top_n_list = [0, int(post_tot_top_n * 0.7), post_tot_top_n - int(post_tot_top_n * 0.7)] + + scores_single_list, proposals_single_list = [], [] + + # sort by score + scores_ordered = scores[order] + proposals_ordered = proposals[order] + + dist = proposals_ordered[:, 2] + first_mask = (dist > nms_range_list[0]) & (dist <= nms_range_list[1]) + for i in range(1, len(nms_range_list)): + # get proposal distance mask + dist_mask = ((dist > nms_range_list[i - 1]) & (dist <= nms_range_list[i])) + + if dist_mask.sum() != 0: + # this area has points + # reduce by mask + cur_scores = scores_ordered[dist_mask] + cur_proposals = proposals_ordered[dist_mask] + + # fetch pre nms top K + cur_scores = cur_scores[:pre_top_n_list[i]] + cur_proposals = cur_proposals[:pre_top_n_list[i]] + else: + assert i == 2, '%d' % i + # this area doesn't have any points, so use rois of first area + cur_scores = scores_ordered[first_mask] + cur_proposals = proposals_ordered[first_mask] + + # fetch top K of first area + cur_scores = cur_scores[pre_top_n_list[i - 1]:][:pre_top_n_list[i]] + cur_proposals = cur_proposals[pre_top_n_list[i - 1]:][:pre_top_n_list[i]] + + # oriented nms + boxes_bev = kitti_utils.boxes3d_to_bev_torch(cur_proposals) + if cfg.RPN.NMS_TYPE == 'rotate': + keep_idx = iou3d_utils.nms_gpu(boxes_bev, cur_scores, cfg[self.mode].RPN_NMS_THRESH) + elif cfg.RPN.NMS_TYPE == 'normal': + keep_idx = iou3d_utils.nms_normal_gpu(boxes_bev, cur_scores, cfg[self.mode].RPN_NMS_THRESH) + else: + raise NotImplementedError + + # Fetch post nms top k + keep_idx = keep_idx[:post_top_n_list[i]] + + scores_single_list.append(cur_scores[keep_idx]) + proposals_single_list.append(cur_proposals[keep_idx]) + + scores_single = torch.cat(scores_single_list, dim = 0) + proposals_single = torch.cat(proposals_single_list, dim = 0) + return scores_single, proposals_single + + def score_based_proposal(self, scores, proposals, order): + """ + propose rois in two area based on the distance + :param scores: (N) + :param proposals: (N, 7) + :param order: (N) + """ + # sort by score + scores_ordered = scores[order] + proposals_ordered = proposals[order] + + # pre nms top K + cur_scores = scores_ordered[:cfg[self.mode].RPN_PRE_NMS_TOP_N] + cur_proposals = proposals_ordered[:cfg[self.mode].RPN_PRE_NMS_TOP_N] + + boxes_bev = kitti_utils.boxes3d_to_bev_torch(cur_proposals) + keep_idx = iou3d_utils.nms_gpu(boxes_bev, cur_scores, cfg[self.mode].RPN_NMS_THRESH) + + # Fetch post nms top k + keep_idx = keep_idx[:cfg[self.mode].RPN_POST_NMS_TOP_N] + + return cur_scores[keep_idx], cur_proposals[keep_idx] diff --git a/lib/rpn/proposal_target_layer.py b/lib/rpn/proposal_target_layer.py new file mode 100644 index 0000000..3c0c3b4 --- /dev/null +++ b/lib/rpn/proposal_target_layer.py @@ -0,0 +1,369 @@ +import torch +import torch.nn as nn +import numpy as np +from lib.config import cfg +import lib.utils.kitti_utils as kitti_utils +import lib.utils.roipool3d.roipool3d_utils as roipool3d_utils +import lib.utils.iou3d.iou3d_utils as iou3d_utils + + +class ProposalTargetLayer(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, input_dict): + roi_boxes3d, gt_boxes3d = input_dict['roi_boxes3d'], input_dict['gt_boxes3d'] + + batch_rois, batch_gt_of_rois, batch_roi_iou = self.sample_rois_for_rcnn(roi_boxes3d, gt_boxes3d) + + rpn_xyz, rpn_features = input_dict['rpn_xyz'], input_dict['rpn_features'] + + + pts_extra_input_list = [] + if cfg.DEEP_RCNN_FUSION: + pts_extra_input_list.append(input_dict['l_xy_cor']) + + if cfg.RCNN.USE_INTENSITY: + pts_extra_input_list.append([input_dict['rpn_intensity'].unsqueeze(dim = 2), + input_dict['seg_mask'].unsqueeze(dim = 2)]) + else: + pts_extra_input_list.append(input_dict['seg_mask'].unsqueeze(dim = 2)) + + # if cfg.RCNN.USE_INTENSITY: + # pts_extra_input_list = [input_dict['rpn_intensity'].unsqueeze(dim = 2), + # input_dict['seg_mask'].unsqueeze(dim = 2)] + # else: + # pts_extra_input_list = [input_dict['seg_mask'].unsqueeze(dim = 2)] + + if cfg.RCNN.USE_DEPTH: + pts_depth = input_dict['pts_depth'] / 70.0 - 0.5 + pts_extra_input_list.append(pts_depth.unsqueeze(dim = 2)) + + if cfg.RCNN.USE_RGB: + pts_rgb=input_dict['pts_rgb'] + pts_extra_input_list.append(pts_rgb) + + pts_extra_input = torch.cat(pts_extra_input_list, dim = 2) + + # point cloud pooling + pts_feature = torch.cat((pts_extra_input, rpn_features), dim = 2) + pooled_features, pooled_empty_flag = \ + roipool3d_utils.roipool3d_gpu(rpn_xyz, pts_feature, batch_rois, cfg.RCNN.POOL_EXTRA_WIDTH, + sampled_pt_num = cfg.RCNN.NUM_POINTS) + + if cfg.DEEP_RCNN_FUSION: + sampled_pts, sampled_xy_cor, sampled_features = pooled_features[:, :, :, 0:3], \ + pooled_features[:, :, :,3:5], pooled_features[:, :,:, 5:] + else: + sampled_pts, sampled_features = pooled_features[:, :, :, 0:3], pooled_features[:, :, :, 3:] + sampled_xy_cor = None + + # sampled_pts, sampled_features = pooled_features[:, :, :, 0:3], pooled_features[:, :, :, 3:] + mask_score = pooled_features[:, :, :, 3].sum(-1) / cfg.RCNN.NUM_POINTS + + # data augmentation + if cfg.AUG_DATA: + # data augmentation + sampled_pts, batch_rois, batch_gt_of_rois = \ + self.data_augmentation(sampled_pts, batch_rois, batch_gt_of_rois) + + # canonical transformation + batch_size = batch_rois.shape[0] + roi_ry = batch_rois[:, :, 6] % (2 * np.pi) + roi_center = batch_rois[:, :, 0:3] + sampled_pts = sampled_pts - roi_center.unsqueeze(dim = 2) # (B, M, 512, 3) + batch_gt_of_rois[:, :, 0:3] = batch_gt_of_rois[:, :, 0:3] - roi_center + batch_gt_of_rois[:, :, 6] = batch_gt_of_rois[:, :, 6] - roi_ry + + for k in range(batch_size): + sampled_pts[k] = kitti_utils.rotate_pc_along_y_torch(sampled_pts[k], batch_rois[k, :, 6]) + batch_gt_of_rois[k] = kitti_utils.rotate_pc_along_y_torch(batch_gt_of_rois[k].unsqueeze(dim = 1), + roi_ry[k]).squeeze(dim = 1) + + # regression valid mask + valid_mask = (pooled_empty_flag == 0) + reg_valid_mask = ((batch_roi_iou > cfg.RCNN.REG_FG_THRESH) & valid_mask).long() + + # classification label + batch_cls_label = (batch_roi_iou > cfg.RCNN.CLS_FG_THRESH).long() + invalid_mask = (batch_roi_iou > cfg.RCNN.CLS_BG_THRESH) & (batch_roi_iou < cfg.RCNN.CLS_FG_THRESH) + batch_cls_label[valid_mask == 0] = -1 + batch_cls_label[invalid_mask > 0] = -1 + + output_dict = { 'sampled_pts' : sampled_pts.view(-1, cfg.RCNN.NUM_POINTS, 3), + 'pts_feature' : sampled_features.view(-1, cfg.RCNN.NUM_POINTS, sampled_features.shape[3]), + 'cls_label' : batch_cls_label.view(-1), + 'mask_score' : mask_score.view(-1), + 'reg_valid_mask': reg_valid_mask.view(-1), + 'gt_of_rois' : batch_gt_of_rois.view(-1, 7), + 'gt_iou' : batch_roi_iou.view(-1), + 'roi_boxes3d' : batch_rois.view(-1, 7), + 'sampled_xy_cor': sampled_xy_cor} + + return output_dict + + def sample_rois_for_rcnn(self, roi_boxes3d, gt_boxes3d): + """ + :param roi_boxes3d: (B, M, 7) + :param gt_boxes3d: (B, N, 8) [x, y, z, h, w, l, ry, cls] + :return + batch_rois: (B, N, 7) + batch_gt_of_rois: (B, N, 8) + batch_roi_iou: (B, N) + """ + batch_size = roi_boxes3d.size(0) + + fg_rois_per_image = int(np.round(cfg.RCNN.FG_RATIO * cfg.RCNN.ROI_PER_IMAGE)) + + batch_rois = gt_boxes3d.new(batch_size, cfg.RCNN.ROI_PER_IMAGE, 7).zero_() + batch_gt_of_rois = gt_boxes3d.new(batch_size, cfg.RCNN.ROI_PER_IMAGE, 7).zero_() + batch_roi_iou = gt_boxes3d.new(batch_size, cfg.RCNN.ROI_PER_IMAGE).zero_() + + for idx in range(batch_size): + cur_roi, cur_gt = roi_boxes3d[idx], gt_boxes3d[idx] + + k = cur_gt.__len__() - 1 + while cur_gt[k].sum() == 0: + k -= 1 + cur_gt = cur_gt[:k + 1] + + # include gt boxes in the candidate rois + iou3d = iou3d_utils.boxes_iou3d_gpu(cur_roi, cur_gt[:, 0:7]) # (M, N) + + max_overlaps, gt_assignment = torch.max(iou3d, dim = 1) + + # sample fg, easy_bg, hard_bg + fg_thresh = min(cfg.RCNN.REG_FG_THRESH, cfg.RCNN.CLS_FG_THRESH) + fg_inds = torch.nonzero((max_overlaps >= fg_thresh)).view(-1) + + # TODO: this will mix the fg and bg when CLS_BG_THRESH_LO < iou < CLS_BG_THRESH + # fg_inds = torch.cat((fg_inds, roi_assignment), dim=0) # consider the roi which has max_iou with gt as fg + + easy_bg_inds = torch.nonzero((max_overlaps < cfg.RCNN.CLS_BG_THRESH_LO)).view(-1) + hard_bg_inds = torch.nonzero((max_overlaps < cfg.RCNN.CLS_BG_THRESH) & + (max_overlaps >= cfg.RCNN.CLS_BG_THRESH_LO)).view(-1) + + fg_num_rois = fg_inds.numel() + bg_num_rois = hard_bg_inds.numel() + easy_bg_inds.numel() + + if fg_num_rois > 0 and bg_num_rois > 0: + # sampling fg + fg_rois_per_this_image = min(fg_rois_per_image, fg_num_rois) + + rand_num = torch.from_numpy(np.random.permutation(fg_num_rois)).type_as(gt_boxes3d).long() + fg_inds = fg_inds[rand_num[:fg_rois_per_this_image]] + + # sampling bg + bg_rois_per_this_image = cfg.RCNN.ROI_PER_IMAGE - fg_rois_per_this_image + bg_inds = self.sample_bg_inds(hard_bg_inds, easy_bg_inds, bg_rois_per_this_image) + + elif fg_num_rois > 0 and bg_num_rois == 0: + # sampling fg + rand_num = np.floor(np.random.rand(cfg.RCNN.ROI_PER_IMAGE) * fg_num_rois) + rand_num = torch.from_numpy(rand_num).type_as(gt_boxes3d).long() + fg_inds = fg_inds[rand_num] + fg_rois_per_this_image = cfg.RCNN.ROI_PER_IMAGE + bg_rois_per_this_image = 0 + elif bg_num_rois > 0 and fg_num_rois == 0: + # sampling bg + bg_rois_per_this_image = cfg.RCNN.ROI_PER_IMAGE + bg_inds = self.sample_bg_inds(hard_bg_inds, easy_bg_inds, bg_rois_per_this_image) + + fg_rois_per_this_image = 0 + else: + import pdb + pdb.set_trace() + raise NotImplementedError + + # augment the rois by noise + roi_list, roi_iou_list, roi_gt_list = [], [], [] + if fg_rois_per_this_image > 0: + fg_rois_src = cur_roi[fg_inds] + gt_of_fg_rois = cur_gt[gt_assignment[fg_inds]] + iou3d_src = max_overlaps[fg_inds] + fg_rois, fg_iou3d = self.aug_roi_by_noise_torch(fg_rois_src, gt_of_fg_rois, iou3d_src, + aug_times = cfg.RCNN.ROI_FG_AUG_TIMES) + roi_list.append(fg_rois) + roi_iou_list.append(fg_iou3d) + roi_gt_list.append(gt_of_fg_rois) + + if bg_rois_per_this_image > 0: + bg_rois_src = cur_roi[bg_inds] + gt_of_bg_rois = cur_gt[gt_assignment[bg_inds]] + iou3d_src = max_overlaps[bg_inds] + aug_times = 1 if cfg.RCNN.ROI_FG_AUG_TIMES > 0 else 0 + bg_rois, bg_iou3d = self.aug_roi_by_noise_torch(bg_rois_src, gt_of_bg_rois, iou3d_src, + aug_times = aug_times) + roi_list.append(bg_rois) + roi_iou_list.append(bg_iou3d) + roi_gt_list.append(gt_of_bg_rois) + + rois = torch.cat(roi_list, dim = 0) + iou_of_rois = torch.cat(roi_iou_list, dim = 0) + gt_of_rois = torch.cat(roi_gt_list, dim = 0) + + batch_rois[idx] = rois + batch_gt_of_rois[idx] = gt_of_rois + batch_roi_iou[idx] = iou_of_rois + + return batch_rois, batch_gt_of_rois, batch_roi_iou + + def sample_bg_inds(self, hard_bg_inds, easy_bg_inds, bg_rois_per_this_image): + if hard_bg_inds.numel() > 0 and easy_bg_inds.numel() > 0: + hard_bg_rois_num = int(bg_rois_per_this_image * cfg.RCNN.HARD_BG_RATIO) + easy_bg_rois_num = bg_rois_per_this_image - hard_bg_rois_num + + # sampling hard bg + rand_idx = torch.randint(low = 0, high = hard_bg_inds.numel(), size = (hard_bg_rois_num,)).long() + hard_bg_inds = hard_bg_inds[rand_idx] + + # sampling easy bg + rand_idx = torch.randint(low = 0, high = easy_bg_inds.numel(), size = (easy_bg_rois_num,)).long() + easy_bg_inds = easy_bg_inds[rand_idx] + + bg_inds = torch.cat([hard_bg_inds, easy_bg_inds], dim = 0) + elif hard_bg_inds.numel() > 0 and easy_bg_inds.numel() == 0: + hard_bg_rois_num = bg_rois_per_this_image + # sampling hard bg + rand_idx = torch.randint(low = 0, high = hard_bg_inds.numel(), size = (hard_bg_rois_num,)).long() + bg_inds = hard_bg_inds[rand_idx] + elif hard_bg_inds.numel() == 0 and easy_bg_inds.numel() > 0: + easy_bg_rois_num = bg_rois_per_this_image + # sampling easy bg + rand_idx = torch.randint(low = 0, high = easy_bg_inds.numel(), size = (easy_bg_rois_num,)).long() + bg_inds = easy_bg_inds[rand_idx] + else: + raise NotImplementedError + + return bg_inds + + def aug_roi_by_noise_torch(self, roi_boxes3d, gt_boxes3d, iou3d_src, aug_times = 10): + iou_of_rois = torch.zeros(roi_boxes3d.shape[0]).type_as(gt_boxes3d) + pos_thresh = min(cfg.RCNN.REG_FG_THRESH, cfg.RCNN.CLS_FG_THRESH) + + for k in range(roi_boxes3d.shape[0]): + temp_iou = cnt = 0 + roi_box3d = roi_boxes3d[k] + + gt_box3d = gt_boxes3d[k].view(1, 7) + aug_box3d = roi_box3d + keep = True + while temp_iou < pos_thresh and cnt < aug_times: + if np.random.rand() < 0.2: + aug_box3d = roi_box3d # p=0.2 to keep the original roi box + keep = True + else: + aug_box3d = self.random_aug_box3d(roi_box3d) + keep = False + aug_box3d = aug_box3d.view((1, 7)) + iou3d = iou3d_utils.boxes_iou3d_gpu(aug_box3d, gt_box3d) + temp_iou = iou3d[0][0] + cnt += 1 + roi_boxes3d[k] = aug_box3d.view(-1) + if cnt == 0 or keep: + iou_of_rois[k] = iou3d_src[k] + else: + iou_of_rois[k] = temp_iou + return roi_boxes3d, iou_of_rois + + @staticmethod + def random_aug_box3d(box3d): + """ + :param box3d: (7) [x, y, z, h, w, l, ry] + random shift, scale, orientation + """ + if cfg.RCNN.REG_AUG_METHOD == 'single': + pos_shift = (torch.rand(3, device = box3d.device) - 0.5) # [-0.5 ~ 0.5] + hwl_scale = (torch.rand(3, device = box3d.device) - 0.5) / (0.5 / 0.15) + 1.0 # + angle_rot = (torch.rand(1, device = box3d.device) - 0.5) / (0.5 / (np.pi / 12)) # [-pi/12 ~ pi/12] + aug_box3d = torch.cat([box3d[0:3] + pos_shift, box3d[3:6] * hwl_scale, box3d[6:7] + angle_rot], dim = 0) + return aug_box3d + elif cfg.RCNN.REG_AUG_METHOD == 'multiple': + # pos_range, hwl_range, angle_range, mean_iou + range_config = [[0.2, 0.1, np.pi / 12, 0.7], + [0.3, 0.15, np.pi / 12, 0.6], + [0.5, 0.15, np.pi / 9, 0.5], + [0.8, 0.15, np.pi / 6, 0.3], + [1.0, 0.15, np.pi / 3, 0.2]] + idx = torch.randint(low = 0, high = len(range_config), size = (1,))[0].long() + + pos_shift = ((torch.rand(3, device = box3d.device) - 0.5) / 0.5) * range_config[idx][0] + hwl_scale = ((torch.rand(3, device = box3d.device) - 0.5) / 0.5) * range_config[idx][1] + 1.0 + angle_rot = ((torch.rand(1, device = box3d.device) - 0.5) / 0.5) * range_config[idx][2] + + aug_box3d = torch.cat([box3d[0:3] + pos_shift, box3d[3:6] * hwl_scale, box3d[6:7] + angle_rot], dim = 0) + return aug_box3d + elif cfg.RCNN.REG_AUG_METHOD == 'normal': + x_shift = np.random.normal(loc = 0, scale = 0.3) + y_shift = np.random.normal(loc = 0, scale = 0.2) + z_shift = np.random.normal(loc = 0, scale = 0.3) + h_shift = np.random.normal(loc = 0, scale = 0.25) + w_shift = np.random.normal(loc = 0, scale = 0.15) + l_shift = np.random.normal(loc = 0, scale = 0.5) + ry_shift = ((torch.rand() - 0.5) / 0.5) * np.pi / 12 + + aug_box3d = np.array([box3d[0] + x_shift, box3d[1] + y_shift, box3d[2] + z_shift, box3d[3] + h_shift, + box3d[4] + w_shift, box3d[5] + l_shift, box3d[6] + ry_shift], dtype = np.float32) + aug_box3d = torch.from_numpy(aug_box3d).type_as(box3d) + return aug_box3d + else: + raise NotImplementedError + + def data_augmentation(self, pts, rois, gt_of_rois): + """ + :param pts: (B, M, 512, 3) + :param rois: (B, M. 7) + :param gt_of_rois: (B, M, 7) + :return: + """ + batch_size, boxes_num = pts.shape[0], pts.shape[1] + + # rotation augmentation + angles = (torch.rand((batch_size, boxes_num), device = pts.device) - 0.5 / 0.5) * (np.pi / cfg.AUG_ROT_RANGE) + + # calculate gt alpha from gt_of_rois + temp_x, temp_z, temp_ry = gt_of_rois[:, :, 0], gt_of_rois[:, :, 2], gt_of_rois[:, :, 6] + temp_beta = torch.atan2(temp_z, temp_x) + gt_alpha = -torch.sign(temp_beta) * np.pi / 2 + temp_beta + temp_ry # (B, M) + + temp_x, temp_z, temp_ry = rois[:, :, 0], rois[:, :, 2], rois[:, :, 6] + temp_beta = torch.atan2(temp_z, temp_x) + roi_alpha = -torch.sign(temp_beta) * np.pi / 2 + temp_beta + temp_ry # (B, M) + + for k in range(batch_size): + pts[k] = kitti_utils.rotate_pc_along_y_torch(pts[k], angles[k]) + gt_of_rois[k] = kitti_utils.rotate_pc_along_y_torch(gt_of_rois[k].unsqueeze(dim = 1), angles[k]).squeeze( + dim = 1) + rois[k] = kitti_utils.rotate_pc_along_y_torch(rois[k].unsqueeze(dim = 1), angles[k]).squeeze(dim = 1) + + # calculate the ry after rotation + temp_x, temp_z = gt_of_rois[:, :, 0], gt_of_rois[:, :, 2] + temp_beta = torch.atan2(temp_z, temp_x) + gt_of_rois[:, :, 6] = torch.sign(temp_beta) * np.pi / 2 + gt_alpha - temp_beta + + temp_x, temp_z = rois[:, :, 0], rois[:, :, 2] + temp_beta = torch.atan2(temp_z, temp_x) + rois[:, :, 6] = torch.sign(temp_beta) * np.pi / 2 + roi_alpha - temp_beta + + # scaling augmentation + scales = 1 + ((torch.rand((batch_size, boxes_num), device = pts.device) - 0.5) / 0.5) * 0.05 + pts = pts * scales.unsqueeze(dim = 2).unsqueeze(dim = 3) + gt_of_rois[:, :, 0:6] = gt_of_rois[:, :, 0:6] * scales.unsqueeze(dim = 2) + rois[:, :, 0:6] = rois[:, :, 0:6] * scales.unsqueeze(dim = 2) + + # flip augmentation + flip_flag = torch.sign(torch.rand((batch_size, boxes_num), device = pts.device) - 0.5) + pts[:, :, :, 0] = pts[:, :, :, 0] * flip_flag.unsqueeze(dim = 2) + gt_of_rois[:, :, 0] = gt_of_rois[:, :, 0] * flip_flag + # flip orientation: ry > 0: pi - ry, ry < 0: -pi - ry + src_ry = gt_of_rois[:, :, 6] + ry = (flip_flag == 1).float() * src_ry + (flip_flag == -1).float() * (torch.sign(src_ry) * np.pi - src_ry) + gt_of_rois[:, :, 6] = ry + + rois[:, :, 0] = rois[:, :, 0] * flip_flag + # flip orientation: ry > 0: pi - ry, ry < 0: -pi - ry + src_ry = rois[:, :, 6] + ry = (flip_flag == 1).float() * src_ry + (flip_flag == -1).float() * (torch.sign(src_ry) * np.pi - src_ry) + rois[:, :, 6] = ry + + return pts, rois, gt_of_rois diff --git a/lib/utils/bbox_transform.py b/lib/utils/bbox_transform.py new file mode 100644 index 0000000..fb4023b --- /dev/null +++ b/lib/utils/bbox_transform.py @@ -0,0 +1,260 @@ +import torch +import numpy as np +from lib.config import cfg +import torch.nn.functional as F + +def rotate_pc_along_y_torch(pc, rot_angle): + """ + :param pc: (N, 3 + C) + :param rot_angle: (N) + :return: + """ + cosa = torch.cos(rot_angle).view(-1, 1) + sina = torch.sin(rot_angle).view(-1, 1) + + raw_1 = torch.cat([cosa, -sina], dim=1) + raw_2 = torch.cat([sina, cosa], dim=1) + R = torch.cat((raw_1.unsqueeze(dim=1), raw_2.unsqueeze(dim=1)), dim=1) # (N, 2, 2) + + pc_temp = pc[:, [0, 2]].unsqueeze(dim=1) # (N, 1, 2) + + pc[:, [0, 2]] = torch.matmul(pc_temp, R.permute(0, 2, 1)).squeeze(dim=1) + return pc + + +def decode_bbox_target(roi_box3d, pred_reg, loc_scope, loc_bin_size, num_head_bin, anchor_size, + get_xz_fine=True, get_y_by_bin=False, loc_y_scope=0.5, loc_y_bin_size=0.25, get_ry_fine=False): + """ + :param roi_box3d: (N, 7) + :param pred_reg: (N, C) + :param loc_scope: + :param loc_bin_size: + :param num_head_bin: + :param anchor_size: + :param get_xz_fine: + :param get_y_by_bin: + :param loc_y_scope: + :param loc_y_bin_size: + :param get_ry_fine: + :return: + """ + anchor_size = anchor_size.to(roi_box3d.get_device()) + per_loc_bin_num = int(loc_scope / loc_bin_size) * 2 + loc_y_bin_num = int(loc_y_scope / loc_y_bin_size) * 2 + + # recover xz localization + assert cfg.TRAIN.BBOX_AVG_BY_BIN == cfg.TEST.BBOX_AVG_BY_BIN + + if not cfg.TRAIN.BBOX_AVG_BY_BIN: + # deafult: cfg.bbox_avg_by_bin = False + x_bin_l, x_bin_r = 0, per_loc_bin_num + z_bin_l, z_bin_r = per_loc_bin_num, per_loc_bin_num * 2 + start_offset = z_bin_r + + x_bin = torch.argmax(pred_reg[:, x_bin_l: x_bin_r], dim=1) + z_bin = torch.argmax(pred_reg[:, z_bin_l: z_bin_r], dim=1) + + pos_x = x_bin.float() * loc_bin_size + loc_bin_size / 2 - loc_scope + pos_z = z_bin.float() * loc_bin_size + loc_bin_size / 2 - loc_scope + + if get_xz_fine: + x_res_l, x_res_r = per_loc_bin_num * 2, per_loc_bin_num * 3 + z_res_l, z_res_r = per_loc_bin_num * 3, per_loc_bin_num * 4 + start_offset = z_res_r + + x_res_norm = torch.gather(pred_reg[:, x_res_l: x_res_r], dim=1, index=x_bin.unsqueeze(dim=1)).squeeze(dim=1) + z_res_norm = torch.gather(pred_reg[:, z_res_l: z_res_r], dim=1, index=z_bin.unsqueeze(dim=1)).squeeze(dim=1) + x_res = x_res_norm * loc_bin_size + z_res = z_res_norm * loc_bin_size + + pos_x += x_res + pos_z += z_res + else: + # print('BBOX_AVG_BY_BIN: True') + + x_bin_l, x_bin_r = 0, per_loc_bin_num + z_bin_l, z_bin_r = per_loc_bin_num, per_loc_bin_num * 2 + start_offset = z_bin_r + + pred_x_bin = F.softmax(pred_reg[:, x_bin_l: x_bin_r], 1) # N x num_bin + pred_z_bin = F.softmax(pred_reg[:, z_bin_l: z_bin_r], 1) + + # print(pred_x_bin[:10, :]) + # input() + + xz_bin_ind = torch.arange(per_loc_bin_num).float() + xz_bin_center = xz_bin_ind * loc_bin_size + loc_bin_size / 2 - loc_scope # num_bin + xz_bin_center = xz_bin_center.to(pred_x_bin.device) + + pred_x_abs = xz_bin_center + pred_z_abs = xz_bin_center + + assert get_xz_fine, 'now only support bin format!' + if get_xz_fine: + x_res_l, x_res_r = per_loc_bin_num * 2, per_loc_bin_num * 3 + z_res_l, z_res_r = per_loc_bin_num * 3, per_loc_bin_num * 4 + start_offset = z_res_r + + pred_x_reg = pred_reg[:, x_res_l: x_res_r] * loc_bin_size # N x num_bin + pred_z_reg = pred_reg[:, z_res_l: z_res_r] * loc_bin_size + + pred_x_abs = pred_x_abs + pred_x_reg + pred_z_abs = pred_z_abs + pred_z_reg + + pos_x = (pred_x_abs * pred_x_bin).sum(dim=1) + pos_z = (pred_z_abs * pred_z_bin).sum(dim=1) + + + # recover y localization + if get_y_by_bin: + y_bin_l, y_bin_r = start_offset, start_offset + loc_y_bin_num + y_res_l, y_res_r = y_bin_r, y_bin_r + loc_y_bin_num + start_offset = y_res_r + + y_bin = torch.argmax(pred_reg[:, y_bin_l: y_bin_r], dim=1) + y_res_norm = torch.gather(pred_reg[:, y_res_l: y_res_r], dim=1, index=y_bin.unsqueeze(dim=1)).squeeze(dim=1) + y_res = y_res_norm * loc_y_bin_size + pos_y = y_bin.float() * loc_y_bin_size + loc_y_bin_size / 2 - loc_y_scope + y_res + pos_y = pos_y + roi_box3d[:, 1] + else: + y_offset_l, y_offset_r = start_offset, start_offset + 1 + start_offset = y_offset_r + + pos_y = roi_box3d[:, 1] + pred_reg[:, y_offset_l] + + # recover ry rotation + ry_bin_l, ry_bin_r = start_offset, start_offset + num_head_bin + ry_res_l, ry_res_r = ry_bin_r, ry_bin_r + num_head_bin + + assert cfg.TRAIN.RY_WITH_BIN == cfg.TEST.RY_WITH_BIN + if not cfg.TEST.RY_WITH_BIN: + ry_bin = torch.argmax(pred_reg[:, ry_bin_l: ry_bin_r], dim=1) + ry_res_norm = torch.gather(pred_reg[:, ry_res_l: ry_res_r], dim=1, index=ry_bin.unsqueeze(dim=1)).squeeze(dim=1) + if get_ry_fine: + # divide pi/2 into several bins + angle_per_class = (np.pi / 2) / num_head_bin + ry_res = ry_res_norm * (angle_per_class / 2) + ry = (ry_bin.float() * angle_per_class + angle_per_class / 2) + ry_res - np.pi / 4 + else: + angle_per_class = (2 * np.pi) / num_head_bin + ry_res = ry_res_norm * (angle_per_class / 2) + + # bin_center is (0, 30, 60, 90, 120, ..., 270, 300, 330) + ry = (ry_bin.float() * angle_per_class + ry_res) % (2 * np.pi) + ry[ry > np.pi] -= 2 * np.pi + else: + # print("RY with BIN") + ry_bin = F.softmax(pred_reg[:, ry_bin_l: ry_bin_r], 1) + # print(ry_bin[:10, :]) + # input() + ry_res_norm = pred_reg[:, ry_res_l: ry_res_r] + if get_ry_fine: + # divide pi/2 into several bins + angle_per_class = (np.pi / 2) / num_head_bin + ry_res = ry_res_norm * (angle_per_class / 2) + # ry = (ry_bin.float() * angle_per_class + angle_per_class / 2) + ry_res - np.pi / 4 + ry_bin_ind = torch.arange(num_head_bin).float().to(ry_res_norm.device) + ry = (ry_bin_ind * angle_per_class + angle_per_class / 2) + ry_res - np.pi / 4 + # [way1] + # ry = (ry * ry_bin).sum(dim=1) + + # [way2] + ry_bin_r = ry_bin.clone() + ry_bin_r[ry<0] = 0 # [0, pi/4] + p_rside = ry_bin_r.sum(dim=1, keepdim=True) + 1e-7 # B + ry_bin_r =ry_bin_r/p_rside + + ry_bin_l = ry_bin.clone() + ry_bin_l[ry>=0] = 0 #[-pi/4, 0] + p_lside = ry_bin_l.sum(dim=1, keepdim=True) + 1e-7 + ry_bin_l =ry_bin_l/p_lside + + # assert 1 - (p_rside + p_lside) < p_lside.new().resize_(p_lside.size()).fill_(1e-4) + ry_r = ry.clone() + ry_r[ry_r<0] = 0 + ry_r = (ry_r * ry_bin_r).sum(dim=1) + + ry_l = ry.clone() + ry_l[ry_l>=0] = 0 + ry_l = (ry_l * ry_bin_l).sum(dim=1) + + # flags + use_r = p_rside.squeeze() >= p_lside.squeeze() + use_l = p_rside.squeeze() < p_lside.squeeze() + ry = ry_r * use_r.float() + ry_l * use_l.float() + + else: + angle_per_class = (2 * np.pi) / num_head_bin + ry_res = ry_res_norm * (angle_per_class / 2) + + # bin_center is (0, 30, 60, 90, 120, ..., 270, 300, 330) + # ry = (ry_bin.float() * angle_per_class + ry_res) % (2 * np.pi) + ry_bin_ind = torch.arange(num_head_bin).float().to(ry_res_norm.device) + ry = (ry_bin_ind * angle_per_class + ry_res) % (2*np.pi) + + # [way1] to [0, pi] + # ry[ry > np.pi] -= np.pi + # ry = (ry * ry_bin).sum(dim=1) + # ry[ry > np.pi] -= 2 * np.pi + + # [way2] ry [0, 2pi] + ry_bin_r = ry_bin.clone() + ry_bin_r[ry > np.pi] = 0 # [0, pi] + p_rside = ry_bin_r.sum(dim=1, keepdim=True) + 1e-7 # B + ry_bin_r =ry_bin_r/p_rside + + ry_bin_l = ry_bin.clone() + ry_bin_l[ry <= np.pi] = 0 # (pi, 2*pi] + p_lside = ry_bin_l.sum(dim=1, keepdim=True) + 1e-7 + ry_bin_l =ry_bin_l/p_lside + + ry_r = ry.clone() + ry_r[ry_r > np.pi] = 0 + ry_r = (ry_r * ry_bin_r).sum(dim=1) # [0, pi] + # print('ry_r', ry_r.size()) + + ry_l = ry.clone() + ry_l[ry_l <= np.pi] = 0 + ry_l = (ry_l * ry_bin_l).sum(dim=1) # (pi, 2*pi] + # print('ry_l', ry_l.size()) + + # flags + use_r = p_rside.squeeze() >= p_lside.squeeze() + use_l = p_rside.squeeze() < p_lside.squeeze() + # print('use_r', use_r.size()) + ry = ry_r * use_r.float() + ry_l * use_l.float() + + # p_rside = ry_bin[ry <= np.pi].sum() + # p_lside = ry_bin[ry > np.pi].sum() + # assert 1 - (p_rside + p_lside).sum().data < 1e-4 + # if p_rside > p_lside: + # ws_r = ry_bin[ry <= np.pi]/ry_bin[ry <= np.pi].sum(dim=1, keepdim=True) + # ry_r = ry[ry<=np.pi] + # ry = (ry_r * ws_r).sum(dim=1) # [0, np.pi] + # else: + # ws_l = ry_bin[ry>np.pi]/ry_bin[ry>np.pi].sum(dim=1, keepdim=True) + # ry_l = ry[ry>np.pi] + # ry = (ry_l * ws_l).sum(dim=1) # [np.pi, 2*np.pi] + ry[ry>np.pi] -= 2*np.pi + + # print(ry.size()) + + + # recover size + size_res_l, size_res_r = ry_res_r, ry_res_r + 3 + assert size_res_r == pred_reg.shape[1] + + size_res_norm = pred_reg[:, size_res_l: size_res_r] + hwl = size_res_norm * anchor_size + anchor_size + + # shift to original coords + roi_center = roi_box3d[:, 0:3] + shift_ret_box3d = torch.cat((pos_x.view(-1, 1), pos_y.view(-1, 1), pos_z.view(-1, 1), hwl, ry.view(-1, 1)), dim=1) + ret_box3d = shift_ret_box3d + if roi_box3d.shape[1] == 7: + roi_ry = roi_box3d[:, 6] + ret_box3d = rotate_pc_along_y_torch(shift_ret_box3d, - roi_ry) + ret_box3d[:, 6] += roi_ry + ret_box3d[:, [0, 2]] += roi_center[:, [0, 2]] + + return ret_box3d \ No newline at end of file diff --git a/lib/utils/calibration.py b/lib/utils/calibration.py new file mode 100644 index 0000000..e05655f --- /dev/null +++ b/lib/utils/calibration.py @@ -0,0 +1,140 @@ +import numpy as np +import os + + +def get_calib_from_file(calib_file): + with open(calib_file) as f: + lines = f.readlines() + + obj = lines[2].strip().split(' ')[1:] + P2 = np.array(obj, dtype = np.float32) + obj = lines[3].strip().split(' ')[1:] + P3 = np.array(obj, dtype = np.float32) + obj = lines[4].strip().split(' ')[1:] + R0 = np.array(obj, dtype = np.float32) + obj = lines[5].strip().split(' ')[1:] + Tr_velo_to_cam = np.array(obj, dtype = np.float32) + + return { 'P2' : P2.reshape(3, 4), + 'P3' : P3.reshape(3, 4), + 'R0' : R0.reshape(3, 3), + 'Tr_velo2cam': Tr_velo_to_cam.reshape(3, 4) } + + +class Calibration(object): + def __init__(self, calib_file): + if isinstance(calib_file, str): + calib = get_calib_from_file(calib_file) + else: + calib = calib_file + + self.P2 = calib['P2'] # 3 x 4 + self.R0 = calib['R0'] # 3 x 3 + self.V2C = calib['Tr_velo2cam'] # 3 x 4 + + # Camera intrinsics and extrinsics + self.cu = self.P2[0, 2] + self.cv = self.P2[1, 2] + self.fu = self.P2[0, 0] + self.fv = self.P2[1, 1] + self.tx = self.P2[0, 3] / (-self.fu) + self.ty = self.P2[1, 3] / (-self.fv) + + def cart_to_hom(self, pts): + """ + :param pts: (N, 3 or 2) + :return pts_hom: (N, 4 or 3) + """ + pts_hom = np.hstack((pts, np.ones((pts.shape[0], 1), dtype = np.float32))) + return pts_hom + + def lidar_to_rect(self, pts_lidar): + """ + :param pts_lidar: (N, 3) + :return pts_rect: (N, 3) + """ + pts_lidar_hom = self.cart_to_hom(pts_lidar) + pts_rect = np.dot(pts_lidar_hom, np.dot(self.V2C.T, self.R0.T)) + # pts_rect = reduce(np.dot, (pts_lidar_hom, self.V2C.T, self.R0.T)) + return pts_rect + + def rect_to_img(self, pts_rect): + """ + :param pts_rect: (N, 3) + :return pts_img: (N, 2) + """ + pts_rect_hom = self.cart_to_hom(pts_rect) + pts_2d_hom = np.dot(pts_rect_hom, self.P2.T) + pts_img = (pts_2d_hom[:, 0:2].T / pts_rect_hom[:, 2]).T # (N, 2) + pts_rect_depth = pts_2d_hom[:, 2] - self.P2.T[3, 2] # depth in rect camera coord + return pts_img, pts_rect_depth + + def lidar_to_img(self, pts_lidar): + """ + :param pts_lidar: (N, 3) + :return pts_img: (N, 2) + """ + pts_rect = self.lidar_to_rect(pts_lidar) + pts_img, pts_depth = self.rect_to_img(pts_rect) + return pts_img, pts_depth + + def img_to_rect(self, u, v, depth_rect): + """ + :param u: (N) + :param v: (N) + :param depth_rect: (N) + :return: + """ + x = ((u - self.cu) * depth_rect) / self.fu + self.tx + y = ((v - self.cv) * depth_rect) / self.fv + self.ty + pts_rect = np.concatenate((x.reshape(-1, 1), y.reshape(-1, 1), depth_rect.reshape(-1, 1)), axis = 1) + return pts_rect + + def depthmap_to_rect(self, depth_map): + """ + :param depth_map: (H, W), depth_map + :return: + """ + x_range = np.arange(0, depth_map.shape[1]) + y_range = np.arange(0, depth_map.shape[0]) + x_idxs, y_idxs = np.meshgrid(x_range, y_range) + x_idxs, y_idxs = x_idxs.reshape(-1), y_idxs.reshape(-1) + depth = depth_map[y_idxs, x_idxs] + pts_rect = self.img_to_rect(x_idxs, y_idxs, depth) + return pts_rect, x_idxs, y_idxs + + def corners3d_to_img_boxes(self, corners3d): + """ + :param corners3d: (N, 8, 3) corners in rect coordinate + :return: boxes: (None, 4) [x1, y1, x2, y2] in rgb coordinate + :return: boxes_corner: (None, 8) [xi, yi] in rgb coordinate + """ + sample_num = corners3d.shape[0] + corners3d_hom = np.concatenate((corners3d, np.ones((sample_num, 8, 1))), axis = 2) # (N, 8, 4) + + img_pts = np.matmul(corners3d_hom, self.P2.T) # (N, 8, 3) + + x, y = img_pts[:, :, 0] / img_pts[:, :, 2], img_pts[:, :, 1] / img_pts[:, :, 2] + x1, y1 = np.min(x, axis = 1), np.min(y, axis = 1) + x2, y2 = np.max(x, axis = 1), np.max(y, axis = 1) + + boxes = np.concatenate((x1.reshape(-1, 1), y1.reshape(-1, 1), x2.reshape(-1, 1), y2.reshape(-1, 1)), axis = 1) + boxes_corner = np.concatenate((x.reshape(-1, 8, 1), y.reshape(-1, 8, 1)), axis = 2) + + return boxes, boxes_corner + + def camera_dis_to_rect(self, u, v, d): + """ + Can only process valid u, v, d, which means u, v can not beyond the image shape, reprojection error 0.02 + :param u: (N) + :param v: (N) + :param d: (N), the distance between camera and 3d points, d^2 = x^2 + y^2 + z^2 + :return: + """ + assert self.fu == self.fv, '%.8f != %.8f' % (self.fu, self.fv) + fd = np.sqrt((u - self.cu) ** 2 + (v - self.cv) ** 2 + self.fu ** 2) + x = ((u - self.cu) * d) / fd + self.tx + y = ((v - self.cv) * d) / fd + self.ty + z = np.sqrt(d ** 2 - x ** 2 - y ** 2) + pts_rect = np.concatenate((x.reshape(-1, 1), y.reshape(-1, 1), z.reshape(-1, 1)), axis = 1) + return pts_rect diff --git a/lib/utils/iou3d/iou3d_utils.py b/lib/utils/iou3d/iou3d_utils.py new file mode 100644 index 0000000..dfdb57a --- /dev/null +++ b/lib/utils/iou3d/iou3d_utils.py @@ -0,0 +1,91 @@ +import torch +import iou3d_cuda +import lib.utils.kitti_utils as kitti_utils + + +def boxes_iou_bev(boxes_a, boxes_b): + """ + :param boxes_a: (M, 5) + :param boxes_b: (N, 5) + :return: + ans_iou: (M, N) + """ + + ans_iou = torch.cuda.FloatTensor(torch.Size((boxes_a.shape[0], boxes_b.shape[0]))).zero_() + + iou3d_cuda.boxes_iou_bev_gpu(boxes_a.contiguous(), boxes_b.contiguous(), ans_iou) + + return ans_iou + + +def boxes_iou3d_gpu(boxes_a, boxes_b): + """ + :param boxes_a: (N, 7) [x, y, z, h, w, l, ry] + :param boxes_b: (M, 7) [x, y, z, h, w, l, ry] + :return: + ans_iou: (M, N) + """ + boxes_a_bev = kitti_utils.boxes3d_to_bev_torch(boxes_a) + boxes_b_bev = kitti_utils.boxes3d_to_bev_torch(boxes_b) + + # bev overlap + overlaps_bev = torch.cuda.FloatTensor(torch.Size((boxes_a.shape[0], boxes_b.shape[0]))).zero_() # (N, M) + iou3d_cuda.boxes_overlap_bev_gpu(boxes_a_bev.contiguous(), boxes_b_bev.contiguous(), overlaps_bev) + + # height overlap + boxes_a_height_min = (boxes_a[:, 1] - boxes_a[:, 3]).view(-1, 1) + boxes_a_height_max = boxes_a[:, 1].view(-1, 1) + boxes_b_height_min = (boxes_b[:, 1] - boxes_b[:, 3]).view(1, -1) + boxes_b_height_max = boxes_b[:, 1].view(1, -1) + + max_of_min = torch.max(boxes_a_height_min, boxes_b_height_min) + min_of_max = torch.min(boxes_a_height_max, boxes_b_height_max) + overlaps_h = torch.clamp(min_of_max - max_of_min, min = 0) + + # 3d iou + overlaps_3d = overlaps_bev * overlaps_h + + vol_a = (boxes_a[:, 3] * boxes_a[:, 4] * boxes_a[:, 5]).view(-1, 1) + vol_b = (boxes_b[:, 3] * boxes_b[:, 4] * boxes_b[:, 5]).view(1, -1) + + iou3d = overlaps_3d / torch.clamp(vol_a + vol_b - overlaps_3d, min = 1e-7) + + return iou3d + + +def nms_gpu(boxes, scores, thresh): + """ + :param boxes: (N, 5) [x1, y1, x2, y2, ry] + :param scores: (N) + :param thresh: + :return: + """ + # areas = (x2 - x1) * (y2 - y1) + order = scores.sort(0, descending = True)[1] + + boxes = boxes[order].contiguous() + + keep = torch.LongTensor(boxes.size(0)) + num_out = iou3d_cuda.nms_gpu(boxes, keep, thresh) + return order[keep[:num_out].cuda()].contiguous() + + +def nms_normal_gpu(boxes, scores, thresh): + """ + :param boxes: (N, 5) [x1, y1, x2, y2, ry] + :param scores: (N) + :param thresh: + :return: + """ + # areas = (x2 - x1) * (y2 - y1) + order = scores.sort(0, descending = True)[1] + + boxes = boxes[order].contiguous() + + keep = torch.LongTensor(boxes.size(0)) + num_out = iou3d_cuda.nms_normal_gpu(boxes, keep, thresh) + return order[keep[:num_out].cuda()].contiguous() + + +if __name__ == '__main__': + pass diff --git a/lib/utils/iou3d/setup.py b/lib/utils/iou3d/setup.py new file mode 100644 index 0000000..025d75c --- /dev/null +++ b/lib/utils/iou3d/setup.py @@ -0,0 +1,14 @@ +from setuptools import setup +from torch.utils.cpp_extension import BuildExtension, CUDAExtension + +setup( + name = 'iou3d', + ext_modules = [ + CUDAExtension('iou3d_cuda', [ + 'src/iou3d.cpp', + 'src/iou3d_kernel.cu', + ], + extra_compile_args = { 'cxx' : ['-g'], + 'nvcc': ['-O2'] }) + ], + cmdclass = { 'build_ext': BuildExtension }) diff --git a/lib/utils/iou3d/src/iou3d.cpp b/lib/utils/iou3d/src/iou3d.cpp new file mode 100644 index 0000000..7ac6272 --- /dev/null +++ b/lib/utils/iou3d/src/iou3d.cpp @@ -0,0 +1,180 @@ +#include +#include +#include +#include +#include + +#define CHECK_CUDA(x) AT_CHECK(x.type().is_cuda(), #x, " must be a CUDAtensor ") +#define CHECK_CONTIGUOUS(x) AT_CHECK(x.is_contiguous(), #x, " must be contiguous ") +#define CHECK_INPUT(x) CHECK_CUDA(x);CHECK_CONTIGUOUS(x) + +#define DIVUP(m,n) ((m) / (n) + ((m) % (n) > 0)) + +#define CHECK_ERROR(ans) { gpuAssert((ans), __FILE__, __LINE__); } +inline void gpuAssert(cudaError_t code, const char *file, int line, bool abort=true) +{ + if (code != cudaSuccess) + { + fprintf(stderr,"GPUassert: %s %s %d\n", cudaGetErrorString(code), file, line); + if (abort) exit(code); + } +} + +const int THREADS_PER_BLOCK_NMS = sizeof(unsigned long long) * 8; + + +void boxesoverlapLauncher(const int num_a, const float *boxes_a, const int num_b, const float *boxes_b, float *ans_overlap); +void boxesioubevLauncher(const int num_a, const float *boxes_a, const int num_b, const float *boxes_b, float *ans_iou); +void nmsLauncher(const float *boxes, unsigned long long * mask, int boxes_num, float nms_overlap_thresh); +void nmsNormalLauncher(const float *boxes, unsigned long long * mask, int boxes_num, float nms_overlap_thresh); + +int boxes_overlap_bev_gpu(at::Tensor boxes_a, at::Tensor boxes_b, at::Tensor ans_overlap){ + // params boxes_a: (N, 5) [x1, y1, x2, y2, ry] + // params boxes_b: (M, 5) + // params ans_overlap: (N, M) + + CHECK_INPUT(boxes_a); + CHECK_INPUT(boxes_b); + CHECK_INPUT(ans_overlap); + + int num_a = boxes_a.size(0); + int num_b = boxes_b.size(0); + + const float * boxes_a_data = boxes_a.data(); + const float * boxes_b_data = boxes_b.data(); + float * ans_overlap_data = ans_overlap.data(); + + boxesoverlapLauncher(num_a, boxes_a_data, num_b, boxes_b_data, ans_overlap_data); + + return 1; +} + +int boxes_iou_bev_gpu(at::Tensor boxes_a, at::Tensor boxes_b, at::Tensor ans_iou){ + // params boxes_a: (N, 5) [x1, y1, x2, y2, ry] + // params boxes_b: (M, 5) + // params ans_overlap: (N, M) + + CHECK_INPUT(boxes_a); + CHECK_INPUT(boxes_b); + CHECK_INPUT(ans_iou); + + int num_a = boxes_a.size(0); + int num_b = boxes_b.size(0); + + const float * boxes_a_data = boxes_a.data(); + const float * boxes_b_data = boxes_b.data(); + float * ans_iou_data = ans_iou.data(); + + boxesioubevLauncher(num_a, boxes_a_data, num_b, boxes_b_data, ans_iou_data); + + return 1; +} + +int nms_gpu(at::Tensor boxes, at::Tensor keep, float nms_overlap_thresh){ + // params boxes: (N, 5) [x1, y1, x2, y2, ry] + // params keep: (N) + + CHECK_INPUT(boxes); + CHECK_CONTIGUOUS(keep); + + int boxes_num = boxes.size(0); + const float * boxes_data = boxes.data(); + long * keep_data = keep.data(); + + const int col_blocks = DIVUP(boxes_num, THREADS_PER_BLOCK_NMS); + + unsigned long long *mask_data = NULL; + CHECK_ERROR(cudaMalloc((void**)&mask_data, boxes_num * col_blocks * sizeof(unsigned long long))); + nmsLauncher(boxes_data, mask_data, boxes_num, nms_overlap_thresh); + + // unsigned long long mask_cpu[boxes_num * col_blocks]; + // unsigned long long *mask_cpu = new unsigned long long [boxes_num * col_blocks]; + std::vector mask_cpu(boxes_num * col_blocks); + +// printf("boxes_num=%d, col_blocks=%d\n", boxes_num, col_blocks); + CHECK_ERROR(cudaMemcpy(&mask_cpu[0], mask_data, boxes_num * col_blocks * sizeof(unsigned long long), + cudaMemcpyDeviceToHost)); + + cudaFree(mask_data); + + unsigned long long remv_cpu[col_blocks]; + memset(remv_cpu, 0, col_blocks * sizeof(unsigned long long)); + + int num_to_keep = 0; + + for (int i = 0; i < boxes_num; i++){ + int nblock = i / THREADS_PER_BLOCK_NMS; + int inblock = i % THREADS_PER_BLOCK_NMS; + + if (!(remv_cpu[nblock] & (1ULL << inblock))){ + keep_data[num_to_keep++] = i; + unsigned long long *p = &mask_cpu[0] + i * col_blocks; + for (int j = nblock; j < col_blocks; j++){ + remv_cpu[j] |= p[j]; + } + } + } + if ( cudaSuccess != cudaGetLastError() ) printf( "Error!\n" ); + + return num_to_keep; +} + + +int nms_normal_gpu(at::Tensor boxes, at::Tensor keep, float nms_overlap_thresh){ + // params boxes: (N, 5) [x1, y1, x2, y2, ry] + // params keep: (N) + + CHECK_INPUT(boxes); + CHECK_CONTIGUOUS(keep); + + int boxes_num = boxes.size(0); + const float * boxes_data = boxes.data(); + long * keep_data = keep.data(); + + const int col_blocks = DIVUP(boxes_num, THREADS_PER_BLOCK_NMS); + + unsigned long long *mask_data = NULL; + CHECK_ERROR(cudaMalloc((void**)&mask_data, boxes_num * col_blocks * sizeof(unsigned long long))); + nmsNormalLauncher(boxes_data, mask_data, boxes_num, nms_overlap_thresh); + + // unsigned long long mask_cpu[boxes_num * col_blocks]; + // unsigned long long *mask_cpu = new unsigned long long [boxes_num * col_blocks]; + std::vector mask_cpu(boxes_num * col_blocks); + +// printf("boxes_num=%d, col_blocks=%d\n", boxes_num, col_blocks); + CHECK_ERROR(cudaMemcpy(&mask_cpu[0], mask_data, boxes_num * col_blocks * sizeof(unsigned long long), + cudaMemcpyDeviceToHost)); + + cudaFree(mask_data); + + unsigned long long remv_cpu[col_blocks]; + memset(remv_cpu, 0, col_blocks * sizeof(unsigned long long)); + + int num_to_keep = 0; + + for (int i = 0; i < boxes_num; i++){ + int nblock = i / THREADS_PER_BLOCK_NMS; + int inblock = i % THREADS_PER_BLOCK_NMS; + + if (!(remv_cpu[nblock] & (1ULL << inblock))){ + keep_data[num_to_keep++] = i; + unsigned long long *p = &mask_cpu[0] + i * col_blocks; + for (int j = nblock; j < col_blocks; j++){ + remv_cpu[j] |= p[j]; + } + } + } + if ( cudaSuccess != cudaGetLastError() ) printf( "Error!\n" ); + + return num_to_keep; +} + + + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("boxes_overlap_bev_gpu", &boxes_overlap_bev_gpu, "oriented boxes overlap"); + m.def("boxes_iou_bev_gpu", &boxes_iou_bev_gpu, "oriented boxes iou"); + m.def("nms_gpu", &nms_gpu, "oriented nms gpu"); + m.def("nms_normal_gpu", &nms_normal_gpu, "nms gpu"); +} + diff --git a/lib/utils/iou3d/src/iou3d_kernel.cu b/lib/utils/iou3d/src/iou3d_kernel.cu new file mode 100644 index 0000000..328a557 --- /dev/null +++ b/lib/utils/iou3d/src/iou3d_kernel.cu @@ -0,0 +1,387 @@ +/* +3D IoU Calculation and Rotated NMS(modified from 2D NMS written by others) +Written by Shaoshuai Shi +All Rights Reserved 2018. +*/ + +#include +#define THREADS_PER_BLOCK 16 +#define DIVUP(m, n) ((m) / (n) + ((m) % (n) > 0)) + +//#define DEBUG +const int THREADS_PER_BLOCK_NMS = sizeof(unsigned long long) * 8; +const float EPS = 1e-8; +struct Point { + float x, y; + __device__ Point() {} + __device__ Point(double _x, double _y){ + x = _x, y = _y; + } + + __device__ void set(float _x, float _y){ + x = _x; y = _y; + } + + __device__ Point operator +(const Point &b)const{ + return Point(x + b.x, y + b.y); + } + + __device__ Point operator -(const Point &b)const{ + return Point(x - b.x, y - b.y); + } +}; + +__device__ inline float cross(const Point &a, const Point &b){ + return a.x * b.y - a.y * b.x; +} + +__device__ inline float cross(const Point &p1, const Point &p2, const Point &p0){ + return (p1.x - p0.x) * (p2.y - p0.y) - (p2.x - p0.x) * (p1.y - p0.y); +} + +__device__ int check_rect_cross(const Point &p1, const Point &p2, const Point &q1, const Point &q2){ + int ret = min(p1.x,p2.x) <= max(q1.x,q2.x) && + min(q1.x,q2.x) <= max(p1.x,p2.x) && + min(p1.y,p2.y) <= max(q1.y,q2.y) && + min(q1.y,q2.y) <= max(p1.y,p2.y); + return ret; +} + +__device__ inline int check_in_box2d(const float *box, const Point &p){ + //params: box (5) [x1, y1, x2, y2, angle] + const float MARGIN = 1e-5; + + float center_x = (box[0] + box[2]) / 2; + float center_y = (box[1] + box[3]) / 2; + float angle_cos = cos(-box[4]), angle_sin = sin(-box[4]); // rotate the point in the opposite direction of box + float rot_x = (p.x - center_x) * angle_cos + (p.y - center_y) * angle_sin + center_x; + float rot_y = -(p.x - center_x) * angle_sin + (p.y - center_y) * angle_cos + center_y; +#ifdef DEBUG + printf("box: (%.3f, %.3f, %.3f, %.3f, %.3f)\n", box[0], box[1], box[2], box[3], box[4]); + printf("center: (%.3f, %.3f), cossin(%.3f, %.3f), src(%.3f, %.3f), rot(%.3f, %.3f)\n", center_x, center_y, + angle_cos, angle_sin, p.x, p.y, rot_x, rot_y); +#endif + return (rot_x > box[0] - MARGIN && rot_x < box[2] + MARGIN && rot_y > box[1] - MARGIN && rot_y < box[3] + MARGIN); +} + +__device__ inline int intersection(const Point &p1, const Point &p0, const Point &q1, const Point &q0, Point &ans){ + // fast exclusion + if (check_rect_cross(p0, p1, q0, q1) == 0) return 0; + + // check cross standing + float s1 = cross(q0, p1, p0); + float s2 = cross(p1, q1, p0); + float s3 = cross(p0, q1, q0); + float s4 = cross(q1, p1, q0); + + if (!(s1 * s2 > 0 && s3 * s4 > 0)) return 0; + + // calculate intersection of two lines + float s5 = cross(q1, p1, p0); + if(fabs(s5 - s1) > EPS){ + ans.x = (s5 * q0.x - s1 * q1.x) / (s5 - s1); + ans.y = (s5 * q0.y - s1 * q1.y) / (s5 - s1); + + } + else{ + float a0 = p0.y - p1.y, b0 = p1.x - p0.x, c0 = p0.x * p1.y - p1.x * p0.y; + float a1 = q0.y - q1.y, b1 = q1.x - q0.x, c1 = q0.x * q1.y - q1.x * q0.y; + float D = a0 * b1 - a1 * b0; + + ans.x = (b0 * c1 - b1 * c0) / D; + ans.y = (a1 * c0 - a0 * c1) / D; + } + + return 1; +} + +__device__ inline void rotate_around_center(const Point ¢er, const float angle_cos, const float angle_sin, Point &p){ + float new_x = (p.x - center.x) * angle_cos + (p.y - center.y) * angle_sin + center.x; + float new_y = -(p.x - center.x) * angle_sin + (p.y - center.y) * angle_cos + center.y; + p.set(new_x, new_y); +} + +__device__ inline int point_cmp(const Point &a, const Point &b, const Point ¢er){ + return atan2(a.y - center.y, a.x - center.x) > atan2(b.y - center.y, b.x - center.x); +} + +__device__ inline float box_overlap(const float *box_a, const float *box_b){ + // params: box_a (5) [x1, y1, x2, y2, angle] + // params: box_b (5) [x1, y1, x2, y2, angle] + + float a_x1 = box_a[0], a_y1 = box_a[1], a_x2 = box_a[2], a_y2 = box_a[3], a_angle = box_a[4]; + float b_x1 = box_b[0], b_y1 = box_b[1], b_x2 = box_b[2], b_y2 = box_b[3], b_angle = box_b[4]; + + Point center_a((a_x1 + a_x2) / 2, (a_y1 + a_y2) / 2); + Point center_b((b_x1 + b_x2) / 2, (b_y1 + b_y2) / 2); +#ifdef DEBUG + printf("a: (%.3f, %.3f, %.3f, %.3f, %.3f), b: (%.3f, %.3f, %.3f, %.3f, %.3f)\n", a_x1, a_y1, a_x2, a_y2, a_angle, + b_x1, b_y1, b_x2, b_y2, b_angle); + printf("center a: (%.3f, %.3f), b: (%.3f, %.3f)\n", center_a.x, center_a.y, center_b.x, center_b.y); +#endif + + Point box_a_corners[5]; + box_a_corners[0].set(a_x1, a_y1); + box_a_corners[1].set(a_x2, a_y1); + box_a_corners[2].set(a_x2, a_y2); + box_a_corners[3].set(a_x1, a_y2); + + Point box_b_corners[5]; + box_b_corners[0].set(b_x1, b_y1); + box_b_corners[1].set(b_x2, b_y1); + box_b_corners[2].set(b_x2, b_y2); + box_b_corners[3].set(b_x1, b_y2); + + // get oriented corners + float a_angle_cos = cos(a_angle), a_angle_sin = sin(a_angle); + float b_angle_cos = cos(b_angle), b_angle_sin = sin(b_angle); + + for (int k = 0; k < 4; k++){ +#ifdef DEBUG + printf("before corner %d: a(%.3f, %.3f), b(%.3f, %.3f) \n", k, box_a_corners[k].x, box_a_corners[k].y, box_b_corners[k].x, box_b_corners[k].y); +#endif + rotate_around_center(center_a, a_angle_cos, a_angle_sin, box_a_corners[k]); + rotate_around_center(center_b, b_angle_cos, b_angle_sin, box_b_corners[k]); +#ifdef DEBUG + printf("corner %d: a(%.3f, %.3f), b(%.3f, %.3f) \n", k, box_a_corners[k].x, box_a_corners[k].y, box_b_corners[k].x, box_b_corners[k].y); +#endif + } + + box_a_corners[4] = box_a_corners[0]; + box_b_corners[4] = box_b_corners[0]; + + // get intersection of lines + Point cross_points[16]; + Point poly_center; + int cnt = 0, flag = 0; + + poly_center.set(0, 0); + for (int i = 0; i < 4; i++){ + for (int j = 0; j < 4; j++){ + flag = intersection(box_a_corners[i + 1], box_a_corners[i], box_b_corners[j + 1], box_b_corners[j], cross_points[cnt]); + if (flag){ + poly_center = poly_center + cross_points[cnt]; + cnt++; + } + } + } + + // check corners + for (int k = 0; k < 4; k++){ + if (check_in_box2d(box_a, box_b_corners[k])){ + poly_center = poly_center + box_b_corners[k]; + cross_points[cnt] = box_b_corners[k]; + cnt++; + } + if (check_in_box2d(box_b, box_a_corners[k])){ + poly_center = poly_center + box_a_corners[k]; + cross_points[cnt] = box_a_corners[k]; + cnt++; + } + } + + poly_center.x /= cnt; + poly_center.y /= cnt; + + // sort the points of polygon + Point temp; + for (int j = 0; j < cnt - 1; j++){ + for (int i = 0; i < cnt - j - 1; i++){ + if (point_cmp(cross_points[i], cross_points[i + 1], poly_center)){ + temp = cross_points[i]; + cross_points[i] = cross_points[i + 1]; + cross_points[i + 1] = temp; + } + } + } + +#ifdef DEBUG + printf("cnt=%d\n", cnt); + for (int i = 0; i < cnt; i++){ + printf("All cross point %d: (%.3f, %.3f)\n", i, cross_points[i].x, cross_points[i].y); + } +#endif + + // get the overlap areas + float area = 0; + for (int k = 0; k < cnt - 1; k++){ + area += cross(cross_points[k] - cross_points[0], cross_points[k + 1] - cross_points[0]); + } + + return fabs(area) / 2.0; +} + +__device__ inline float iou_bev(const float *box_a, const float *box_b){ + // params: box_a (5) [x1, y1, x2, y2, angle] + // params: box_b (5) [x1, y1, x2, y2, angle] + float sa = (box_a[2] - box_a[0]) * (box_a[3] - box_a[1]); + float sb = (box_b[2] - box_b[0]) * (box_b[3] - box_b[1]); + float s_overlap = box_overlap(box_a, box_b); + return s_overlap / fmaxf(sa + sb - s_overlap, EPS); +} + +__global__ void boxes_overlap_kernel(const int num_a, const float *boxes_a, const int num_b, const float *boxes_b, float *ans_overlap){ + const int a_idx = blockIdx.y * THREADS_PER_BLOCK + threadIdx.y; + const int b_idx = blockIdx.x * THREADS_PER_BLOCK + threadIdx.x; + + if (a_idx >= num_a || b_idx >= num_b){ + return; + } + const float * cur_box_a = boxes_a + a_idx * 5; + const float * cur_box_b = boxes_b + b_idx * 5; + float s_overlap = box_overlap(cur_box_a, cur_box_b); + ans_overlap[a_idx * num_b + b_idx] = s_overlap; +} + +__global__ void boxes_iou_bev_kernel(const int num_a, const float *boxes_a, const int num_b, const float *boxes_b, float *ans_iou){ + const int a_idx = blockIdx.y * THREADS_PER_BLOCK + threadIdx.y; + const int b_idx = blockIdx.x * THREADS_PER_BLOCK + threadIdx.x; + + if (a_idx >= num_a || b_idx >= num_b){ + return; + } + + const float * cur_box_a = boxes_a + a_idx * 5; + const float * cur_box_b = boxes_b + b_idx * 5; + float cur_iou_bev = iou_bev(cur_box_a, cur_box_b); + ans_iou[a_idx * num_b + b_idx] = cur_iou_bev; +} + +__global__ void nms_kernel(const int boxes_num, const float nms_overlap_thresh, + const float *boxes, unsigned long long *mask){ + //params: boxes (N, 5) [x1, y1, x2, y2, ry] + //params: mask (N, N/THREADS_PER_BLOCK_NMS) + + const int row_start = blockIdx.y; + const int col_start = blockIdx.x; + + // if (row_start > col_start) return; + + const int row_size = fminf(boxes_num - row_start * THREADS_PER_BLOCK_NMS, THREADS_PER_BLOCK_NMS); + const int col_size = fminf(boxes_num - col_start * THREADS_PER_BLOCK_NMS, THREADS_PER_BLOCK_NMS); + + __shared__ float block_boxes[THREADS_PER_BLOCK_NMS * 5]; + + if (threadIdx.x < col_size) { + block_boxes[threadIdx.x * 5 + 0] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 0]; + block_boxes[threadIdx.x * 5 + 1] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 1]; + block_boxes[threadIdx.x * 5 + 2] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 2]; + block_boxes[threadIdx.x * 5 + 3] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 3]; + block_boxes[threadIdx.x * 5 + 4] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size) { + const int cur_box_idx = THREADS_PER_BLOCK_NMS * row_start + threadIdx.x; + const float *cur_box = boxes + cur_box_idx * 5; + + int i = 0; + unsigned long long t = 0; + int start = 0; + if (row_start == col_start) { + start = threadIdx.x + 1; + } + for (i = start; i < col_size; i++) { + if (iou_bev(cur_box, block_boxes + i * 5) > nms_overlap_thresh){ + t |= 1ULL << i; + } + } + const int col_blocks = DIVUP(boxes_num, THREADS_PER_BLOCK_NMS); + mask[cur_box_idx * col_blocks + col_start] = t; + } +} + + +__device__ inline float iou_normal(float const * const a, float const * const b) { + float left = fmaxf(a[0], b[0]), right = fminf(a[2], b[2]); + float top = fmaxf(a[1], b[1]), bottom = fminf(a[3], b[3]); + float width = fmaxf(right - left, 0.f), height = fmaxf(bottom - top, 0.f); + float interS = width * height; + float Sa = (a[2] - a[0]) * (a[3] - a[1]); + float Sb = (b[2] - b[0]) * (b[3] - b[1]); + return interS / fmaxf(Sa + Sb - interS, EPS); +} + + +__global__ void nms_normal_kernel(const int boxes_num, const float nms_overlap_thresh, + const float *boxes, unsigned long long *mask){ + //params: boxes (N, 5) [x1, y1, x2, y2, ry] + //params: mask (N, N/THREADS_PER_BLOCK_NMS) + + const int row_start = blockIdx.y; + const int col_start = blockIdx.x; + + // if (row_start > col_start) return; + + const int row_size = fminf(boxes_num - row_start * THREADS_PER_BLOCK_NMS, THREADS_PER_BLOCK_NMS); + const int col_size = fminf(boxes_num - col_start * THREADS_PER_BLOCK_NMS, THREADS_PER_BLOCK_NMS); + + __shared__ float block_boxes[THREADS_PER_BLOCK_NMS * 5]; + + if (threadIdx.x < col_size) { + block_boxes[threadIdx.x * 5 + 0] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 0]; + block_boxes[threadIdx.x * 5 + 1] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 1]; + block_boxes[threadIdx.x * 5 + 2] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 2]; + block_boxes[threadIdx.x * 5 + 3] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 3]; + block_boxes[threadIdx.x * 5 + 4] = boxes[(THREADS_PER_BLOCK_NMS * col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size) { + const int cur_box_idx = THREADS_PER_BLOCK_NMS * row_start + threadIdx.x; + const float *cur_box = boxes + cur_box_idx * 5; + + int i = 0; + unsigned long long t = 0; + int start = 0; + if (row_start == col_start) { + start = threadIdx.x + 1; + } + for (i = start; i < col_size; i++) { + if (iou_normal(cur_box, block_boxes + i * 5) > nms_overlap_thresh){ + t |= 1ULL << i; + } + } + const int col_blocks = DIVUP(boxes_num, THREADS_PER_BLOCK_NMS); + mask[cur_box_idx * col_blocks + col_start] = t; + } +} + + + + + +void boxesoverlapLauncher(const int num_a, const float *boxes_a, const int num_b, const float *boxes_b, float *ans_overlap){ + + dim3 blocks(DIVUP(num_b, THREADS_PER_BLOCK), DIVUP(num_a, THREADS_PER_BLOCK)); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK, THREADS_PER_BLOCK); + + boxes_overlap_kernel<<>>(num_a, boxes_a, num_b, boxes_b, ans_overlap); +#ifdef DEBUG + cudaDeviceSynchronize(); // for using printf in kernel function +#endif +} + +void boxesioubevLauncher(const int num_a, const float *boxes_a, const int num_b, const float *boxes_b, float *ans_iou){ + + dim3 blocks(DIVUP(num_b, THREADS_PER_BLOCK), DIVUP(num_a, THREADS_PER_BLOCK)); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK, THREADS_PER_BLOCK); + + boxes_iou_bev_kernel<<>>(num_a, boxes_a, num_b, boxes_b, ans_iou); +} + + +void nmsLauncher(const float *boxes, unsigned long long * mask, int boxes_num, float nms_overlap_thresh){ + dim3 blocks(DIVUP(boxes_num, THREADS_PER_BLOCK_NMS), + DIVUP(boxes_num, THREADS_PER_BLOCK_NMS)); + dim3 threads(THREADS_PER_BLOCK_NMS); + nms_kernel<<>>(boxes_num, nms_overlap_thresh, boxes, mask); +} + + +void nmsNormalLauncher(const float *boxes, unsigned long long * mask, int boxes_num, float nms_overlap_thresh){ + dim3 blocks(DIVUP(boxes_num, THREADS_PER_BLOCK_NMS), + DIVUP(boxes_num, THREADS_PER_BLOCK_NMS)); + dim3 threads(THREADS_PER_BLOCK_NMS); + nms_normal_kernel<<>>(boxes_num, nms_overlap_thresh, boxes, mask); +} diff --git a/lib/utils/kitti_utils.py b/lib/utils/kitti_utils.py new file mode 100644 index 0000000..c9a3e91 --- /dev/null +++ b/lib/utils/kitti_utils.py @@ -0,0 +1,238 @@ +import numpy as np +from scipy.spatial import Delaunay +import scipy +import lib.utils.object3d as object3d +import torch + + +def get_objects_from_label(label_file): + with open(label_file, 'r') as f: + lines = f.readlines() + objects = [object3d.Object3d(line) for line in lines] + return objects + + +def dist_to_plane(plane, points): + """ + Calculates the signed distance from a 3D plane to each point in a list of points + :param plane: (a, b, c, d) + :param points: (N, 3) + :return: (N), signed distance of each point to the plane + """ + a, b, c, d = plane + + points = np.array(points) + x = points[:, 0] + y = points[:, 1] + z = points[:, 2] + + return (a * x + b * y + c * z + d) / np.sqrt(a ** 2 + b ** 2 + c ** 2) + + +def rotate_pc_along_y(pc, rot_angle): + """ + params pc: (N, 3+C), (N, 3) is in the rectified camera coordinate + params rot_angle: rad scalar + Output pc: updated pc with XYZ rotated + """ + cosval = np.cos(rot_angle) + sinval = np.sin(rot_angle) + rotmat = np.array([[cosval, -sinval], [sinval, cosval]]) + pc[:, [0, 2]] = np.dot(pc[:, [0, 2]], np.transpose(rotmat)) + return pc + + +def rotate_pc_along_y_torch(pc, rot_angle): + """ + :param pc: (N, 512, 3 + C) + :param rot_angle: (N) + :return: + TODO: merge with rotate_pc_along_y_torch in bbox_transform.py + """ + cosa = torch.cos(rot_angle).view(-1, 1) # (N, 1) + sina = torch.sin(rot_angle).view(-1, 1) # (N, 1) + + raw_1 = torch.cat([cosa, -sina], dim = 1) # (N, 2) + raw_2 = torch.cat([sina, cosa], dim = 1) # (N, 2) + R = torch.cat((raw_1.unsqueeze(dim = 1), raw_2.unsqueeze(dim = 1)), dim = 1) # (N, 2, 2) + + pc_temp = pc[:, :, [0, 2]] # (N, 512, 2) + + pc[:, :, [0, 2]] = torch.matmul(pc_temp, R.permute(0, 2, 1)) # (N, 512, 2) + + return pc + + +def boxes3d_to_corners3d(boxes3d, rotate = True): + """ + :param boxes3d: (N, 7) [x, y, z, h, w, l, ry] + :param rotate: + :return: corners3d: (N, 8, 3) + """ + boxes_num = boxes3d.shape[0] + h, w, l = boxes3d[:, 3], boxes3d[:, 4], boxes3d[:, 5] + x_corners = np.array([l / 2., l / 2., -l / 2., -l / 2., l / 2., l / 2., -l / 2., -l / 2.], + dtype = np.float32).T # (N, 8) + z_corners = np.array([w / 2., -w / 2., -w / 2., w / 2., w / 2., -w / 2., -w / 2., w / 2.], + dtype = np.float32).T # (N, 8) + + y_corners = np.zeros((boxes_num, 8), dtype = np.float32) + y_corners[:, 4:8] = -h.reshape(boxes_num, 1).repeat(4, axis = 1) # (N, 8) + + if rotate: + ry = boxes3d[:, 6] + zeros, ones = np.zeros(ry.size, dtype = np.float32), np.ones(ry.size, dtype = np.float32) + rot_list = np.array([[np.cos(ry), zeros, -np.sin(ry)], + [zeros, ones, zeros], + [np.sin(ry), zeros, np.cos(ry)]]) # (3, 3, N) + R_list = np.transpose(rot_list, (2, 0, 1)) # (N, 3, 3) + + temp_corners = np.concatenate((x_corners.reshape(-1, 8, 1), y_corners.reshape(-1, 8, 1), + z_corners.reshape(-1, 8, 1)), axis = 2) # (N, 8, 3) + rotated_corners = np.matmul(temp_corners, R_list) # (N, 8, 3) + x_corners, y_corners, z_corners = rotated_corners[:, :, 0], rotated_corners[:, :, 1], rotated_corners[:, :, 2] + + x_loc, y_loc, z_loc = boxes3d[:, 0], boxes3d[:, 1], boxes3d[:, 2] + + x = x_loc.reshape(-1, 1) + x_corners.reshape(-1, 8) + y = y_loc.reshape(-1, 1) + y_corners.reshape(-1, 8) + z = z_loc.reshape(-1, 1) + z_corners.reshape(-1, 8) + + corners = np.concatenate((x.reshape(-1, 8, 1), y.reshape(-1, 8, 1), z.reshape(-1, 8, 1)), axis = 2) + + return corners.astype(np.float32) + + +def boxes3d_to_corners3d_torch(boxes3d, flip = False): + """ + :param boxes3d: (N, 7) [x, y, z, h, w, l, ry] + :return: corners_rotated: (N, 8, 3) + """ + boxes_num = boxes3d.shape[0] + h, w, l, ry = boxes3d[:, 3:4], boxes3d[:, 4:5], boxes3d[:, 5:6], boxes3d[:, 6:7] + if flip: + ry = ry + np.pi + centers = boxes3d[:, 0:3] + zeros = torch.cuda.FloatTensor(boxes_num, 1).fill_(0) + ones = torch.cuda.FloatTensor(boxes_num, 1).fill_(1) + + x_corners = torch.cat([l / 2., l / 2., -l / 2., -l / 2., l / 2., l / 2., -l / 2., -l / 2.], dim = 1) # (N, 8) + y_corners = torch.cat([zeros, zeros, zeros, zeros, -h, -h, -h, -h], dim = 1) # (N, 8) + z_corners = torch.cat([w / 2., -w / 2., -w / 2., w / 2., w / 2., -w / 2., -w / 2., w / 2.], dim = 1) # (N, 8) + corners = torch.cat((x_corners.unsqueeze(dim = 1), y_corners.unsqueeze(dim = 1), z_corners.unsqueeze(dim = 1)), + dim = 1) # (N, 3, 8) + + cosa, sina = torch.cos(ry), torch.sin(ry) + raw_1 = torch.cat([cosa, zeros, sina], dim = 1) + raw_2 = torch.cat([zeros, ones, zeros], dim = 1) + raw_3 = torch.cat([-sina, zeros, cosa], dim = 1) + R = torch.cat((raw_1.unsqueeze(dim = 1), raw_2.unsqueeze(dim = 1), raw_3.unsqueeze(dim = 1)), dim = 1) # (N, 3, 3) + + corners_rotated = torch.matmul(R, corners) # (N, 3, 8) + corners_rotated = corners_rotated + centers.unsqueeze(dim = 2).expand(-1, -1, 8) + corners_rotated = corners_rotated.permute(0, 2, 1) + return corners_rotated + + +def boxes3d_to_bev_torch(boxes3d): + """ + :param boxes3d: (N, 7) [x, y, z, h, w, l, ry] + :return: + boxes_bev: (N, 5) [x1, y1, x2, y2, ry] + """ + boxes_bev = boxes3d.new(torch.Size((boxes3d.shape[0], 5))) + + cu, cv = boxes3d[:, 0], boxes3d[:, 2] + half_l, half_w = boxes3d[:, 5] / 2, boxes3d[:, 4] / 2 + boxes_bev[:, 0], boxes_bev[:, 1] = cu - half_l, cv - half_w + boxes_bev[:, 2], boxes_bev[:, 3] = cu + half_l, cv + half_w + boxes_bev[:, 4] = boxes3d[:, 6] + return boxes_bev + + +def enlarge_box3d(boxes3d, extra_width): + """ + :param boxes3d: (N, 7) [x, y, z, h, w, l, ry] + """ + if isinstance(boxes3d, np.ndarray): + large_boxes3d = boxes3d.copy() + else: + large_boxes3d = boxes3d.clone() + large_boxes3d[:, 3:6] += extra_width * 2 + large_boxes3d[:, 1] += extra_width + return large_boxes3d + + +def in_hull(p, hull): + """ + :param p: (N, K) test points + :param hull: (M, K) M corners of a box + :return (N) bool + """ + try: + if not isinstance(hull, Delaunay): + hull = Delaunay(hull) + flag = hull.find_simplex(p) >= 0 + except scipy.spatial.qhull.QhullError: + print('Warning: not a hull %s' % str(hull)) + flag = np.zeros(p.shape[0], dtype = np.bool) + + return flag + + +def objs_to_boxes3d(obj_list): + boxes3d = np.zeros((obj_list.__len__(), 7), dtype = np.float32) + for k, obj in enumerate(obj_list): + boxes3d[k, 0:3], boxes3d[k, 3], boxes3d[k, 4], boxes3d[k, 5], boxes3d[k, 6] \ + = obj.pos, obj.h, obj.w, obj.l, obj.ry + return boxes3d + + +def objs_to_scores(obj_list): + scores = np.zeros((obj_list.__len__()), dtype = np.float32) + for k, obj in enumerate(obj_list): + scores[k] = obj.score + return scores + + +def get_iou3d(corners3d, query_corners3d, need_bev = False): + """ + :param corners3d: (N, 8, 3) in rect coords + :param query_corners3d: (M, 8, 3) + :return: + """ + from shapely.geometry import Polygon + A, B = corners3d, query_corners3d + N, M = A.shape[0], B.shape[0] + iou3d = np.zeros((N, M), dtype = np.float32) + iou_bev = np.zeros((N, M), dtype = np.float32) + + # for height overlap, since y face down, use the negative y + min_h_a = -A[:, 0:4, 1].sum(axis = 1) / 4.0 + max_h_a = -A[:, 4:8, 1].sum(axis = 1) / 4.0 + min_h_b = -B[:, 0:4, 1].sum(axis = 1) / 4.0 + max_h_b = -B[:, 4:8, 1].sum(axis = 1) / 4.0 + + for i in range(N): + for j in range(M): + max_of_min = np.max([min_h_a[i], min_h_b[j]]) + min_of_max = np.min([max_h_a[i], max_h_b[j]]) + h_overlap = np.max([0, min_of_max - max_of_min]) + if h_overlap == 0: + continue + + bottom_a, bottom_b = Polygon(A[i, 0:4, [0, 2]].T), Polygon(B[j, 0:4, [0, 2]].T) + if bottom_a.is_valid and bottom_b.is_valid: + # check is valid, A valid Polygon may not possess any overlapping exterior or interior rings. + bottom_overlap = bottom_a.intersection(bottom_b).area + else: + bottom_overlap = 0. + overlap3d = bottom_overlap * h_overlap + union3d = bottom_a.area * (max_h_a[i] - min_h_a[i]) + bottom_b.area * (max_h_b[j] - min_h_b[j]) - overlap3d + iou3d[i][j] = overlap3d / union3d + iou_bev[i][j] = bottom_overlap / (bottom_a.area + bottom_b.area - bottom_overlap) + + if need_bev: + return iou3d, iou_bev + + return iou3d diff --git a/lib/utils/loss_utils.py b/lib/utils/loss_utils.py new file mode 100644 index 0000000..7f322b9 --- /dev/null +++ b/lib/utils/loss_utils.py @@ -0,0 +1,350 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +from lib.config import cfg + + +class DiceLoss(nn.Module): + def __init__(self, ignore_target = -1): + super().__init__() + self.ignore_target = ignore_target + + def forward(self, input, target): + """ + :param input: (N), logit + :param target: (N), {0, 1} + :return: + """ + input = torch.sigmoid(input.view(-1)) + target = target.float().view(-1) + mask = (target != self.ignore_target).float() + return 1.0 - (torch.min(input, target) * mask).sum() / torch.clamp((torch.max(input, target) * mask).sum(), + min = 1.0) + + +class SigmoidFocalClassificationLoss(nn.Module): + """Sigmoid focal cross entropy loss. + Focal loss down-weights well classified examples and focusses on the hard + examples. See https://arxiv.org/pdf/1708.02002.pdf for the loss definition. + """ + + def __init__(self, gamma = 2.0, alpha = 0.25): + """Constructor. + Args: + gamma: exponent of the modulating factor (1 - p_t) ^ gamma. + alpha: optional alpha weighting factor to balance positives vs negatives. + all_zero_negative: bool. if True, will treat all zero as background. + else, will treat first label as background. only affect alpha. + """ + super().__init__() + self._alpha = alpha + self._gamma = gamma + + def forward(self, + prediction_tensor, + target_tensor, + weights): + """Compute loss function. + + Args: + prediction_tensor: A float tensor of shape [batch_size, num_anchors, + num_classes] representing the predicted logits for each class + target_tensor: A float tensor of shape [batch_size, num_anchors, + num_classes] representing one-hot encoded classification targets + weights: a float tensor of shape [batch_size, num_anchors] + class_indices: (Optional) A 1-D integer tensor of class indices. + If provided, computes loss only for the specified class indices. + + Returns: + loss: a float tensor of shape [batch_size, num_anchors, num_classes] + representing the value of the loss function. + """ + per_entry_cross_ent = (_sigmoid_cross_entropy_with_logits( + labels = target_tensor, logits = prediction_tensor)) + prediction_probabilities = torch.sigmoid(prediction_tensor) + p_t = ((target_tensor * prediction_probabilities) + + ((1 - target_tensor) * (1 - prediction_probabilities))) + modulating_factor = 1.0 + if self._gamma: + modulating_factor = torch.pow(1.0 - p_t, self._gamma) + alpha_weight_factor = 1.0 + if self._alpha is not None: + alpha_weight_factor = (target_tensor * self._alpha + (1 - target_tensor) * (1 - self._alpha)) + + focal_cross_entropy_loss = (modulating_factor * alpha_weight_factor * per_entry_cross_ent) + return focal_cross_entropy_loss * weights + + +def _sigmoid_cross_entropy_with_logits(logits, labels): + # to be compatible with tensorflow, we don't use ignore_idx + loss = torch.clamp(logits, min = 0) - logits * labels.type_as(logits) + loss += torch.log1p(torch.exp(-torch.abs(logits))) + # transpose_param = [0] + [param[-1]] + param[1:-1] + # logits = logits.permute(*transpose_param) + # loss_ftor = nn.NLLLoss(reduce=False) + # loss = loss_ftor(F.logsigmoid(logits), labels) + return loss + + +def get_reg_loss(cls_score, mask_score, pred_reg, reg_label, loc_scope, loc_bin_size, num_head_bin, anchor_size, + get_xz_fine = True, get_y_by_bin = False, loc_y_scope = 0.5, loc_y_bin_size = 0.25, + get_ry_fine = False, + use_cls_score = False, use_mask_score = False, + gt_iou_weight = None, + use_iou_branch=False, + iou_branch_pred=None + ): + """ + Bin-based 3D bounding boxes regression loss. See https://arxiv.org/abs/1812.04244 for more details. + :param pred_reg: (N, C) + :param reg_label: (N, 7) [dx, dy, dz, h, w, l, ry] + :param loc_scope: constant + :param loc_bin_size: constant + :param num_head_bin: constant + :param anchor_size: (N, 3) or (3) + :param get_xz_fine: + :param get_y_by_bin: + :param loc_y_scope: + :param loc_y_bin_size: + :param get_ry_fine: + :return: + """ + per_loc_bin_num = int(loc_scope / loc_bin_size) * 2 + loc_y_bin_num = int(loc_y_scope / loc_y_bin_size) * 2 + + reg_loss_dict = { } + loc_loss = 0 + + # xz localization loss + x_offset_label, y_offset_label, z_offset_label = reg_label[:, 0], reg_label[:, 1], reg_label[:, 2] + x_shift = torch.clamp(x_offset_label + loc_scope, 0, loc_scope * 2 - 1e-3) + z_shift = torch.clamp(z_offset_label + loc_scope, 0, loc_scope * 2 - 1e-3) + x_bin_label = (x_shift / loc_bin_size).floor().long() + z_bin_label = (z_shift / loc_bin_size).floor().long() + + x_bin_l, x_bin_r = 0, per_loc_bin_num + z_bin_l, z_bin_r = per_loc_bin_num, per_loc_bin_num * 2 + start_offset = z_bin_r + + loss_x_bin = F.cross_entropy(pred_reg[:, x_bin_l: x_bin_r], x_bin_label) + loss_z_bin = F.cross_entropy(pred_reg[:, z_bin_l: z_bin_r], z_bin_label) + reg_loss_dict['loss_x_bin'] = loss_x_bin.item() + reg_loss_dict['loss_z_bin'] = loss_z_bin.item() + loc_loss += loss_x_bin + loss_z_bin + + if get_xz_fine: + x_res_l, x_res_r = per_loc_bin_num * 2, per_loc_bin_num * 3 + z_res_l, z_res_r = per_loc_bin_num * 3, per_loc_bin_num * 4 + start_offset = z_res_r + + x_res_label = x_shift - (x_bin_label.float() * loc_bin_size + loc_bin_size / 2) + z_res_label = z_shift - (z_bin_label.float() * loc_bin_size + loc_bin_size / 2) + x_res_norm_label = x_res_label / loc_bin_size + z_res_norm_label = z_res_label / loc_bin_size + + x_bin_onehot = torch.cuda.FloatTensor(x_bin_label.size(0), per_loc_bin_num).zero_() + x_bin_onehot.scatter_(1, x_bin_label.view(-1, 1).long(), 1) + z_bin_onehot = torch.cuda.FloatTensor(z_bin_label.size(0), per_loc_bin_num).zero_() + z_bin_onehot.scatter_(1, z_bin_label.view(-1, 1).long(), 1) + + loss_x_res = F.smooth_l1_loss((pred_reg[:, x_res_l: x_res_r] * x_bin_onehot).sum(dim = 1), x_res_norm_label) + loss_z_res = F.smooth_l1_loss((pred_reg[:, z_res_l: z_res_r] * z_bin_onehot).sum(dim = 1), z_res_norm_label) + reg_loss_dict['loss_x_res'] = loss_x_res.item() + reg_loss_dict['loss_z_res'] = loss_z_res.item() + loc_loss += loss_x_res + loss_z_res + + # y localization loss + if get_y_by_bin: + y_bin_l, y_bin_r = start_offset, start_offset + loc_y_bin_num + y_res_l, y_res_r = y_bin_r, y_bin_r + loc_y_bin_num + start_offset = y_res_r + + y_shift = torch.clamp(y_offset_label + loc_y_scope, 0, loc_y_scope * 2 - 1e-3) + y_bin_label = (y_shift / loc_y_bin_size).floor().long() + y_res_label = y_shift - (y_bin_label.float() * loc_y_bin_size + loc_y_bin_size / 2) + y_res_norm_label = y_res_label / loc_y_bin_size + + y_bin_onehot = torch.cuda.FloatTensor(y_bin_label.size(0), loc_y_bin_num).zero_() + y_bin_onehot.scatter_(1, y_bin_label.view(-1, 1).long(), 1) + + loss_y_bin = F.cross_entropy(pred_reg[:, y_bin_l: y_bin_r], y_bin_label) + loss_y_res = F.smooth_l1_loss((pred_reg[:, y_res_l: y_res_r] * y_bin_onehot).sum(dim = 1), y_res_norm_label) + + reg_loss_dict['loss_y_bin'] = loss_y_bin.item() + reg_loss_dict['loss_y_res'] = loss_y_res.item() + + loc_loss += loss_y_bin + loss_y_res + else: + y_offset_l, y_offset_r = start_offset, start_offset + 1 + start_offset = y_offset_r + + loss_y_offset = F.smooth_l1_loss(pred_reg[:, y_offset_l: y_offset_r].sum(dim = 1), y_offset_label) + reg_loss_dict['loss_y_offset'] = loss_y_offset.item() + loc_loss += loss_y_offset + + # angle loss + ry_bin_l, ry_bin_r = start_offset, start_offset + num_head_bin + ry_res_l, ry_res_r = ry_bin_r, ry_bin_r + num_head_bin + + ry_label = reg_label[:, 6] + + if get_ry_fine: + # divide pi/2 into several bins (For RCNN, num_head_bin = 9) + angle_per_class = (np.pi / 2) / num_head_bin + + ry_label = ry_label % (2 * np.pi) # 0 ~ 2pi + opposite_flag = (ry_label > np.pi * 0.5) & (ry_label < np.pi * 1.5) + ry_label[opposite_flag] = (ry_label[opposite_flag] + np.pi) % (2 * np.pi) # (0 ~ pi/2, 3pi/2 ~ 2pi) + shift_angle = (ry_label + np.pi * 0.5) % (2 * np.pi) # (0 ~ pi) + + shift_angle = torch.clamp(shift_angle - np.pi * 0.25, min = 1e-3, max = np.pi * 0.5 - 1e-3) # (0, pi/2) + + # bin center is (5, 10, 15, ..., 85) + ry_bin_label = (shift_angle / angle_per_class).floor().long() + ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2) + ry_res_norm_label = ry_res_label / (angle_per_class / 2) + + else: + # divide 2pi into several bins (For RPN, num_head_bin = 12) + angle_per_class = (2 * np.pi) / num_head_bin + heading_angle = ry_label % (2 * np.pi) # 0 ~ 2pi + + shift_angle = (heading_angle + angle_per_class / 2) % (2 * np.pi) + ry_bin_label = (shift_angle / angle_per_class).floor().long() + ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2) + ry_res_norm_label = ry_res_label / (angle_per_class / 2) + + ry_bin_onehot = torch.cuda.FloatTensor(ry_bin_label.size(0), num_head_bin).zero_() + ry_bin_onehot.scatter_(1, ry_bin_label.view(-1, 1).long(), 1) + loss_ry_bin = F.cross_entropy(pred_reg[:, ry_bin_l:ry_bin_r], ry_bin_label) + loss_ry_res = F.smooth_l1_loss((pred_reg[:, ry_res_l: ry_res_r] * ry_bin_onehot).sum(dim = 1), ry_res_norm_label) + + reg_loss_dict['loss_ry_bin'] = loss_ry_bin.item() + reg_loss_dict['loss_ry_res'] = loss_ry_res.item() + angle_loss = loss_ry_bin + loss_ry_res + + # size loss + size_res_l, size_res_r = ry_res_r, ry_res_r + 3 + assert pred_reg.shape[1] == size_res_r, '%d vs %d' % (pred_reg.shape[1], size_res_r) + + size_res_norm_label = (reg_label[:, 3:6] - anchor_size) / anchor_size + size_res_norm = pred_reg[:, size_res_l:size_res_r] + size_loss = F.smooth_l1_loss(size_res_norm, size_res_norm_label) + + pred_x = (pred_reg[:, x_res_l: x_res_r] * x_bin_onehot).sum(dim = 1) * loc_bin_size + pred_y = pred_reg[:, y_offset_l: y_offset_r].sum(dim = 1) + pred_z = (pred_reg[:, z_res_l: z_res_r] * z_bin_onehot).sum(dim = 1) * loc_bin_size + pred_size = size_res_norm * anchor_size + anchor_size # hwl(yzx) + + tar_x, tar_y, tar_z = x_res_label, y_offset_label, z_res_label + tar_size = reg_label[:, 3:6] + + insect_x = torch.max(torch.min((pred_x + pred_size[:, 2] / 2), (tar_x + tar_size[:, 2] / 2)) - torch.max( + (pred_x - pred_size[:, 2] / 2), (tar_x - tar_size[:, 2] / 2)), + pred_x.new().resize_(pred_x.shape).fill_(1e-3)) + insect_y = torch.max(torch.min((pred_y + pred_size[:, 0] / 2), (tar_y + tar_size[:, 0] / 2)) - torch.max( + (pred_y - pred_size[:, 0] / 2), (tar_y - tar_size[:, 0] / 2)), + pred_x.new().resize_(pred_x.shape).fill_(1e-3)) + insect_z = torch.max(torch.min((pred_z + pred_size[:, 1] / 2), (tar_z + tar_size[:, 1] / 2)) - torch.max( + (pred_z - pred_size[:, 1] / 2), (tar_z - tar_size[:, 1] / 2)), + pred_x.new().resize_(pred_x.shape).fill_(1e-3)) + + + if cfg.TRAIN.IOU_LOSS_TYPE == 'raw': + # print('USE RAW LOSS') + # + insect_area = insect_x * insect_y * insect_z + pred_area = torch.max(pred_size[:, 0] * pred_size[:, 1] * pred_size[:, 2], + pred_size.new().resize_(pred_size[:, 2].shape).fill_(1e-3)) + tar_area = tar_size[:, 0] * tar_size[:, 1] * tar_size[:, 2] + iou_tmp = insect_area / (pred_area + tar_area - insect_area) + + if use_iou_branch: + iou_branch_pred_flat = iou_branch_pred.view(-1) + iou_branch_pred_flat = torch.clamp(iou_branch_pred_flat, 0.0001, 0.9999) + iou_tmp_taget = torch.clamp(iou_tmp, 0.0001, 0.9999) + iou_branch_loss = -(iou_tmp_taget.detach() * torch.log(iou_branch_pred_flat) + ( + 1 - iou_tmp_taget.detach()) * torch.log(1 - iou_branch_pred_flat)) + reg_loss_dict['iou_branch_loss'] = iou_branch_loss.mean() + + if use_cls_score: + iou_tmp = cls_score * iou_tmp + + if use_mask_score: + # print('mask_score:', mask_score) + # iou_tmp = mask_score * iou_tmp + iou_tmp = iou_tmp + iou_tmp = torch.max(iou_tmp, iou_tmp.new().resize_(iou_tmp.shape).fill_(1e-4)) + iou_loss = -torch.log(iou_tmp) + iou_loss = iou_loss.mean() + + elif cfg.TRAIN.IOU_LOSS_TYPE == 'cls_mask_with_bin': + #print('cfg.TRAIN.IOU_LOSS_TYPE') + pred_x_bin = F.softmax(pred_reg[:, x_bin_l: x_bin_r], 1) # N x num_bin + pred_z_bin = F.softmax(pred_reg[:, z_bin_l: z_bin_r], 1) + + # + xz_bin_ind = torch.arange(per_loc_bin_num).float() + xz_bin_center = xz_bin_ind * loc_bin_size + loc_bin_size / 2 - loc_scope # num_bin + xz_bin_center = xz_bin_center.to(pred_x_bin.device) + + # + pred_x_reg = pred_reg[:, x_res_l: x_res_r] * loc_bin_size # N x num_bin + pred_z_reg = pred_reg[:, z_res_l: z_res_r] * loc_bin_size + + # + pred_x_abs = xz_bin_center + pred_x_reg + pred_z_abs = xz_bin_center + pred_z_reg + + pred_x = (pred_x_abs * pred_x_bin).sum(dim=1) + pred_z = (pred_z_abs * pred_z_bin).sum(dim=1) + pred_y = pred_reg[:, y_offset_l: y_offset_r].sum(dim=1) # N + + pred_size = size_res_norm * anchor_size + anchor_size # hwl(yzx) + + # + tar_x, tar_y, tar_z = x_res_label, y_offset_label, z_res_label + # + tar_x = xz_bin_center[x_bin_label] + tar_x + tar_z = xz_bin_center[z_bin_label] + tar_z + + tar_size = reg_label[:, 3:6] + + insect_x = torch.max(torch.min((pred_x + pred_size[:, 2]/2), (tar_x + tar_size[:, 2]/2)) - torch.max((pred_x - pred_size[:, 2]/2), (tar_x - tar_size[:, 2]/2)), pred_x.new().resize_(pred_x.shape).fill_(1e-3)) + insect_y = torch.max(torch.min((pred_y + pred_size[:, 0]/2), (tar_y + tar_size[:, 0]/2)) - torch.max((pred_y - pred_size[:, 0]/2), (tar_y - tar_size[:, 0]/2)), pred_x.new().resize_(pred_x.shape).fill_(1e-3)) + insect_z = torch.max(torch.min((pred_z + pred_size[:, 1]/2), (tar_z + tar_size[:, 1]/2)) - torch.max((pred_z - pred_size[:, 1]/2), (tar_z - tar_size[:, 1]/2)), pred_x.new().resize_(pred_x.shape).fill_(1e-3)) + + insect_area = insect_x * insect_y * insect_z + pred_area = torch.max(pred_size[:, 0] * pred_size[:, 1] * pred_size[:, 2], pred_size.new().resize_(pred_size[:, 2].shape).fill_(1e-3)) + tar_area = tar_size[:, 0] * tar_size[:, 1] * tar_size[:, 2] + iou_tmp = insect_area/(pred_area+tar_area-insect_area) + + if use_iou_branch: + iou_branch_pred_flat = iou_branch_pred.view(-1) + iou_branch_pred_flat = torch.clamp(iou_branch_pred_flat, 0.0001, 0.9999) + iou_tmp_taget = torch.clamp(iou_tmp, 0.0001, 0.9999) + iou_branch_loss = -(iou_tmp_taget.detach() * torch.log(iou_branch_pred_flat) + ( + 1 - iou_tmp_taget.detach()) * torch.log(1 - iou_branch_pred_flat)) + reg_loss_dict['iou_branch_loss'] = iou_branch_loss.mean() + + if use_cls_score: + iou_tmp = cls_score * iou_tmp + + if use_mask_score: + # print('mask_score:', mask_score) + # iou_tmp = mask_score * iou_tmp + iou_tmp = iou_tmp + iou_tmp = torch.max(iou_tmp, iou_tmp.new().resize_(iou_tmp.shape).fill_(1e-4)) + iou_loss = -torch.log(iou_tmp) + + iou_loss = iou_loss.mean() + + # Total regression loss + reg_loss_dict['loss_loc'] = loc_loss + reg_loss_dict['loss_angle'] = angle_loss + reg_loss_dict['loss_size'] = size_loss + reg_loss_dict['loss_iou'] = iou_loss + + + return loc_loss, angle_loss, size_loss, iou_loss, reg_loss_dict diff --git a/lib/utils/object3d.py b/lib/utils/object3d.py new file mode 100644 index 0000000..8c1e818 --- /dev/null +++ b/lib/utils/object3d.py @@ -0,0 +1,103 @@ +import numpy as np + + +def cls_type_to_id(cls_type): + type_to_id = { 'Car': 1, 'Pedestrian': 2, 'Cyclist': 3, 'Van': 4 } + if cls_type not in type_to_id.keys(): + return -1 + return type_to_id[cls_type] + + +class Object3d(object): + def __init__(self, line): + label = line.strip().split(' ') + self.src = line + self.cls_type = label[0] + self.cls_id = cls_type_to_id(self.cls_type) + self.trucation = float(label[1]) + self.occlusion = float(label[2]) # 0:fully visible 1:partly occluded 2:largely occluded 3:unknown + self.alpha = float(label[3]) + self.box2d = np.array((float(label[4]), float(label[5]), float(label[6]), float(label[7])), dtype = np.float32) + self.h = float(label[8]) + self.w = float(label[9]) + self.l = float(label[10]) + self.pos = np.array((float(label[11]), float(label[12]), float(label[13])), dtype = np.float32) + self.dis_to_cam = np.linalg.norm(self.pos) + self.ry = float(label[14]) + self.score = float(label[15]) if label.__len__() == 16 else -1.0 + self.level_str = None + self.level = self.get_obj_level() + + def get_obj_level(self): + height = float(self.box2d[3]) - float(self.box2d[1]) + 1 + + if height >= 40 and self.trucation <= 0.15 and self.occlusion <= 0: + self.level_str = 'Easy' + return 1 # Easy + elif height >= 25 and self.trucation <= 0.3 and self.occlusion <= 1: + self.level_str = 'Moderate' + return 2 # Moderate + elif height >= 25 and self.trucation <= 0.5 and self.occlusion <= 2: + self.level_str = 'Hard' + return 3 # Hard + else: + self.level_str = 'UnKnown' + return 4 + + def generate_corners3d(self): + """ + generate corners3d representation for this object + :return corners_3d: (8, 3) corners of box3d in camera coord + """ + l, h, w = self.l, self.h, self.w + x_corners = [l / 2, l / 2, -l / 2, -l / 2, l / 2, l / 2, -l / 2, -l / 2] + y_corners = [0, 0, 0, 0, -h, -h, -h, -h] + z_corners = [w / 2, -w / 2, -w / 2, w / 2, w / 2, -w / 2, -w / 2, w / 2] + + R = np.array([[np.cos(self.ry), 0, np.sin(self.ry)], + [0, 1, 0], + [-np.sin(self.ry), 0, np.cos(self.ry)]]) + corners3d = np.vstack([x_corners, y_corners, z_corners]) # (3, 8) + corners3d = np.dot(R, corners3d).T + corners3d = corners3d + self.pos + return corners3d + + def to_bev_box2d(self, oblique = True, voxel_size = 0.1): + """ + :param bev_shape: (2) for bev shape (h, w), => (y_max, x_max) in image + :param voxel_size: float, 0.1m + :param oblique: + :return: box2d (4, 2)/ (4) in image coordinate + """ + if oblique: + corners3d = self.generate_corners3d() + xz_corners = corners3d[0:4, [0, 2]] + box2d = np.zeros((4, 2), dtype = np.int32) + box2d[:, 0] = ((xz_corners[:, 0] - Object3d.MIN_XZ[0]) / voxel_size).astype(np.int32) + box2d[:, 1] = Object3d.BEV_SHAPE[0] - 1 - ((xz_corners[:, 1] - Object3d.MIN_XZ[1]) / voxel_size).astype( + np.int32) + box2d[:, 0] = np.clip(box2d[:, 0], 0, Object3d.BEV_SHAPE[1]) + box2d[:, 1] = np.clip(box2d[:, 1], 0, Object3d.BEV_SHAPE[0]) + else: + box2d = np.zeros(4, dtype = np.int32) + # discrete_center = np.floor((self.pos / voxel_size)).astype(np.int32) + cu = np.floor((self.pos[0] - Object3d.MIN_XZ[0]) / voxel_size).astype(np.int32) + cv = Object3d.BEV_SHAPE[0] - 1 - ((self.pos[2] - Object3d.MIN_XZ[1]) / voxel_size).astype(np.int32) + half_l, half_w = int(self.l / voxel_size / 2), int(self.w / voxel_size / 2) + box2d[0], box2d[1] = cu - half_l, cv - half_w + box2d[2], box2d[3] = cu + half_l, cv + half_w + + return box2d + + def to_str(self): + print_str = '%s %.3f %.3f %.3f box2d: %s hwl: [%.3f %.3f %.3f] pos: %s ry: %.3f' \ + % (self.cls_type, self.trucation, self.occlusion, self.alpha, self.box2d, self.h, self.w, self.l, + self.pos, self.ry) + return print_str + + def to_kitti_format(self): + kitti_str = '%s %.2f %d %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f' \ + % (self.cls_type, self.trucation, int(self.occlusion), self.alpha, self.box2d[0], self.box2d[1], + self.box2d[2], self.box2d[3], self.h, self.w, self.l, self.pos[0], self.pos[1], self.pos[2], + self.ry) + return kitti_str diff --git a/lib/utils/roipool3d/roipool3d_utils.py b/lib/utils/roipool3d/roipool3d_utils.py new file mode 100644 index 0000000..0362e96 --- /dev/null +++ b/lib/utils/roipool3d/roipool3d_utils.py @@ -0,0 +1,112 @@ +import torch +import roipool3d_cuda +import numpy as np +import lib.utils.kitti_utils as kitti_utils + + +def roipool3d_gpu(pts, pts_feature, boxes3d, pool_extra_width, sampled_pt_num = 512): + """ + :param pts: (B, N, 3) + :param pts_feature: (B, N, C) + :param boxes3d: (B, M, 7) + :param pool_extra_width: float + :param sampled_pt_num: int + :return: + pooled_features: (B, M, 512, 3 + C) + pooled_empty_flag: (B, M) + """ + batch_size, boxes_num, feature_len = pts.shape[0], boxes3d.shape[1], pts_feature.shape[2] + pooled_boxes3d = kitti_utils.enlarge_box3d(boxes3d.view(-1, 7), pool_extra_width).view(batch_size, -1, 7) + + pooled_features = torch.cuda.FloatTensor(torch.Size((batch_size, boxes_num, + sampled_pt_num, 3 + feature_len))).zero_() + pooled_empty_flag = torch.cuda.IntTensor(torch.Size((batch_size, boxes_num))).zero_() + + roipool3d_cuda.forward(pts.contiguous(), pooled_boxes3d.contiguous(), + pts_feature.contiguous(), pooled_features, pooled_empty_flag) + + return pooled_features, pooled_empty_flag + + +def pts_in_boxes3d_cpu(pts, boxes3d): + """ + :param pts: (N, 3) in rect-camera coords + :param boxes3d: (M, 7) + :return: boxes_pts_mask_list: (M), list with [(N), (N), ..] + """ + if not pts.is_cuda: + pts = pts.float().contiguous() + boxes3d = boxes3d.float().contiguous() + pts_flag = torch.LongTensor(torch.Size((boxes3d.size(0), pts.size(0)))) # (M, N) + roipool3d_cuda.pts_in_boxes3d_cpu(pts_flag, pts, boxes3d) + + boxes_pts_mask_list = [] + for k in range(0, boxes3d.shape[0]): + cur_mask = pts_flag[k] > 0 + boxes_pts_mask_list.append(cur_mask) + return boxes_pts_mask_list + else: + raise NotImplementedError + + +def roipool_pc_cpu(pts, pts_feature, boxes3d, sampled_pt_num): + """ + :param pts: (N, 3) + :param pts_feature: (N, C) + :param boxes3d: (M, 7) + :param sampled_pt_num: int + :return: + """ + pts = pts.cpu().float().contiguous() + pts_feature = pts_feature.cpu().float().contiguous() + boxes3d = boxes3d.cpu().float().contiguous() + assert pts.shape[0] == pts_feature.shape[0] and pts.shape[1] == 3, '%s %s' % (pts.shape, pts_feature.shape) + assert pts.is_cuda is False + pooled_pts = torch.FloatTensor(torch.Size((boxes3d.shape[0], sampled_pt_num, 3))).zero_() + pooled_features = torch.FloatTensor(torch.Size((boxes3d.shape[0], sampled_pt_num, pts_feature.shape[1]))).zero_() + pooled_empty_flag = torch.LongTensor(boxes3d.shape[0]).zero_() + roipool3d_cuda.roipool3d_cpu(pts, boxes3d, pts_feature, pooled_pts, pooled_features, pooled_empty_flag) + return pooled_pts, pooled_features, pooled_empty_flag + + +def roipool3d_cpu(boxes3d, pts, pts_feature, pts_extra_input, pool_extra_width, sampled_pt_num = 512, + canonical_transform = True): + """ + :param boxes3d: (N, 7) + :param pts: (N, 3) + :param pts_feature: (N, C) + :param pts_extra_input: (N, C2) + :param pool_extra_width: constant + :param sampled_pt_num: constant + :return: + """ + pooled_boxes3d = kitti_utils.enlarge_box3d(boxes3d, pool_extra_width) + + pts_feature_all = np.concatenate((pts_extra_input, pts_feature), axis = 1) + + # Note: if pooled_empty_flag[i] > 0, the pooled_pts[i], pooled_features[i] will be zero + pooled_pts, pooled_features, pooled_empty_flag = \ + roipool_pc_cpu(torch.from_numpy(pts), torch.from_numpy(pts_feature_all), + torch.from_numpy(pooled_boxes3d), sampled_pt_num) + + extra_input_len = pts_extra_input.shape[1] + sampled_pts_input = torch.cat((pooled_pts, pooled_features[:, :, 0:extra_input_len]), dim = 2).numpy() + sampled_pts_feature = pooled_features[:, :, extra_input_len:].numpy() + + if canonical_transform: + # Translate to the roi coordinates + roi_ry = boxes3d[:, 6] % (2 * np.pi) # 0~2pi + roi_center = boxes3d[:, 0:3] + + # shift to center + sampled_pts_input[:, :, 0:3] = sampled_pts_input[:, :, 0:3] - roi_center[:, np.newaxis, :] + for k in range(sampled_pts_input.shape[0]): + sampled_pts_input[k] = kitti_utils.rotate_pc_along_y(sampled_pts_input[k], roi_ry[k]) + + return sampled_pts_input, sampled_pts_feature + + return sampled_pts_input, sampled_pts_feature, pooled_empty_flag.numpy() + + +if __name__ == '__main__': + pass diff --git a/lib/utils/roipool3d/setup.py b/lib/utils/roipool3d/setup.py new file mode 100644 index 0000000..476895e --- /dev/null +++ b/lib/utils/roipool3d/setup.py @@ -0,0 +1,14 @@ +from setuptools import setup +from torch.utils.cpp_extension import BuildExtension, CUDAExtension + +setup( + name = 'roipool3d', + ext_modules = [ + CUDAExtension('roipool3d_cuda', [ + 'src/roipool3d.cpp', + 'src/roipool3d_kernel.cu', + ], + extra_compile_args = { 'cxx' : ['-g'], + 'nvcc': ['-O2'] }) + ], + cmdclass = { 'build_ext': BuildExtension }) diff --git a/lib/utils/roipool3d/src/roipool3d.cpp b/lib/utils/roipool3d/src/roipool3d.cpp new file mode 100644 index 0000000..ac002af --- /dev/null +++ b/lib/utils/roipool3d/src/roipool3d.cpp @@ -0,0 +1,204 @@ +#include +#include + + +#define CHECK_CUDA(x) AT_CHECK(x.type().is_cuda(), #x, " must be a CUDAtensor ") +#define CHECK_CONTIGUOUS(x) AT_CHECK(x.is_contiguous(), #x, " must be contiguous ") +#define CHECK_INPUT(x) CHECK_CUDA(x);CHECK_CONTIGUOUS(x) + +void roipool3dLauncher_slow(int batch_size, int pts_num, int boxes_num, int feature_in_len, int sampled_pts_num, + const float *xyz, const float *boxes3d, const float *pts_feature, float *pooled_features, int *pooled_empty_flag); + +void roipool3dLauncher(int batch_size, int pts_num, int boxes_num, int feature_in_len, int sampled_pts_num, + const float *xyz, const float *boxes3d, const float *pts_feature, float *pooled_features, int *pooled_empty_flag); + +int roipool3d_gpu_slow(at::Tensor xyz, at::Tensor boxes3d, at::Tensor pts_feature, at::Tensor pooled_features, at::Tensor pooled_empty_flag){ + // params xyz: (B, N, 3) + // params boxes3d: (B, M, 7) + // params pts_feature: (B, N, C) + // params pooled_features: (B, M, 512, 3+C) + // params pooled_empty_flag: (B, M) + CHECK_INPUT(xyz); + CHECK_INPUT(boxes3d); + CHECK_INPUT(pts_feature); + CHECK_INPUT(pooled_features); + CHECK_INPUT(pooled_empty_flag); + + int batch_size = xyz.size(0); + int pts_num = xyz.size(1); + int boxes_num = boxes3d.size(1); + int feature_in_len = pts_feature.size(2); + int sampled_pts_num = pooled_features.size(2); + + + const float * xyz_data = xyz.data(); + const float * boxes3d_data = boxes3d.data(); + const float * pts_feature_data = pts_feature.data(); + float * pooled_features_data = pooled_features.data(); + int * pooled_empty_flag_data = pooled_empty_flag.data(); + + roipool3dLauncher_slow(batch_size, pts_num, boxes_num, feature_in_len, sampled_pts_num, + xyz_data, boxes3d_data, pts_feature_data, pooled_features_data, pooled_empty_flag_data); + + return 1; +} + + + +int roipool3d_gpu(at::Tensor xyz, at::Tensor boxes3d, at::Tensor pts_feature, at::Tensor pooled_features, at::Tensor pooled_empty_flag){ + // params xyz: (B, N, 3) + // params boxes3d: (B, M, 7) + // params pts_feature: (B, N, C) + // params pooled_features: (B, M, 512, 3+C) + // params pooled_empty_flag: (B, M) + CHECK_INPUT(xyz); + CHECK_INPUT(boxes3d); + CHECK_INPUT(pts_feature); + CHECK_INPUT(pooled_features); + CHECK_INPUT(pooled_empty_flag); + + int batch_size = xyz.size(0); + int pts_num = xyz.size(1); + int boxes_num = boxes3d.size(1); + int feature_in_len = pts_feature.size(2); + int sampled_pts_num = pooled_features.size(2); + + + const float * xyz_data = xyz.data(); + const float * boxes3d_data = boxes3d.data(); + const float * pts_feature_data = pts_feature.data(); + float * pooled_features_data = pooled_features.data(); + int * pooled_empty_flag_data = pooled_empty_flag.data(); + + roipool3dLauncher(batch_size, pts_num, boxes_num, feature_in_len, sampled_pts_num, + xyz_data, boxes3d_data, pts_feature_data, pooled_features_data, pooled_empty_flag_data); + + + + return 1; +} + + +int pt_in_box3d_cpu(float x, float y, float z, float cx, float bottom_y, float cz, float h, float w, float l, float angle){ + float max_dis = 10.0, x_rot, z_rot, cosa, sina, cy; + int in_flag; + cy = bottom_y - h / 2.0; + if ((fabsf(x - cx) > max_dis) || (fabsf(y - cy) > h / 2.0) || (fabsf(z - cz) > max_dis)){ + return 0; + } + cosa = cos(angle); sina = sin(angle); + x_rot = (x - cx) * cosa + (z - cz) * (-sina); + z_rot = (x - cx) * sina + (z - cz) * cosa; + + in_flag = (x_rot >= -l / 2.0) & (x_rot <= l / 2.0) & (z_rot >= -w / 2.0) & (z_rot <= w / 2.0); + return in_flag; +} + +int pts_in_boxes3d_cpu(at::Tensor pts_flag, at::Tensor pts, at::Tensor boxes3d){ + // param in_flag: (M, N), 0 or 1 + // param pts: (N, 3) + // param boxes3d: (M, 7) [x, y, z, h, w, l, ry] + + CHECK_CONTIGUOUS(pts_flag); + CHECK_CONTIGUOUS(pts); + CHECK_CONTIGUOUS(boxes3d); + + long boxes_num = boxes3d.size(0); + long pts_num = pts.size(0); + + long * pts_flag_flat = pts_flag.data(); + float * pts_flat = pts.data(); + float * boxes3d_flat = boxes3d.data(); + + memset(pts_flag_flat, 0, boxes_num * pts_num * sizeof(long)); + + int i, j, cur_in_flag; + for (i = 0; i < boxes_num; i++){ + for (j = 0; j < pts_num; j++){ + cur_in_flag = pt_in_box3d_cpu(pts_flat[j * 3], pts_flat[j * 3 + 1], pts_flat[j * 3 + 2], boxes3d_flat[i * 7], + boxes3d_flat[i * 7 + 1], boxes3d_flat[i * 7 + 2], boxes3d_flat[i * 7 + 3], + boxes3d_flat[i * 7 + 4], boxes3d_flat[i * 7 + 5], boxes3d_flat[i * 7 + 6]); + pts_flag_flat[i * pts_num + j] = cur_in_flag; + } + } + return 1; +} + +int roipool3d_cpu(at::Tensor pts, at::Tensor boxes3d, at::Tensor pts_feature, at::Tensor pooled_pts, + at::Tensor pooled_features, at::Tensor pooled_empty_flag){ + // param pts: (N, 3) [x, y, z] + // param boxes3d: (M, 7) [x, y, z, h, w, l, ry] + // param pts_feature: (N, C) + // param pooled_pts: (M, 512, 3) + // param pooled_features: (M, 512, C) + CHECK_CONTIGUOUS(pts); + CHECK_CONTIGUOUS(boxes3d); + CHECK_CONTIGUOUS(pts_feature); + CHECK_CONTIGUOUS(pooled_pts); + CHECK_CONTIGUOUS(pooled_features); + CHECK_CONTIGUOUS(pooled_empty_flag); + + long boxes_num = boxes3d.size(0); + long pts_num = pts.size(0); + long feature_len = pts_feature.size(1); + long sampled_pts_num = pooled_pts.size(1); + + float * pts_flat = pts.data(); + float * boxes3d_flat = boxes3d.data(); + float * pts_feature_flat = pts_feature.data(); + float * pooled_pts_flat = pooled_pts.data(); + float * pooled_features_flat = pooled_features.data(); + long * pooled_empty_flag_flat = pooled_empty_flag.data(); + + memset(pooled_empty_flag_flat, 0, boxes_num * sizeof(long)); + + int i, j, k, cnt, temp_idx, duplicate_idx, cur_in_flag; + for (i = 0; i < boxes_num; i++){ + cnt = 0; + for (j = 0; j < pts_num; j++){ + cur_in_flag = pt_in_box3d_cpu(pts_flat[j * 3], pts_flat[j * 3 + 1], pts_flat[j * 3 + 2], boxes3d_flat[i * 7], + boxes3d_flat[i * 7 + 1], boxes3d_flat[i * 7 + 2], boxes3d_flat[i * 7 + 3], + boxes3d_flat[i * 7 + 4], boxes3d_flat[i * 7 + 5], boxes3d_flat[i * 7 + 6]); + + if (cur_in_flag){ + if (cnt < sampled_pts_num){ + temp_idx = i * sampled_pts_num * 3 + cnt * 3; + for (k = 0; k < 3; k++) pooled_pts_flat[temp_idx + k] = pts_flat[j * 3 + k]; + temp_idx = i * sampled_pts_num * feature_len + cnt * feature_len; + for (k = 0; k < feature_len; k++) pooled_features_flat[temp_idx + k] = pts_feature_flat[j * feature_len + k]; + cnt++; + } + else break; + } + } + + if (cnt == 0){ + // no points in this box + pooled_empty_flag_flat[i] = 1; + } + else if (cnt < sampled_pts_num){ + // duplicate same points + duplicate_idx = 0; + for (j = cnt; j < sampled_pts_num; j++){ + temp_idx = i * sampled_pts_num * 3 + j * 3; + duplicate_idx = i * sampled_pts_num * 3 + (j % cnt) * 3; + for (k = 0; k < 3; k++) pooled_pts_flat[temp_idx + k] = pooled_pts_flat[duplicate_idx + k]; + temp_idx = i * sampled_pts_num * feature_len + j * feature_len; + duplicate_idx = i * sampled_pts_num * feature_len + (j % cnt) * feature_len; + for (k = 0; k < feature_len; k++){ + pooled_features_flat[temp_idx + k] = pooled_features_flat[duplicate_idx + k]; + } + } + } + } + return 1; +} + + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("pts_in_boxes3d_cpu", &pts_in_boxes3d_cpu, "pts_in_boxes3d_cpu"); + m.def("roipool3d_cpu", &roipool3d_cpu, "roipool3d_cpu"); + m.def("forward", &roipool3d_gpu, "roipool3d forward (CUDA)"); + m.def("forward_slow", &roipool3d_gpu_slow, "roipool3d forward (CUDA)"); +} + diff --git a/lib/utils/roipool3d/src/roipool3d_kernel.cu b/lib/utils/roipool3d/src/roipool3d_kernel.cu new file mode 100644 index 0000000..a3dc0ff --- /dev/null +++ b/lib/utils/roipool3d/src/roipool3d_kernel.cu @@ -0,0 +1,237 @@ +/* +Point cloud feature pooling +Written by Shaoshuai Shi +All Rights Reserved 2018. +*/ + +#include +#include + +#define THREADS_PER_BLOCK 256 +#define DIVUP(m,n) ((m) / (n) + ((m) % (n) > 0)) +// #define DEBUG + +__device__ inline int pt_in_box3d(float x, float y, float z, float cx, float bottom_y, float cz, float h, float w, + float l, float angle, float max_dis){ + float x_rot, z_rot, cosa, sina, cy; + int in_flag; + cy = bottom_y - h / 2.0; + if ((fabsf(x - cx) > max_dis) || (fabsf(y - cy) > h / 2.0) || (fabsf(z - cz) > max_dis)){ + return 0; + } + cosa = cos(angle); sina = sin(angle); + x_rot = (x - cx) * cosa + (z - cz) * (-sina); + z_rot = (x - cx) * sina + (z - cz) * cosa; + + in_flag = (x_rot >= -l / 2.0) & (x_rot <= l / 2.0) & (z_rot >= -w / 2.0) & (z_rot <= w / 2.0); + return in_flag; +} + + +__global__ void roipool3d_forward(int batch_size, int pts_num, int boxes_num, int feature_in_len, int sampled_pts_num, + const float *xyz, const float *boxes3d, const float *pts_feature, + float *pooled_features, int *pooled_empty_flag){ + // params xyz: (B, N, 3) + // params boxes3d: (B, M, 7) + // params pts_feature: (B, N, C) + // params pooled_features: (B, M, 512, 3+C) + // params pooled_empty_flag: (B, M) + + int boxes_idx = blockIdx.x * blockDim.x + threadIdx.x; + if (boxes_idx >= boxes_num){ + return; + } + + for (int i = 0; i < batch_size; i++){ + int cnt = 0; + for (int k = 0; k < pts_num; k++){ + int pt_offset = i * pts_num * 3 + k * 3; + int box_offset = i * boxes_num * 7 + boxes_idx * 7; + + int cur_in_flag = pt_in_box3d(xyz[pt_offset], xyz[pt_offset + 1], xyz[pt_offset + 2], boxes3d[box_offset], + boxes3d[box_offset + 1], boxes3d[box_offset + 2], boxes3d[box_offset + 3], + boxes3d[box_offset + 4], boxes3d[box_offset + 5], boxes3d[box_offset + 6], 10.0); + if (cur_in_flag){ + if (cnt < sampled_pts_num){ + int feature_out_offset = i * boxes_num * sampled_pts_num * (3 + feature_in_len) + + boxes_idx * sampled_pts_num * (3 + feature_in_len) + + cnt * (3 + feature_in_len); + + int feature_in_offset = i * pts_num * feature_in_len + k * feature_in_len; + + // copy xyz + for (int j = 0; j < 3; j++) + pooled_features[feature_out_offset + j] = xyz[pt_offset + j]; + + // copy feature + for (int j = 0; j < feature_in_len; j++) + pooled_features[feature_out_offset + 3 + j] = pts_feature[feature_in_offset + j]; + + cnt++; + } + else break; + } + } + + if (cnt == 0){ + pooled_empty_flag[i * boxes_num + boxes_idx] = 1; + } + else if (cnt < sampled_pts_num){ + // duplicate same points for sampling + for (int k = cnt; k < sampled_pts_num; k++){ + int duplicate_idx = k % cnt; + int src_offset = i * boxes_num * sampled_pts_num * (3 + feature_in_len) + + boxes_idx * sampled_pts_num * (3 + feature_in_len) + + duplicate_idx * (3 + feature_in_len); + int dst_offset = i * boxes_num * sampled_pts_num * (3 + feature_in_len) + + boxes_idx * sampled_pts_num * (3 + feature_in_len) + + k * (3 + feature_in_len); + for (int j = 0; j < 3 + feature_in_len; j++) + pooled_features[dst_offset + j] = pooled_features[src_offset + j]; + } + } + } +} + + +__global__ void assign_pts_to_box3d(int batch_size, int pts_num, int boxes_num, const float *xyz, const float *boxes3d, int *pts_assign){ + // params xyz: (B, N, 3) + // params boxes3d: (B, M, 7) + // params pts_assign: (B, N, M): idx of the corresponding box3d, -1 means background points + int pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + int box_idx = blockIdx.y; + int bs_idx = blockIdx.z; + + if (pt_idx >= pts_num || box_idx >= boxes_num || bs_idx >= batch_size){ + return; + } + int assign_idx = bs_idx * pts_num * boxes_num + pt_idx * boxes_num + box_idx; + pts_assign[assign_idx] = 0; + + int box_offset = bs_idx * boxes_num * 7 + box_idx * 7; + int pt_offset = bs_idx * pts_num * 3 + pt_idx * 3; + + int cur_in_flag = pt_in_box3d(xyz[pt_offset], xyz[pt_offset + 1], xyz[pt_offset + 2], boxes3d[box_offset], + boxes3d[box_offset + 1], boxes3d[box_offset + 2], boxes3d[box_offset + 3], + boxes3d[box_offset + 4], boxes3d[box_offset + 5], boxes3d[box_offset + 6], 10.0); + + pts_assign[assign_idx] = cur_in_flag; + // printf("bs=%d, pt=%d, in=%d\n", bs_idx, pt_idx, pts_assign[bs_idx * pts_num + pt_idx]); +} + + +__global__ void get_pooled_idx(int batch_size, int pts_num, int boxes_num, int sampled_pts_num, + const int *pts_assign, int *pts_idx, int *pooled_empty_flag){ + // params xyz: (B, N, 3) + // params pts_feature: (B, N, C) + // params pts_assign: (B, N) + // params pts_idx: (B, M, 512) + // params pooled_empty_flag: (B, M) + + int boxes_idx = blockIdx.x * blockDim.x + threadIdx.x; + if (boxes_idx >= boxes_num){ + return; + } + + int bs_idx = blockIdx.y; + + int cnt = 0; + for (int k = 0; k < pts_num; k++){ + if (pts_assign[bs_idx * pts_num * boxes_num + k * boxes_num + boxes_idx]){ + if (cnt < sampled_pts_num){ + pts_idx[bs_idx * boxes_num * sampled_pts_num + boxes_idx * sampled_pts_num + cnt] = k; + cnt++; + } + else break; + } + } + + if (cnt == 0){ + pooled_empty_flag[bs_idx * boxes_num + boxes_idx] = 1; + } + else if (cnt < sampled_pts_num){ + // duplicate same points for sampling + for (int k = cnt; k < sampled_pts_num; k++){ + int duplicate_idx = k % cnt; + int base_offset = bs_idx * boxes_num * sampled_pts_num + boxes_idx * sampled_pts_num; + pts_idx[base_offset + k] = pts_idx[base_offset + duplicate_idx]; + } + } +} + + +__global__ void roipool3d_forward(int batch_size, int pts_num, int boxes_num, int feature_in_len, int sampled_pts_num, + const float *xyz, const int *pts_idx, const float *pts_feature, + float *pooled_features, int *pooled_empty_flag){ + // params xyz: (B, N, 3) + // params pts_idx: (B, M, 512) + // params pts_feature: (B, N, C) + // params pooled_features: (B, M, 512, 3+C) + // params pooled_empty_flag: (B, M) + + int sample_pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + int box_idx = blockIdx.y; + int bs_idx = blockIdx.z; + + if (sample_pt_idx >= sampled_pts_num || box_idx >= boxes_num || bs_idx >= batch_size){ + return; + } + + if (pooled_empty_flag[bs_idx * boxes_num + box_idx]){ + return; + } + + int temp_idx = bs_idx * boxes_num * sampled_pts_num + box_idx * sampled_pts_num + sample_pt_idx; + int src_pt_idx = pts_idx[temp_idx]; + int dst_feature_offset = temp_idx * (3 + feature_in_len); + + for (int j = 0; j < 3; j++) + pooled_features[dst_feature_offset + j] = xyz[bs_idx * pts_num * 3 + src_pt_idx * 3 + j]; + + int src_feature_offset = bs_idx * pts_num * feature_in_len + src_pt_idx * feature_in_len; + for (int j = 0; j < feature_in_len; j++) + pooled_features[dst_feature_offset + 3 + j] = pts_feature[src_feature_offset + j]; +} + + +void roipool3dLauncher_slow(int batch_size, int pts_num, int boxes_num, int feature_in_len, int sampled_pts_num, + const float *xyz, const float *boxes3d, const float *pts_feature, float *pooled_features, int *pooled_empty_flag){ + roipool3d_forward<<>>(batch_size, pts_num, boxes_num, feature_in_len, sampled_pts_num, + xyz, boxes3d, pts_feature, pooled_features, pooled_empty_flag); + +#ifdef DEBUG + cudaDeviceSynchronize(); // for using printf in kernel function +#endif +} + + +void roipool3dLauncher(int batch_size, int pts_num, int boxes_num, int feature_in_len, int sampled_pts_num, + const float *xyz, const float *boxes3d, const float *pts_feature, float *pooled_features, int *pooled_empty_flag){ + + // printf("batch_size=%d, pts_num=%d, boxes_num=%d\n", batch_size, pts_num, boxes_num); + int *pts_assign = NULL; + cudaMalloc(&pts_assign, batch_size * pts_num * boxes_num * sizeof(int)); // (batch_size, N, M) + // cudaMemset(&pts_assign, -1, batch_size * pts_num * boxes_num * sizeof(int)); + + dim3 blocks(DIVUP(pts_num, THREADS_PER_BLOCK), boxes_num, batch_size); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + assign_pts_to_box3d<<>>(batch_size, pts_num, boxes_num, xyz, boxes3d, pts_assign); + + int *pts_idx = NULL; + cudaMalloc(&pts_idx, batch_size * boxes_num * sampled_pts_num * sizeof(int)); // (batch_size, M, sampled_pts_num) + + dim3 blocks2(DIVUP(boxes_num, THREADS_PER_BLOCK), batch_size); // blockIdx.x(col), blockIdx.y(row) + get_pooled_idx<<>>(batch_size, pts_num, boxes_num, sampled_pts_num, pts_assign, pts_idx, pooled_empty_flag); + + dim3 blocks_pool(DIVUP(sampled_pts_num, THREADS_PER_BLOCK), boxes_num, batch_size); + roipool3d_forward<<>>(batch_size, pts_num, boxes_num, feature_in_len, sampled_pts_num, + xyz, pts_idx, pts_feature, pooled_features, pooled_empty_flag); + + cudaFree(pts_assign); + cudaFree(pts_idx); + +#ifdef DEBUG + cudaDeviceSynchronize(); // for using printf in kernel function +#endif +} \ No newline at end of file diff --git a/lib/utils/sample2grid/Bilinear_voxel_gpu.cu b/lib/utils/sample2grid/Bilinear_voxel_gpu.cu new file mode 100644 index 0000000..be9101f --- /dev/null +++ b/lib/utils/sample2grid/Bilinear_voxel_gpu.cu @@ -0,0 +1,228 @@ +#include + +#include +#include +#include +#include +#include + +//using namespace std; +namespace{ + +template +__global__ void bilinear_voxel_2d_kernel( + const torch::PackedTensorAccessor input, + const torch::PackedTensorAccessor grid, + torch::PackedTensorAccessor output, + torch::PackedTensorAccessor output_bilinear_count +) +{ + // input (N,C,H) + // grid (N,H,Coor) + // output (N,C, H, W) + // output_bilinear_count (N,H,W) + int C = input.size(1); + int input_H = input.size(2); + + int out_H = output.size(2); + int out_W = output.size(3); + + int grid_H=grid.size(1); + int grid_Coor=grid.size(2); + + //batch index + const int n = blockIdx.y; + // column index + const int h = blockIdx.x * blockDim.x + threadIdx.x; + if(h < input_H){ + // get the corresponding input x, y co-ordinates from grid + float ix = static_cast(grid[n][h][0]); + float iy = static_cast(grid[n][h][1]); + + ix = grid_sampler_compute_source_index(ix, out_W); + iy = grid_sampler_compute_source_index(iy, out_H); + int ix0 = static_cast(::floor(ix)); +// int ix1 = ::ceil(ix); + int iy0 = static_cast(::floor(iy)); +// int iy1 = ::ceil(iy); + +// float ix_rest = static_cast(ix_nearest - ix); +// float iy_rest = static_cast(iy_nearest - iy); + // hand-craft 4 points + float weight =0; + for (int i = ix0; i <= ix0+1; ++i){ + for (int j = iy0; j <= iy0+1; ++j){ + // assign nearest neighor pixel value to output pixel + if (within_bounds_2d(j, i, out_H, out_W)) { + // bilinear: exp(-(ix**2+iy**2)) + + weight = (1.-::fabs(ix-i))*(1. - ::fabs(iy-j)); +//# if __CUDA_ARCH__>=200 +// printf("weight, %f \n", weight); +//#endif +// float weight = 1.; + // atomicAdd((int* )&(output_count[n][iy_nearest][ix_nearest]), int(1)); + atomicAdd((float* )&(output_bilinear_count[n][j][i]), 1.); + // safe_add_2d(count_ptr, iy_nearest, ix_nearest, out_ct_sH, out_ct_sW, out_H, out_W, 1); + for (int c = 0; c < C; ++c) { + // calculate and set grad_input + atomicAdd((scalar_t* )&(output[n][c][j][i]),weight*input[n][c][h]); + } + } + } + } + + } +} + +template +__global__ void bilinear_voxel_2d_normal_kernel( + torch::PackedTensorAccessor output , + const torch::PackedTensorAccessor output_bilinear_count) +{ + // output (N,C, H, W) + // output_count (N,H,W) + int C = output.size(1); + int out_H = output.size(2); + int out_W = output.size(3); + + + //batch index + const int n = blockIdx.y; + // column index + const int hw = blockIdx.x * blockDim.x + threadIdx.x; + const int h=hw/out_W; + const int w=hw -h*out_W; + if(h < out_H &&w < out_W){ + // get the corresponding input x, y co-ordinates from grid + // assign nearest neighor pixel value to output pixel + float bilinear_ct=output_bilinear_count[n][h][w]; + if(bilinear_ct>0){ + for (int c=0;c +__global__ void bilinear_voxel_2d_backward_kernel( + const torch::PackedTensorAccessor grid, + const torch::PackedTensorAccessor output_bilinear_count, + const torch::PackedTensorAccessor grad_output, + torch::PackedTensorAccessor grad_input +) +{ + + // grid (N,H,Coor) + // output_count (N, H, W) + // grad_output (N,C,H,W) + // grad_input (N,C,H2) + + int C = grad_output.size(1); + int gInp_H = grad_input.size(2); + + int grid_H = grid.size(1); + + int out_H=output_bilinear_count.size(1); + int out_W=output_bilinear_count.size(2); + + //batch index + const int n = blockIdx.y; + // column index + const int h = blockIdx.x * blockDim.x + threadIdx.x; + if(h < gInp_H){ + // get the corresponding input x, y co-ordinates from grid + float ix = static_cast(grid[n][h][0]); + float iy = static_cast(grid[n][h][1]); + + ix = grid_sampler_compute_source_index(ix, out_W); + iy = grid_sampler_compute_source_index(iy, out_H); + + + int ix0 = static_cast(::floor(ix)); +// int ix1 = ::ceil(ix); + int iy0 = static_cast(::floor(iy)); + float weight =0.; + // assign nearest neighor pixel value to output pixel + for (int i = ix0; i <= ix0; ++i){ + for (int j = iy0; j <= iy0; ++j){ + auto ct= output_bilinear_count[n][j][i]; + if(ct<=0 || !within_bounds_2d(j, i, out_H, out_W)){ + //TODO check here + for (int c = 0; c < C; ++c) { + grad_input[n][c][h] = static_cast(0); + } + }else{ + for (int c = 0; c < C; ++c) { + weight = (1.-::fabs(ix-i))*(1. - ::fabs(iy-j)); +// float weight = 1.; + // printf('%f',static_cast(grad_output[n][c][iy_nearest][ix_nearest]/ct)); + grad_input[n][c][h] = grad_output[n][c][j][i]*weight/ct; + } + } + } + } + } +} + +}//namespace + +// No shape checking needed here. See # NOTE [ grid_sampler Native Functions ]. +std::tuple +bilinear_grid_voxel_2d_cuda_forward(const torch::Tensor& input, const torch::Tensor& grid, torch::Tensor& output, torch::Tensor& output_bilinear_count) { + const auto N = grid.size(0); + const auto H = grid.size(1); + + const int threads=1024; + const dim3 blocks((H+threads-1)/threads, N); + +// AT_DISPATCH_FLOATING_TYPES(input.scalar_type(), "grid_voxel_2d_cuda", ([&] { + bilinear_voxel_2d_kernel + <<>>( + input.packed_accessor(), + grid.packed_accessor(), + output.packed_accessor(), + output_bilinear_count.packed_accessor()); +// })); + const auto out_H=output.size(2); + const auto out_W=output.size(3); + dim3 blocks2((out_H*out_W+threads-1)/threads, N); + + bilinear_voxel_2d_normal_kernel + <<>>( + output.packed_accessor(), + output_bilinear_count.packed_accessor() + ); + + return std::make_tuple(output,output_bilinear_count); +}; + +// No shape checking needed here. See # NOTE [ grid_sampler Native Functions ]. +torch::Tensor bilinear_grid_voxel_2d_cuda_backward(const torch::Tensor& grid, const torch::Tensor& output_bilinear_count, + const torch::Tensor& grad_output,torch::Tensor& grad_input) { + const auto N = grid.size(0); + const auto H = grid.size(1); + + const int threads=1024; + const dim3 blocks((H+threads-1)/threads, N); + + +// AT_DISPATCH_FLOATING_TYPES(output_bilinear_count.scalar_type(), "grid_voxel_2d_backward_cuda", ([&] { + bilinear_voxel_2d_backward_kernel + <<>>( + grid.packed_accessor(), + output_bilinear_count.packed_accessor(), + grad_output.packed_accessor(), + grad_input.packed_accessor() + ); + +// } +// )); + return grad_input; +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("bilinear_grid_voxel_2d_cuda_forward", &bilinear_grid_voxel_2d_cuda_forward, "bilinear_grid_voxel_2d_cuda_forward"); + m.def("bilinear_grid_voxel_2d_cuda_backward", &bilinear_grid_voxel_2d_cuda_backward, "bilinear_grid_voxel_2d_cuda_backward"); +} \ No newline at end of file diff --git a/lib/utils/sample2grid/Gaussian_voxel_gpu.cu b/lib/utils/sample2grid/Gaussian_voxel_gpu.cu new file mode 100644 index 0000000..59fb42d --- /dev/null +++ b/lib/utils/sample2grid/Gaussian_voxel_gpu.cu @@ -0,0 +1,215 @@ +#include + +#include +#include +#include +#include +#include + +//using namespace std; +namespace{ + +template +__global__ void gaussian_voxel_2d_kernel( + const torch::PackedTensorAccessor input , + const torch::PackedTensorAccessor grid , + torch::PackedTensorAccessor output , +// torch::PackedTensorAccessor output_count , + torch::PackedTensorAccessor output_gaussian_count + ) +{ + // input (N,C,H) + // grid (N,H,Coor) + // output (N,C, H, W) + // output_gaussian_count (N,H,W) + int C = input.size(1); + int input_H=input.size(2); + + int out_H = output.size(2); + int out_W = output.size(3); + + int grid_H=grid.size(1); + int grid_Coor=grid.size(2); + + //batch index + const int n = blockIdx.y; + // column index + const int h = blockIdx.x * blockDim.x + threadIdx.x; + if(h < input_H){ + // get the corresponding input x, y co-ordinates from grid + scalar_t ix = grid[n][h][0]; + scalar_t iy = grid[n][h][1]; + + ix = grid_sampler_compute_source_index(ix, out_W); + iy = grid_sampler_compute_source_index(iy, out_H); + + int ix_nearest = static_cast(::round(ix)); + int iy_nearest = static_cast(::round(iy)); + + float ix_rest = static_cast(ix_nearest - ix); + float iy_rest = static_cast(iy_nearest - iy); + + // assign nearest neighor pixel value to output pixel + if (within_bounds_2d(iy_nearest, ix_nearest, out_H, out_W)) { + // gaussian: exp(-(ix**2+iy**2)) + float gaussian = ::expf(-(ix_rest*ix_rest+iy_rest*iy_rest)); +// atomicAdd((int* )&(output_count[n][iy_nearest][ix_nearest]), int(1)); + atomicAdd((float* )&(output_gaussian_count[n][iy_nearest][ix_nearest]), gaussian); +// safe_add_2d(count_ptr, iy_nearest, ix_nearest, out_ct_sH, out_ct_sW, out_H, out_W, 1); + for (int c = 0; c < C; ++c) { + // calculate and set grad_input + atomicAdd((scalar_t* )&(output[n][c][iy_nearest][ix_nearest]),gaussian*input[n][c][h]); + } + } + } +} + +template +__global__ void gaussian_voxel_2d_normal_kernel( + torch::PackedTensorAccessor output , + const torch::PackedTensorAccessor output_gaussian_count) +{ + // output (N,C, H, W) + // output_count (N,H,W) + int C = output.size(1); + int out_H = output.size(2); + int out_W = output.size(3); + + + //batch index + const int n = blockIdx.y; + // column index + const int hw = blockIdx.x * blockDim.x + threadIdx.x; + const int h=hw/out_W; + const int w=hw -h*out_W; + if(h < out_H &&w < out_W){ + // get the corresponding input x, y co-ordinates from grid + // assign nearest neighor pixel value to output pixel + float gaussian_ct=output_gaussian_count[n][h][w]; + if(gaussian_ct>0){ + for (int c=0;c +__global__ void gaussian_voxel_2d_backward_kernel( + const torch::PackedTensorAccessor grid, + const torch::PackedTensorAccessor output_gaussian_count, + const torch::PackedTensorAccessor grad_output, + torch::PackedTensorAccessor grad_input +) +{ + + // grid (N,H,Coor) + // output_count (N, H, W) + // grad_output (N,C,H,W) + // grad_input (N,C,H2) + + int C = grad_output.size(1); + int gInp_H = grad_input.size(2); + + int grid_H = grid.size(1); + + int out_H=output_gaussian_count.size(1); + int out_W=output_gaussian_count.size(2); + + //batch index + const int n = blockIdx.y; + // column index + const int h = blockIdx.x * blockDim.x + threadIdx.x; + if(h < gInp_H){ + // get the corresponding input x, y co-ordinates from grid + scalar_t ix = grid[n][h][0]; + scalar_t iy = grid[n][h][1]; + + ix = grid_sampler_compute_source_index(ix, out_W); + iy = grid_sampler_compute_source_index(iy, out_H); + + + int ix_nearest = static_cast(::round(ix)); + int iy_nearest = static_cast(::round(iy)); + + float ix_rest = static_cast(ix_nearest - ix); + float iy_rest = static_cast(iy_nearest - iy); + + // assign nearest neighor pixel value to output pixel + auto ct= output_gaussian_count[n][iy_nearest][ix_nearest]; + if(ct<=0 || !within_bounds_2d(iy_nearest, ix_nearest, out_H, out_W)){ + //TODO check here + for (int c = 0; c < C; ++c) { + grad_input[n][c][h] = static_cast(0); + } + }else{ + for (int c = 0; c < C; ++c) { + float gaussian_normal = ::expf(-(ix_rest*ix_rest+iy_rest*iy_rest))/ct; +// printf('%f',static_cast(grad_output[n][c][iy_nearest][ix_nearest]/ct)); + grad_input[n][c][h] = grad_output[n][c][iy_nearest][ix_nearest]*gaussian_normal; + } + } + } + +} + +}//namespace + +// No shape checking needed here. See # NOTE [ grid_sampler Native Functions ]. +std::tuple +gaussian_grid_voxel_2d_cuda_forward(const torch::Tensor& input, const torch::Tensor& grid, torch::Tensor& output, torch::Tensor& output_gaussian_count) { + const auto N = grid.size(0); + const auto H = grid.size(1); + + const int threads=1024; + const dim3 blocks((H+threads-1)/threads, N); + +// AT_DISPATCH_FLOATING_TYPES(input.scalar_type(), "grid_voxel_2d_cuda", ([&] { + gaussian_voxel_2d_kernel + <<>>( + input.packed_accessor(), + grid.packed_accessor(), + output.packed_accessor(), + output_gaussian_count.packed_accessor()); +// })); + const auto out_H=output.size(2); + const auto out_W=output.size(3); + dim3 blocks2((out_H*out_W+threads-1)/threads, N); + + gaussian_voxel_2d_normal_kernel + <<>>( + output.packed_accessor(), + output_gaussian_count.packed_accessor() + ); + + return std::make_tuple(output,output_gaussian_count); +}; + +// No shape checking needed here. See # NOTE [ grid_sampler Native Functions ]. +torch::Tensor gaussian_grid_voxel_2d_cuda_backward(const torch::Tensor& grid, const torch::Tensor& output_gaussian_count, + const torch::Tensor& grad_output,torch::Tensor& grad_input) { + const auto N = grid.size(0); + const auto H = grid.size(1); + + const int threads=1024; + const dim3 blocks((H+threads-1)/threads, N); + + +// AT_DISPATCH_FLOATING_TYPES(output_gaussian_count.scalar_type(), "grid_voxel_2d_backward_cuda", ([&] { + gaussian_voxel_2d_backward_kernel + <<>>( + grid.packed_accessor(), + output_gaussian_count.packed_accessor(), + grad_output.packed_accessor(), + grad_input.packed_accessor() + ); + +// } +// )); + return grad_input; +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("gaussian_grid_voxel_2d_cuda_forward", &gaussian_grid_voxel_2d_cuda_forward, "gaussian_grid_voxel_2d_cuda_forward"); + m.def("gaussian_grid_voxel_2d_cuda_backward", &gaussian_grid_voxel_2d_cuda_backward, "gaussian_grid_voxel_2d_cuda_backward"); +} \ No newline at end of file diff --git a/lib/utils/sample2grid/Voxel_gpu.cu b/lib/utils/sample2grid/Voxel_gpu.cu new file mode 100644 index 0000000..3598f0b --- /dev/null +++ b/lib/utils/sample2grid/Voxel_gpu.cu @@ -0,0 +1,203 @@ +#include + +#include +#include +#include +#include +#include + +//using namespace std; +namespace{ + +template +__global__ void voxel_2d_kernel( + const torch::PackedTensorAccessor input , + const torch::PackedTensorAccessor grid , + torch::PackedTensorAccessor output , + torch::PackedTensorAccessor output_count) +{ + // input (N,C,H) + // grid (N,H,Coor) + // output (N,C, H, W) + // output_count (N,H,W) + int C = input.size(1); + int input_H=input.size(2); + + int out_H = output.size(2); + int out_W = output.size(3); + + int grid_H=grid.size(1); + int grid_Coor=grid.size(2); + + //batch index + const int n = blockIdx.y; + // column index + const int h = blockIdx.x * blockDim.x + threadIdx.x; + if(h < input_H){ + // get the corresponding input x, y co-ordinates from grid + scalar_t ix = grid[n][h][0]; + scalar_t iy = grid[n][h][1]; + + ix = grid_sampler_compute_source_index(ix, out_W); + iy = grid_sampler_compute_source_index(iy, out_H); + + int ix_nearest = static_cast(::round(ix)); + int iy_nearest = static_cast(::round(iy)); + + // assign nearest neighor pixel value to output pixel + if (within_bounds_2d(iy_nearest, ix_nearest, out_H, out_W)) { + atomicAdd((int* )&(output_count[n][iy_nearest][ix_nearest]), int(1)); +// safe_add_2d(count_ptr, iy_nearest, ix_nearest, out_ct_sH, out_ct_sW, out_H, out_W, 1); + for (int c = 0; c < C; ++c) { + // calculate and set grad_input + atomicAdd((scalar_t* )&(output[n][c][iy_nearest][ix_nearest]),input[n][c][h]); + } + } + } +} + +template +__global__ void voxel_2d_normal_kernel( + torch::PackedTensorAccessor output , + const torch::PackedTensorAccessor output_count) +{ + // output (N,C, H, W) + // output_count (N,H,W) + int C = output.size(1); + int out_H = output.size(2); + int out_W = output.size(3); + + + //batch index + const int n = blockIdx.y; + // column index + const int hw = blockIdx.x * blockDim.x + threadIdx.x; + const int h=hw/out_W; + const int w=hw -h*out_W; + if(h < out_H &&w < out_W){ + // get the corresponding input x, y coordinates from grid + // assign nearest neighbor pixel value to output pixel + int ct=output_count[n][h][w]; + if(ct>0){ + for (int c=0;c +__global__ void voxel_2d_backward_kernel( + const torch::PackedTensorAccessor grid, + const torch::PackedTensorAccessor output_count, + const torch::PackedTensorAccessor grad_output, + torch::PackedTensorAccessor grad_input +) +{ + + // grid (N,H,Coor) + // output_count (N, H, W) + // grad_output (N,C,H,W) + // grad_input (N,C,H2) + + int C = grad_output.size(1); + int gInp_H = grad_input.size(2); + + int grid_H = grid.size(1); + + int out_H=output_count.size(1); + int out_W=output_count.size(2); + + //batch index + const int n = blockIdx.y; + // column index + const int h = blockIdx.x * blockDim.x + threadIdx.x; + if(h < gInp_H){ + // get the corresponding input x, y co-ordinates from grid + scalar_t ix = grid[n][h][0]; + scalar_t iy = grid[n][h][1]; + + ix = grid_sampler_compute_source_index(ix, out_W); + iy = grid_sampler_compute_source_index(iy, out_H); + + + int ix_nearest = static_cast(::round(ix)); + int iy_nearest = static_cast(::round(iy)); + + // assign nearest neighor pixel value to output pixel + auto ct= output_count[n][iy_nearest][ix_nearest]; + if(ct<=0 || !within_bounds_2d(iy_nearest, ix_nearest, out_H, out_W)){ + //TODO check here + for (int c = 0; c < C; ++c) { + grad_input[n][c][h] = static_cast(0); + } + }else{ + for (int c = 0; c < C; ++c) { +// printf('%f',static_cast(grad_output[n][c][iy_nearest][ix_nearest]/ct)); + grad_input[n][c][h] = grad_output[n][c][iy_nearest][ix_nearest]/(float)ct; + } + } + } + +} + +}//namespace + +// No shape checking needed here. See # NOTE [ grid_sampler Native Functions ]. +std::tuple +grid_voxel_2d_cuda_forward(const torch::Tensor& input, const torch::Tensor& grid, torch::Tensor& output, torch::Tensor& output_count) { + const auto N = grid.size(0); + const auto H = grid.size(1); + + const int threads=1024; + const dim3 blocks((H+threads-1)/threads, N); + +// AT_DISPATCH_FLOATING_TYPES(input.scalar_type(), "grid_voxel_2d_cuda", ([&] { + voxel_2d_kernel + <<>>( + input.packed_accessor(), + grid.packed_accessor(), + output.packed_accessor(), + output_count.packed_accessor()); +// })); + const auto out_H=output.size(2); + const auto out_W=output.size(3); + dim3 blocks2((out_H*out_W+threads-1)/threads, N); + + voxel_2d_normal_kernel + <<>>( + output.packed_accessor(), + output_count.packed_accessor() + ); + + return std::make_tuple(output,output_count); +}; + +// No shape checking needed here. See # NOTE [ grid_sampler Native Functions ]. +torch::Tensor grid_voxel_2d_cuda_backward(const torch::Tensor& grid, const torch::Tensor& output_count, + const torch::Tensor& grad_output,torch::Tensor& grad_input) { + const auto N = grid.size(0); + const auto H = grid.size(1); + + const int threads=1024; + const dim3 blocks((H+threads-1)/threads, N); + + +// AT_DISPATCH_FLOATING_TYPES(output_count.scalar_type(), "grid_voxel_2d_backward_cuda", ([&] { + voxel_2d_backward_kernel + <<>>( + grid.packed_accessor(), + output_count.packed_accessor(), + grad_output.packed_accessor(), + grad_input.packed_accessor() + ); + +// } +// )); + return grad_input; +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("grid_voxel_2d_cuda_forward", &grid_voxel_2d_cuda_forward, "grid_voxel_2d_cuda"); + m.def("grid_voxel_2d_cuda_backward", &grid_voxel_2d_cuda_backward, "grid_voxel_2d_backward_cuda"); +} \ No newline at end of file diff --git a/lib/utils/sample2grid/__init__.py b/lib/utils/sample2grid/__init__.py new file mode 100644 index 0000000..c60cefb --- /dev/null +++ b/lib/utils/sample2grid/__init__.py @@ -0,0 +1,9 @@ +""" +Created by silver at 2019/10/16 11:26 +Email: xiwuchencn[at]gmail[dot]com +""" + + +#import voxel_cuda + +from lib.utils.sample2grid.voxel_cuda import sample2grid_F, sample2GaussianGrid_F, sample2BilinearGrid_F \ No newline at end of file diff --git a/lib/utils/sample2grid/myGridSampler.cuh b/lib/utils/sample2grid/myGridSampler.cuh new file mode 100644 index 0000000..df6f005 --- /dev/null +++ b/lib/utils/sample2grid/myGridSampler.cuh @@ -0,0 +1,57 @@ +//#include +//#include +//#include +#include +#include +#include +#include + + +// Unnormalizes a coordinate from the -1 to +1 scale to its pixel index value, +// where we view each pixel as an area between (idx - 0.5) and (idx + 0.5). +// if align_corners: -1 and +1 get sent to the centers of the corner pixels +// -1 --> 0 +// +1 --> (size - 1) +// scale_factor = (size - 1) / 2 +// if not align_corners: -1 and +1 get sent to the image edges +// -1 --> -0.5 +// +1 --> (size - 1) + 0.5 == size - 0.5 +// scale_factor = size / 2 +template +static __forceinline__ __device__ +scalar_t grid_sampler_unnormalize(scalar_t coord, int size) { + + // unnormalize coord from [-1, 1] to [0, size - 1] + return ((coord + 1.f) / 2) * (size - 1); + +} + + +// Clips coordinates to between 0 and clip_limit - 1 +template +static __forceinline__ __device__ +scalar_t clip_coordinates(scalar_t in, int clip_limit) { + return ::min(static_cast(clip_limit - 1), ::max(in, static_cast(0))); +} + + + +// Computes the pixel source index value for a grid coordinate +template +static __forceinline__ __device__ +scalar_t grid_sampler_compute_source_index( + scalar_t coord, + int size) { + coord = grid_sampler_unnormalize(coord,size); + + // clip coordinates to image borders + coord = clip_coordinates(coord, size); + return coord; +} + +static __forceinline__ __device__ +bool within_bounds_2d(int h, int w, int H, int W) { + return h >= 0 && h < H && w >= 0 && w < W; +} + + diff --git a/lib/utils/sample2grid/setup.py b/lib/utils/sample2grid/setup.py new file mode 100644 index 0000000..b951b96 --- /dev/null +++ b/lib/utils/sample2grid/setup.py @@ -0,0 +1,44 @@ +""" +Created by silver at 2019/10/11 21:23 +Email: xiwuchencn[at]gmail[dot]com +""" +from setuptools import setup +from torch.utils.cpp_extension import BuildExtension, CUDAExtension + +setup( + name = 'gridvoxel', + ext_modules = [ + CUDAExtension('gridvoxel_cuda', [ + 'Voxel_gpu.cu' + ], + extra_compile_args = { 'cxx': ['-g'], 'nvcc': ['-O2'] + }) + ], + cmdclass = { 'build_ext': BuildExtension } + , include_dirs = ['./'], +) +setup( + name = 'gaussiangridvoxel', + ext_modules = [ + CUDAExtension('gaussian_gridvoxel_cuda', [ + 'Gaussian_voxel_gpu.cu' + ], + extra_compile_args = { 'cxx': ['-g'], 'nvcc': ['-O2'] + }) + ], + cmdclass = { 'build_ext': BuildExtension } + , include_dirs = ['./'], +) + +setup( + name = 'bilineargridvoxel', + ext_modules = [ + CUDAExtension('bilinear_gridvoxel_cuda', [ + 'Bilinear_voxel_gpu.cu' + ], + extra_compile_args = { 'cxx': ['-g'], 'nvcc': ['-O2'] + }) + ], + cmdclass = { 'build_ext': BuildExtension } + , include_dirs = ['./'], +) \ No newline at end of file diff --git a/lib/utils/sample2grid/voxel_cuda.py b/lib/utils/sample2grid/voxel_cuda.py new file mode 100644 index 0000000..17de674 --- /dev/null +++ b/lib/utils/sample2grid/voxel_cuda.py @@ -0,0 +1,190 @@ +""" +Created by silver at 2019/10/15 22:29 +Email: xiwuchencn[at]gmail[dot]com +""" + +import torch +from gridvoxel_cuda import grid_voxel_2d_cuda_forward, grid_voxel_2d_cuda_backward +from gaussian_gridvoxel_cuda import gaussian_grid_voxel_2d_cuda_forward,gaussian_grid_voxel_2d_cuda_backward +from bilinear_gridvoxel_cuda import bilinear_grid_voxel_2d_cuda_forward,bilinear_grid_voxel_2d_cuda_backward + +from torch.autograd import Function + + + +class Sample2Grid(Function): + @staticmethod + def forward(ctx, input, grid, output_size): + """ + This function is the inverse operation of grid_sample. While grid_sample sampling value from img(4D) or + some(5D) tensor by grid(dims same as the input). + But this function is specify modified for the point cloud. the grid and input's dim(3D) is less than output(4D). + :param ctx: + :param input: (B,C,N) + :param grid: (B,N,Coor) + :param output_size: [b,c,h,w] + :return: output: (B,C,H,W); output_ct: (B,H,W) + """ + # assert grid.requires_grad == False + assert input.dim()==3 + assert grid.dim()==3 + assert output_size.__len__()==4 + assert output_size[1]==input.size(1) + # print(input.requires_grad) + output = torch.zeros(output_size, device = input.device).float().requires_grad_(False) + output_ct = torch.zeros([output_size[i] for i in [0, 2, 3]], device = input.device, dtype = torch.int) + grid_voxel_2d_cuda_forward(input, grid, output, output_ct) + ctx.save_for_backward(grid, output_ct) + return output.requires_grad_(True) + + @staticmethod + def backward(ctx, grad_output): + # print('grad_output',grad_output) + C = grad_output.size(1) + grid, output_ct = ctx.saved_tensors + B, N = grid.shape[0:2] + d_input = torch.zeros((B, C, N), device = grid.device,dtype = torch.float) + grid_voxel_2d_cuda_backward(grid, output_ct, grad_output, d_input) + + return d_input, None, None + +sample2grid_F = Sample2Grid.apply + +class Sample2GaussianGrid(Function): + @staticmethod + def forward(ctx, input, grid, output_size): + """ + This function is the inverse operation of grid_sample. While grid_sample sampling value from img(4D) or + some(5D) tensor by grid(dims same as the input). + But this function is specify modified for the point cloud. the grid and input's dim(3D) is less than output(4D). + :param ctx: + :param input: (B,C,N) + :param grid: (B,N,Coor) + :param output_size: [b,c,h,w] + :return: output: (B,C,H,W); output_ct: (B,H,W) + """ + # assert grid.requires_grad == False + assert input.dim()==3 + assert grid.dim()==3 + assert output_size.__len__()==4 + assert output_size[1]==input.size(1) + # print(input.requires_grad) + output = torch.zeros(output_size, device = input.device).float().requires_grad_(False) + output_gaussian_ct = torch.zeros([output_size[i] for i in [0, 2, 3]], device = input.device,dtype = torch.float) + gaussian_grid_voxel_2d_cuda_forward(input, grid, output, output_gaussian_ct) + ctx.save_for_backward(grid, output_gaussian_ct) + return output.requires_grad_(True) + + @staticmethod + def backward(ctx, grad_output): + # print('grad_output',grad_output) + C = grad_output.size(1) + grid, output_gaussian_ct = ctx.saved_tensors + B, N = grid.shape[0:2] + d_input = torch.zeros((B, C, N), device = grid.device,dtype = torch.float) + gaussian_grid_voxel_2d_cuda_backward(grid, output_gaussian_ct, grad_output, d_input) + + return d_input, None, None + +sample2GaussianGrid_F = Sample2GaussianGrid.apply + +class Sample2BilinearGrid(Function): + @staticmethod + def forward(ctx, input, grid, output_size): + """ + This function is the inverse operation of grid_sample. While grid_sample sampling value from img(4D) or + some(5D) tensor by grid(dims same as the input). + But this function is specify modified for the point cloud. the grid and input's dim(3D) is less than output(4D). + :param ctx: + :param input: (B,C,N) + :param grid: (B,N,Coor) + :param output_size: [b,c,h,w] + :return: output: (B,C,H,W); output_ct: (B,H,W) + """ + # assert grid.requires_grad == False + assert input.dim()==3 + assert grid.dim()==3 + assert output_size.__len__()==4 + assert output_size[1]==input.size(1) + # print(input.requires_grad) + output = torch.zeros(output_size, device = input.device).float().requires_grad_(False) + output_gaussian_ct = torch.zeros([output_size[i] for i in [0, 2, 3]], device = input.device,dtype = torch.float) + bilinear_grid_voxel_2d_cuda_forward(input, grid, output, output_gaussian_ct) + ctx.save_for_backward(grid, output_gaussian_ct) + return output.requires_grad_(True) + + @staticmethod + def backward(ctx, grad_output): + # print('grad_output',grad_output) + C = grad_output.size(1) + grid, output_gaussian_ct = ctx.saved_tensors + B, N = grid.shape[0:2] + d_input = torch.zeros((B, C, N), device = grid.device,dtype = torch.float) + bilinear_grid_voxel_2d_cuda_backward(grid, output_gaussian_ct, grad_output, d_input) + + return d_input, None, None + +sample2BilinearGrid_F = Sample2BilinearGrid.apply + + +if __name__ == '__main__': + from torch.nn.functional import grid_sample + import torch + B = 1 + C = 1 + H, W = 2,2 + N = 1 + # img = torch.rand([1, 1, 5, 5]).cuda().requires_grad_(True) + # index=torch.randint(0,5,size = [1,C,2]) + # grid=index.float()/(torch.tensor([5.,5.])-1.)*2-1. + seed = 0 + torch.manual_seed(seed) # 为CPU设置随机种子 + torch.cuda.manual_seed(seed) # 为当前GPU设置随机种子 + torch.cuda.manual_seed_all(seed) + grid = torch.rand([B, N, 2]) * 2 - 1. + # print(grid) + # print(index) + # index_ct=torch.zeros([1,5,5]) + # for i in range(C): + # index_ct[0,index[0,i,1],index[0,i,0]]+=1 + # print('index',index_ct) + grid = grid.cuda().requires_grad_(True) + # pc = grid_sample(img, grid.unsqueeze(-2),mode = 'nearest',padding_mode ='zeros') + # print(pc.shape) + # pc=pc.squeeze(-1) + pc=torch.rand([B, C, N]).cuda().requires_grad_(True) + # print(pc.requires_grad) + # pc=pc.requires_grad_(True) + img_new = sample2grid_F(pc, grid.clone(), [B, C, H, W]) + print(grid) + print("1111111111111111") + print(img_new) + print(pc) + loss=(img_new**2).sum() + loss.backward() + print(img_new.grad) + print('pc_grad',pc.grad) + + print("2222222222222222") + img_new2=sample2GaussianGrid_F(pc,grid.clone(),[B, C, H, W]) + + print(img_new2) + print(pc) + pc.grad.zero_() + loss=(img_new2**2).sum() + loss.backward() + print(img_new2.grad) + print('pc_grad',pc.grad) + + print("3333333333333333") + img_new3 = sample2BilinearGrid_F(pc, grid.clone(), [B, C, H, W]) + print(img_new3) + print(pc) + pc.grad.zero_() + loss=(img_new3**2).sum() + loss.backward() + print(img_new3.grad) + print('pc_grad',pc.grad) + + + diff --git a/pointnet2_lib/.gitignore b/pointnet2_lib/.gitignore new file mode 100644 index 0000000..cf42194 --- /dev/null +++ b/pointnet2_lib/.gitignore @@ -0,0 +1,4 @@ +pointnet2/build/ +pointnet2/dist/ +pointnet2/pointnet2.egg-info/ +__pycache__/ diff --git a/pointnet2_lib/LICENSE b/pointnet2_lib/LICENSE new file mode 100644 index 0000000..77c8ebe --- /dev/null +++ b/pointnet2_lib/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Shaoshuai Shi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pointnet2_lib/README.md b/pointnet2_lib/README.md new file mode 100644 index 0000000..c5a43f0 --- /dev/null +++ b/pointnet2_lib/README.md @@ -0,0 +1,51 @@ +# Pointnet2.PyTorch + +* PyTorch implementation of [PointNet++](https://arxiv.org/abs/1706.02413) based on [erikwijmans/Pointnet2_PyTorch](https://github.com/erikwijmans/Pointnet2_PyTorch). +* Faster than the original codes by re-implementing the CUDA operations. + +## Installation +### Requirements +* Linux (tested on Ubuntu 14.04/16.04) +* Python 3.6+ +* PyTorch 1.0 + +### Install +Install this library by running the following command: + +```shell +cd pointnet2 +python setup.py install +cd ../ +``` + +## Examples +Here I provide a simple example to use this library in the task of KITTI ourdoor foreground point cloud segmentation, and you could refer to the paper [PointRCNN](https://arxiv.org/abs/1812.04244) for the details of task description and foreground label generation. + +1. Download the training data from [KITTI 3D object detection](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d) website and organize the downloaded files as follows: +``` +Pointnet2.PyTorch +├── pointnet2 +├── tools +│ ├──data +│ │ ├── KITTI +│ │ │ ├── ImageSets +│ │ │ ├── object +│ │ │ │ ├──training +│ │ │ │ ├──calib & velodyne & label_2 & image_2 +│ │ train_and_eval.py +``` + +2. Run the following command to train and evaluate: +```shell +cd tools +python train_and_eval.py --batch_size 8 --epochs 100 --ckpt_save_interval 2 +``` + + + +## Project using this repo: +* [PointRCNN](https://github.com/sshaoshuai/PointRCNN): 3D object detector from raw point cloud. + +## Acknowledgement +* [charlesq34/pointnet2](https://github.com/charlesq34/pointnet2): Paper author and official code repo. +* [erikwijmans/Pointnet2_PyTorch](https://github.com/erikwijmans/Pointnet2_PyTorch): Initial work of PyTorch implementation of PointNet++. diff --git a/pointnet2_lib/pointnet2/pointnet2_modules.py b/pointnet2_lib/pointnet2/pointnet2_modules.py new file mode 100644 index 0000000..3fa26c7 --- /dev/null +++ b/pointnet2_lib/pointnet2/pointnet2_modules.py @@ -0,0 +1,177 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from . import pointnet2_utils +from . import pytorch_utils as pt_utils +from typing import List + + +class _PointnetSAModuleBase(nn.Module): + + def __init__(self): + super().__init__() + self.npoint = None + self.groupers = None + self.mlps = None + self.pool_method = 'max_pool' + + def forward(self, xyz: torch.Tensor, features: torch.Tensor = None, new_xyz = None) -> (torch.Tensor, torch.Tensor): + """ + :param xyz: (B, N, 3) tensor of the xyz coordinates of the features + :param features: (B, N, C) tensor of the descriptors of the the features + :param new_xyz: + :return: + new_xyz: (B, npoint, 3) tensor of the new features' xyz + new_features: (B, npoint, \sum_k(mlps[k][-1])) tensor of the new_features descriptors + """ + new_features_list = [] + + xyz_flipped = xyz.transpose(1, 2).contiguous() + # if new_xyz is None: + # new_xyz = pointnet2_utils.gather_operation( + # xyz_flipped, + # pointnet2_utils.furthest_point_sample(xyz, self.npoint) + # ).transpose(1, 2).contiguous() if self.npoint is not None else None + + # + + if new_xyz is None: + if self.npoint is not None: + idx = pointnet2_utils.furthest_point_sample(xyz, self.npoint) + new_xyz = pointnet2_utils.gather_operation( + xyz_flipped, + idx + ).transpose(1, 2).contiguous() + else: + new_xyz = None + idx = None + else: + idx = None + + for i in range(len(self.groupers)): + new_features = self.groupers[i](xyz, new_xyz, features) # (B, C, npoint, nsample) + # print(new_features.size()) + # print(features.size(), new_features.size()) + + new_features = self.mlps[i](new_features) # (B, mlp[-1], npoint, nsample) + if self.pool_method == 'max_pool': + new_features = F.max_pool2d( + new_features, kernel_size = [1, new_features.size(3)] + ) # (B, mlp[-1], npoint, 1) + elif self.pool_method == 'avg_pool': + new_features = F.avg_pool2d( + new_features, kernel_size = [1, new_features.size(3)] + ) # (B, mlp[-1], npoint, 1) + else: + raise NotImplementedError + + new_features = new_features.squeeze(-1) # (B, mlp[-1], npoint) + new_features_list.append(new_features) + + return new_xyz, torch.cat(new_features_list, dim = 1), idx + + +class PointnetSAModuleMSG(_PointnetSAModuleBase): + """Pointnet set abstraction layer with multiscale grouping""" + + def __init__(self, *, npoint: int, radii: List[float], nsamples: List[int], mlps: List[List[int]], bn: bool = True, + use_xyz: bool = True, pool_method = 'max_pool', instance_norm = False): + """ + :param npoint: int + :param radii: list of float, list of radii to group with + :param nsamples: list of int, number of samples in each ball query + :param mlps: list of list of int, spec of the pointnet before the global pooling for each scale + :param bn: whether to use batchnorm + :param use_xyz: + :param pool_method: max_pool / avg_pool + :param instance_norm: whether to use instance_norm + """ + super().__init__() + + assert len(radii) == len(nsamples) == len(mlps) + + self.npoint = npoint + self.groupers = nn.ModuleList() + self.mlps = nn.ModuleList() + for i in range(len(radii)): + radius = radii[i] + nsample = nsamples[i] + self.groupers.append( + pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz = use_xyz) + if npoint is not None else pointnet2_utils.GroupAll(use_xyz) + ) + mlp_spec = mlps[i] + if use_xyz: + mlp_spec[0] += 3 + + self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn = bn, instance_norm = instance_norm)) + self.pool_method = pool_method + + +class PointnetSAModule(PointnetSAModuleMSG): + """Pointnet set abstraction layer""" + + def __init__(self, *, mlp: List[int], npoint: int = None, radius: float = None, nsample: int = None, + bn: bool = True, use_xyz: bool = True, pool_method = 'max_pool', instance_norm = False): + """ + :param mlp: list of int, spec of the pointnet before the global max_pool + :param npoint: int, number of features + :param radius: float, radius of ball + :param nsample: int, number of samples in the ball query + :param bn: whether to use batchnorm + :param use_xyz: + :param pool_method: max_pool / avg_pool + :param instance_norm: whether to use instance_norm + """ + super().__init__( + mlps = [mlp], npoint = npoint, radii = [radius], nsamples = [nsample], bn = bn, use_xyz = use_xyz, + pool_method = pool_method, instance_norm = instance_norm + ) + + +class PointnetFPModule(nn.Module): + r"""Propigates the features of one set to another""" + + def __init__(self, *, mlp: List[int], bn: bool = True, activation = nn.ReLU(inplace = True)): + """ + :param mlp: list of int + :param bn: whether to use batchnorm + """ + super().__init__() + self.mlp = pt_utils.SharedMLP(mlp, bn = bn, activation = activation) + + def forward( + self, unknown: torch.Tensor, known: torch.Tensor, unknow_feats: torch.Tensor, known_feats: torch.Tensor + ) -> torch.Tensor: + """ + :param unknown: (B, n, 3) tensor of the xyz positions of the unknown features + :param known: (B, m, 3) tensor of the xyz positions of the known features + :param unknow_feats: (B, C1, n) tensor of the features to be propigated to + :param known_feats: (B, C2, m) tensor of features to be propigated + :return: + new_features: (B, mlp[-1], n) tensor of the features of the unknown features + """ + if known is not None: + dist, idx = pointnet2_utils.three_nn(unknown, known) + dist_recip = 1.0 / (dist + 1e-8) + norm = torch.sum(dist_recip, dim = 2, keepdim = True) + weight = dist_recip / norm + + interpolated_feats = pointnet2_utils.three_interpolate(known_feats, idx, weight) + else: + interpolated_feats = known_feats.expand(*known_feats.size()[0:2], unknown.size(1)) + + if unknow_feats is not None: + new_features = torch.cat([interpolated_feats, unknow_feats], dim = 1) # (B, C2 + C1, n) + else: + new_features = interpolated_feats + + new_features = new_features.unsqueeze(-1) + new_features = self.mlp(new_features) + + return new_features.squeeze(-1) + + +if __name__ == "__main__": + pass diff --git a/pointnet2_lib/pointnet2/pointnet2_utils.py b/pointnet2_lib/pointnet2/pointnet2_utils.py new file mode 100644 index 0000000..3dc819b --- /dev/null +++ b/pointnet2_lib/pointnet2/pointnet2_utils.py @@ -0,0 +1,290 @@ +import torch +from torch.autograd import Variable +from torch.autograd import Function +import torch.nn as nn +from typing import Tuple + +import pointnet2_cuda as pointnet2 + + +class FurthestPointSampling(Function): + @staticmethod + def forward(ctx, xyz: torch.Tensor, npoint: int) -> torch.Tensor: + """ + Uses iterative furthest point sampling to select a set of npoint features that have the largest + minimum distance + :param ctx: + :param xyz: (B, N, 3) where N > npoint + :param npoint: int, number of features in the sampled set + :return: + output: (B, npoint) tensor containing the set + """ + assert xyz.is_contiguous() + + B, N, _ = xyz.size() + output = torch.cuda.IntTensor(B, npoint) + temp = torch.cuda.FloatTensor(B, N).fill_(1e10) + + pointnet2.furthest_point_sampling_wrapper(B, N, npoint, xyz, temp, output) + return output + + @staticmethod + def backward(xyz, a = None): + return None, None + + +furthest_point_sample = FurthestPointSampling.apply + + +class GatherOperation(Function): + + @staticmethod + def forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: + """ + :param ctx: + :param features: (B, C, N) + :param idx: (B, npoint) index tensor of the features to gather + :return: + output: (B, C, npoint) + """ + assert features.is_contiguous() + assert idx.is_contiguous() + + B, npoint = idx.size() + _, C, N = features.size() + output = torch.cuda.FloatTensor(B, C, npoint) + + pointnet2.gather_points_wrapper(B, C, N, npoint, features, idx, output) + + ctx.for_backwards = (idx, C, N) + return output + + @staticmethod + def backward(ctx, grad_out): + idx, C, N = ctx.for_backwards + B, npoint = idx.size() + + grad_features = Variable(torch.cuda.FloatTensor(B, C, N).zero_()) + grad_out_data = grad_out.data.contiguous() + pointnet2.gather_points_grad_wrapper(B, C, N, npoint, grad_out_data, idx, grad_features.data) + return grad_features, None + + +gather_operation = GatherOperation.apply + + +class ThreeNN(Function): + + @staticmethod + def forward(ctx, unknown: torch.Tensor, known: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Find the three nearest neighbors of unknown in known + :param ctx: + :param unknown: (B, N, 3) + :param known: (B, M, 3) + :return: + dist: (B, N, 3) l2 distance to the three nearest neighbors + idx: (B, N, 3) index of 3 nearest neighbors + """ + assert unknown.is_contiguous() + assert known.is_contiguous() + + B, N, _ = unknown.size() + m = known.size(1) + dist2 = torch.cuda.FloatTensor(B, N, 3) + idx = torch.cuda.IntTensor(B, N, 3) + + pointnet2.three_nn_wrapper(B, N, m, unknown, known, dist2, idx) + return torch.sqrt(dist2), idx + + @staticmethod + def backward(ctx, a = None, b = None): + return None, None + + +three_nn = ThreeNN.apply + + +class ThreeInterpolate(Function): + + @staticmethod + def forward(ctx, features: torch.Tensor, idx: torch.Tensor, weight: torch.Tensor) -> torch.Tensor: + """ + Performs weight linear interpolation on 3 features + :param ctx: + :param features: (B, C, M) Features descriptors to be interpolated from + :param idx: (B, n, 3) three nearest neighbors of the target features in features + :param weight: (B, n, 3) weights + :return: + output: (B, C, N) tensor of the interpolated features + """ + assert features.is_contiguous() + assert idx.is_contiguous() + assert weight.is_contiguous() + + B, c, m = features.size() + n = idx.size(1) + ctx.three_interpolate_for_backward = (idx, weight, m) + output = torch.cuda.FloatTensor(B, c, n) + + pointnet2.three_interpolate_wrapper(B, c, m, n, features, idx, weight, output) + return output + + @staticmethod + def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """ + :param ctx: + :param grad_out: (B, C, N) tensor with gradients of outputs + :return: + grad_features: (B, C, M) tensor with gradients of features + None: + None: + """ + idx, weight, m = ctx.three_interpolate_for_backward + B, c, n = grad_out.size() + + grad_features = Variable(torch.cuda.FloatTensor(B, c, m).zero_()) + grad_out_data = grad_out.data.contiguous() + + pointnet2.three_interpolate_grad_wrapper(B, c, n, m, grad_out_data, idx, weight, grad_features.data) + return grad_features, None, None + + +three_interpolate = ThreeInterpolate.apply + + +class GroupingOperation(Function): + + @staticmethod + def forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: + """ + :param ctx: + :param features: (B, C, N) tensor of features to group + :param idx: (B, npoint, nsample) tensor containing the indicies of features to group with + :return: + output: (B, C, npoint, nsample) tensor + """ + assert features.is_contiguous() + assert idx.is_contiguous() + + B, nfeatures, nsample = idx.size() + _, C, N = features.size() + output = torch.cuda.FloatTensor(B, C, nfeatures, nsample) + + pointnet2.group_points_wrapper(B, C, N, nfeatures, nsample, features, idx, output) + + ctx.for_backwards = (idx, N) + return output + + @staticmethod + def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """ + :param ctx: + :param grad_out: (B, C, npoint, nsample) tensor of the gradients of the output from forward + :return: + grad_features: (B, C, N) gradient of the features + """ + idx, N = ctx.for_backwards + + B, C, npoint, nsample = grad_out.size() + grad_features = Variable(torch.cuda.FloatTensor(B, C, N).zero_()) + + grad_out_data = grad_out.data.contiguous() + pointnet2.group_points_grad_wrapper(B, C, N, npoint, nsample, grad_out_data, idx, grad_features.data) + return grad_features, None + + +grouping_operation = GroupingOperation.apply + + +class BallQuery(Function): + + @staticmethod + def forward(ctx, radius: float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor) -> torch.Tensor: + """ + :param ctx: + :param radius: float, radius of the balls + :param nsample: int, maximum number of features in the balls + :param xyz: (B, N, 3) xyz coordinates of the features + :param new_xyz: (B, npoint, 3) centers of the ball query + :return: + idx: (B, npoint, nsample) tensor with the indicies of the features that form the query balls + """ + assert new_xyz.is_contiguous() + assert xyz.is_contiguous() + + B, N, _ = xyz.size() + npoint = new_xyz.size(1) + idx = torch.cuda.IntTensor(B, npoint, nsample).zero_() + + pointnet2.ball_query_wrapper(B, N, npoint, radius, nsample, new_xyz, xyz, idx) + return idx + + @staticmethod + def backward(ctx, a = None): + return None, None, None, None + + +ball_query = BallQuery.apply + + +class QueryAndGroup(nn.Module): + def __init__(self, radius: float, nsample: int, use_xyz: bool = True): + """ + :param radius: float, radius of ball + :param nsample: int, maximum number of features to gather in the ball + :param use_xyz: + """ + super().__init__() + self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz + + def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor = None) -> Tuple[torch.Tensor]: + """ + :param xyz: (B, N, 3) xyz coordinates of the features + :param new_xyz: (B, npoint, 3) centroids + :param features: (B, C, N) descriptors of the features + :return: + new_features: (B, 3 + C, npoint, nsample) + """ + idx = ball_query(self.radius, self.nsample, xyz, new_xyz) + xyz_trans = xyz.transpose(1, 2).contiguous() + grouped_xyz = grouping_operation(xyz_trans, idx) # (B, 3, npoint, nsample) + grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) + + if features is not None: + grouped_features = grouping_operation(features, idx) + if self.use_xyz: + new_features = torch.cat([grouped_xyz, grouped_features], dim = 1) # (B, C + 3, npoint, nsample) + else: + new_features = grouped_features + else: + assert self.use_xyz, "Cannot have not features and not use xyz as a feature!" + new_features = grouped_xyz + + return new_features + + +class GroupAll(nn.Module): + def __init__(self, use_xyz: bool = True): + super().__init__() + self.use_xyz = use_xyz + + def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor = None): + """ + :param xyz: (B, N, 3) xyz coordinates of the features + :param new_xyz: ignored + :param features: (B, C, N) descriptors of the features + :return: + new_features: (B, C + 3, 1, N) + """ + grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) + if features is not None: + grouped_features = features.unsqueeze(2) + if self.use_xyz: + new_features = torch.cat([grouped_xyz, grouped_features], dim = 1) # (B, 3 + C, 1, N) + else: + new_features = grouped_features + else: + new_features = grouped_xyz + + return new_features diff --git a/pointnet2_lib/pointnet2/pytorch_utils.py b/pointnet2_lib/pointnet2/pytorch_utils.py new file mode 100644 index 0000000..8f80bc1 --- /dev/null +++ b/pointnet2_lib/pointnet2/pytorch_utils.py @@ -0,0 +1,235 @@ +import torch.nn as nn +from typing import List, Tuple + + +class SharedMLP(nn.Sequential): + + def __init__( + self, + args: List[int], + *, + bn: bool = False, + activation = nn.ReLU(inplace = True), + preact: bool = False, + first: bool = False, + name: str = "", + instance_norm: bool = False, + ): + super().__init__() + + for i in range(len(args) - 1): + self.add_module( + name + 'layer{}'.format(i), + Conv2d( + args[i], + args[i + 1], + bn = (not first or not preact or (i != 0)) and bn, + activation = activation + if (not first or not preact or (i != 0)) else None, + preact = preact, + instance_norm = instance_norm + ) + ) + + +class _ConvBase(nn.Sequential): + + def __init__( + self, + in_size, + out_size, + kernel_size, + stride, + padding, + activation, + bn, + init, + conv = None, + batch_norm = None, + bias = True, + preact = False, + name = "", + instance_norm = False, + instance_norm_func = None + ): + super().__init__() + + bias = bias and (not bn) + conv_unit = conv( + in_size, + out_size, + kernel_size = kernel_size, + stride = stride, + padding = padding, + bias = bias + ) + init(conv_unit.weight) + if bias: + nn.init.constant_(conv_unit.bias, 0) + + if bn: + if not preact: + bn_unit = batch_norm(out_size) + else: + bn_unit = batch_norm(in_size) + if instance_norm: + if not preact: + in_unit = instance_norm_func(out_size, affine = False, track_running_stats = False) + else: + in_unit = instance_norm_func(in_size, affine = False, track_running_stats = False) + + if preact: + if bn: + self.add_module(name + 'bn', bn_unit) + + if activation is not None: + self.add_module(name + 'activation', activation) + + if not bn and instance_norm: + self.add_module(name + 'in', in_unit) + + self.add_module(name + 'conv', conv_unit) + + if not preact: + if bn: + self.add_module(name + 'bn', bn_unit) + + if activation is not None: + self.add_module(name + 'activation', activation) + + if not bn and instance_norm: + self.add_module(name + 'in', in_unit) + + +class _BNBase(nn.Sequential): + + def __init__(self, in_size, batch_norm = None, name = ""): + super().__init__() + self.add_module(name + "bn", batch_norm(in_size)) + + nn.init.constant_(self[0].weight, 1.0) + nn.init.constant_(self[0].bias, 0) + + +class BatchNorm1d(_BNBase): + + def __init__(self, in_size: int, *, name: str = ""): + super().__init__(in_size, batch_norm = nn.BatchNorm1d, name = name) + + +class BatchNorm2d(_BNBase): + + def __init__(self, in_size: int, name: str = ""): + super().__init__(in_size, batch_norm = nn.BatchNorm2d, name = name) + + +class Conv1d(_ConvBase): + + def __init__( + self, + in_size: int, + out_size: int, + *, + kernel_size: int = 1, + stride: int = 1, + padding: int = 0, + activation = nn.ReLU(inplace = True), + bn: bool = False, + init = nn.init.kaiming_normal_, + bias: bool = True, + preact: bool = False, + name: str = "", + instance_norm = False + ): + super().__init__( + in_size, + out_size, + kernel_size, + stride, + padding, + activation, + bn, + init, + conv = nn.Conv1d, + batch_norm = BatchNorm1d, + bias = bias, + preact = preact, + name = name, + instance_norm = instance_norm, + instance_norm_func = nn.InstanceNorm1d + ) + + +class Conv2d(_ConvBase): + + def __init__( + self, + in_size: int, + out_size: int, + *, + kernel_size: Tuple[int, int] = (1, 1), + stride: Tuple[int, int] = (1, 1), + padding: Tuple[int, int] = (0, 0), + activation = nn.ReLU(inplace = True), + bn: bool = False, + init = nn.init.kaiming_normal_, + bias: bool = True, + preact: bool = False, + name: str = "", + instance_norm = False + ): + super().__init__( + in_size, + out_size, + kernel_size, + stride, + padding, + activation, + bn, + init, + conv = nn.Conv2d, + batch_norm = BatchNorm2d, + bias = bias, + preact = preact, + name = name, + instance_norm = instance_norm, + instance_norm_func = nn.InstanceNorm2d + ) + + +class FC(nn.Sequential): + + def __init__( + self, + in_size: int, + out_size: int, + *, + activation = nn.ReLU(inplace = True), + bn: bool = False, + init = None, + preact: bool = False, + name: str = "" + ): + super().__init__() + + fc = nn.Linear(in_size, out_size, bias = not bn) + if init is not None: + init(fc.weight) + if not bn: + nn.init.constant(fc.bias, 0) + + if preact: + if bn: + self.add_module(name + 'bn', BatchNorm1d(in_size)) + + if activation is not None: + self.add_module(name + 'activation', activation) + + self.add_module(name + 'fc', fc) + + if not preact: + if bn: + self.add_module(name + 'bn', BatchNorm1d(out_size)) + + if activation is not None: + self.add_module(name + 'activation', activation) diff --git a/pointnet2_lib/pointnet2/setup.py b/pointnet2_lib/pointnet2/setup.py new file mode 100644 index 0000000..0543ee7 --- /dev/null +++ b/pointnet2_lib/pointnet2/setup.py @@ -0,0 +1,23 @@ +from setuptools import setup +from torch.utils.cpp_extension import BuildExtension, CUDAExtension + +setup( + name = 'pointnet2', + ext_modules = [ + CUDAExtension('pointnet2_cuda', [ + 'src/pointnet2_api.cpp', + + 'src/ball_query.cpp', + 'src/ball_query_gpu.cu', + 'src/group_points.cpp', + 'src/group_points_gpu.cu', + 'src/interpolate.cpp', + 'src/interpolate_gpu.cu', + 'src/sampling.cpp', + 'src/sampling_gpu.cu', + ], + extra_compile_args = { 'cxx' : ['-g'], + 'nvcc': ['-O2'] }) + ], + cmdclass = { 'build_ext': BuildExtension } +) diff --git a/pointnet2_lib/pointnet2/src/ball_query.cpp b/pointnet2_lib/pointnet2/src/ball_query.cpp new file mode 100644 index 0000000..c5b06e1 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/ball_query.cpp @@ -0,0 +1,25 @@ +#include +#include +#include +#include +#include +#include "ball_query_gpu.h" + +extern THCState *state; + +#define CHECK_CUDA(x) AT_CHECK(x.type().is_cuda(), #x, " must be a CUDAtensor ") +#define CHECK_CONTIGUOUS(x) AT_CHECK(x.is_contiguous(), #x, " must be contiguous ") +#define CHECK_INPUT(x) CHECK_CUDA(x);CHECK_CONTIGUOUS(x) + +int ball_query_wrapper_fast(int b, int n, int m, float radius, int nsample, + at::Tensor new_xyz_tensor, at::Tensor xyz_tensor, at::Tensor idx_tensor) { + CHECK_INPUT(new_xyz_tensor); + CHECK_INPUT(xyz_tensor); + const float *new_xyz = new_xyz_tensor.data(); + const float *xyz = xyz_tensor.data(); + int *idx = idx_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + ball_query_kernel_launcher_fast(b, n, m, radius, nsample, new_xyz, xyz, idx, stream); + return 1; +} \ No newline at end of file diff --git a/pointnet2_lib/pointnet2/src/ball_query_gpu.cu b/pointnet2_lib/pointnet2/src/ball_query_gpu.cu new file mode 100644 index 0000000..f8840aa --- /dev/null +++ b/pointnet2_lib/pointnet2/src/ball_query_gpu.cu @@ -0,0 +1,67 @@ +#include +#include +#include + +#include "ball_query_gpu.h" +#include "cuda_utils.h" + + +__global__ void ball_query_kernel_fast(int b, int n, int m, float radius, int nsample, + const float *__restrict__ new_xyz, const float *__restrict__ xyz, int *__restrict__ idx) { + // new_xyz: (B, M, 3) + // xyz: (B, N, 3) + // output: + // idx: (B, M, nsample) + int bs_idx = blockIdx.y; + int pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + if (bs_idx >= b || pt_idx >= m) return; + + new_xyz += bs_idx * m * 3 + pt_idx * 3; + xyz += bs_idx * n * 3; + idx += bs_idx * m * nsample + pt_idx * nsample; + + float radius2 = radius * radius; + float new_x = new_xyz[0]; + float new_y = new_xyz[1]; + float new_z = new_xyz[2]; + + int cnt = 0; + for (int k = 0; k < n; ++k) { + float x = xyz[k * 3 + 0]; + float y = xyz[k * 3 + 1]; + float z = xyz[k * 3 + 2]; + float d2 = (new_x - x) * (new_x - x) + (new_y - y) * (new_y - y) + (new_z - z) * (new_z - z); + if (d2 < radius2){ + if (cnt == 0){ + for (int l = 0; l < nsample; ++l) { + idx[l] = k; + } + } + idx[cnt] = k; + ++cnt; + if (cnt >= nsample) break; + } + } +} + + +void ball_query_kernel_launcher_fast(int b, int n, int m, float radius, int nsample, \ + const float *new_xyz, const float *xyz, int *idx, cudaStream_t stream) { + // new_xyz: (B, M, 3) + // xyz: (B, N, 3) + // output: + // idx: (B, M, nsample) + + cudaError_t err; + + dim3 blocks(DIVUP(m, THREADS_PER_BLOCK), b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + + ball_query_kernel_fast<<>>(b, n, m, radius, nsample, new_xyz, xyz, idx); + // cudaDeviceSynchronize(); // for using printf in kernel function + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} \ No newline at end of file diff --git a/pointnet2_lib/pointnet2/src/ball_query_gpu.h b/pointnet2_lib/pointnet2/src/ball_query_gpu.h new file mode 100644 index 0000000..ffc831a --- /dev/null +++ b/pointnet2_lib/pointnet2/src/ball_query_gpu.h @@ -0,0 +1,15 @@ +#ifndef _BALL_QUERY_GPU_H +#define _BALL_QUERY_GPU_H + +#include +#include +#include +#include + +int ball_query_wrapper_fast(int b, int n, int m, float radius, int nsample, + at::Tensor new_xyz_tensor, at::Tensor xyz_tensor, at::Tensor idx_tensor); + +void ball_query_kernel_launcher_fast(int b, int n, int m, float radius, int nsample, + const float *xyz, const float *new_xyz, int *idx, cudaStream_t stream); + +#endif diff --git a/pointnet2_lib/pointnet2/src/cuda_utils.h b/pointnet2_lib/pointnet2/src/cuda_utils.h new file mode 100644 index 0000000..7fe2796 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/cuda_utils.h @@ -0,0 +1,15 @@ +#ifndef _CUDA_UTILS_H +#define _CUDA_UTILS_H + +#include + +#define TOTAL_THREADS 1024 +#define THREADS_PER_BLOCK 256 +#define DIVUP(m,n) ((m) / (n) + ((m) % (n) > 0)) + +inline int opt_n_threads(int work_size) { + const int pow_2 = std::log(static_cast(work_size)) / std::log(2.0); + + return max(min(1 << pow_2, TOTAL_THREADS), 1); +} +#endif diff --git a/pointnet2_lib/pointnet2/src/group_points.cpp b/pointnet2_lib/pointnet2/src/group_points.cpp new file mode 100644 index 0000000..6bb577d --- /dev/null +++ b/pointnet2_lib/pointnet2/src/group_points.cpp @@ -0,0 +1,36 @@ +#include +#include +#include +#include +#include +#include "group_points_gpu.h" + +extern THCState *state; + + +int group_points_grad_wrapper_fast(int b, int c, int n, int npoints, int nsample, + at::Tensor grad_out_tensor, at::Tensor idx_tensor, at::Tensor grad_points_tensor) { + + float *grad_points = grad_points_tensor.data(); + const int *idx = idx_tensor.data(); + const float *grad_out = grad_out_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + + group_points_grad_kernel_launcher_fast(b, c, n, npoints, nsample, grad_out, idx, grad_points, stream); + return 1; +} + + +int group_points_wrapper_fast(int b, int c, int n, int npoints, int nsample, + at::Tensor points_tensor, at::Tensor idx_tensor, at::Tensor out_tensor) { + + const float *points = points_tensor.data(); + const int *idx = idx_tensor.data(); + float *out = out_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + + group_points_kernel_launcher_fast(b, c, n, npoints, nsample, points, idx, out, stream); + return 1; +} \ No newline at end of file diff --git a/pointnet2_lib/pointnet2/src/group_points_gpu.cu b/pointnet2_lib/pointnet2/src/group_points_gpu.cu new file mode 100644 index 0000000..c015a81 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/group_points_gpu.cu @@ -0,0 +1,86 @@ +#include +#include + +#include "cuda_utils.h" +#include "group_points_gpu.h" + + +__global__ void group_points_grad_kernel_fast(int b, int c, int n, int npoints, int nsample, + const float *__restrict__ grad_out, const int *__restrict__ idx, float *__restrict__ grad_points) { + // grad_out: (B, C, npoints, nsample) + // idx: (B, npoints, nsample) + // output: + // grad_points: (B, C, N) + int bs_idx = blockIdx.z; + int c_idx = blockIdx.y; + int index = blockIdx.x * blockDim.x + threadIdx.x; + int pt_idx = index / nsample; + if (bs_idx >= b || c_idx >= c || pt_idx >= npoints) return; + + int sample_idx = index % nsample; + grad_out += bs_idx * c * npoints * nsample + c_idx * npoints * nsample + pt_idx * nsample + sample_idx; + idx += bs_idx * npoints * nsample + pt_idx * nsample + sample_idx; + + atomicAdd(grad_points + bs_idx * c * n + c_idx * n + idx[0] , grad_out[0]); +} + +void group_points_grad_kernel_launcher_fast(int b, int c, int n, int npoints, int nsample, + const float *grad_out, const int *idx, float *grad_points, cudaStream_t stream) { + // grad_out: (B, C, npoints, nsample) + // idx: (B, npoints, nsample) + // output: + // grad_points: (B, C, N) + cudaError_t err; + dim3 blocks(DIVUP(npoints * nsample, THREADS_PER_BLOCK), c, b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + + group_points_grad_kernel_fast<<>>(b, c, n, npoints, nsample, grad_out, idx, grad_points); + + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} + + +__global__ void group_points_kernel_fast(int b, int c, int n, int npoints, int nsample, + const float *__restrict__ points, const int *__restrict__ idx, float *__restrict__ out) { + // points: (B, C, N) + // idx: (B, npoints, nsample) + // output: + // out: (B, C, npoints, nsample) + int bs_idx = blockIdx.z; + int c_idx = blockIdx.y; + int index = blockIdx.x * blockDim.x + threadIdx.x; + int pt_idx = index / nsample; + if (bs_idx >= b || c_idx >= c || pt_idx >= npoints) return; + + int sample_idx = index % nsample; + + idx += bs_idx * npoints * nsample + pt_idx * nsample + sample_idx; + int in_idx = bs_idx * c * n + c_idx * n + idx[0]; + int out_idx = bs_idx * c * npoints * nsample + c_idx * npoints * nsample + pt_idx * nsample + sample_idx; + + out[out_idx] = points[in_idx]; +} + + +void group_points_kernel_launcher_fast(int b, int c, int n, int npoints, int nsample, + const float *points, const int *idx, float *out, cudaStream_t stream) { + // points: (B, C, N) + // idx: (B, npoints, nsample) + // output: + // out: (B, C, npoints, nsample) + cudaError_t err; + dim3 blocks(DIVUP(npoints * nsample, THREADS_PER_BLOCK), c, b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + + group_points_kernel_fast<<>>(b, c, n, npoints, nsample, points, idx, out); + // cudaDeviceSynchronize(); // for using printf in kernel function + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} diff --git a/pointnet2_lib/pointnet2/src/group_points_gpu.h b/pointnet2_lib/pointnet2/src/group_points_gpu.h new file mode 100644 index 0000000..76c73ca --- /dev/null +++ b/pointnet2_lib/pointnet2/src/group_points_gpu.h @@ -0,0 +1,22 @@ +#ifndef _GROUP_POINTS_GPU_H +#define _GROUP_POINTS_GPU_H + +#include +#include +#include +#include + + +int group_points_wrapper_fast(int b, int c, int n, int npoints, int nsample, + at::Tensor points_tensor, at::Tensor idx_tensor, at::Tensor out_tensor); + +void group_points_kernel_launcher_fast(int b, int c, int n, int npoints, int nsample, + const float *points, const int *idx, float *out, cudaStream_t stream); + +int group_points_grad_wrapper_fast(int b, int c, int n, int npoints, int nsample, + at::Tensor grad_out_tensor, at::Tensor idx_tensor, at::Tensor grad_points_tensor); + +void group_points_grad_kernel_launcher_fast(int b, int c, int n, int npoints, int nsample, + const float *grad_out, const int *idx, float *grad_points, cudaStream_t stream); + +#endif diff --git a/pointnet2_lib/pointnet2/src/interpolate.cpp b/pointnet2_lib/pointnet2/src/interpolate.cpp new file mode 100644 index 0000000..6c1724c --- /dev/null +++ b/pointnet2_lib/pointnet2/src/interpolate.cpp @@ -0,0 +1,54 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include "interpolate_gpu.h" + +extern THCState *state; + + +void three_nn_wrapper_fast(int b, int n, int m, at::Tensor unknown_tensor, + at::Tensor known_tensor, at::Tensor dist2_tensor, at::Tensor idx_tensor) { + const float *unknown = unknown_tensor.data(); + const float *known = known_tensor.data(); + float *dist2 = dist2_tensor.data(); + int *idx = idx_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + three_nn_kernel_launcher_fast(b, n, m, unknown, known, dist2, idx, stream); +} + + +void three_interpolate_wrapper_fast(int b, int c, int m, int n, + at::Tensor points_tensor, + at::Tensor idx_tensor, + at::Tensor weight_tensor, + at::Tensor out_tensor) { + + const float *points = points_tensor.data(); + const float *weight = weight_tensor.data(); + float *out = out_tensor.data(); + const int *idx = idx_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + three_interpolate_kernel_launcher_fast(b, c, m, n, points, idx, weight, out, stream); +} + +void three_interpolate_grad_wrapper_fast(int b, int c, int n, int m, + at::Tensor grad_out_tensor, + at::Tensor idx_tensor, + at::Tensor weight_tensor, + at::Tensor grad_points_tensor) { + + const float *grad_out = grad_out_tensor.data(); + const float *weight = weight_tensor.data(); + float *grad_points = grad_points_tensor.data(); + const int *idx = idx_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + three_interpolate_grad_kernel_launcher_fast(b, c, n, m, grad_out, idx, weight, grad_points, stream); +} \ No newline at end of file diff --git a/pointnet2_lib/pointnet2/src/interpolate_gpu.cu b/pointnet2_lib/pointnet2/src/interpolate_gpu.cu new file mode 100644 index 0000000..a123dd8 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/interpolate_gpu.cu @@ -0,0 +1,161 @@ +#include +#include +#include + +#include "cuda_utils.h" +#include "interpolate_gpu.h" + + +__global__ void three_nn_kernel_fast(int b, int n, int m, const float *__restrict__ unknown, + const float *__restrict__ known, float *__restrict__ dist2, int *__restrict__ idx) { + // unknown: (B, N, 3) + // known: (B, M, 3) + // output: + // dist2: (B, N, 3) + // idx: (B, N, 3) + + int bs_idx = blockIdx.y; + int pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + if (bs_idx >= b || pt_idx >= n) return; + + unknown += bs_idx * n * 3 + pt_idx * 3; + known += bs_idx * m * 3; + dist2 += bs_idx * n * 3 + pt_idx * 3; + idx += bs_idx * n * 3 + pt_idx * 3; + + float ux = unknown[0]; + float uy = unknown[1]; + float uz = unknown[2]; + + double best1 = 1e40, best2 = 1e40, best3 = 1e40; + int besti1 = 0, besti2 = 0, besti3 = 0; + for (int k = 0; k < m; ++k) { + float x = known[k * 3 + 0]; + float y = known[k * 3 + 1]; + float z = known[k * 3 + 2]; + float d = (ux - x) * (ux - x) + (uy - y) * (uy - y) + (uz - z) * (uz - z); + if (d < best1) { + best3 = best2; besti3 = besti2; + best2 = best1; besti2 = besti1; + best1 = d; besti1 = k; + } + else if (d < best2) { + best3 = best2; besti3 = besti2; + best2 = d; besti2 = k; + } + else if (d < best3) { + best3 = d; besti3 = k; + } + } + dist2[0] = best1; dist2[1] = best2; dist2[2] = best3; + idx[0] = besti1; idx[1] = besti2; idx[2] = besti3; +} + + +void three_nn_kernel_launcher_fast(int b, int n, int m, const float *unknown, + const float *known, float *dist2, int *idx, cudaStream_t stream) { + // unknown: (B, N, 3) + // known: (B, M, 3) + // output: + // dist2: (B, N, 3) + // idx: (B, N, 3) + + cudaError_t err; + dim3 blocks(DIVUP(n, THREADS_PER_BLOCK), b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + + three_nn_kernel_fast<<>>(b, n, m, unknown, known, dist2, idx); + + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} + + +__global__ void three_interpolate_kernel_fast(int b, int c, int m, int n, const float *__restrict__ points, + const int *__restrict__ idx, const float *__restrict__ weight, float *__restrict__ out) { + // points: (B, C, M) + // idx: (B, N, 3) + // weight: (B, N, 3) + // output: + // out: (B, C, N) + + int bs_idx = blockIdx.z; + int c_idx = blockIdx.y; + int pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + + if (bs_idx >= b || c_idx >= c || pt_idx >= n) return; + + weight += bs_idx * n * 3 + pt_idx * 3; + points += bs_idx * c * m + c_idx * m; + idx += bs_idx * n * 3 + pt_idx * 3; + out += bs_idx * c * n + c_idx * n; + + out[pt_idx] = weight[0] * points[idx[0]] + weight[1] * points[idx[1]] + weight[2] * points[idx[2]]; +} + +void three_interpolate_kernel_launcher_fast(int b, int c, int m, int n, + const float *points, const int *idx, const float *weight, float *out, cudaStream_t stream) { + // points: (B, C, M) + // idx: (B, N, 3) + // weight: (B, N, 3) + // output: + // out: (B, C, N) + + cudaError_t err; + dim3 blocks(DIVUP(n, THREADS_PER_BLOCK), c, b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + three_interpolate_kernel_fast<<>>(b, c, m, n, points, idx, weight, out); + + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} + + +__global__ void three_interpolate_grad_kernel_fast(int b, int c, int n, int m, const float *__restrict__ grad_out, + const int *__restrict__ idx, const float *__restrict__ weight, float *__restrict__ grad_points) { + // grad_out: (B, C, N) + // weight: (B, N, 3) + // output: + // grad_points: (B, C, M) + + int bs_idx = blockIdx.z; + int c_idx = blockIdx.y; + int pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + + if (bs_idx >= b || c_idx >= c || pt_idx >= n) return; + + grad_out += bs_idx * c * n + c_idx * n + pt_idx; + weight += bs_idx * n * 3 + pt_idx * 3; + grad_points += bs_idx * c * m + c_idx * m; + idx += bs_idx * n * 3 + pt_idx * 3; + + + atomicAdd(grad_points + idx[0], grad_out[0] * weight[0]); + atomicAdd(grad_points + idx[1], grad_out[0] * weight[1]); + atomicAdd(grad_points + idx[2], grad_out[0] * weight[2]); +} + +void three_interpolate_grad_kernel_launcher_fast(int b, int c, int n, int m, const float *grad_out, + const int *idx, const float *weight, float *grad_points, cudaStream_t stream) { + // grad_out: (B, C, N) + // weight: (B, N, 3) + // output: + // grad_points: (B, C, M) + + cudaError_t err; + dim3 blocks(DIVUP(n, THREADS_PER_BLOCK), c, b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + three_interpolate_grad_kernel_fast<<>>(b, c, n, m, grad_out, idx, weight, grad_points); + + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} \ No newline at end of file diff --git a/pointnet2_lib/pointnet2/src/interpolate_gpu.h b/pointnet2_lib/pointnet2/src/interpolate_gpu.h new file mode 100644 index 0000000..f177108 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/interpolate_gpu.h @@ -0,0 +1,30 @@ +#ifndef _INTERPOLATE_GPU_H +#define _INTERPOLATE_GPU_H + +#include +#include +#include +#include + + +void three_nn_wrapper_fast(int b, int n, int m, at::Tensor unknown_tensor, + at::Tensor known_tensor, at::Tensor dist2_tensor, at::Tensor idx_tensor); + +void three_nn_kernel_launcher_fast(int b, int n, int m, const float *unknown, + const float *known, float *dist2, int *idx, cudaStream_t stream); + + +void three_interpolate_wrapper_fast(int b, int c, int m, int n, at::Tensor points_tensor, + at::Tensor idx_tensor, at::Tensor weight_tensor, at::Tensor out_tensor); + +void three_interpolate_kernel_launcher_fast(int b, int c, int m, int n, + const float *points, const int *idx, const float *weight, float *out, cudaStream_t stream); + + +void three_interpolate_grad_wrapper_fast(int b, int c, int n, int m, at::Tensor grad_out_tensor, + at::Tensor idx_tensor, at::Tensor weight_tensor, at::Tensor grad_points_tensor); + +void three_interpolate_grad_kernel_launcher_fast(int b, int c, int n, int m, const float *grad_out, + const int *idx, const float *weight, float *grad_points, cudaStream_t stream); + +#endif diff --git a/pointnet2_lib/pointnet2/src/pointnet2_api.cpp b/pointnet2_lib/pointnet2/src/pointnet2_api.cpp new file mode 100644 index 0000000..d91f0f2 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/pointnet2_api.cpp @@ -0,0 +1,24 @@ +#include +#include + +#include "ball_query_gpu.h" +#include "group_points_gpu.h" +#include "sampling_gpu.h" +#include "interpolate_gpu.h" + + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("ball_query_wrapper", &ball_query_wrapper_fast, "ball_query_wrapper_fast"); + + m.def("group_points_wrapper", &group_points_wrapper_fast, "group_points_wrapper_fast"); + m.def("group_points_grad_wrapper", &group_points_grad_wrapper_fast, "group_points_grad_wrapper_fast"); + + m.def("gather_points_wrapper", &gather_points_wrapper_fast, "gather_points_wrapper_fast"); + m.def("gather_points_grad_wrapper", &gather_points_grad_wrapper_fast, "gather_points_grad_wrapper_fast"); + + m.def("furthest_point_sampling_wrapper", &furthest_point_sampling_wrapper, "furthest_point_sampling_wrapper"); + + m.def("three_nn_wrapper", &three_nn_wrapper_fast, "three_nn_wrapper_fast"); + m.def("three_interpolate_wrapper", &three_interpolate_wrapper_fast, "three_interpolate_wrapper_fast"); + m.def("three_interpolate_grad_wrapper", &three_interpolate_grad_wrapper_fast, "three_interpolate_grad_wrapper_fast"); +} diff --git a/pointnet2_lib/pointnet2/src/sampling.cpp b/pointnet2_lib/pointnet2/src/sampling.cpp new file mode 100644 index 0000000..e0c9df0 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/sampling.cpp @@ -0,0 +1,46 @@ +#include +#include +#include +#include + +#include "sampling_gpu.h" + +extern THCState *state; + + +int gather_points_wrapper_fast(int b, int c, int n, int npoints, + at::Tensor points_tensor, at::Tensor idx_tensor, at::Tensor out_tensor){ + const float *points = points_tensor.data(); + const int *idx = idx_tensor.data(); + float *out = out_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + gather_points_kernel_launcher_fast(b, c, n, npoints, points, idx, out, stream); + return 1; +} + + +int gather_points_grad_wrapper_fast(int b, int c, int n, int npoints, + at::Tensor grad_out_tensor, at::Tensor idx_tensor, at::Tensor grad_points_tensor) { + + const float *grad_out = grad_out_tensor.data(); + const int *idx = idx_tensor.data(); + float *grad_points = grad_points_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + gather_points_grad_kernel_launcher_fast(b, c, n, npoints, grad_out, idx, grad_points, stream); + return 1; +} + + +int furthest_point_sampling_wrapper(int b, int n, int m, + at::Tensor points_tensor, at::Tensor temp_tensor, at::Tensor idx_tensor) { + + const float *points = points_tensor.data(); + float *temp = temp_tensor.data(); + int *idx = idx_tensor.data(); + + cudaStream_t stream = THCState_getCurrentStream(state); + furthest_point_sampling_kernel_launcher(b, n, m, points, temp, idx, stream); + return 1; +} diff --git a/pointnet2_lib/pointnet2/src/sampling_gpu.cu b/pointnet2_lib/pointnet2/src/sampling_gpu.cu new file mode 100644 index 0000000..9e49a60 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/sampling_gpu.cu @@ -0,0 +1,253 @@ +#include +#include + +#include "cuda_utils.h" +#include "sampling_gpu.h" + + +__global__ void gather_points_kernel_fast(int b, int c, int n, int m, + const float *__restrict__ points, const int *__restrict__ idx, float *__restrict__ out) { + // points: (B, C, N) + // idx: (B, M) + // output: + // out: (B, C, M) + + int bs_idx = blockIdx.z; + int c_idx = blockIdx.y; + int pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + if (bs_idx >= b || c_idx >= c || pt_idx >= m) return; + + out += bs_idx * c * m + c_idx * m + pt_idx; + idx += bs_idx * m + pt_idx; + points += bs_idx * c * n + c_idx * n; + out[0] = points[idx[0]]; +} + +void gather_points_kernel_launcher_fast(int b, int c, int n, int npoints, + const float *points, const int *idx, float *out, cudaStream_t stream) { + // points: (B, C, N) + // idx: (B, npoints) + // output: + // out: (B, C, npoints) + + cudaError_t err; + dim3 blocks(DIVUP(npoints, THREADS_PER_BLOCK), c, b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + + gather_points_kernel_fast<<>>(b, c, n, npoints, points, idx, out); + + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} + +__global__ void gather_points_grad_kernel_fast(int b, int c, int n, int m, const float *__restrict__ grad_out, + const int *__restrict__ idx, float *__restrict__ grad_points) { + // grad_out: (B, C, M) + // idx: (B, M) + // output: + // grad_points: (B, C, N) + + int bs_idx = blockIdx.z; + int c_idx = blockIdx.y; + int pt_idx = blockIdx.x * blockDim.x + threadIdx.x; + if (bs_idx >= b || c_idx >= c || pt_idx >= m) return; + + grad_out += bs_idx * c * m + c_idx * m + pt_idx; + idx += bs_idx * m + pt_idx; + grad_points += bs_idx * c * n + c_idx * n; + + atomicAdd(grad_points + idx[0], grad_out[0]); +} + +void gather_points_grad_kernel_launcher_fast(int b, int c, int n, int npoints, + const float *grad_out, const int *idx, float *grad_points, cudaStream_t stream) { + // grad_out: (B, C, npoints) + // idx: (B, npoints) + // output: + // grad_points: (B, C, N) + + cudaError_t err; + dim3 blocks(DIVUP(npoints, THREADS_PER_BLOCK), c, b); // blockIdx.x(col), blockIdx.y(row) + dim3 threads(THREADS_PER_BLOCK); + + gather_points_grad_kernel_fast<<>>(b, c, n, npoints, grad_out, idx, grad_points); + + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} + + +__device__ void __update(float *__restrict__ dists, int *__restrict__ dists_i, int idx1, int idx2){ + const float v1 = dists[idx1], v2 = dists[idx2]; + const int i1 = dists_i[idx1], i2 = dists_i[idx2]; + dists[idx1] = max(v1, v2); + dists_i[idx1] = v2 > v1 ? i2 : i1; +} + +template +__global__ void furthest_point_sampling_kernel(int b, int n, int m, + const float *__restrict__ dataset, float *__restrict__ temp, int *__restrict__ idxs) { + // dataset: (B, N, 3) + // tmp: (B, N) + // output: + // idx: (B, M) + + if (m <= 0) return; + __shared__ float dists[block_size]; + __shared__ int dists_i[block_size]; + + int batch_index = blockIdx.x; + dataset += batch_index * n * 3; + temp += batch_index * n; + idxs += batch_index * m; + + int tid = threadIdx.x; + const int stride = block_size; + + int old = 0; + if (threadIdx.x == 0) + idxs[0] = old; + + __syncthreads(); + for (int j = 1; j < m; j++) { + int besti = 0; + float best = -1; + float x1 = dataset[old * 3 + 0]; + float y1 = dataset[old * 3 + 1]; + float z1 = dataset[old * 3 + 2]; + for (int k = tid; k < n; k += stride) { + float x2, y2, z2; + x2 = dataset[k * 3 + 0]; + y2 = dataset[k * 3 + 1]; + z2 = dataset[k * 3 + 2]; + // float mag = (x2 * x2) + (y2 * y2) + (z2 * z2); + // if (mag <= 1e-3) + // continue; + + float d = (x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1) + (z2 - z1) * (z2 - z1); + float d2 = min(d, temp[k]); + temp[k] = d2; + besti = d2 > best ? k : besti; + best = d2 > best ? d2 : best; + } + dists[tid] = best; + dists_i[tid] = besti; + __syncthreads(); + + if (block_size >= 1024) { + if (tid < 512) { + __update(dists, dists_i, tid, tid + 512); + } + __syncthreads(); + } + + if (block_size >= 512) { + if (tid < 256) { + __update(dists, dists_i, tid, tid + 256); + } + __syncthreads(); + } + if (block_size >= 256) { + if (tid < 128) { + __update(dists, dists_i, tid, tid + 128); + } + __syncthreads(); + } + if (block_size >= 128) { + if (tid < 64) { + __update(dists, dists_i, tid, tid + 64); + } + __syncthreads(); + } + if (block_size >= 64) { + if (tid < 32) { + __update(dists, dists_i, tid, tid + 32); + } + __syncthreads(); + } + if (block_size >= 32) { + if (tid < 16) { + __update(dists, dists_i, tid, tid + 16); + } + __syncthreads(); + } + if (block_size >= 16) { + if (tid < 8) { + __update(dists, dists_i, tid, tid + 8); + } + __syncthreads(); + } + if (block_size >= 8) { + if (tid < 4) { + __update(dists, dists_i, tid, tid + 4); + } + __syncthreads(); + } + if (block_size >= 4) { + if (tid < 2) { + __update(dists, dists_i, tid, tid + 2); + } + __syncthreads(); + } + if (block_size >= 2) { + if (tid < 1) { + __update(dists, dists_i, tid, tid + 1); + } + __syncthreads(); + } + + old = dists_i[0]; + if (tid == 0) + idxs[j] = old; + } +} + +void furthest_point_sampling_kernel_launcher(int b, int n, int m, + const float *dataset, float *temp, int *idxs, cudaStream_t stream) { + // dataset: (B, N, 3) + // tmp: (B, N) + // output: + // idx: (B, M) + + cudaError_t err; + unsigned int n_threads = opt_n_threads(n); + + switch (n_threads) { + case 1024: + furthest_point_sampling_kernel<1024><<>>(b, n, m, dataset, temp, idxs); break; + case 512: + furthest_point_sampling_kernel<512><<>>(b, n, m, dataset, temp, idxs); break; + case 256: + furthest_point_sampling_kernel<256><<>>(b, n, m, dataset, temp, idxs); break; + case 128: + furthest_point_sampling_kernel<128><<>>(b, n, m, dataset, temp, idxs); break; + case 64: + furthest_point_sampling_kernel<64><<>>(b, n, m, dataset, temp, idxs); break; + case 32: + furthest_point_sampling_kernel<32><<>>(b, n, m, dataset, temp, idxs); break; + case 16: + furthest_point_sampling_kernel<16><<>>(b, n, m, dataset, temp, idxs); break; + case 8: + furthest_point_sampling_kernel<8><<>>(b, n, m, dataset, temp, idxs); break; + case 4: + furthest_point_sampling_kernel<4><<>>(b, n, m, dataset, temp, idxs); break; + case 2: + furthest_point_sampling_kernel<2><<>>(b, n, m, dataset, temp, idxs); break; + case 1: + furthest_point_sampling_kernel<1><<>>(b, n, m, dataset, temp, idxs); break; + default: + furthest_point_sampling_kernel<512><<>>(b, n, m, dataset, temp, idxs); + } + + err = cudaGetLastError(); + if (cudaSuccess != err) { + fprintf(stderr, "CUDA kernel failed : %s\n", cudaGetErrorString(err)); + exit(-1); + } +} diff --git a/pointnet2_lib/pointnet2/src/sampling_gpu.h b/pointnet2_lib/pointnet2/src/sampling_gpu.h new file mode 100644 index 0000000..6200c59 --- /dev/null +++ b/pointnet2_lib/pointnet2/src/sampling_gpu.h @@ -0,0 +1,29 @@ +#ifndef _SAMPLING_GPU_H +#define _SAMPLING_GPU_H + +#include +#include +#include + + +int gather_points_wrapper_fast(int b, int c, int n, int npoints, + at::Tensor points_tensor, at::Tensor idx_tensor, at::Tensor out_tensor); + +void gather_points_kernel_launcher_fast(int b, int c, int n, int npoints, + const float *points, const int *idx, float *out, cudaStream_t stream); + + +int gather_points_grad_wrapper_fast(int b, int c, int n, int npoints, + at::Tensor grad_out_tensor, at::Tensor idx_tensor, at::Tensor grad_points_tensor); + +void gather_points_grad_kernel_launcher_fast(int b, int c, int n, int npoints, + const float *grad_out, const int *idx, float *grad_points, cudaStream_t stream); + + +int furthest_point_sampling_wrapper(int b, int n, int m, + at::Tensor points_tensor, at::Tensor temp_tensor, at::Tensor idx_tensor); + +void furthest_point_sampling_kernel_launcher(int b, int n, int m, + const float *dataset, float *temp, int *idxs, cudaStream_t stream); + +#endif diff --git a/pointnet2_lib/tools/_init_path.py b/pointnet2_lib/tools/_init_path.py new file mode 100644 index 0000000..4e22221 --- /dev/null +++ b/pointnet2_lib/tools/_init_path.py @@ -0,0 +1,3 @@ +import os, sys + +sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), '../')) diff --git a/pointnet2_lib/tools/data/KITTI/ImageSets/test.txt b/pointnet2_lib/tools/data/KITTI/ImageSets/test.txt new file mode 100644 index 0000000..5d39002 --- /dev/null +++ b/pointnet2_lib/tools/data/KITTI/ImageSets/test.txt @@ -0,0 +1,7518 @@ +000000 +000001 +000002 +000003 +000004 +000005 +000006 +000007 +000008 +000009 +000010 +000011 +000012 +000013 +000014 +000015 +000016 +000017 +000018 +000019 +000020 +000021 +000022 +000023 +000024 +000025 +000026 +000027 +000028 +000029 +000030 +000031 +000032 +000033 +000034 +000035 +000036 +000037 +000038 +000039 +000040 +000041 +000042 +000043 +000044 +000045 +000046 +000047 +000048 +000049 +000050 +000051 +000052 +000053 +000054 +000055 +000056 +000057 +000058 +000059 +000060 +000061 +000062 +000063 +000064 +000065 +000066 +000067 +000068 +000069 +000070 +000071 +000072 +000073 +000074 +000075 +000076 +000077 +000078 +000079 +000080 +000081 +000082 +000083 +000084 +000085 +000086 +000087 +000088 +000089 +000090 +000091 +000092 +000093 +000094 +000095 +000096 +000097 +000098 +000099 +000100 +000101 +000102 +000103 +000104 +000105 +000106 +000107 +000108 +000109 +000110 +000111 +000112 +000113 +000114 +000115 +000116 +000117 +000118 +000119 +000120 +000121 +000122 +000123 +000124 +000125 +000126 +000127 +000128 +000129 +000130 +000131 +000132 +000133 +000134 +000135 +000136 +000137 +000138 +000139 +000140 +000141 +000142 +000143 +000144 +000145 +000146 +000147 +000148 +000149 +000150 +000151 +000152 +000153 +000154 +000155 +000156 +000157 +000158 +000159 +000160 +000161 +000162 +000163 +000164 +000165 +000166 +000167 +000168 +000169 +000170 +000171 +000172 +000173 +000174 +000175 +000176 +000177 +000178 +000179 +000180 +000181 +000182 +000183 +000184 +000185 +000186 +000187 +000188 +000189 +000190 +000191 +000192 +000193 +000194 +000195 +000196 +000197 +000198 +000199 +000200 +000201 +000202 +000203 +000204 +000205 +000206 +000207 +000208 +000209 +000210 +000211 +000212 +000213 +000214 +000215 +000216 +000217 +000218 +000219 +000220 +000221 +000222 +000223 +000224 +000225 +000226 +000227 +000228 +000229 +000230 +000231 +000232 +000233 +000234 +000235 +000236 +000237 +000238 +000239 +000240 +000241 +000242 +000243 +000244 +000245 +000246 +000247 +000248 +000249 +000250 +000251 +000252 +000253 +000254 +000255 +000256 +000257 +000258 +000259 +000260 +000261 +000262 +000263 +000264 +000265 +000266 +000267 +000268 +000269 +000270 +000271 +000272 +000273 +000274 +000275 +000276 +000277 +000278 +000279 +000280 +000281 +000282 +000283 +000284 +000285 +000286 +000287 +000288 +000289 +000290 +000291 +000292 +000293 +000294 +000295 +000296 +000297 +000298 +000299 +000300 +000301 +000302 +000303 +000304 +000305 +000306 +000307 +000308 +000309 +000310 +000311 +000312 +000313 +000314 +000315 +000316 +000317 +000318 +000319 +000320 +000321 +000322 +000323 +000324 +000325 +000326 +000327 +000328 +000329 +000330 +000331 +000332 +000333 +000334 +000335 +000336 +000337 +000338 +000339 +000340 +000341 +000342 +000343 +000344 +000345 +000346 +000347 +000348 +000349 +000350 +000351 +000352 +000353 +000354 +000355 +000356 +000357 +000358 +000359 +000360 +000361 +000362 +000363 +000364 +000365 +000366 +000367 +000368 +000369 +000370 +000371 +000372 +000373 +000374 +000375 +000376 +000377 +000378 +000379 +000380 +000381 +000382 +000383 +000384 +000385 +000386 +000387 +000388 +000389 +000390 +000391 +000392 +000393 +000394 +000395 +000396 +000397 +000398 +000399 +000400 +000401 +000402 +000403 +000404 +000405 +000406 +000407 +000408 +000409 +000410 +000411 +000412 +000413 +000414 +000415 +000416 +000417 +000418 +000419 +000420 +000421 +000422 +000423 +000424 +000425 +000426 +000427 +000428 +000429 +000430 +000431 +000432 +000433 +000434 +000435 +000436 +000437 +000438 +000439 +000440 +000441 +000442 +000443 +000444 +000445 +000446 +000447 +000448 +000449 +000450 +000451 +000452 +000453 +000454 +000455 +000456 +000457 +000458 +000459 +000460 +000461 +000462 +000463 +000464 +000465 +000466 +000467 +000468 +000469 +000470 +000471 +000472 +000473 +000474 +000475 +000476 +000477 +000478 +000479 +000480 +000481 +000482 +000483 +000484 +000485 +000486 +000487 +000488 +000489 +000490 +000491 +000492 +000493 +000494 +000495 +000496 +000497 +000498 +000499 +000500 +000501 +000502 +000503 +000504 +000505 +000506 +000507 +000508 +000509 +000510 +000511 +000512 +000513 +000514 +000515 +000516 +000517 +000518 +000519 +000520 +000521 +000522 +000523 +000524 +000525 +000526 +000527 +000528 +000529 +000530 +000531 +000532 +000533 +000534 +000535 +000536 +000537 +000538 +000539 +000540 +000541 +000542 +000543 +000544 +000545 +000546 +000547 +000548 +000549 +000550 +000551 +000552 +000553 +000554 +000555 +000556 +000557 +000558 +000559 +000560 +000561 +000562 +000563 +000564 +000565 +000566 +000567 +000568 +000569 +000570 +000571 +000572 +000573 +000574 +000575 +000576 +000577 +000578 +000579 +000580 +000581 +000582 +000583 +000584 +000585 +000586 +000587 +000588 +000589 +000590 +000591 +000592 +000593 +000594 +000595 +000596 +000597 +000598 +000599 +000600 +000601 +000602 +000603 +000604 +000605 +000606 +000607 +000608 +000609 +000610 +000611 +000612 +000613 +000614 +000615 +000616 +000617 +000618 +000619 +000620 +000621 +000622 +000623 +000624 +000625 +000626 +000627 +000628 +000629 +000630 +000631 +000632 +000633 +000634 +000635 +000636 +000637 +000638 +000639 +000640 +000641 +000642 +000643 +000644 +000645 +000646 +000647 +000648 +000649 +000650 +000651 +000652 +000653 +000654 +000655 +000656 +000657 +000658 +000659 +000660 +000661 +000662 +000663 +000664 +000665 +000666 +000667 +000668 +000669 +000670 +000671 +000672 +000673 +000674 +000675 +000676 +000677 +000678 +000679 +000680 +000681 +000682 +000683 +000684 +000685 +000686 +000687 +000688 +000689 +000690 +000691 +000692 +000693 +000694 +000695 +000696 +000697 +000698 +000699 +000700 +000701 +000702 +000703 +000704 +000705 +000706 +000707 +000708 +000709 +000710 +000711 +000712 +000713 +000714 +000715 +000716 +000717 +000718 +000719 +000720 +000721 +000722 +000723 +000724 +000725 +000726 +000727 +000728 +000729 +000730 +000731 +000732 +000733 +000734 +000735 +000736 +000737 +000738 +000739 +000740 +000741 +000742 +000743 +000744 +000745 +000746 +000747 +000748 +000749 +000750 +000751 +000752 +000753 +000754 +000755 +000756 +000757 +000758 +000759 +000760 +000761 +000762 +000763 +000764 +000765 +000766 +000767 +000768 +000769 +000770 +000771 +000772 +000773 +000774 +000775 +000776 +000777 +000778 +000779 +000780 +000781 +000782 +000783 +000784 +000785 +000786 +000787 +000788 +000789 +000790 +000791 +000792 +000793 +000794 +000795 +000796 +000797 +000798 +000799 +000800 +000801 +000802 +000803 +000804 +000805 +000806 +000807 +000808 +000809 +000810 +000811 +000812 +000813 +000814 +000815 +000816 +000817 +000818 +000819 +000820 +000821 +000822 +000823 +000824 +000825 +000826 +000827 +000828 +000829 +000830 +000831 +000832 +000833 +000834 +000835 +000836 +000837 +000838 +000839 +000840 +000841 +000842 +000843 +000844 +000845 +000846 +000847 +000848 +000849 +000850 +000851 +000852 +000853 +000854 +000855 +000856 +000857 +000858 +000859 +000860 +000861 +000862 +000863 +000864 +000865 +000866 +000867 +000868 +000869 +000870 +000871 +000872 +000873 +000874 +000875 +000876 +000877 +000878 +000879 +000880 +000881 +000882 +000883 +000884 +000885 +000886 +000887 +000888 +000889 +000890 +000891 +000892 +000893 +000894 +000895 +000896 +000897 +000898 +000899 +000900 +000901 +000902 +000903 +000904 +000905 +000906 +000907 +000908 +000909 +000910 +000911 +000912 +000913 +000914 +000915 +000916 +000917 +000918 +000919 +000920 +000921 +000922 +000923 +000924 +000925 +000926 +000927 +000928 +000929 +000930 +000931 +000932 +000933 +000934 +000935 +000936 +000937 +000938 +000939 +000940 +000941 +000942 +000943 +000944 +000945 +000946 +000947 +000948 +000949 +000950 +000951 +000952 +000953 +000954 +000955 +000956 +000957 +000958 +000959 +000960 +000961 +000962 +000963 +000964 +000965 +000966 +000967 +000968 +000969 +000970 +000971 +000972 +000973 +000974 +000975 +000976 +000977 +000978 +000979 +000980 +000981 +000982 +000983 +000984 +000985 +000986 +000987 +000988 +000989 +000990 +000991 +000992 +000993 +000994 +000995 +000996 +000997 +000998 +000999 +001000 +001001 +001002 +001003 +001004 +001005 +001006 +001007 +001008 +001009 +001010 +001011 +001012 +001013 +001014 +001015 +001016 +001017 +001018 +001019 +001020 +001021 +001022 +001023 +001024 +001025 +001026 +001027 +001028 +001029 +001030 +001031 +001032 +001033 +001034 +001035 +001036 +001037 +001038 +001039 +001040 +001041 +001042 +001043 +001044 +001045 +001046 +001047 +001048 +001049 +001050 +001051 +001052 +001053 +001054 +001055 +001056 +001057 +001058 +001059 +001060 +001061 +001062 +001063 +001064 +001065 +001066 +001067 +001068 +001069 +001070 +001071 +001072 +001073 +001074 +001075 +001076 +001077 +001078 +001079 +001080 +001081 +001082 +001083 +001084 +001085 +001086 +001087 +001088 +001089 +001090 +001091 +001092 +001093 +001094 +001095 +001096 +001097 +001098 +001099 +001100 +001101 +001102 +001103 +001104 +001105 +001106 +001107 +001108 +001109 +001110 +001111 +001112 +001113 +001114 +001115 +001116 +001117 +001118 +001119 +001120 +001121 +001122 +001123 +001124 +001125 +001126 +001127 +001128 +001129 +001130 +001131 +001132 +001133 +001134 +001135 +001136 +001137 +001138 +001139 +001140 +001141 +001142 +001143 +001144 +001145 +001146 +001147 +001148 +001149 +001150 +001151 +001152 +001153 +001154 +001155 +001156 +001157 +001158 +001159 +001160 +001161 +001162 +001163 +001164 +001165 +001166 +001167 +001168 +001169 +001170 +001171 +001172 +001173 +001174 +001175 +001176 +001177 +001178 +001179 +001180 +001181 +001182 +001183 +001184 +001185 +001186 +001187 +001188 +001189 +001190 +001191 +001192 +001193 +001194 +001195 +001196 +001197 +001198 +001199 +001200 +001201 +001202 +001203 +001204 +001205 +001206 +001207 +001208 +001209 +001210 +001211 +001212 +001213 +001214 +001215 +001216 +001217 +001218 +001219 +001220 +001221 +001222 +001223 +001224 +001225 +001226 +001227 +001228 +001229 +001230 +001231 +001232 +001233 +001234 +001235 +001236 +001237 +001238 +001239 +001240 +001241 +001242 +001243 +001244 +001245 +001246 +001247 +001248 +001249 +001250 +001251 +001252 +001253 +001254 +001255 +001256 +001257 +001258 +001259 +001260 +001261 +001262 +001263 +001264 +001265 +001266 +001267 +001268 +001269 +001270 +001271 +001272 +001273 +001274 +001275 +001276 +001277 +001278 +001279 +001280 +001281 +001282 +001283 +001284 +001285 +001286 +001287 +001288 +001289 +001290 +001291 +001292 +001293 +001294 +001295 +001296 +001297 +001298 +001299 +001300 +001301 +001302 +001303 +001304 +001305 +001306 +001307 +001308 +001309 +001310 +001311 +001312 +001313 +001314 +001315 +001316 +001317 +001318 +001319 +001320 +001321 +001322 +001323 +001324 +001325 +001326 +001327 +001328 +001329 +001330 +001331 +001332 +001333 +001334 +001335 +001336 +001337 +001338 +001339 +001340 +001341 +001342 +001343 +001344 +001345 +001346 +001347 +001348 +001349 +001350 +001351 +001352 +001353 +001354 +001355 +001356 +001357 +001358 +001359 +001360 +001361 +001362 +001363 +001364 +001365 +001366 +001367 +001368 +001369 +001370 +001371 +001372 +001373 +001374 +001375 +001376 +001377 +001378 +001379 +001380 +001381 +001382 +001383 +001384 +001385 +001386 +001387 +001388 +001389 +001390 +001391 +001392 +001393 +001394 +001395 +001396 +001397 +001398 +001399 +001400 +001401 +001402 +001403 +001404 +001405 +001406 +001407 +001408 +001409 +001410 +001411 +001412 +001413 +001414 +001415 +001416 +001417 +001418 +001419 +001420 +001421 +001422 +001423 +001424 +001425 +001426 +001427 +001428 +001429 +001430 +001431 +001432 +001433 +001434 +001435 +001436 +001437 +001438 +001439 +001440 +001441 +001442 +001443 +001444 +001445 +001446 +001447 +001448 +001449 +001450 +001451 +001452 +001453 +001454 +001455 +001456 +001457 +001458 +001459 +001460 +001461 +001462 +001463 +001464 +001465 +001466 +001467 +001468 +001469 +001470 +001471 +001472 +001473 +001474 +001475 +001476 +001477 +001478 +001479 +001480 +001481 +001482 +001483 +001484 +001485 +001486 +001487 +001488 +001489 +001490 +001491 +001492 +001493 +001494 +001495 +001496 +001497 +001498 +001499 +001500 +001501 +001502 +001503 +001504 +001505 +001506 +001507 +001508 +001509 +001510 +001511 +001512 +001513 +001514 +001515 +001516 +001517 +001518 +001519 +001520 +001521 +001522 +001523 +001524 +001525 +001526 +001527 +001528 +001529 +001530 +001531 +001532 +001533 +001534 +001535 +001536 +001537 +001538 +001539 +001540 +001541 +001542 +001543 +001544 +001545 +001546 +001547 +001548 +001549 +001550 +001551 +001552 +001553 +001554 +001555 +001556 +001557 +001558 +001559 +001560 +001561 +001562 +001563 +001564 +001565 +001566 +001567 +001568 +001569 +001570 +001571 +001572 +001573 +001574 +001575 +001576 +001577 +001578 +001579 +001580 +001581 +001582 +001583 +001584 +001585 +001586 +001587 +001588 +001589 +001590 +001591 +001592 +001593 +001594 +001595 +001596 +001597 +001598 +001599 +001600 +001601 +001602 +001603 +001604 +001605 +001606 +001607 +001608 +001609 +001610 +001611 +001612 +001613 +001614 +001615 +001616 +001617 +001618 +001619 +001620 +001621 +001622 +001623 +001624 +001625 +001626 +001627 +001628 +001629 +001630 +001631 +001632 +001633 +001634 +001635 +001636 +001637 +001638 +001639 +001640 +001641 +001642 +001643 +001644 +001645 +001646 +001647 +001648 +001649 +001650 +001651 +001652 +001653 +001654 +001655 +001656 +001657 +001658 +001659 +001660 +001661 +001662 +001663 +001664 +001665 +001666 +001667 +001668 +001669 +001670 +001671 +001672 +001673 +001674 +001675 +001676 +001677 +001678 +001679 +001680 +001681 +001682 +001683 +001684 +001685 +001686 +001687 +001688 +001689 +001690 +001691 +001692 +001693 +001694 +001695 +001696 +001697 +001698 +001699 +001700 +001701 +001702 +001703 +001704 +001705 +001706 +001707 +001708 +001709 +001710 +001711 +001712 +001713 +001714 +001715 +001716 +001717 +001718 +001719 +001720 +001721 +001722 +001723 +001724 +001725 +001726 +001727 +001728 +001729 +001730 +001731 +001732 +001733 +001734 +001735 +001736 +001737 +001738 +001739 +001740 +001741 +001742 +001743 +001744 +001745 +001746 +001747 +001748 +001749 +001750 +001751 +001752 +001753 +001754 +001755 +001756 +001757 +001758 +001759 +001760 +001761 +001762 +001763 +001764 +001765 +001766 +001767 +001768 +001769 +001770 +001771 +001772 +001773 +001774 +001775 +001776 +001777 +001778 +001779 +001780 +001781 +001782 +001783 +001784 +001785 +001786 +001787 +001788 +001789 +001790 +001791 +001792 +001793 +001794 +001795 +001796 +001797 +001798 +001799 +001800 +001801 +001802 +001803 +001804 +001805 +001806 +001807 +001808 +001809 +001810 +001811 +001812 +001813 +001814 +001815 +001816 +001817 +001818 +001819 +001820 +001821 +001822 +001823 +001824 +001825 +001826 +001827 +001828 +001829 +001830 +001831 +001832 +001833 +001834 +001835 +001836 +001837 +001838 +001839 +001840 +001841 +001842 +001843 +001844 +001845 +001846 +001847 +001848 +001849 +001850 +001851 +001852 +001853 +001854 +001855 +001856 +001857 +001858 +001859 +001860 +001861 +001862 +001863 +001864 +001865 +001866 +001867 +001868 +001869 +001870 +001871 +001872 +001873 +001874 +001875 +001876 +001877 +001878 +001879 +001880 +001881 +001882 +001883 +001884 +001885 +001886 +001887 +001888 +001889 +001890 +001891 +001892 +001893 +001894 +001895 +001896 +001897 +001898 +001899 +001900 +001901 +001902 +001903 +001904 +001905 +001906 +001907 +001908 +001909 +001910 +001911 +001912 +001913 +001914 +001915 +001916 +001917 +001918 +001919 +001920 +001921 +001922 +001923 +001924 +001925 +001926 +001927 +001928 +001929 +001930 +001931 +001932 +001933 +001934 +001935 +001936 +001937 +001938 +001939 +001940 +001941 +001942 +001943 +001944 +001945 +001946 +001947 +001948 +001949 +001950 +001951 +001952 +001953 +001954 +001955 +001956 +001957 +001958 +001959 +001960 +001961 +001962 +001963 +001964 +001965 +001966 +001967 +001968 +001969 +001970 +001971 +001972 +001973 +001974 +001975 +001976 +001977 +001978 +001979 +001980 +001981 +001982 +001983 +001984 +001985 +001986 +001987 +001988 +001989 +001990 +001991 +001992 +001993 +001994 +001995 +001996 +001997 +001998 +001999 +002000 +002001 +002002 +002003 +002004 +002005 +002006 +002007 +002008 +002009 +002010 +002011 +002012 +002013 +002014 +002015 +002016 +002017 +002018 +002019 +002020 +002021 +002022 +002023 +002024 +002025 +002026 +002027 +002028 +002029 +002030 +002031 +002032 +002033 +002034 +002035 +002036 +002037 +002038 +002039 +002040 +002041 +002042 +002043 +002044 +002045 +002046 +002047 +002048 +002049 +002050 +002051 +002052 +002053 +002054 +002055 +002056 +002057 +002058 +002059 +002060 +002061 +002062 +002063 +002064 +002065 +002066 +002067 +002068 +002069 +002070 +002071 +002072 +002073 +002074 +002075 +002076 +002077 +002078 +002079 +002080 +002081 +002082 +002083 +002084 +002085 +002086 +002087 +002088 +002089 +002090 +002091 +002092 +002093 +002094 +002095 +002096 +002097 +002098 +002099 +002100 +002101 +002102 +002103 +002104 +002105 +002106 +002107 +002108 +002109 +002110 +002111 +002112 +002113 +002114 +002115 +002116 +002117 +002118 +002119 +002120 +002121 +002122 +002123 +002124 +002125 +002126 +002127 +002128 +002129 +002130 +002131 +002132 +002133 +002134 +002135 +002136 +002137 +002138 +002139 +002140 +002141 +002142 +002143 +002144 +002145 +002146 +002147 +002148 +002149 +002150 +002151 +002152 +002153 +002154 +002155 +002156 +002157 +002158 +002159 +002160 +002161 +002162 +002163 +002164 +002165 +002166 +002167 +002168 +002169 +002170 +002171 +002172 +002173 +002174 +002175 +002176 +002177 +002178 +002179 +002180 +002181 +002182 +002183 +002184 +002185 +002186 +002187 +002188 +002189 +002190 +002191 +002192 +002193 +002194 +002195 +002196 +002197 +002198 +002199 +002200 +002201 +002202 +002203 +002204 +002205 +002206 +002207 +002208 +002209 +002210 +002211 +002212 +002213 +002214 +002215 +002216 +002217 +002218 +002219 +002220 +002221 +002222 +002223 +002224 +002225 +002226 +002227 +002228 +002229 +002230 +002231 +002232 +002233 +002234 +002235 +002236 +002237 +002238 +002239 +002240 +002241 +002242 +002243 +002244 +002245 +002246 +002247 +002248 +002249 +002250 +002251 +002252 +002253 +002254 +002255 +002256 +002257 +002258 +002259 +002260 +002261 +002262 +002263 +002264 +002265 +002266 +002267 +002268 +002269 +002270 +002271 +002272 +002273 +002274 +002275 +002276 +002277 +002278 +002279 +002280 +002281 +002282 +002283 +002284 +002285 +002286 +002287 +002288 +002289 +002290 +002291 +002292 +002293 +002294 +002295 +002296 +002297 +002298 +002299 +002300 +002301 +002302 +002303 +002304 +002305 +002306 +002307 +002308 +002309 +002310 +002311 +002312 +002313 +002314 +002315 +002316 +002317 +002318 +002319 +002320 +002321 +002322 +002323 +002324 +002325 +002326 +002327 +002328 +002329 +002330 +002331 +002332 +002333 +002334 +002335 +002336 +002337 +002338 +002339 +002340 +002341 +002342 +002343 +002344 +002345 +002346 +002347 +002348 +002349 +002350 +002351 +002352 +002353 +002354 +002355 +002356 +002357 +002358 +002359 +002360 +002361 +002362 +002363 +002364 +002365 +002366 +002367 +002368 +002369 +002370 +002371 +002372 +002373 +002374 +002375 +002376 +002377 +002378 +002379 +002380 +002381 +002382 +002383 +002384 +002385 +002386 +002387 +002388 +002389 +002390 +002391 +002392 +002393 +002394 +002395 +002396 +002397 +002398 +002399 +002400 +002401 +002402 +002403 +002404 +002405 +002406 +002407 +002408 +002409 +002410 +002411 +002412 +002413 +002414 +002415 +002416 +002417 +002418 +002419 +002420 +002421 +002422 +002423 +002424 +002425 +002426 +002427 +002428 +002429 +002430 +002431 +002432 +002433 +002434 +002435 +002436 +002437 +002438 +002439 +002440 +002441 +002442 +002443 +002444 +002445 +002446 +002447 +002448 +002449 +002450 +002451 +002452 +002453 +002454 +002455 +002456 +002457 +002458 +002459 +002460 +002461 +002462 +002463 +002464 +002465 +002466 +002467 +002468 +002469 +002470 +002471 +002472 +002473 +002474 +002475 +002476 +002477 +002478 +002479 +002480 +002481 +002482 +002483 +002484 +002485 +002486 +002487 +002488 +002489 +002490 +002491 +002492 +002493 +002494 +002495 +002496 +002497 +002498 +002499 +002500 +002501 +002502 +002503 +002504 +002505 +002506 +002507 +002508 +002509 +002510 +002511 +002512 +002513 +002514 +002515 +002516 +002517 +002518 +002519 +002520 +002521 +002522 +002523 +002524 +002525 +002526 +002527 +002528 +002529 +002530 +002531 +002532 +002533 +002534 +002535 +002536 +002537 +002538 +002539 +002540 +002541 +002542 +002543 +002544 +002545 +002546 +002547 +002548 +002549 +002550 +002551 +002552 +002553 +002554 +002555 +002556 +002557 +002558 +002559 +002560 +002561 +002562 +002563 +002564 +002565 +002566 +002567 +002568 +002569 +002570 +002571 +002572 +002573 +002574 +002575 +002576 +002577 +002578 +002579 +002580 +002581 +002582 +002583 +002584 +002585 +002586 +002587 +002588 +002589 +002590 +002591 +002592 +002593 +002594 +002595 +002596 +002597 +002598 +002599 +002600 +002601 +002602 +002603 +002604 +002605 +002606 +002607 +002608 +002609 +002610 +002611 +002612 +002613 +002614 +002615 +002616 +002617 +002618 +002619 +002620 +002621 +002622 +002623 +002624 +002625 +002626 +002627 +002628 +002629 +002630 +002631 +002632 +002633 +002634 +002635 +002636 +002637 +002638 +002639 +002640 +002641 +002642 +002643 +002644 +002645 +002646 +002647 +002648 +002649 +002650 +002651 +002652 +002653 +002654 +002655 +002656 +002657 +002658 +002659 +002660 +002661 +002662 +002663 +002664 +002665 +002666 +002667 +002668 +002669 +002670 +002671 +002672 +002673 +002674 +002675 +002676 +002677 +002678 +002679 +002680 +002681 +002682 +002683 +002684 +002685 +002686 +002687 +002688 +002689 +002690 +002691 +002692 +002693 +002694 +002695 +002696 +002697 +002698 +002699 +002700 +002701 +002702 +002703 +002704 +002705 +002706 +002707 +002708 +002709 +002710 +002711 +002712 +002713 +002714 +002715 +002716 +002717 +002718 +002719 +002720 +002721 +002722 +002723 +002724 +002725 +002726 +002727 +002728 +002729 +002730 +002731 +002732 +002733 +002734 +002735 +002736 +002737 +002738 +002739 +002740 +002741 +002742 +002743 +002744 +002745 +002746 +002747 +002748 +002749 +002750 +002751 +002752 +002753 +002754 +002755 +002756 +002757 +002758 +002759 +002760 +002761 +002762 +002763 +002764 +002765 +002766 +002767 +002768 +002769 +002770 +002771 +002772 +002773 +002774 +002775 +002776 +002777 +002778 +002779 +002780 +002781 +002782 +002783 +002784 +002785 +002786 +002787 +002788 +002789 +002790 +002791 +002792 +002793 +002794 +002795 +002796 +002797 +002798 +002799 +002800 +002801 +002802 +002803 +002804 +002805 +002806 +002807 +002808 +002809 +002810 +002811 +002812 +002813 +002814 +002815 +002816 +002817 +002818 +002819 +002820 +002821 +002822 +002823 +002824 +002825 +002826 +002827 +002828 +002829 +002830 +002831 +002832 +002833 +002834 +002835 +002836 +002837 +002838 +002839 +002840 +002841 +002842 +002843 +002844 +002845 +002846 +002847 +002848 +002849 +002850 +002851 +002852 +002853 +002854 +002855 +002856 +002857 +002858 +002859 +002860 +002861 +002862 +002863 +002864 +002865 +002866 +002867 +002868 +002869 +002870 +002871 +002872 +002873 +002874 +002875 +002876 +002877 +002878 +002879 +002880 +002881 +002882 +002883 +002884 +002885 +002886 +002887 +002888 +002889 +002890 +002891 +002892 +002893 +002894 +002895 +002896 +002897 +002898 +002899 +002900 +002901 +002902 +002903 +002904 +002905 +002906 +002907 +002908 +002909 +002910 +002911 +002912 +002913 +002914 +002915 +002916 +002917 +002918 +002919 +002920 +002921 +002922 +002923 +002924 +002925 +002926 +002927 +002928 +002929 +002930 +002931 +002932 +002933 +002934 +002935 +002936 +002937 +002938 +002939 +002940 +002941 +002942 +002943 +002944 +002945 +002946 +002947 +002948 +002949 +002950 +002951 +002952 +002953 +002954 +002955 +002956 +002957 +002958 +002959 +002960 +002961 +002962 +002963 +002964 +002965 +002966 +002967 +002968 +002969 +002970 +002971 +002972 +002973 +002974 +002975 +002976 +002977 +002978 +002979 +002980 +002981 +002982 +002983 +002984 +002985 +002986 +002987 +002988 +002989 +002990 +002991 +002992 +002993 +002994 +002995 +002996 +002997 +002998 +002999 +003000 +003001 +003002 +003003 +003004 +003005 +003006 +003007 +003008 +003009 +003010 +003011 +003012 +003013 +003014 +003015 +003016 +003017 +003018 +003019 +003020 +003021 +003022 +003023 +003024 +003025 +003026 +003027 +003028 +003029 +003030 +003031 +003032 +003033 +003034 +003035 +003036 +003037 +003038 +003039 +003040 +003041 +003042 +003043 +003044 +003045 +003046 +003047 +003048 +003049 +003050 +003051 +003052 +003053 +003054 +003055 +003056 +003057 +003058 +003059 +003060 +003061 +003062 +003063 +003064 +003065 +003066 +003067 +003068 +003069 +003070 +003071 +003072 +003073 +003074 +003075 +003076 +003077 +003078 +003079 +003080 +003081 +003082 +003083 +003084 +003085 +003086 +003087 +003088 +003089 +003090 +003091 +003092 +003093 +003094 +003095 +003096 +003097 +003098 +003099 +003100 +003101 +003102 +003103 +003104 +003105 +003106 +003107 +003108 +003109 +003110 +003111 +003112 +003113 +003114 +003115 +003116 +003117 +003118 +003119 +003120 +003121 +003122 +003123 +003124 +003125 +003126 +003127 +003128 +003129 +003130 +003131 +003132 +003133 +003134 +003135 +003136 +003137 +003138 +003139 +003140 +003141 +003142 +003143 +003144 +003145 +003146 +003147 +003148 +003149 +003150 +003151 +003152 +003153 +003154 +003155 +003156 +003157 +003158 +003159 +003160 +003161 +003162 +003163 +003164 +003165 +003166 +003167 +003168 +003169 +003170 +003171 +003172 +003173 +003174 +003175 +003176 +003177 +003178 +003179 +003180 +003181 +003182 +003183 +003184 +003185 +003186 +003187 +003188 +003189 +003190 +003191 +003192 +003193 +003194 +003195 +003196 +003197 +003198 +003199 +003200 +003201 +003202 +003203 +003204 +003205 +003206 +003207 +003208 +003209 +003210 +003211 +003212 +003213 +003214 +003215 +003216 +003217 +003218 +003219 +003220 +003221 +003222 +003223 +003224 +003225 +003226 +003227 +003228 +003229 +003230 +003231 +003232 +003233 +003234 +003235 +003236 +003237 +003238 +003239 +003240 +003241 +003242 +003243 +003244 +003245 +003246 +003247 +003248 +003249 +003250 +003251 +003252 +003253 +003254 +003255 +003256 +003257 +003258 +003259 +003260 +003261 +003262 +003263 +003264 +003265 +003266 +003267 +003268 +003269 +003270 +003271 +003272 +003273 +003274 +003275 +003276 +003277 +003278 +003279 +003280 +003281 +003282 +003283 +003284 +003285 +003286 +003287 +003288 +003289 +003290 +003291 +003292 +003293 +003294 +003295 +003296 +003297 +003298 +003299 +003300 +003301 +003302 +003303 +003304 +003305 +003306 +003307 +003308 +003309 +003310 +003311 +003312 +003313 +003314 +003315 +003316 +003317 +003318 +003319 +003320 +003321 +003322 +003323 +003324 +003325 +003326 +003327 +003328 +003329 +003330 +003331 +003332 +003333 +003334 +003335 +003336 +003337 +003338 +003339 +003340 +003341 +003342 +003343 +003344 +003345 +003346 +003347 +003348 +003349 +003350 +003351 +003352 +003353 +003354 +003355 +003356 +003357 +003358 +003359 +003360 +003361 +003362 +003363 +003364 +003365 +003366 +003367 +003368 +003369 +003370 +003371 +003372 +003373 +003374 +003375 +003376 +003377 +003378 +003379 +003380 +003381 +003382 +003383 +003384 +003385 +003386 +003387 +003388 +003389 +003390 +003391 +003392 +003393 +003394 +003395 +003396 +003397 +003398 +003399 +003400 +003401 +003402 +003403 +003404 +003405 +003406 +003407 +003408 +003409 +003410 +003411 +003412 +003413 +003414 +003415 +003416 +003417 +003418 +003419 +003420 +003421 +003422 +003423 +003424 +003425 +003426 +003427 +003428 +003429 +003430 +003431 +003432 +003433 +003434 +003435 +003436 +003437 +003438 +003439 +003440 +003441 +003442 +003443 +003444 +003445 +003446 +003447 +003448 +003449 +003450 +003451 +003452 +003453 +003454 +003455 +003456 +003457 +003458 +003459 +003460 +003461 +003462 +003463 +003464 +003465 +003466 +003467 +003468 +003469 +003470 +003471 +003472 +003473 +003474 +003475 +003476 +003477 +003478 +003479 +003480 +003481 +003482 +003483 +003484 +003485 +003486 +003487 +003488 +003489 +003490 +003491 +003492 +003493 +003494 +003495 +003496 +003497 +003498 +003499 +003500 +003501 +003502 +003503 +003504 +003505 +003506 +003507 +003508 +003509 +003510 +003511 +003512 +003513 +003514 +003515 +003516 +003517 +003518 +003519 +003520 +003521 +003522 +003523 +003524 +003525 +003526 +003527 +003528 +003529 +003530 +003531 +003532 +003533 +003534 +003535 +003536 +003537 +003538 +003539 +003540 +003541 +003542 +003543 +003544 +003545 +003546 +003547 +003548 +003549 +003550 +003551 +003552 +003553 +003554 +003555 +003556 +003557 +003558 +003559 +003560 +003561 +003562 +003563 +003564 +003565 +003566 +003567 +003568 +003569 +003570 +003571 +003572 +003573 +003574 +003575 +003576 +003577 +003578 +003579 +003580 +003581 +003582 +003583 +003584 +003585 +003586 +003587 +003588 +003589 +003590 +003591 +003592 +003593 +003594 +003595 +003596 +003597 +003598 +003599 +003600 +003601 +003602 +003603 +003604 +003605 +003606 +003607 +003608 +003609 +003610 +003611 +003612 +003613 +003614 +003615 +003616 +003617 +003618 +003619 +003620 +003621 +003622 +003623 +003624 +003625 +003626 +003627 +003628 +003629 +003630 +003631 +003632 +003633 +003634 +003635 +003636 +003637 +003638 +003639 +003640 +003641 +003642 +003643 +003644 +003645 +003646 +003647 +003648 +003649 +003650 +003651 +003652 +003653 +003654 +003655 +003656 +003657 +003658 +003659 +003660 +003661 +003662 +003663 +003664 +003665 +003666 +003667 +003668 +003669 +003670 +003671 +003672 +003673 +003674 +003675 +003676 +003677 +003678 +003679 +003680 +003681 +003682 +003683 +003684 +003685 +003686 +003687 +003688 +003689 +003690 +003691 +003692 +003693 +003694 +003695 +003696 +003697 +003698 +003699 +003700 +003701 +003702 +003703 +003704 +003705 +003706 +003707 +003708 +003709 +003710 +003711 +003712 +003713 +003714 +003715 +003716 +003717 +003718 +003719 +003720 +003721 +003722 +003723 +003724 +003725 +003726 +003727 +003728 +003729 +003730 +003731 +003732 +003733 +003734 +003735 +003736 +003737 +003738 +003739 +003740 +003741 +003742 +003743 +003744 +003745 +003746 +003747 +003748 +003749 +003750 +003751 +003752 +003753 +003754 +003755 +003756 +003757 +003758 +003759 +003760 +003761 +003762 +003763 +003764 +003765 +003766 +003767 +003768 +003769 +003770 +003771 +003772 +003773 +003774 +003775 +003776 +003777 +003778 +003779 +003780 +003781 +003782 +003783 +003784 +003785 +003786 +003787 +003788 +003789 +003790 +003791 +003792 +003793 +003794 +003795 +003796 +003797 +003798 +003799 +003800 +003801 +003802 +003803 +003804 +003805 +003806 +003807 +003808 +003809 +003810 +003811 +003812 +003813 +003814 +003815 +003816 +003817 +003818 +003819 +003820 +003821 +003822 +003823 +003824 +003825 +003826 +003827 +003828 +003829 +003830 +003831 +003832 +003833 +003834 +003835 +003836 +003837 +003838 +003839 +003840 +003841 +003842 +003843 +003844 +003845 +003846 +003847 +003848 +003849 +003850 +003851 +003852 +003853 +003854 +003855 +003856 +003857 +003858 +003859 +003860 +003861 +003862 +003863 +003864 +003865 +003866 +003867 +003868 +003869 +003870 +003871 +003872 +003873 +003874 +003875 +003876 +003877 +003878 +003879 +003880 +003881 +003882 +003883 +003884 +003885 +003886 +003887 +003888 +003889 +003890 +003891 +003892 +003893 +003894 +003895 +003896 +003897 +003898 +003899 +003900 +003901 +003902 +003903 +003904 +003905 +003906 +003907 +003908 +003909 +003910 +003911 +003912 +003913 +003914 +003915 +003916 +003917 +003918 +003919 +003920 +003921 +003922 +003923 +003924 +003925 +003926 +003927 +003928 +003929 +003930 +003931 +003932 +003933 +003934 +003935 +003936 +003937 +003938 +003939 +003940 +003941 +003942 +003943 +003944 +003945 +003946 +003947 +003948 +003949 +003950 +003951 +003952 +003953 +003954 +003955 +003956 +003957 +003958 +003959 +003960 +003961 +003962 +003963 +003964 +003965 +003966 +003967 +003968 +003969 +003970 +003971 +003972 +003973 +003974 +003975 +003976 +003977 +003978 +003979 +003980 +003981 +003982 +003983 +003984 +003985 +003986 +003987 +003988 +003989 +003990 +003991 +003992 +003993 +003994 +003995 +003996 +003997 +003998 +003999 +004000 +004001 +004002 +004003 +004004 +004005 +004006 +004007 +004008 +004009 +004010 +004011 +004012 +004013 +004014 +004015 +004016 +004017 +004018 +004019 +004020 +004021 +004022 +004023 +004024 +004025 +004026 +004027 +004028 +004029 +004030 +004031 +004032 +004033 +004034 +004035 +004036 +004037 +004038 +004039 +004040 +004041 +004042 +004043 +004044 +004045 +004046 +004047 +004048 +004049 +004050 +004051 +004052 +004053 +004054 +004055 +004056 +004057 +004058 +004059 +004060 +004061 +004062 +004063 +004064 +004065 +004066 +004067 +004068 +004069 +004070 +004071 +004072 +004073 +004074 +004075 +004076 +004077 +004078 +004079 +004080 +004081 +004082 +004083 +004084 +004085 +004086 +004087 +004088 +004089 +004090 +004091 +004092 +004093 +004094 +004095 +004096 +004097 +004098 +004099 +004100 +004101 +004102 +004103 +004104 +004105 +004106 +004107 +004108 +004109 +004110 +004111 +004112 +004113 +004114 +004115 +004116 +004117 +004118 +004119 +004120 +004121 +004122 +004123 +004124 +004125 +004126 +004127 +004128 +004129 +004130 +004131 +004132 +004133 +004134 +004135 +004136 +004137 +004138 +004139 +004140 +004141 +004142 +004143 +004144 +004145 +004146 +004147 +004148 +004149 +004150 +004151 +004152 +004153 +004154 +004155 +004156 +004157 +004158 +004159 +004160 +004161 +004162 +004163 +004164 +004165 +004166 +004167 +004168 +004169 +004170 +004171 +004172 +004173 +004174 +004175 +004176 +004177 +004178 +004179 +004180 +004181 +004182 +004183 +004184 +004185 +004186 +004187 +004188 +004189 +004190 +004191 +004192 +004193 +004194 +004195 +004196 +004197 +004198 +004199 +004200 +004201 +004202 +004203 +004204 +004205 +004206 +004207 +004208 +004209 +004210 +004211 +004212 +004213 +004214 +004215 +004216 +004217 +004218 +004219 +004220 +004221 +004222 +004223 +004224 +004225 +004226 +004227 +004228 +004229 +004230 +004231 +004232 +004233 +004234 +004235 +004236 +004237 +004238 +004239 +004240 +004241 +004242 +004243 +004244 +004245 +004246 +004247 +004248 +004249 +004250 +004251 +004252 +004253 +004254 +004255 +004256 +004257 +004258 +004259 +004260 +004261 +004262 +004263 +004264 +004265 +004266 +004267 +004268 +004269 +004270 +004271 +004272 +004273 +004274 +004275 +004276 +004277 +004278 +004279 +004280 +004281 +004282 +004283 +004284 +004285 +004286 +004287 +004288 +004289 +004290 +004291 +004292 +004293 +004294 +004295 +004296 +004297 +004298 +004299 +004300 +004301 +004302 +004303 +004304 +004305 +004306 +004307 +004308 +004309 +004310 +004311 +004312 +004313 +004314 +004315 +004316 +004317 +004318 +004319 +004320 +004321 +004322 +004323 +004324 +004325 +004326 +004327 +004328 +004329 +004330 +004331 +004332 +004333 +004334 +004335 +004336 +004337 +004338 +004339 +004340 +004341 +004342 +004343 +004344 +004345 +004346 +004347 +004348 +004349 +004350 +004351 +004352 +004353 +004354 +004355 +004356 +004357 +004358 +004359 +004360 +004361 +004362 +004363 +004364 +004365 +004366 +004367 +004368 +004369 +004370 +004371 +004372 +004373 +004374 +004375 +004376 +004377 +004378 +004379 +004380 +004381 +004382 +004383 +004384 +004385 +004386 +004387 +004388 +004389 +004390 +004391 +004392 +004393 +004394 +004395 +004396 +004397 +004398 +004399 +004400 +004401 +004402 +004403 +004404 +004405 +004406 +004407 +004408 +004409 +004410 +004411 +004412 +004413 +004414 +004415 +004416 +004417 +004418 +004419 +004420 +004421 +004422 +004423 +004424 +004425 +004426 +004427 +004428 +004429 +004430 +004431 +004432 +004433 +004434 +004435 +004436 +004437 +004438 +004439 +004440 +004441 +004442 +004443 +004444 +004445 +004446 +004447 +004448 +004449 +004450 +004451 +004452 +004453 +004454 +004455 +004456 +004457 +004458 +004459 +004460 +004461 +004462 +004463 +004464 +004465 +004466 +004467 +004468 +004469 +004470 +004471 +004472 +004473 +004474 +004475 +004476 +004477 +004478 +004479 +004480 +004481 +004482 +004483 +004484 +004485 +004486 +004487 +004488 +004489 +004490 +004491 +004492 +004493 +004494 +004495 +004496 +004497 +004498 +004499 +004500 +004501 +004502 +004503 +004504 +004505 +004506 +004507 +004508 +004509 +004510 +004511 +004512 +004513 +004514 +004515 +004516 +004517 +004518 +004519 +004520 +004521 +004522 +004523 +004524 +004525 +004526 +004527 +004528 +004529 +004530 +004531 +004532 +004533 +004534 +004535 +004536 +004537 +004538 +004539 +004540 +004541 +004542 +004543 +004544 +004545 +004546 +004547 +004548 +004549 +004550 +004551 +004552 +004553 +004554 +004555 +004556 +004557 +004558 +004559 +004560 +004561 +004562 +004563 +004564 +004565 +004566 +004567 +004568 +004569 +004570 +004571 +004572 +004573 +004574 +004575 +004576 +004577 +004578 +004579 +004580 +004581 +004582 +004583 +004584 +004585 +004586 +004587 +004588 +004589 +004590 +004591 +004592 +004593 +004594 +004595 +004596 +004597 +004598 +004599 +004600 +004601 +004602 +004603 +004604 +004605 +004606 +004607 +004608 +004609 +004610 +004611 +004612 +004613 +004614 +004615 +004616 +004617 +004618 +004619 +004620 +004621 +004622 +004623 +004624 +004625 +004626 +004627 +004628 +004629 +004630 +004631 +004632 +004633 +004634 +004635 +004636 +004637 +004638 +004639 +004640 +004641 +004642 +004643 +004644 +004645 +004646 +004647 +004648 +004649 +004650 +004651 +004652 +004653 +004654 +004655 +004656 +004657 +004658 +004659 +004660 +004661 +004662 +004663 +004664 +004665 +004666 +004667 +004668 +004669 +004670 +004671 +004672 +004673 +004674 +004675 +004676 +004677 +004678 +004679 +004680 +004681 +004682 +004683 +004684 +004685 +004686 +004687 +004688 +004689 +004690 +004691 +004692 +004693 +004694 +004695 +004696 +004697 +004698 +004699 +004700 +004701 +004702 +004703 +004704 +004705 +004706 +004707 +004708 +004709 +004710 +004711 +004712 +004713 +004714 +004715 +004716 +004717 +004718 +004719 +004720 +004721 +004722 +004723 +004724 +004725 +004726 +004727 +004728 +004729 +004730 +004731 +004732 +004733 +004734 +004735 +004736 +004737 +004738 +004739 +004740 +004741 +004742 +004743 +004744 +004745 +004746 +004747 +004748 +004749 +004750 +004751 +004752 +004753 +004754 +004755 +004756 +004757 +004758 +004759 +004760 +004761 +004762 +004763 +004764 +004765 +004766 +004767 +004768 +004769 +004770 +004771 +004772 +004773 +004774 +004775 +004776 +004777 +004778 +004779 +004780 +004781 +004782 +004783 +004784 +004785 +004786 +004787 +004788 +004789 +004790 +004791 +004792 +004793 +004794 +004795 +004796 +004797 +004798 +004799 +004800 +004801 +004802 +004803 +004804 +004805 +004806 +004807 +004808 +004809 +004810 +004811 +004812 +004813 +004814 +004815 +004816 +004817 +004818 +004819 +004820 +004821 +004822 +004823 +004824 +004825 +004826 +004827 +004828 +004829 +004830 +004831 +004832 +004833 +004834 +004835 +004836 +004837 +004838 +004839 +004840 +004841 +004842 +004843 +004844 +004845 +004846 +004847 +004848 +004849 +004850 +004851 +004852 +004853 +004854 +004855 +004856 +004857 +004858 +004859 +004860 +004861 +004862 +004863 +004864 +004865 +004866 +004867 +004868 +004869 +004870 +004871 +004872 +004873 +004874 +004875 +004876 +004877 +004878 +004879 +004880 +004881 +004882 +004883 +004884 +004885 +004886 +004887 +004888 +004889 +004890 +004891 +004892 +004893 +004894 +004895 +004896 +004897 +004898 +004899 +004900 +004901 +004902 +004903 +004904 +004905 +004906 +004907 +004908 +004909 +004910 +004911 +004912 +004913 +004914 +004915 +004916 +004917 +004918 +004919 +004920 +004921 +004922 +004923 +004924 +004925 +004926 +004927 +004928 +004929 +004930 +004931 +004932 +004933 +004934 +004935 +004936 +004937 +004938 +004939 +004940 +004941 +004942 +004943 +004944 +004945 +004946 +004947 +004948 +004949 +004950 +004951 +004952 +004953 +004954 +004955 +004956 +004957 +004958 +004959 +004960 +004961 +004962 +004963 +004964 +004965 +004966 +004967 +004968 +004969 +004970 +004971 +004972 +004973 +004974 +004975 +004976 +004977 +004978 +004979 +004980 +004981 +004982 +004983 +004984 +004985 +004986 +004987 +004988 +004989 +004990 +004991 +004992 +004993 +004994 +004995 +004996 +004997 +004998 +004999 +005000 +005001 +005002 +005003 +005004 +005005 +005006 +005007 +005008 +005009 +005010 +005011 +005012 +005013 +005014 +005015 +005016 +005017 +005018 +005019 +005020 +005021 +005022 +005023 +005024 +005025 +005026 +005027 +005028 +005029 +005030 +005031 +005032 +005033 +005034 +005035 +005036 +005037 +005038 +005039 +005040 +005041 +005042 +005043 +005044 +005045 +005046 +005047 +005048 +005049 +005050 +005051 +005052 +005053 +005054 +005055 +005056 +005057 +005058 +005059 +005060 +005061 +005062 +005063 +005064 +005065 +005066 +005067 +005068 +005069 +005070 +005071 +005072 +005073 +005074 +005075 +005076 +005077 +005078 +005079 +005080 +005081 +005082 +005083 +005084 +005085 +005086 +005087 +005088 +005089 +005090 +005091 +005092 +005093 +005094 +005095 +005096 +005097 +005098 +005099 +005100 +005101 +005102 +005103 +005104 +005105 +005106 +005107 +005108 +005109 +005110 +005111 +005112 +005113 +005114 +005115 +005116 +005117 +005118 +005119 +005120 +005121 +005122 +005123 +005124 +005125 +005126 +005127 +005128 +005129 +005130 +005131 +005132 +005133 +005134 +005135 +005136 +005137 +005138 +005139 +005140 +005141 +005142 +005143 +005144 +005145 +005146 +005147 +005148 +005149 +005150 +005151 +005152 +005153 +005154 +005155 +005156 +005157 +005158 +005159 +005160 +005161 +005162 +005163 +005164 +005165 +005166 +005167 +005168 +005169 +005170 +005171 +005172 +005173 +005174 +005175 +005176 +005177 +005178 +005179 +005180 +005181 +005182 +005183 +005184 +005185 +005186 +005187 +005188 +005189 +005190 +005191 +005192 +005193 +005194 +005195 +005196 +005197 +005198 +005199 +005200 +005201 +005202 +005203 +005204 +005205 +005206 +005207 +005208 +005209 +005210 +005211 +005212 +005213 +005214 +005215 +005216 +005217 +005218 +005219 +005220 +005221 +005222 +005223 +005224 +005225 +005226 +005227 +005228 +005229 +005230 +005231 +005232 +005233 +005234 +005235 +005236 +005237 +005238 +005239 +005240 +005241 +005242 +005243 +005244 +005245 +005246 +005247 +005248 +005249 +005250 +005251 +005252 +005253 +005254 +005255 +005256 +005257 +005258 +005259 +005260 +005261 +005262 +005263 +005264 +005265 +005266 +005267 +005268 +005269 +005270 +005271 +005272 +005273 +005274 +005275 +005276 +005277 +005278 +005279 +005280 +005281 +005282 +005283 +005284 +005285 +005286 +005287 +005288 +005289 +005290 +005291 +005292 +005293 +005294 +005295 +005296 +005297 +005298 +005299 +005300 +005301 +005302 +005303 +005304 +005305 +005306 +005307 +005308 +005309 +005310 +005311 +005312 +005313 +005314 +005315 +005316 +005317 +005318 +005319 +005320 +005321 +005322 +005323 +005324 +005325 +005326 +005327 +005328 +005329 +005330 +005331 +005332 +005333 +005334 +005335 +005336 +005337 +005338 +005339 +005340 +005341 +005342 +005343 +005344 +005345 +005346 +005347 +005348 +005349 +005350 +005351 +005352 +005353 +005354 +005355 +005356 +005357 +005358 +005359 +005360 +005361 +005362 +005363 +005364 +005365 +005366 +005367 +005368 +005369 +005370 +005371 +005372 +005373 +005374 +005375 +005376 +005377 +005378 +005379 +005380 +005381 +005382 +005383 +005384 +005385 +005386 +005387 +005388 +005389 +005390 +005391 +005392 +005393 +005394 +005395 +005396 +005397 +005398 +005399 +005400 +005401 +005402 +005403 +005404 +005405 +005406 +005407 +005408 +005409 +005410 +005411 +005412 +005413 +005414 +005415 +005416 +005417 +005418 +005419 +005420 +005421 +005422 +005423 +005424 +005425 +005426 +005427 +005428 +005429 +005430 +005431 +005432 +005433 +005434 +005435 +005436 +005437 +005438 +005439 +005440 +005441 +005442 +005443 +005444 +005445 +005446 +005447 +005448 +005449 +005450 +005451 +005452 +005453 +005454 +005455 +005456 +005457 +005458 +005459 +005460 +005461 +005462 +005463 +005464 +005465 +005466 +005467 +005468 +005469 +005470 +005471 +005472 +005473 +005474 +005475 +005476 +005477 +005478 +005479 +005480 +005481 +005482 +005483 +005484 +005485 +005486 +005487 +005488 +005489 +005490 +005491 +005492 +005493 +005494 +005495 +005496 +005497 +005498 +005499 +005500 +005501 +005502 +005503 +005504 +005505 +005506 +005507 +005508 +005509 +005510 +005511 +005512 +005513 +005514 +005515 +005516 +005517 +005518 +005519 +005520 +005521 +005522 +005523 +005524 +005525 +005526 +005527 +005528 +005529 +005530 +005531 +005532 +005533 +005534 +005535 +005536 +005537 +005538 +005539 +005540 +005541 +005542 +005543 +005544 +005545 +005546 +005547 +005548 +005549 +005550 +005551 +005552 +005553 +005554 +005555 +005556 +005557 +005558 +005559 +005560 +005561 +005562 +005563 +005564 +005565 +005566 +005567 +005568 +005569 +005570 +005571 +005572 +005573 +005574 +005575 +005576 +005577 +005578 +005579 +005580 +005581 +005582 +005583 +005584 +005585 +005586 +005587 +005588 +005589 +005590 +005591 +005592 +005593 +005594 +005595 +005596 +005597 +005598 +005599 +005600 +005601 +005602 +005603 +005604 +005605 +005606 +005607 +005608 +005609 +005610 +005611 +005612 +005613 +005614 +005615 +005616 +005617 +005618 +005619 +005620 +005621 +005622 +005623 +005624 +005625 +005626 +005627 +005628 +005629 +005630 +005631 +005632 +005633 +005634 +005635 +005636 +005637 +005638 +005639 +005640 +005641 +005642 +005643 +005644 +005645 +005646 +005647 +005648 +005649 +005650 +005651 +005652 +005653 +005654 +005655 +005656 +005657 +005658 +005659 +005660 +005661 +005662 +005663 +005664 +005665 +005666 +005667 +005668 +005669 +005670 +005671 +005672 +005673 +005674 +005675 +005676 +005677 +005678 +005679 +005680 +005681 +005682 +005683 +005684 +005685 +005686 +005687 +005688 +005689 +005690 +005691 +005692 +005693 +005694 +005695 +005696 +005697 +005698 +005699 +005700 +005701 +005702 +005703 +005704 +005705 +005706 +005707 +005708 +005709 +005710 +005711 +005712 +005713 +005714 +005715 +005716 +005717 +005718 +005719 +005720 +005721 +005722 +005723 +005724 +005725 +005726 +005727 +005728 +005729 +005730 +005731 +005732 +005733 +005734 +005735 +005736 +005737 +005738 +005739 +005740 +005741 +005742 +005743 +005744 +005745 +005746 +005747 +005748 +005749 +005750 +005751 +005752 +005753 +005754 +005755 +005756 +005757 +005758 +005759 +005760 +005761 +005762 +005763 +005764 +005765 +005766 +005767 +005768 +005769 +005770 +005771 +005772 +005773 +005774 +005775 +005776 +005777 +005778 +005779 +005780 +005781 +005782 +005783 +005784 +005785 +005786 +005787 +005788 +005789 +005790 +005791 +005792 +005793 +005794 +005795 +005796 +005797 +005798 +005799 +005800 +005801 +005802 +005803 +005804 +005805 +005806 +005807 +005808 +005809 +005810 +005811 +005812 +005813 +005814 +005815 +005816 +005817 +005818 +005819 +005820 +005821 +005822 +005823 +005824 +005825 +005826 +005827 +005828 +005829 +005830 +005831 +005832 +005833 +005834 +005835 +005836 +005837 +005838 +005839 +005840 +005841 +005842 +005843 +005844 +005845 +005846 +005847 +005848 +005849 +005850 +005851 +005852 +005853 +005854 +005855 +005856 +005857 +005858 +005859 +005860 +005861 +005862 +005863 +005864 +005865 +005866 +005867 +005868 +005869 +005870 +005871 +005872 +005873 +005874 +005875 +005876 +005877 +005878 +005879 +005880 +005881 +005882 +005883 +005884 +005885 +005886 +005887 +005888 +005889 +005890 +005891 +005892 +005893 +005894 +005895 +005896 +005897 +005898 +005899 +005900 +005901 +005902 +005903 +005904 +005905 +005906 +005907 +005908 +005909 +005910 +005911 +005912 +005913 +005914 +005915 +005916 +005917 +005918 +005919 +005920 +005921 +005922 +005923 +005924 +005925 +005926 +005927 +005928 +005929 +005930 +005931 +005932 +005933 +005934 +005935 +005936 +005937 +005938 +005939 +005940 +005941 +005942 +005943 +005944 +005945 +005946 +005947 +005948 +005949 +005950 +005951 +005952 +005953 +005954 +005955 +005956 +005957 +005958 +005959 +005960 +005961 +005962 +005963 +005964 +005965 +005966 +005967 +005968 +005969 +005970 +005971 +005972 +005973 +005974 +005975 +005976 +005977 +005978 +005979 +005980 +005981 +005982 +005983 +005984 +005985 +005986 +005987 +005988 +005989 +005990 +005991 +005992 +005993 +005994 +005995 +005996 +005997 +005998 +005999 +006000 +006001 +006002 +006003 +006004 +006005 +006006 +006007 +006008 +006009 +006010 +006011 +006012 +006013 +006014 +006015 +006016 +006017 +006018 +006019 +006020 +006021 +006022 +006023 +006024 +006025 +006026 +006027 +006028 +006029 +006030 +006031 +006032 +006033 +006034 +006035 +006036 +006037 +006038 +006039 +006040 +006041 +006042 +006043 +006044 +006045 +006046 +006047 +006048 +006049 +006050 +006051 +006052 +006053 +006054 +006055 +006056 +006057 +006058 +006059 +006060 +006061 +006062 +006063 +006064 +006065 +006066 +006067 +006068 +006069 +006070 +006071 +006072 +006073 +006074 +006075 +006076 +006077 +006078 +006079 +006080 +006081 +006082 +006083 +006084 +006085 +006086 +006087 +006088 +006089 +006090 +006091 +006092 +006093 +006094 +006095 +006096 +006097 +006098 +006099 +006100 +006101 +006102 +006103 +006104 +006105 +006106 +006107 +006108 +006109 +006110 +006111 +006112 +006113 +006114 +006115 +006116 +006117 +006118 +006119 +006120 +006121 +006122 +006123 +006124 +006125 +006126 +006127 +006128 +006129 +006130 +006131 +006132 +006133 +006134 +006135 +006136 +006137 +006138 +006139 +006140 +006141 +006142 +006143 +006144 +006145 +006146 +006147 +006148 +006149 +006150 +006151 +006152 +006153 +006154 +006155 +006156 +006157 +006158 +006159 +006160 +006161 +006162 +006163 +006164 +006165 +006166 +006167 +006168 +006169 +006170 +006171 +006172 +006173 +006174 +006175 +006176 +006177 +006178 +006179 +006180 +006181 +006182 +006183 +006184 +006185 +006186 +006187 +006188 +006189 +006190 +006191 +006192 +006193 +006194 +006195 +006196 +006197 +006198 +006199 +006200 +006201 +006202 +006203 +006204 +006205 +006206 +006207 +006208 +006209 +006210 +006211 +006212 +006213 +006214 +006215 +006216 +006217 +006218 +006219 +006220 +006221 +006222 +006223 +006224 +006225 +006226 +006227 +006228 +006229 +006230 +006231 +006232 +006233 +006234 +006235 +006236 +006237 +006238 +006239 +006240 +006241 +006242 +006243 +006244 +006245 +006246 +006247 +006248 +006249 +006250 +006251 +006252 +006253 +006254 +006255 +006256 +006257 +006258 +006259 +006260 +006261 +006262 +006263 +006264 +006265 +006266 +006267 +006268 +006269 +006270 +006271 +006272 +006273 +006274 +006275 +006276 +006277 +006278 +006279 +006280 +006281 +006282 +006283 +006284 +006285 +006286 +006287 +006288 +006289 +006290 +006291 +006292 +006293 +006294 +006295 +006296 +006297 +006298 +006299 +006300 +006301 +006302 +006303 +006304 +006305 +006306 +006307 +006308 +006309 +006310 +006311 +006312 +006313 +006314 +006315 +006316 +006317 +006318 +006319 +006320 +006321 +006322 +006323 +006324 +006325 +006326 +006327 +006328 +006329 +006330 +006331 +006332 +006333 +006334 +006335 +006336 +006337 +006338 +006339 +006340 +006341 +006342 +006343 +006344 +006345 +006346 +006347 +006348 +006349 +006350 +006351 +006352 +006353 +006354 +006355 +006356 +006357 +006358 +006359 +006360 +006361 +006362 +006363 +006364 +006365 +006366 +006367 +006368 +006369 +006370 +006371 +006372 +006373 +006374 +006375 +006376 +006377 +006378 +006379 +006380 +006381 +006382 +006383 +006384 +006385 +006386 +006387 +006388 +006389 +006390 +006391 +006392 +006393 +006394 +006395 +006396 +006397 +006398 +006399 +006400 +006401 +006402 +006403 +006404 +006405 +006406 +006407 +006408 +006409 +006410 +006411 +006412 +006413 +006414 +006415 +006416 +006417 +006418 +006419 +006420 +006421 +006422 +006423 +006424 +006425 +006426 +006427 +006428 +006429 +006430 +006431 +006432 +006433 +006434 +006435 +006436 +006437 +006438 +006439 +006440 +006441 +006442 +006443 +006444 +006445 +006446 +006447 +006448 +006449 +006450 +006451 +006452 +006453 +006454 +006455 +006456 +006457 +006458 +006459 +006460 +006461 +006462 +006463 +006464 +006465 +006466 +006467 +006468 +006469 +006470 +006471 +006472 +006473 +006474 +006475 +006476 +006477 +006478 +006479 +006480 +006481 +006482 +006483 +006484 +006485 +006486 +006487 +006488 +006489 +006490 +006491 +006492 +006493 +006494 +006495 +006496 +006497 +006498 +006499 +006500 +006501 +006502 +006503 +006504 +006505 +006506 +006507 +006508 +006509 +006510 +006511 +006512 +006513 +006514 +006515 +006516 +006517 +006518 +006519 +006520 +006521 +006522 +006523 +006524 +006525 +006526 +006527 +006528 +006529 +006530 +006531 +006532 +006533 +006534 +006535 +006536 +006537 +006538 +006539 +006540 +006541 +006542 +006543 +006544 +006545 +006546 +006547 +006548 +006549 +006550 +006551 +006552 +006553 +006554 +006555 +006556 +006557 +006558 +006559 +006560 +006561 +006562 +006563 +006564 +006565 +006566 +006567 +006568 +006569 +006570 +006571 +006572 +006573 +006574 +006575 +006576 +006577 +006578 +006579 +006580 +006581 +006582 +006583 +006584 +006585 +006586 +006587 +006588 +006589 +006590 +006591 +006592 +006593 +006594 +006595 +006596 +006597 +006598 +006599 +006600 +006601 +006602 +006603 +006604 +006605 +006606 +006607 +006608 +006609 +006610 +006611 +006612 +006613 +006614 +006615 +006616 +006617 +006618 +006619 +006620 +006621 +006622 +006623 +006624 +006625 +006626 +006627 +006628 +006629 +006630 +006631 +006632 +006633 +006634 +006635 +006636 +006637 +006638 +006639 +006640 +006641 +006642 +006643 +006644 +006645 +006646 +006647 +006648 +006649 +006650 +006651 +006652 +006653 +006654 +006655 +006656 +006657 +006658 +006659 +006660 +006661 +006662 +006663 +006664 +006665 +006666 +006667 +006668 +006669 +006670 +006671 +006672 +006673 +006674 +006675 +006676 +006677 +006678 +006679 +006680 +006681 +006682 +006683 +006684 +006685 +006686 +006687 +006688 +006689 +006690 +006691 +006692 +006693 +006694 +006695 +006696 +006697 +006698 +006699 +006700 +006701 +006702 +006703 +006704 +006705 +006706 +006707 +006708 +006709 +006710 +006711 +006712 +006713 +006714 +006715 +006716 +006717 +006718 +006719 +006720 +006721 +006722 +006723 +006724 +006725 +006726 +006727 +006728 +006729 +006730 +006731 +006732 +006733 +006734 +006735 +006736 +006737 +006738 +006739 +006740 +006741 +006742 +006743 +006744 +006745 +006746 +006747 +006748 +006749 +006750 +006751 +006752 +006753 +006754 +006755 +006756 +006757 +006758 +006759 +006760 +006761 +006762 +006763 +006764 +006765 +006766 +006767 +006768 +006769 +006770 +006771 +006772 +006773 +006774 +006775 +006776 +006777 +006778 +006779 +006780 +006781 +006782 +006783 +006784 +006785 +006786 +006787 +006788 +006789 +006790 +006791 +006792 +006793 +006794 +006795 +006796 +006797 +006798 +006799 +006800 +006801 +006802 +006803 +006804 +006805 +006806 +006807 +006808 +006809 +006810 +006811 +006812 +006813 +006814 +006815 +006816 +006817 +006818 +006819 +006820 +006821 +006822 +006823 +006824 +006825 +006826 +006827 +006828 +006829 +006830 +006831 +006832 +006833 +006834 +006835 +006836 +006837 +006838 +006839 +006840 +006841 +006842 +006843 +006844 +006845 +006846 +006847 +006848 +006849 +006850 +006851 +006852 +006853 +006854 +006855 +006856 +006857 +006858 +006859 +006860 +006861 +006862 +006863 +006864 +006865 +006866 +006867 +006868 +006869 +006870 +006871 +006872 +006873 +006874 +006875 +006876 +006877 +006878 +006879 +006880 +006881 +006882 +006883 +006884 +006885 +006886 +006887 +006888 +006889 +006890 +006891 +006892 +006893 +006894 +006895 +006896 +006897 +006898 +006899 +006900 +006901 +006902 +006903 +006904 +006905 +006906 +006907 +006908 +006909 +006910 +006911 +006912 +006913 +006914 +006915 +006916 +006917 +006918 +006919 +006920 +006921 +006922 +006923 +006924 +006925 +006926 +006927 +006928 +006929 +006930 +006931 +006932 +006933 +006934 +006935 +006936 +006937 +006938 +006939 +006940 +006941 +006942 +006943 +006944 +006945 +006946 +006947 +006948 +006949 +006950 +006951 +006952 +006953 +006954 +006955 +006956 +006957 +006958 +006959 +006960 +006961 +006962 +006963 +006964 +006965 +006966 +006967 +006968 +006969 +006970 +006971 +006972 +006973 +006974 +006975 +006976 +006977 +006978 +006979 +006980 +006981 +006982 +006983 +006984 +006985 +006986 +006987 +006988 +006989 +006990 +006991 +006992 +006993 +006994 +006995 +006996 +006997 +006998 +006999 +007000 +007001 +007002 +007003 +007004 +007005 +007006 +007007 +007008 +007009 +007010 +007011 +007012 +007013 +007014 +007015 +007016 +007017 +007018 +007019 +007020 +007021 +007022 +007023 +007024 +007025 +007026 +007027 +007028 +007029 +007030 +007031 +007032 +007033 +007034 +007035 +007036 +007037 +007038 +007039 +007040 +007041 +007042 +007043 +007044 +007045 +007046 +007047 +007048 +007049 +007050 +007051 +007052 +007053 +007054 +007055 +007056 +007057 +007058 +007059 +007060 +007061 +007062 +007063 +007064 +007065 +007066 +007067 +007068 +007069 +007070 +007071 +007072 +007073 +007074 +007075 +007076 +007077 +007078 +007079 +007080 +007081 +007082 +007083 +007084 +007085 +007086 +007087 +007088 +007089 +007090 +007091 +007092 +007093 +007094 +007095 +007096 +007097 +007098 +007099 +007100 +007101 +007102 +007103 +007104 +007105 +007106 +007107 +007108 +007109 +007110 +007111 +007112 +007113 +007114 +007115 +007116 +007117 +007118 +007119 +007120 +007121 +007122 +007123 +007124 +007125 +007126 +007127 +007128 +007129 +007130 +007131 +007132 +007133 +007134 +007135 +007136 +007137 +007138 +007139 +007140 +007141 +007142 +007143 +007144 +007145 +007146 +007147 +007148 +007149 +007150 +007151 +007152 +007153 +007154 +007155 +007156 +007157 +007158 +007159 +007160 +007161 +007162 +007163 +007164 +007165 +007166 +007167 +007168 +007169 +007170 +007171 +007172 +007173 +007174 +007175 +007176 +007177 +007178 +007179 +007180 +007181 +007182 +007183 +007184 +007185 +007186 +007187 +007188 +007189 +007190 +007191 +007192 +007193 +007194 +007195 +007196 +007197 +007198 +007199 +007200 +007201 +007202 +007203 +007204 +007205 +007206 +007207 +007208 +007209 +007210 +007211 +007212 +007213 +007214 +007215 +007216 +007217 +007218 +007219 +007220 +007221 +007222 +007223 +007224 +007225 +007226 +007227 +007228 +007229 +007230 +007231 +007232 +007233 +007234 +007235 +007236 +007237 +007238 +007239 +007240 +007241 +007242 +007243 +007244 +007245 +007246 +007247 +007248 +007249 +007250 +007251 +007252 +007253 +007254 +007255 +007256 +007257 +007258 +007259 +007260 +007261 +007262 +007263 +007264 +007265 +007266 +007267 +007268 +007269 +007270 +007271 +007272 +007273 +007274 +007275 +007276 +007277 +007278 +007279 +007280 +007281 +007282 +007283 +007284 +007285 +007286 +007287 +007288 +007289 +007290 +007291 +007292 +007293 +007294 +007295 +007296 +007297 +007298 +007299 +007300 +007301 +007302 +007303 +007304 +007305 +007306 +007307 +007308 +007309 +007310 +007311 +007312 +007313 +007314 +007315 +007316 +007317 +007318 +007319 +007320 +007321 +007322 +007323 +007324 +007325 +007326 +007327 +007328 +007329 +007330 +007331 +007332 +007333 +007334 +007335 +007336 +007337 +007338 +007339 +007340 +007341 +007342 +007343 +007344 +007345 +007346 +007347 +007348 +007349 +007350 +007351 +007352 +007353 +007354 +007355 +007356 +007357 +007358 +007359 +007360 +007361 +007362 +007363 +007364 +007365 +007366 +007367 +007368 +007369 +007370 +007371 +007372 +007373 +007374 +007375 +007376 +007377 +007378 +007379 +007380 +007381 +007382 +007383 +007384 +007385 +007386 +007387 +007388 +007389 +007390 +007391 +007392 +007393 +007394 +007395 +007396 +007397 +007398 +007399 +007400 +007401 +007402 +007403 +007404 +007405 +007406 +007407 +007408 +007409 +007410 +007411 +007412 +007413 +007414 +007415 +007416 +007417 +007418 +007419 +007420 +007421 +007422 +007423 +007424 +007425 +007426 +007427 +007428 +007429 +007430 +007431 +007432 +007433 +007434 +007435 +007436 +007437 +007438 +007439 +007440 +007441 +007442 +007443 +007444 +007445 +007446 +007447 +007448 +007449 +007450 +007451 +007452 +007453 +007454 +007455 +007456 +007457 +007458 +007459 +007460 +007461 +007462 +007463 +007464 +007465 +007466 +007467 +007468 +007469 +007470 +007471 +007472 +007473 +007474 +007475 +007476 +007477 +007478 +007479 +007480 +007481 +007482 +007483 +007484 +007485 +007486 +007487 +007488 +007489 +007490 +007491 +007492 +007493 +007494 +007495 +007496 +007497 +007498 +007499 +007500 +007501 +007502 +007503 +007504 +007505 +007506 +007507 +007508 +007509 +007510 +007511 +007512 +007513 +007514 +007515 +007516 +007517 \ No newline at end of file diff --git a/pointnet2_lib/tools/data/KITTI/ImageSets/train.txt b/pointnet2_lib/tools/data/KITTI/ImageSets/train.txt new file mode 100644 index 0000000..505b1e2 --- /dev/null +++ b/pointnet2_lib/tools/data/KITTI/ImageSets/train.txt @@ -0,0 +1,3712 @@ +000000 +000003 +000007 +000009 +000010 +000011 +000012 +000013 +000014 +000016 +000017 +000018 +000022 +000026 +000029 +000030 +000032 +000034 +000036 +000038 +000041 +000043 +000044 +000045 +000046 +000049 +000051 +000054 +000055 +000056 +000057 +000060 +000064 +000067 +000068 +000069 +000070 +000071 +000072 +000073 +000074 +000075 +000079 +000080 +000082 +000083 +000084 +000085 +000086 +000087 +000088 +000091 +000092 +000095 +000096 +000097 +000099 +000100 +000101 +000103 +000105 +000109 +000110 +000111 +000112 +000113 +000114 +000115 +000119 +000120 +000121 +000123 +000125 +000127 +000129 +000130 +000131 +000133 +000136 +000138 +000141 +000142 +000144 +000145 +000146 +000148 +000149 +000150 +000154 +000155 +000157 +000158 +000160 +000162 +000163 +000164 +000165 +000166 +000171 +000172 +000176 +000177 +000178 +000179 +000180 +000184 +000185 +000189 +000193 +000198 +000200 +000202 +000205 +000206 +000208 +000209 +000210 +000214 +000215 +000217 +000219 +000220 +000221 +000222 +000225 +000227 +000228 +000232 +000233 +000238 +000240 +000241 +000243 +000244 +000245 +000253 +000254 +000255 +000256 +000257 +000258 +000259 +000261 +000264 +000267 +000271 +000274 +000275 +000276 +000277 +000280 +000282 +000285 +000286 +000287 +000288 +000292 +000294 +000295 +000296 +000298 +000299 +000300 +000303 +000304 +000306 +000310 +000313 +000316 +000317 +000318 +000322 +000325 +000326 +000330 +000331 +000334 +000337 +000338 +000339 +000342 +000344 +000348 +000349 +000353 +000358 +000363 +000364 +000367 +000368 +000371 +000374 +000375 +000380 +000384 +000387 +000389 +000390 +000400 +000405 +000406 +000410 +000411 +000412 +000416 +000417 +000418 +000421 +000423 +000424 +000425 +000426 +000431 +000432 +000433 +000434 +000435 +000438 +000439 +000441 +000442 +000444 +000445 +000447 +000449 +000456 +000458 +000460 +000461 +000462 +000464 +000465 +000466 +000467 +000470 +000471 +000474 +000482 +000483 +000484 +000487 +000488 +000490 +000497 +000500 +000501 +000502 +000505 +000507 +000511 +000513 +000514 +000516 +000518 +000520 +000522 +000523 +000525 +000526 +000529 +000531 +000532 +000534 +000535 +000537 +000538 +000539 +000540 +000544 +000547 +000549 +000550 +000552 +000553 +000556 +000557 +000562 +000563 +000565 +000570 +000573 +000574 +000575 +000576 +000577 +000578 +000579 +000580 +000582 +000584 +000585 +000586 +000587 +000592 +000593 +000594 +000596 +000597 +000598 +000599 +000602 +000603 +000605 +000606 +000607 +000608 +000609 +000616 +000617 +000621 +000622 +000623 +000627 +000629 +000631 +000632 +000633 +000637 +000638 +000640 +000641 +000643 +000646 +000649 +000651 +000652 +000653 +000654 +000656 +000661 +000662 +000663 +000664 +000665 +000666 +000668 +000671 +000672 +000673 +000675 +000676 +000678 +000680 +000681 +000685 +000686 +000687 +000688 +000689 +000690 +000693 +000695 +000697 +000701 +000703 +000705 +000707 +000709 +000710 +000711 +000712 +000713 +000714 +000715 +000719 +000720 +000723 +000724 +000726 +000730 +000732 +000733 +000735 +000738 +000739 +000742 +000743 +000744 +000747 +000749 +000753 +000755 +000757 +000758 +000759 +000760 +000762 +000763 +000764 +000770 +000775 +000776 +000777 +000780 +000781 +000783 +000784 +000785 +000786 +000787 +000788 +000789 +000791 +000793 +000794 +000796 +000797 +000799 +000808 +000813 +000814 +000815 +000817 +000818 +000820 +000821 +000822 +000824 +000825 +000827 +000828 +000829 +000830 +000832 +000833 +000834 +000835 +000836 +000839 +000842 +000845 +000846 +000851 +000853 +000855 +000856 +000857 +000858 +000860 +000861 +000864 +000865 +000866 +000867 +000868 +000870 +000871 +000872 +000880 +000882 +000883 +000886 +000887 +000888 +000890 +000891 +000892 +000895 +000896 +000898 +000900 +000901 +000902 +000903 +000905 +000906 +000908 +000910 +000913 +000914 +000918 +000919 +000921 +000924 +000925 +000927 +000929 +000933 +000934 +000935 +000936 +000937 +000941 +000945 +000946 +000947 +000950 +000951 +000954 +000955 +000957 +000959 +000960 +000962 +000965 +000968 +000972 +000975 +000977 +000978 +000980 +000982 +000987 +000989 +000990 +000992 +000993 +000994 +000995 +000996 +000997 +000998 +001000 +001001 +001003 +001004 +001005 +001009 +001016 +001017 +001020 +001023 +001024 +001028 +001029 +001030 +001031 +001032 +001033 +001034 +001036 +001038 +001040 +001041 +001044 +001045 +001047 +001048 +001049 +001052 +001056 +001057 +001059 +001060 +001061 +001062 +001064 +001072 +001073 +001074 +001079 +001080 +001081 +001082 +001085 +001087 +001090 +001091 +001092 +001093 +001098 +001100 +001103 +001105 +001109 +001110 +001112 +001117 +001119 +001121 +001122 +001124 +001126 +001128 +001130 +001137 +001142 +001146 +001151 +001156 +001157 +001159 +001160 +001161 +001164 +001165 +001166 +001168 +001169 +001170 +001171 +001174 +001175 +001181 +001184 +001185 +001186 +001190 +001196 +001197 +001200 +001201 +001202 +001204 +001205 +001208 +001209 +001210 +001211 +001212 +001215 +001219 +001220 +001223 +001227 +001229 +001231 +001233 +001238 +001240 +001247 +001248 +001250 +001256 +001258 +001262 +001264 +001276 +001277 +001278 +001279 +001280 +001282 +001283 +001285 +001288 +001290 +001293 +001297 +001298 +001299 +001300 +001301 +001302 +001309 +001310 +001311 +001312 +001313 +001315 +001316 +001319 +001320 +001321 +001322 +001323 +001324 +001325 +001326 +001327 +001328 +001335 +001338 +001340 +001341 +001343 +001348 +001349 +001351 +001354 +001357 +001358 +001360 +001361 +001362 +001364 +001366 +001367 +001368 +001369 +001370 +001371 +001373 +001378 +001379 +001383 +001385 +001390 +001392 +001393 +001394 +001396 +001399 +001400 +001401 +001402 +001403 +001404 +001405 +001406 +001408 +001409 +001413 +001414 +001417 +001418 +001420 +001422 +001423 +001425 +001426 +001428 +001429 +001430 +001433 +001434 +001436 +001440 +001444 +001447 +001449 +001452 +001453 +001454 +001455 +001456 +001457 +001459 +001460 +001462 +001464 +001465 +001467 +001468 +001470 +001472 +001473 +001474 +001475 +001476 +001479 +001482 +001483 +001484 +001486 +001490 +001491 +001492 +001493 +001494 +001496 +001498 +001499 +001500 +001503 +001504 +001505 +001506 +001509 +001510 +001512 +001515 +001518 +001519 +001520 +001523 +001529 +001530 +001531 +001532 +001534 +001539 +001540 +001541 +001543 +001544 +001548 +001550 +001551 +001553 +001554 +001556 +001558 +001559 +001561 +001563 +001566 +001568 +001570 +001571 +001572 +001575 +001578 +001580 +001581 +001584 +001593 +001595 +001598 +001599 +001601 +001604 +001607 +001608 +001609 +001611 +001612 +001614 +001618 +001620 +001622 +001623 +001624 +001626 +001628 +001630 +001632 +001636 +001637 +001638 +001639 +001641 +001642 +001644 +001646 +001648 +001649 +001651 +001652 +001653 +001655 +001657 +001659 +001661 +001663 +001668 +001669 +001671 +001672 +001673 +001674 +001676 +001677 +001678 +001679 +001681 +001685 +001686 +001687 +001688 +001690 +001691 +001692 +001695 +001696 +001698 +001700 +001703 +001708 +001715 +001716 +001720 +001723 +001724 +001725 +001728 +001730 +001731 +001734 +001735 +001736 +001737 +001738 +001739 +001743 +001744 +001747 +001748 +001753 +001754 +001756 +001757 +001759 +001760 +001761 +001763 +001766 +001767 +001769 +001770 +001773 +001775 +001777 +001779 +001784 +001785 +001788 +001789 +001790 +001791 +001792 +001793 +001796 +001798 +001799 +001803 +001805 +001806 +001809 +001810 +001811 +001812 +001815 +001816 +001819 +001821 +001826 +001827 +001829 +001830 +001832 +001833 +001834 +001836 +001837 +001838 +001839 +001841 +001842 +001843 +001845 +001847 +001849 +001850 +001857 +001860 +001864 +001865 +001866 +001870 +001871 +001873 +001874 +001876 +001879 +001882 +001883 +001889 +001891 +001894 +001895 +001896 +001899 +001901 +001902 +001903 +001906 +001907 +001908 +001910 +001911 +001912 +001913 +001914 +001915 +001916 +001917 +001918 +001921 +001922 +001930 +001935 +001938 +001939 +001944 +001947 +001948 +001949 +001950 +001951 +001953 +001955 +001956 +001957 +001958 +001961 +001962 +001963 +001964 +001965 +001968 +001970 +001971 +001973 +001974 +001975 +001976 +001981 +001987 +001988 +001990 +001992 +001993 +001994 +001998 +002003 +002005 +002006 +002007 +002009 +002015 +002016 +002018 +002020 +002023 +002024 +002026 +002030 +002031 +002032 +002033 +002039 +002040 +002041 +002047 +002051 +002053 +002055 +002059 +002060 +002061 +002063 +002064 +002065 +002066 +002067 +002069 +002070 +002072 +002077 +002080 +002083 +002084 +002088 +002090 +002092 +002095 +002096 +002097 +002098 +002099 +002104 +002105 +002106 +002109 +002110 +002114 +002116 +002117 +002119 +002122 +002125 +002126 +002129 +002132 +002133 +002134 +002141 +002143 +002144 +002145 +002146 +002147 +002148 +002149 +002150 +002154 +002155 +002156 +002157 +002162 +002164 +002167 +002171 +002172 +002174 +002175 +002176 +002178 +002180 +002181 +002184 +002186 +002189 +002190 +002191 +002192 +002194 +002195 +002197 +002198 +002199 +002203 +002204 +002205 +002208 +002210 +002211 +002212 +002213 +002214 +002217 +002221 +002222 +002223 +002226 +002227 +002230 +002231 +002235 +002236 +002237 +002238 +002240 +002241 +002242 +002244 +002247 +002249 +002252 +002253 +002256 +002259 +002261 +002263 +002264 +002265 +002267 +002268 +002269 +002270 +002271 +002273 +002274 +002275 +002278 +002281 +002285 +002288 +002289 +002296 +002297 +002301 +002302 +002305 +002309 +002311 +002312 +002313 +002316 +002317 +002318 +002321 +002322 +002323 +002324 +002326 +002328 +002331 +002333 +002335 +002339 +002342 +002343 +002349 +002350 +002351 +002352 +002354 +002355 +002358 +002360 +002361 +002363 +002364 +002368 +002371 +002373 +002374 +002375 +002377 +002379 +002381 +002388 +002389 +002390 +002394 +002395 +002396 +002400 +002401 +002402 +002403 +002406 +002407 +002408 +002409 +002410 +002412 +002413 +002416 +002417 +002421 +002426 +002427 +002430 +002431 +002435 +002436 +002437 +002438 +002441 +002443 +002444 +002445 +002447 +002448 +002449 +002451 +002452 +002453 +002456 +002459 +002464 +002465 +002466 +002467 +002468 +002469 +002470 +002471 +002472 +002475 +002480 +002481 +002482 +002484 +002485 +002487 +002489 +002491 +002493 +002494 +002496 +002498 +002501 +002507 +002508 +002510 +002512 +002513 +002514 +002515 +002517 +002518 +002522 +002523 +002524 +002527 +002533 +002535 +002536 +002537 +002542 +002544 +002545 +002547 +002549 +002550 +002551 +002553 +002554 +002555 +002559 +002560 +002561 +002566 +002567 +002571 +002573 +002576 +002578 +002579 +002582 +002587 +002588 +002589 +002591 +002592 +002593 +002595 +002596 +002597 +002605 +002607 +002608 +002609 +002610 +002611 +002614 +002616 +002617 +002618 +002620 +002622 +002623 +002624 +002627 +002629 +002632 +002634 +002637 +002639 +002642 +002643 +002647 +002648 +002649 +002650 +002652 +002654 +002655 +002658 +002659 +002660 +002662 +002664 +002665 +002667 +002668 +002670 +002671 +002672 +002676 +002678 +002679 +002682 +002683 +002684 +002687 +002688 +002689 +002691 +002697 +002698 +002700 +002701 +002703 +002704 +002705 +002708 +002714 +002716 +002718 +002719 +002723 +002731 +002732 +002733 +002734 +002736 +002738 +002739 +002741 +002743 +002750 +002751 +002754 +002756 +002759 +002762 +002766 +002768 +002769 +002770 +002771 +002774 +002776 +002777 +002778 +002779 +002780 +002781 +002782 +002784 +002785 +002788 +002790 +002791 +002792 +002795 +002798 +002799 +002802 +002803 +002807 +002808 +002813 +002816 +002817 +002819 +002821 +002822 +002823 +002824 +002825 +002829 +002832 +002834 +002835 +002837 +002838 +002842 +002843 +002849 +002850 +002851 +002852 +002854 +002855 +002857 +002859 +002860 +002862 +002864 +002865 +002868 +002869 +002870 +002871 +002872 +002873 +002874 +002882 +002884 +002886 +002887 +002888 +002897 +002898 +002899 +002904 +002906 +002907 +002909 +002910 +002912 +002913 +002915 +002918 +002920 +002921 +002922 +002923 +002926 +002927 +002929 +002931 +002932 +002933 +002936 +002938 +002939 +002940 +002941 +002943 +002946 +002949 +002950 +002952 +002954 +002956 +002965 +002967 +002968 +002969 +002970 +002972 +002973 +002975 +002980 +002981 +002983 +002986 +002987 +002989 +002990 +002992 +002996 +002998 +003002 +003008 +003009 +003012 +003013 +003014 +003015 +003016 +003017 +003018 +003020 +003021 +003023 +003026 +003028 +003036 +003037 +003039 +003040 +003041 +003044 +003045 +003049 +003051 +003057 +003059 +003060 +003063 +003064 +003068 +003069 +003070 +003072 +003075 +003077 +003078 +003079 +003081 +003083 +003084 +003085 +003086 +003089 +003091 +003092 +003093 +003095 +003097 +003098 +003100 +003104 +003105 +003108 +003111 +003113 +003115 +003117 +003119 +003120 +003121 +003122 +003123 +003125 +003128 +003130 +003132 +003138 +003139 +003140 +003143 +003147 +003149 +003151 +003152 +003154 +003155 +003157 +003158 +003160 +003163 +003164 +003166 +003168 +003169 +003171 +003173 +003176 +003178 +003184 +003185 +003186 +003188 +003189 +003191 +003193 +003195 +003196 +003198 +003200 +003201 +003205 +003206 +003208 +003209 +003212 +003213 +003215 +003218 +003220 +003223 +003227 +003230 +003234 +003235 +003237 +003238 +003241 +003243 +003244 +003245 +003246 +003248 +003249 +003253 +003256 +003258 +003260 +003261 +003262 +003263 +003264 +003267 +003268 +003270 +003271 +003273 +003274 +003277 +003278 +003279 +003282 +003284 +003285 +003286 +003287 +003289 +003290 +003291 +003293 +003294 +003297 +003299 +003303 +003307 +003309 +003311 +003314 +003317 +003320 +003321 +003326 +003327 +003328 +003329 +003332 +003333 +003334 +003335 +003336 +003339 +003340 +003342 +003344 +003345 +003348 +003349 +003354 +003356 +003359 +003360 +003361 +003362 +003363 +003369 +003371 +003372 +003374 +003376 +003377 +003378 +003380 +003381 +003382 +003383 +003384 +003387 +003388 +003389 +003390 +003391 +003392 +003398 +003400 +003413 +003414 +003415 +003416 +003418 +003420 +003423 +003424 +003427 +003431 +003433 +003436 +003437 +003438 +003439 +003440 +003441 +003442 +003444 +003445 +003446 +003451 +003452 +003454 +003455 +003457 +003458 +003459 +003460 +003462 +003463 +003468 +003472 +003473 +003475 +003476 +003477 +003479 +003485 +003486 +003493 +003494 +003498 +003499 +003500 +003501 +003505 +003507 +003508 +003509 +003510 +003512 +003513 +003514 +003516 +003518 +003522 +003523 +003525 +003526 +003532 +003533 +003534 +003536 +003537 +003538 +003540 +003541 +003542 +003545 +003546 +003548 +003549 +003551 +003555 +003556 +003560 +003561 +003564 +003565 +003566 +003567 +003569 +003570 +003572 +003575 +003576 +003577 +003578 +003579 +003581 +003585 +003586 +003587 +003589 +003590 +003591 +003592 +003593 +003594 +003595 +003596 +003597 +003598 +003599 +003602 +003603 +003606 +003610 +003612 +003613 +003615 +003617 +003619 +003625 +003626 +003628 +003636 +003637 +003638 +003639 +003640 +003641 +003642 +003644 +003646 +003648 +003650 +003651 +003654 +003656 +003657 +003660 +003663 +003664 +003665 +003666 +003670 +003672 +003673 +003674 +003675 +003680 +003681 +003685 +003686 +003687 +003693 +003694 +003695 +003696 +003697 +003698 +003699 +003700 +003701 +003704 +003706 +003709 +003710 +003713 +003714 +003717 +003720 +003721 +003722 +003724 +003725 +003727 +003729 +003730 +003731 +003732 +003733 +003734 +003740 +003741 +003742 +003743 +003744 +003745 +003749 +003752 +003754 +003757 +003758 +003759 +003760 +003761 +003765 +003766 +003767 +003768 +003770 +003772 +003773 +003774 +003776 +003780 +003783 +003784 +003785 +003786 +003789 +003790 +003791 +003792 +003795 +003796 +003797 +003799 +003801 +003803 +003806 +003810 +003813 +003815 +003816 +003817 +003818 +003819 +003821 +003823 +003824 +003825 +003829 +003831 +003832 +003833 +003836 +003838 +003839 +003840 +003842 +003843 +003844 +003845 +003846 +003848 +003849 +003850 +003851 +003853 +003855 +003857 +003858 +003861 +003862 +003863 +003865 +003867 +003868 +003871 +003875 +003876 +003877 +003882 +003884 +003887 +003888 +003889 +003893 +003895 +003896 +003900 +003903 +003904 +003906 +003908 +003910 +003911 +003912 +003913 +003917 +003918 +003919 +003921 +003922 +003925 +003927 +003928 +003929 +003930 +003933 +003935 +003936 +003939 +003940 +003941 +003942 +003944 +003947 +003949 +003951 +003952 +003953 +003954 +003955 +003957 +003959 +003960 +003963 +003966 +003967 +003968 +003971 +003973 +003974 +003976 +003978 +003979 +003983 +003985 +003987 +003988 +003989 +003990 +003991 +003993 +003994 +003995 +003997 +003999 +004005 +004006 +004012 +004013 +004014 +004015 +004017 +004018 +004019 +004020 +004022 +004023 +004024 +004025 +004029 +004030 +004031 +004035 +004037 +004039 +004043 +004044 +004046 +004047 +004050 +004052 +004053 +004054 +004056 +004057 +004058 +004060 +004062 +004066 +004067 +004069 +004070 +004071 +004073 +004075 +004076 +004078 +004080 +004084 +004086 +004088 +004090 +004093 +004094 +004097 +004099 +004102 +004103 +004106 +004112 +004114 +004115 +004123 +004127 +004133 +004134 +004135 +004139 +004141 +004144 +004145 +004146 +004147 +004151 +004159 +004165 +004166 +004167 +004169 +004170 +004176 +004177 +004178 +004179 +004180 +004181 +004182 +004183 +004184 +004186 +004192 +004193 +004194 +004197 +004198 +004199 +004200 +004201 +004203 +004204 +004208 +004211 +004212 +004216 +004217 +004218 +004219 +004225 +004227 +004229 +004230 +004231 +004233 +004234 +004235 +004236 +004238 +004240 +004244 +004245 +004247 +004252 +004253 +004257 +004258 +004261 +004262 +004264 +004265 +004266 +004267 +004268 +004269 +004272 +004273 +004274 +004276 +004279 +004283 +004286 +004287 +004292 +004296 +004297 +004302 +004304 +004308 +004310 +004313 +004315 +004316 +004317 +004320 +004322 +004325 +004328 +004331 +004332 +004333 +004334 +004339 +004341 +004344 +004346 +004347 +004351 +004354 +004355 +004356 +004357 +004358 +004359 +004361 +004365 +004366 +004371 +004372 +004375 +004376 +004378 +004379 +004380 +004381 +004382 +004386 +004387 +004389 +004390 +004394 +004395 +004399 +004400 +004405 +004408 +004409 +004410 +004411 +004412 +004413 +004416 +004417 +004427 +004428 +004431 +004432 +004436 +004441 +004442 +004445 +004446 +004448 +004449 +004451 +004453 +004455 +004457 +004459 +004461 +004463 +004464 +004466 +004467 +004468 +004471 +004473 +004476 +004477 +004478 +004479 +004484 +004488 +004492 +004495 +004497 +004498 +004499 +004500 +004503 +004504 +004505 +004506 +004507 +004509 +004510 +004512 +004514 +004515 +004518 +004522 +004523 +004524 +004525 +004533 +004535 +004536 +004537 +004538 +004539 +004543 +004544 +004545 +004546 +004550 +004552 +004554 +004555 +004558 +004559 +004560 +004561 +004563 +004564 +004565 +004571 +004572 +004575 +004577 +004579 +004580 +004583 +004584 +004586 +004590 +004592 +004593 +004594 +004595 +004597 +004600 +004601 +004602 +004604 +004605 +004606 +004607 +004613 +004614 +004616 +004617 +004619 +004621 +004623 +004625 +004627 +004628 +004631 +004635 +004637 +004639 +004641 +004642 +004643 +004645 +004646 +004653 +004654 +004656 +004659 +004661 +004662 +004663 +004664 +004670 +004671 +004674 +004675 +004676 +004677 +004678 +004681 +004684 +004690 +004696 +004701 +004702 +004703 +004704 +004707 +004712 +004719 +004723 +004727 +004728 +004729 +004731 +004733 +004736 +004741 +004747 +004749 +004750 +004751 +004754 +004755 +004757 +004758 +004760 +004761 +004765 +004767 +004771 +004772 +004774 +004775 +004778 +004779 +004780 +004781 +004784 +004785 +004786 +004789 +004793 +004794 +004795 +004796 +004798 +004801 +004802 +004803 +004805 +004808 +004809 +004812 +004818 +004819 +004820 +004823 +004824 +004826 +004827 +004828 +004833 +004834 +004836 +004837 +004838 +004840 +004841 +004842 +004844 +004845 +004847 +004853 +004854 +004855 +004856 +004857 +004865 +004866 +004869 +004870 +004872 +004876 +004877 +004878 +004879 +004880 +004882 +004883 +004884 +004886 +004889 +004890 +004894 +004897 +004899 +004900 +004901 +004906 +004908 +004910 +004911 +004912 +004913 +004915 +004916 +004919 +004922 +004923 +004925 +004930 +004933 +004936 +004937 +004939 +004940 +004945 +004950 +004951 +004952 +004955 +004957 +004961 +004964 +004965 +004967 +004968 +004969 +004970 +004971 +004972 +004973 +004975 +004977 +004978 +004980 +004982 +004984 +004987 +004991 +004992 +004997 +005000 +005003 +005005 +005006 +005007 +005009 +005011 +005012 +005016 +005018 +005020 +005022 +005023 +005025 +005027 +005029 +005030 +005031 +005033 +005035 +005039 +005042 +005043 +005044 +005046 +005047 +005048 +005051 +005059 +005060 +005061 +005066 +005069 +005071 +005076 +005083 +005084 +005085 +005087 +005088 +005089 +005091 +005092 +005096 +005097 +005098 +005099 +005100 +005102 +005104 +005106 +005107 +005111 +005114 +005115 +005116 +005117 +005118 +005119 +005123 +005126 +005129 +005130 +005131 +005132 +005134 +005137 +005142 +005146 +005148 +005150 +005151 +005152 +005154 +005159 +005160 +005165 +005169 +005171 +005173 +005177 +005178 +005183 +005186 +005187 +005192 +005193 +005195 +005196 +005200 +005202 +005203 +005204 +005205 +005207 +005208 +005209 +005210 +005211 +005212 +005215 +005216 +005220 +005223 +005224 +005225 +005228 +005231 +005232 +005235 +005238 +005239 +005243 +005245 +005247 +005248 +005250 +005252 +005253 +005254 +005257 +005258 +005259 +005261 +005263 +005264 +005265 +005266 +005269 +005270 +005272 +005277 +005278 +005281 +005283 +005285 +005286 +005288 +005290 +005291 +005293 +005294 +005295 +005300 +005301 +005302 +005303 +005305 +005306 +005310 +005314 +005317 +005320 +005324 +005326 +005327 +005331 +005332 +005339 +005340 +005344 +005346 +005348 +005351 +005352 +005353 +005354 +005355 +005356 +005357 +005358 +005361 +005362 +005364 +005367 +005370 +005373 +005374 +005376 +005380 +005382 +005383 +005384 +005387 +005388 +005392 +005393 +005394 +005395 +005396 +005397 +005398 +005399 +005400 +005401 +005402 +005403 +005406 +005407 +005408 +005409 +005410 +005411 +005412 +005414 +005416 +005417 +005418 +005419 +005420 +005421 +005424 +005425 +005428 +005432 +005433 +005435 +005436 +005438 +005439 +005440 +005442 +005446 +005451 +005454 +005455 +005456 +005457 +005462 +005463 +005464 +005468 +005469 +005470 +005475 +005478 +005480 +005483 +005485 +005488 +005490 +005491 +005492 +005493 +005496 +005497 +005499 +005500 +005501 +005502 +005503 +005504 +005506 +005507 +005508 +005509 +005512 +005513 +005516 +005517 +005518 +005519 +005520 +005521 +005522 +005524 +005526 +005527 +005529 +005530 +005533 +005535 +005537 +005539 +005541 +005543 +005547 +005548 +005549 +005550 +005553 +005554 +005561 +005562 +005563 +005564 +005567 +005568 +005569 +005574 +005575 +005578 +005579 +005583 +005585 +005591 +005592 +005593 +005594 +005597 +005598 +005599 +005604 +005605 +005606 +005607 +005608 +005609 +005611 +005612 +005614 +005615 +005620 +005621 +005622 +005624 +005626 +005627 +005628 +005629 +005632 +005636 +005637 +005641 +005644 +005645 +005646 +005647 +005648 +005651 +005654 +005655 +005657 +005661 +005663 +005665 +005666 +005667 +005670 +005671 +005674 +005675 +005678 +005679 +005681 +005682 +005684 +005686 +005688 +005690 +005691 +005692 +005693 +005694 +005696 +005697 +005701 +005702 +005705 +005710 +005711 +005715 +005716 +005718 +005719 +005720 +005721 +005722 +005723 +005726 +005730 +005732 +005733 +005734 +005737 +005738 +005742 +005748 +005749 +005750 +005752 +005753 +005755 +005756 +005758 +005759 +005761 +005764 +005766 +005767 +005768 +005769 +005770 +005771 +005772 +005773 +005774 +005775 +005776 +005778 +005779 +005780 +005781 +005788 +005789 +005791 +005792 +005795 +005797 +005798 +005799 +005802 +005804 +005808 +005809 +005810 +005813 +005814 +005815 +005816 +005817 +005823 +005824 +005825 +005828 +005830 +005831 +005832 +005833 +005835 +005836 +005837 +005838 +005842 +005844 +005845 +005846 +005847 +005848 +005849 +005850 +005851 +005853 +005858 +005860 +005861 +005862 +005863 +005865 +005866 +005867 +005868 +005870 +005871 +005872 +005874 +005875 +005877 +005880 +005884 +005886 +005888 +005890 +005891 +005895 +005896 +005897 +005898 +005902 +005904 +005908 +005915 +005920 +005924 +005928 +005929 +005930 +005932 +005934 +005936 +005937 +005940 +005941 +005942 +005943 +005945 +005946 +005950 +005951 +005953 +005954 +005956 +005957 +005959 +005960 +005964 +005966 +005967 +005968 +005971 +005973 +005974 +005976 +005977 +005979 +005980 +005983 +005987 +005989 +005990 +005991 +005992 +005993 +005995 +005998 +006000 +006004 +006006 +006007 +006011 +006015 +006017 +006018 +006019 +006020 +006021 +006022 +006025 +006032 +006035 +006037 +006040 +006049 +006051 +006053 +006055 +006056 +006059 +006064 +006065 +006069 +006072 +006073 +006076 +006079 +006080 +006081 +006082 +006084 +006089 +006090 +006091 +006092 +006094 +006099 +006101 +006104 +006105 +006108 +006109 +006111 +006112 +006113 +006119 +006120 +006124 +006128 +006129 +006131 +006132 +006134 +006135 +006137 +006138 +006140 +006141 +006142 +006143 +006145 +006147 +006149 +006150 +006153 +006155 +006157 +006158 +006159 +006160 +006162 +006164 +006166 +006170 +006171 +006172 +006174 +006175 +006178 +006179 +006180 +006181 +006183 +006184 +006188 +006189 +006191 +006192 +006193 +006197 +006199 +006200 +006201 +006203 +006205 +006206 +006207 +006209 +006211 +006212 +006214 +006216 +006217 +006218 +006220 +006221 +006223 +006224 +006225 +006226 +006230 +006231 +006234 +006235 +006236 +006237 +006239 +006241 +006242 +006243 +006245 +006248 +006251 +006252 +006253 +006254 +006255 +006256 +006257 +006259 +006260 +006261 +006262 +006264 +006268 +006271 +006277 +006279 +006281 +006283 +006284 +006285 +006289 +006290 +006291 +006292 +006293 +006294 +006295 +006296 +006298 +006299 +006303 +006304 +006307 +006308 +006309 +006310 +006311 +006313 +006318 +006319 +006320 +006323 +006325 +006326 +006327 +006328 +006329 +006330 +006335 +006336 +006337 +006341 +006346 +006347 +006350 +006352 +006358 +006359 +006361 +006362 +006363 +006365 +006367 +006373 +006374 +006375 +006376 +006378 +006382 +006383 +006384 +006387 +006389 +006390 +006392 +006397 +006398 +006399 +006400 +006401 +006402 +006404 +006408 +006412 +006413 +006414 +006418 +006419 +006421 +006422 +006428 +006429 +006430 +006431 +006432 +006438 +006443 +006447 +006448 +006449 +006450 +006455 +006456 +006457 +006458 +006459 +006460 +006461 +006463 +006466 +006467 +006471 +006476 +006479 +006480 +006485 +006487 +006489 +006490 +006492 +006494 +006495 +006499 +006500 +006501 +006502 +006504 +006509 +006510 +006511 +006513 +006518 +006522 +006523 +006526 +006527 +006528 +006536 +006538 +006539 +006541 +006543 +006544 +006545 +006546 +006547 +006550 +006552 +006554 +006557 +006559 +006562 +006564 +006566 +006567 +006571 +006572 +006573 +006575 +006579 +006580 +006584 +006585 +006587 +006589 +006591 +006594 +006598 +006599 +006600 +006601 +006605 +006606 +006607 +006608 +006609 +006610 +006615 +006616 +006617 +006619 +006620 +006621 +006622 +006627 +006630 +006631 +006635 +006639 +006640 +006642 +006644 +006645 +006646 +006648 +006652 +006653 +006654 +006657 +006661 +006662 +006663 +006665 +006668 +006671 +006672 +006673 +006675 +006680 +006681 +006683 +006684 +006687 +006688 +006689 +006690 +006691 +006697 +006699 +006700 +006702 +006704 +006705 +006706 +006707 +006708 +006716 +006717 +006718 +006721 +006722 +006724 +006727 +006728 +006730 +006735 +006736 +006739 +006740 +006742 +006743 +006746 +006748 +006749 +006750 +006757 +006763 +006766 +006769 +006774 +006775 +006776 +006779 +006784 +006787 +006788 +006790 +006793 +006795 +006799 +006801 +006802 +006805 +006809 +006810 +006814 +006817 +006820 +006821 +006823 +006824 +006825 +006826 +006827 +006830 +006831 +006834 +006835 +006838 +006839 +006840 +006842 +006845 +006846 +006848 +006851 +006857 +006859 +006861 +006864 +006865 +006867 +006869 +006871 +006875 +006877 +006878 +006880 +006883 +006886 +006888 +006890 +006892 +006893 +006894 +006896 +006902 +006904 +006905 +006909 +006911 +006912 +006915 +006916 +006918 +006919 +006920 +006921 +006923 +006924 +006926 +006927 +006929 +006931 +006932 +006933 +006934 +006935 +006939 +006940 +006941 +006946 +006947 +006949 +006951 +006952 +006957 +006958 +006961 +006963 +006965 +006966 +006967 +006969 +006970 +006972 +006974 +006975 +006976 +006979 +006983 +006984 +006985 +006986 +006988 +006991 +006993 +006995 +006996 +006998 +007001 +007002 +007004 +007007 +007009 +007013 +007017 +007018 +007020 +007021 +007024 +007025 +007035 +007036 +007039 +007040 +007041 +007044 +007045 +007046 +007050 +007051 +007054 +007057 +007058 +007060 +007062 +007064 +007066 +007070 +007073 +007075 +007077 +007086 +007090 +007092 +007093 +007094 +007096 +007097 +007099 +007101 +007102 +007104 +007105 +007106 +007107 +007108 +007111 +007113 +007114 +007116 +007118 +007121 +007123 +007124 +007126 +007127 +007128 +007129 +007134 +007137 +007140 +007141 +007142 +007143 +007147 +007148 +007150 +007151 +007152 +007153 +007155 +007156 +007159 +007160 +007167 +007170 +007171 +007173 +007175 +007179 +007181 +007184 +007185 +007186 +007188 +007189 +007190 +007191 +007192 +007193 +007195 +007196 +007197 +007203 +007206 +007209 +007211 +007213 +007216 +007218 +007220 +007222 +007223 +007224 +007226 +007228 +007231 +007234 +007236 +007237 +007239 +007241 +007243 +007245 +007248 +007249 +007250 +007251 +007254 +007257 +007259 +007263 +007264 +007268 +007269 +007270 +007276 +007281 +007282 +007285 +007286 +007293 +007295 +007296 +007297 +007298 +007301 +007305 +007306 +007307 +007308 +007312 +007313 +007314 +007316 +007317 +007320 +007321 +007324 +007328 +007332 +007333 +007334 +007335 +007338 +007340 +007341 +007346 +007348 +007354 +007355 +007356 +007357 +007358 +007361 +007362 +007363 +007365 +007366 +007367 +007368 +007370 +007372 +007373 +007378 +007379 +007386 +007387 +007388 +007390 +007392 +007393 +007394 +007399 +007400 +007404 +007406 +007408 +007414 +007417 +007418 +007425 +007427 +007428 +007429 +007431 +007432 +007438 +007441 +007443 +007444 +007446 +007451 +007452 +007454 +007455 +007457 +007459 +007460 +007461 +007465 +007471 +007472 +007474 +007476 +007479 \ No newline at end of file diff --git a/pointnet2_lib/tools/data/KITTI/ImageSets/trainval.txt b/pointnet2_lib/tools/data/KITTI/ImageSets/trainval.txt new file mode 100644 index 0000000..43467b5 --- /dev/null +++ b/pointnet2_lib/tools/data/KITTI/ImageSets/trainval.txt @@ -0,0 +1,7481 @@ +000000 +000001 +000002 +000003 +000004 +000005 +000006 +000007 +000008 +000009 +000010 +000011 +000012 +000013 +000014 +000015 +000016 +000017 +000018 +000019 +000020 +000021 +000022 +000023 +000024 +000025 +000026 +000027 +000028 +000029 +000030 +000031 +000032 +000033 +000034 +000035 +000036 +000037 +000038 +000039 +000040 +000041 +000042 +000043 +000044 +000045 +000046 +000047 +000048 +000049 +000050 +000051 +000052 +000053 +000054 +000055 +000056 +000057 +000058 +000059 +000060 +000061 +000062 +000063 +000064 +000065 +000066 +000067 +000068 +000069 +000070 +000071 +000072 +000073 +000074 +000075 +000076 +000077 +000078 +000079 +000080 +000081 +000082 +000083 +000084 +000085 +000086 +000087 +000088 +000089 +000090 +000091 +000092 +000093 +000094 +000095 +000096 +000097 +000098 +000099 +000100 +000101 +000102 +000103 +000104 +000105 +000106 +000107 +000108 +000109 +000110 +000111 +000112 +000113 +000114 +000115 +000116 +000117 +000118 +000119 +000120 +000121 +000122 +000123 +000124 +000125 +000126 +000127 +000128 +000129 +000130 +000131 +000132 +000133 +000134 +000135 +000136 +000137 +000138 +000139 +000140 +000141 +000142 +000143 +000144 +000145 +000146 +000147 +000148 +000149 +000150 +000151 +000152 +000153 +000154 +000155 +000156 +000157 +000158 +000159 +000160 +000161 +000162 +000163 +000164 +000165 +000166 +000167 +000168 +000169 +000170 +000171 +000172 +000173 +000174 +000175 +000176 +000177 +000178 +000179 +000180 +000181 +000182 +000183 +000184 +000185 +000186 +000187 +000188 +000189 +000190 +000191 +000192 +000193 +000194 +000195 +000196 +000197 +000198 +000199 +000200 +000201 +000202 +000203 +000204 +000205 +000206 +000207 +000208 +000209 +000210 +000211 +000212 +000213 +000214 +000215 +000216 +000217 +000218 +000219 +000220 +000221 +000222 +000223 +000224 +000225 +000226 +000227 +000228 +000229 +000230 +000231 +000232 +000233 +000234 +000235 +000236 +000237 +000238 +000239 +000240 +000241 +000242 +000243 +000244 +000245 +000246 +000247 +000248 +000249 +000250 +000251 +000252 +000253 +000254 +000255 +000256 +000257 +000258 +000259 +000260 +000261 +000262 +000263 +000264 +000265 +000266 +000267 +000268 +000269 +000270 +000271 +000272 +000273 +000274 +000275 +000276 +000277 +000278 +000279 +000280 +000281 +000282 +000283 +000284 +000285 +000286 +000287 +000288 +000289 +000290 +000291 +000292 +000293 +000294 +000295 +000296 +000297 +000298 +000299 +000300 +000301 +000302 +000303 +000304 +000305 +000306 +000307 +000308 +000309 +000310 +000311 +000312 +000313 +000314 +000315 +000316 +000317 +000318 +000319 +000320 +000321 +000322 +000323 +000324 +000325 +000326 +000327 +000328 +000329 +000330 +000331 +000332 +000333 +000334 +000335 +000336 +000337 +000338 +000339 +000340 +000341 +000342 +000343 +000344 +000345 +000346 +000347 +000348 +000349 +000350 +000351 +000352 +000353 +000354 +000355 +000356 +000357 +000358 +000359 +000360 +000361 +000362 +000363 +000364 +000365 +000366 +000367 +000368 +000369 +000370 +000371 +000372 +000373 +000374 +000375 +000376 +000377 +000378 +000379 +000380 +000381 +000382 +000383 +000384 +000385 +000386 +000387 +000388 +000389 +000390 +000391 +000392 +000393 +000394 +000395 +000396 +000397 +000398 +000399 +000400 +000401 +000402 +000403 +000404 +000405 +000406 +000407 +000408 +000409 +000410 +000411 +000412 +000413 +000414 +000415 +000416 +000417 +000418 +000419 +000420 +000421 +000422 +000423 +000424 +000425 +000426 +000427 +000428 +000429 +000430 +000431 +000432 +000433 +000434 +000435 +000436 +000437 +000438 +000439 +000440 +000441 +000442 +000443 +000444 +000445 +000446 +000447 +000448 +000449 +000450 +000451 +000452 +000453 +000454 +000455 +000456 +000457 +000458 +000459 +000460 +000461 +000462 +000463 +000464 +000465 +000466 +000467 +000468 +000469 +000470 +000471 +000472 +000473 +000474 +000475 +000476 +000477 +000478 +000479 +000480 +000481 +000482 +000483 +000484 +000485 +000486 +000487 +000488 +000489 +000490 +000491 +000492 +000493 +000494 +000495 +000496 +000497 +000498 +000499 +000500 +000501 +000502 +000503 +000504 +000505 +000506 +000507 +000508 +000509 +000510 +000511 +000512 +000513 +000514 +000515 +000516 +000517 +000518 +000519 +000520 +000521 +000522 +000523 +000524 +000525 +000526 +000527 +000528 +000529 +000530 +000531 +000532 +000533 +000534 +000535 +000536 +000537 +000538 +000539 +000540 +000541 +000542 +000543 +000544 +000545 +000546 +000547 +000548 +000549 +000550 +000551 +000552 +000553 +000554 +000555 +000556 +000557 +000558 +000559 +000560 +000561 +000562 +000563 +000564 +000565 +000566 +000567 +000568 +000569 +000570 +000571 +000572 +000573 +000574 +000575 +000576 +000577 +000578 +000579 +000580 +000581 +000582 +000583 +000584 +000585 +000586 +000587 +000588 +000589 +000590 +000591 +000592 +000593 +000594 +000595 +000596 +000597 +000598 +000599 +000600 +000601 +000602 +000603 +000604 +000605 +000606 +000607 +000608 +000609 +000610 +000611 +000612 +000613 +000614 +000615 +000616 +000617 +000618 +000619 +000620 +000621 +000622 +000623 +000624 +000625 +000626 +000627 +000628 +000629 +000630 +000631 +000632 +000633 +000634 +000635 +000636 +000637 +000638 +000639 +000640 +000641 +000642 +000643 +000644 +000645 +000646 +000647 +000648 +000649 +000650 +000651 +000652 +000653 +000654 +000655 +000656 +000657 +000658 +000659 +000660 +000661 +000662 +000663 +000664 +000665 +000666 +000667 +000668 +000669 +000670 +000671 +000672 +000673 +000674 +000675 +000676 +000677 +000678 +000679 +000680 +000681 +000682 +000683 +000684 +000685 +000686 +000687 +000688 +000689 +000690 +000691 +000692 +000693 +000694 +000695 +000696 +000697 +000698 +000699 +000700 +000701 +000702 +000703 +000704 +000705 +000706 +000707 +000708 +000709 +000710 +000711 +000712 +000713 +000714 +000715 +000716 +000717 +000718 +000719 +000720 +000721 +000722 +000723 +000724 +000725 +000726 +000727 +000728 +000729 +000730 +000731 +000732 +000733 +000734 +000735 +000736 +000737 +000738 +000739 +000740 +000741 +000742 +000743 +000744 +000745 +000746 +000747 +000748 +000749 +000750 +000751 +000752 +000753 +000754 +000755 +000756 +000757 +000758 +000759 +000760 +000761 +000762 +000763 +000764 +000765 +000766 +000767 +000768 +000769 +000770 +000771 +000772 +000773 +000774 +000775 +000776 +000777 +000778 +000779 +000780 +000781 +000782 +000783 +000784 +000785 +000786 +000787 +000788 +000789 +000790 +000791 +000792 +000793 +000794 +000795 +000796 +000797 +000798 +000799 +000800 +000801 +000802 +000803 +000804 +000805 +000806 +000807 +000808 +000809 +000810 +000811 +000812 +000813 +000814 +000815 +000816 +000817 +000818 +000819 +000820 +000821 +000822 +000823 +000824 +000825 +000826 +000827 +000828 +000829 +000830 +000831 +000832 +000833 +000834 +000835 +000836 +000837 +000838 +000839 +000840 +000841 +000842 +000843 +000844 +000845 +000846 +000847 +000848 +000849 +000850 +000851 +000852 +000853 +000854 +000855 +000856 +000857 +000858 +000859 +000860 +000861 +000862 +000863 +000864 +000865 +000866 +000867 +000868 +000869 +000870 +000871 +000872 +000873 +000874 +000875 +000876 +000877 +000878 +000879 +000880 +000881 +000882 +000883 +000884 +000885 +000886 +000887 +000888 +000889 +000890 +000891 +000892 +000893 +000894 +000895 +000896 +000897 +000898 +000899 +000900 +000901 +000902 +000903 +000904 +000905 +000906 +000907 +000908 +000909 +000910 +000911 +000912 +000913 +000914 +000915 +000916 +000917 +000918 +000919 +000920 +000921 +000922 +000923 +000924 +000925 +000926 +000927 +000928 +000929 +000930 +000931 +000932 +000933 +000934 +000935 +000936 +000937 +000938 +000939 +000940 +000941 +000942 +000943 +000944 +000945 +000946 +000947 +000948 +000949 +000950 +000951 +000952 +000953 +000954 +000955 +000956 +000957 +000958 +000959 +000960 +000961 +000962 +000963 +000964 +000965 +000966 +000967 +000968 +000969 +000970 +000971 +000972 +000973 +000974 +000975 +000976 +000977 +000978 +000979 +000980 +000981 +000982 +000983 +000984 +000985 +000986 +000987 +000988 +000989 +000990 +000991 +000992 +000993 +000994 +000995 +000996 +000997 +000998 +000999 +001000 +001001 +001002 +001003 +001004 +001005 +001006 +001007 +001008 +001009 +001010 +001011 +001012 +001013 +001014 +001015 +001016 +001017 +001018 +001019 +001020 +001021 +001022 +001023 +001024 +001025 +001026 +001027 +001028 +001029 +001030 +001031 +001032 +001033 +001034 +001035 +001036 +001037 +001038 +001039 +001040 +001041 +001042 +001043 +001044 +001045 +001046 +001047 +001048 +001049 +001050 +001051 +001052 +001053 +001054 +001055 +001056 +001057 +001058 +001059 +001060 +001061 +001062 +001063 +001064 +001065 +001066 +001067 +001068 +001069 +001070 +001071 +001072 +001073 +001074 +001075 +001076 +001077 +001078 +001079 +001080 +001081 +001082 +001083 +001084 +001085 +001086 +001087 +001088 +001089 +001090 +001091 +001092 +001093 +001094 +001095 +001096 +001097 +001098 +001099 +001100 +001101 +001102 +001103 +001104 +001105 +001106 +001107 +001108 +001109 +001110 +001111 +001112 +001113 +001114 +001115 +001116 +001117 +001118 +001119 +001120 +001121 +001122 +001123 +001124 +001125 +001126 +001127 +001128 +001129 +001130 +001131 +001132 +001133 +001134 +001135 +001136 +001137 +001138 +001139 +001140 +001141 +001142 +001143 +001144 +001145 +001146 +001147 +001148 +001149 +001150 +001151 +001152 +001153 +001154 +001155 +001156 +001157 +001158 +001159 +001160 +001161 +001162 +001163 +001164 +001165 +001166 +001167 +001168 +001169 +001170 +001171 +001172 +001173 +001174 +001175 +001176 +001177 +001178 +001179 +001180 +001181 +001182 +001183 +001184 +001185 +001186 +001187 +001188 +001189 +001190 +001191 +001192 +001193 +001194 +001195 +001196 +001197 +001198 +001199 +001200 +001201 +001202 +001203 +001204 +001205 +001206 +001207 +001208 +001209 +001210 +001211 +001212 +001213 +001214 +001215 +001216 +001217 +001218 +001219 +001220 +001221 +001222 +001223 +001224 +001225 +001226 +001227 +001228 +001229 +001230 +001231 +001232 +001233 +001234 +001235 +001236 +001237 +001238 +001239 +001240 +001241 +001242 +001243 +001244 +001245 +001246 +001247 +001248 +001249 +001250 +001251 +001252 +001253 +001254 +001255 +001256 +001257 +001258 +001259 +001260 +001261 +001262 +001263 +001264 +001265 +001266 +001267 +001268 +001269 +001270 +001271 +001272 +001273 +001274 +001275 +001276 +001277 +001278 +001279 +001280 +001281 +001282 +001283 +001284 +001285 +001286 +001287 +001288 +001289 +001290 +001291 +001292 +001293 +001294 +001295 +001296 +001297 +001298 +001299 +001300 +001301 +001302 +001303 +001304 +001305 +001306 +001307 +001308 +001309 +001310 +001311 +001312 +001313 +001314 +001315 +001316 +001317 +001318 +001319 +001320 +001321 +001322 +001323 +001324 +001325 +001326 +001327 +001328 +001329 +001330 +001331 +001332 +001333 +001334 +001335 +001336 +001337 +001338 +001339 +001340 +001341 +001342 +001343 +001344 +001345 +001346 +001347 +001348 +001349 +001350 +001351 +001352 +001353 +001354 +001355 +001356 +001357 +001358 +001359 +001360 +001361 +001362 +001363 +001364 +001365 +001366 +001367 +001368 +001369 +001370 +001371 +001372 +001373 +001374 +001375 +001376 +001377 +001378 +001379 +001380 +001381 +001382 +001383 +001384 +001385 +001386 +001387 +001388 +001389 +001390 +001391 +001392 +001393 +001394 +001395 +001396 +001397 +001398 +001399 +001400 +001401 +001402 +001403 +001404 +001405 +001406 +001407 +001408 +001409 +001410 +001411 +001412 +001413 +001414 +001415 +001416 +001417 +001418 +001419 +001420 +001421 +001422 +001423 +001424 +001425 +001426 +001427 +001428 +001429 +001430 +001431 +001432 +001433 +001434 +001435 +001436 +001437 +001438 +001439 +001440 +001441 +001442 +001443 +001444 +001445 +001446 +001447 +001448 +001449 +001450 +001451 +001452 +001453 +001454 +001455 +001456 +001457 +001458 +001459 +001460 +001461 +001462 +001463 +001464 +001465 +001466 +001467 +001468 +001469 +001470 +001471 +001472 +001473 +001474 +001475 +001476 +001477 +001478 +001479 +001480 +001481 +001482 +001483 +001484 +001485 +001486 +001487 +001488 +001489 +001490 +001491 +001492 +001493 +001494 +001495 +001496 +001497 +001498 +001499 +001500 +001501 +001502 +001503 +001504 +001505 +001506 +001507 +001508 +001509 +001510 +001511 +001512 +001513 +001514 +001515 +001516 +001517 +001518 +001519 +001520 +001521 +001522 +001523 +001524 +001525 +001526 +001527 +001528 +001529 +001530 +001531 +001532 +001533 +001534 +001535 +001536 +001537 +001538 +001539 +001540 +001541 +001542 +001543 +001544 +001545 +001546 +001547 +001548 +001549 +001550 +001551 +001552 +001553 +001554 +001555 +001556 +001557 +001558 +001559 +001560 +001561 +001562 +001563 +001564 +001565 +001566 +001567 +001568 +001569 +001570 +001571 +001572 +001573 +001574 +001575 +001576 +001577 +001578 +001579 +001580 +001581 +001582 +001583 +001584 +001585 +001586 +001587 +001588 +001589 +001590 +001591 +001592 +001593 +001594 +001595 +001596 +001597 +001598 +001599 +001600 +001601 +001602 +001603 +001604 +001605 +001606 +001607 +001608 +001609 +001610 +001611 +001612 +001613 +001614 +001615 +001616 +001617 +001618 +001619 +001620 +001621 +001622 +001623 +001624 +001625 +001626 +001627 +001628 +001629 +001630 +001631 +001632 +001633 +001634 +001635 +001636 +001637 +001638 +001639 +001640 +001641 +001642 +001643 +001644 +001645 +001646 +001647 +001648 +001649 +001650 +001651 +001652 +001653 +001654 +001655 +001656 +001657 +001658 +001659 +001660 +001661 +001662 +001663 +001664 +001665 +001666 +001667 +001668 +001669 +001670 +001671 +001672 +001673 +001674 +001675 +001676 +001677 +001678 +001679 +001680 +001681 +001682 +001683 +001684 +001685 +001686 +001687 +001688 +001689 +001690 +001691 +001692 +001693 +001694 +001695 +001696 +001697 +001698 +001699 +001700 +001701 +001702 +001703 +001704 +001705 +001706 +001707 +001708 +001709 +001710 +001711 +001712 +001713 +001714 +001715 +001716 +001717 +001718 +001719 +001720 +001721 +001722 +001723 +001724 +001725 +001726 +001727 +001728 +001729 +001730 +001731 +001732 +001733 +001734 +001735 +001736 +001737 +001738 +001739 +001740 +001741 +001742 +001743 +001744 +001745 +001746 +001747 +001748 +001749 +001750 +001751 +001752 +001753 +001754 +001755 +001756 +001757 +001758 +001759 +001760 +001761 +001762 +001763 +001764 +001765 +001766 +001767 +001768 +001769 +001770 +001771 +001772 +001773 +001774 +001775 +001776 +001777 +001778 +001779 +001780 +001781 +001782 +001783 +001784 +001785 +001786 +001787 +001788 +001789 +001790 +001791 +001792 +001793 +001794 +001795 +001796 +001797 +001798 +001799 +001800 +001801 +001802 +001803 +001804 +001805 +001806 +001807 +001808 +001809 +001810 +001811 +001812 +001813 +001814 +001815 +001816 +001817 +001818 +001819 +001820 +001821 +001822 +001823 +001824 +001825 +001826 +001827 +001828 +001829 +001830 +001831 +001832 +001833 +001834 +001835 +001836 +001837 +001838 +001839 +001840 +001841 +001842 +001843 +001844 +001845 +001846 +001847 +001848 +001849 +001850 +001851 +001852 +001853 +001854 +001855 +001856 +001857 +001858 +001859 +001860 +001861 +001862 +001863 +001864 +001865 +001866 +001867 +001868 +001869 +001870 +001871 +001872 +001873 +001874 +001875 +001876 +001877 +001878 +001879 +001880 +001881 +001882 +001883 +001884 +001885 +001886 +001887 +001888 +001889 +001890 +001891 +001892 +001893 +001894 +001895 +001896 +001897 +001898 +001899 +001900 +001901 +001902 +001903 +001904 +001905 +001906 +001907 +001908 +001909 +001910 +001911 +001912 +001913 +001914 +001915 +001916 +001917 +001918 +001919 +001920 +001921 +001922 +001923 +001924 +001925 +001926 +001927 +001928 +001929 +001930 +001931 +001932 +001933 +001934 +001935 +001936 +001937 +001938 +001939 +001940 +001941 +001942 +001943 +001944 +001945 +001946 +001947 +001948 +001949 +001950 +001951 +001952 +001953 +001954 +001955 +001956 +001957 +001958 +001959 +001960 +001961 +001962 +001963 +001964 +001965 +001966 +001967 +001968 +001969 +001970 +001971 +001972 +001973 +001974 +001975 +001976 +001977 +001978 +001979 +001980 +001981 +001982 +001983 +001984 +001985 +001986 +001987 +001988 +001989 +001990 +001991 +001992 +001993 +001994 +001995 +001996 +001997 +001998 +001999 +002000 +002001 +002002 +002003 +002004 +002005 +002006 +002007 +002008 +002009 +002010 +002011 +002012 +002013 +002014 +002015 +002016 +002017 +002018 +002019 +002020 +002021 +002022 +002023 +002024 +002025 +002026 +002027 +002028 +002029 +002030 +002031 +002032 +002033 +002034 +002035 +002036 +002037 +002038 +002039 +002040 +002041 +002042 +002043 +002044 +002045 +002046 +002047 +002048 +002049 +002050 +002051 +002052 +002053 +002054 +002055 +002056 +002057 +002058 +002059 +002060 +002061 +002062 +002063 +002064 +002065 +002066 +002067 +002068 +002069 +002070 +002071 +002072 +002073 +002074 +002075 +002076 +002077 +002078 +002079 +002080 +002081 +002082 +002083 +002084 +002085 +002086 +002087 +002088 +002089 +002090 +002091 +002092 +002093 +002094 +002095 +002096 +002097 +002098 +002099 +002100 +002101 +002102 +002103 +002104 +002105 +002106 +002107 +002108 +002109 +002110 +002111 +002112 +002113 +002114 +002115 +002116 +002117 +002118 +002119 +002120 +002121 +002122 +002123 +002124 +002125 +002126 +002127 +002128 +002129 +002130 +002131 +002132 +002133 +002134 +002135 +002136 +002137 +002138 +002139 +002140 +002141 +002142 +002143 +002144 +002145 +002146 +002147 +002148 +002149 +002150 +002151 +002152 +002153 +002154 +002155 +002156 +002157 +002158 +002159 +002160 +002161 +002162 +002163 +002164 +002165 +002166 +002167 +002168 +002169 +002170 +002171 +002172 +002173 +002174 +002175 +002176 +002177 +002178 +002179 +002180 +002181 +002182 +002183 +002184 +002185 +002186 +002187 +002188 +002189 +002190 +002191 +002192 +002193 +002194 +002195 +002196 +002197 +002198 +002199 +002200 +002201 +002202 +002203 +002204 +002205 +002206 +002207 +002208 +002209 +002210 +002211 +002212 +002213 +002214 +002215 +002216 +002217 +002218 +002219 +002220 +002221 +002222 +002223 +002224 +002225 +002226 +002227 +002228 +002229 +002230 +002231 +002232 +002233 +002234 +002235 +002236 +002237 +002238 +002239 +002240 +002241 +002242 +002243 +002244 +002245 +002246 +002247 +002248 +002249 +002250 +002251 +002252 +002253 +002254 +002255 +002256 +002257 +002258 +002259 +002260 +002261 +002262 +002263 +002264 +002265 +002266 +002267 +002268 +002269 +002270 +002271 +002272 +002273 +002274 +002275 +002276 +002277 +002278 +002279 +002280 +002281 +002282 +002283 +002284 +002285 +002286 +002287 +002288 +002289 +002290 +002291 +002292 +002293 +002294 +002295 +002296 +002297 +002298 +002299 +002300 +002301 +002302 +002303 +002304 +002305 +002306 +002307 +002308 +002309 +002310 +002311 +002312 +002313 +002314 +002315 +002316 +002317 +002318 +002319 +002320 +002321 +002322 +002323 +002324 +002325 +002326 +002327 +002328 +002329 +002330 +002331 +002332 +002333 +002334 +002335 +002336 +002337 +002338 +002339 +002340 +002341 +002342 +002343 +002344 +002345 +002346 +002347 +002348 +002349 +002350 +002351 +002352 +002353 +002354 +002355 +002356 +002357 +002358 +002359 +002360 +002361 +002362 +002363 +002364 +002365 +002366 +002367 +002368 +002369 +002370 +002371 +002372 +002373 +002374 +002375 +002376 +002377 +002378 +002379 +002380 +002381 +002382 +002383 +002384 +002385 +002386 +002387 +002388 +002389 +002390 +002391 +002392 +002393 +002394 +002395 +002396 +002397 +002398 +002399 +002400 +002401 +002402 +002403 +002404 +002405 +002406 +002407 +002408 +002409 +002410 +002411 +002412 +002413 +002414 +002415 +002416 +002417 +002418 +002419 +002420 +002421 +002422 +002423 +002424 +002425 +002426 +002427 +002428 +002429 +002430 +002431 +002432 +002433 +002434 +002435 +002436 +002437 +002438 +002439 +002440 +002441 +002442 +002443 +002444 +002445 +002446 +002447 +002448 +002449 +002450 +002451 +002452 +002453 +002454 +002455 +002456 +002457 +002458 +002459 +002460 +002461 +002462 +002463 +002464 +002465 +002466 +002467 +002468 +002469 +002470 +002471 +002472 +002473 +002474 +002475 +002476 +002477 +002478 +002479 +002480 +002481 +002482 +002483 +002484 +002485 +002486 +002487 +002488 +002489 +002490 +002491 +002492 +002493 +002494 +002495 +002496 +002497 +002498 +002499 +002500 +002501 +002502 +002503 +002504 +002505 +002506 +002507 +002508 +002509 +002510 +002511 +002512 +002513 +002514 +002515 +002516 +002517 +002518 +002519 +002520 +002521 +002522 +002523 +002524 +002525 +002526 +002527 +002528 +002529 +002530 +002531 +002532 +002533 +002534 +002535 +002536 +002537 +002538 +002539 +002540 +002541 +002542 +002543 +002544 +002545 +002546 +002547 +002548 +002549 +002550 +002551 +002552 +002553 +002554 +002555 +002556 +002557 +002558 +002559 +002560 +002561 +002562 +002563 +002564 +002565 +002566 +002567 +002568 +002569 +002570 +002571 +002572 +002573 +002574 +002575 +002576 +002577 +002578 +002579 +002580 +002581 +002582 +002583 +002584 +002585 +002586 +002587 +002588 +002589 +002590 +002591 +002592 +002593 +002594 +002595 +002596 +002597 +002598 +002599 +002600 +002601 +002602 +002603 +002604 +002605 +002606 +002607 +002608 +002609 +002610 +002611 +002612 +002613 +002614 +002615 +002616 +002617 +002618 +002619 +002620 +002621 +002622 +002623 +002624 +002625 +002626 +002627 +002628 +002629 +002630 +002631 +002632 +002633 +002634 +002635 +002636 +002637 +002638 +002639 +002640 +002641 +002642 +002643 +002644 +002645 +002646 +002647 +002648 +002649 +002650 +002651 +002652 +002653 +002654 +002655 +002656 +002657 +002658 +002659 +002660 +002661 +002662 +002663 +002664 +002665 +002666 +002667 +002668 +002669 +002670 +002671 +002672 +002673 +002674 +002675 +002676 +002677 +002678 +002679 +002680 +002681 +002682 +002683 +002684 +002685 +002686 +002687 +002688 +002689 +002690 +002691 +002692 +002693 +002694 +002695 +002696 +002697 +002698 +002699 +002700 +002701 +002702 +002703 +002704 +002705 +002706 +002707 +002708 +002709 +002710 +002711 +002712 +002713 +002714 +002715 +002716 +002717 +002718 +002719 +002720 +002721 +002722 +002723 +002724 +002725 +002726 +002727 +002728 +002729 +002730 +002731 +002732 +002733 +002734 +002735 +002736 +002737 +002738 +002739 +002740 +002741 +002742 +002743 +002744 +002745 +002746 +002747 +002748 +002749 +002750 +002751 +002752 +002753 +002754 +002755 +002756 +002757 +002758 +002759 +002760 +002761 +002762 +002763 +002764 +002765 +002766 +002767 +002768 +002769 +002770 +002771 +002772 +002773 +002774 +002775 +002776 +002777 +002778 +002779 +002780 +002781 +002782 +002783 +002784 +002785 +002786 +002787 +002788 +002789 +002790 +002791 +002792 +002793 +002794 +002795 +002796 +002797 +002798 +002799 +002800 +002801 +002802 +002803 +002804 +002805 +002806 +002807 +002808 +002809 +002810 +002811 +002812 +002813 +002814 +002815 +002816 +002817 +002818 +002819 +002820 +002821 +002822 +002823 +002824 +002825 +002826 +002827 +002828 +002829 +002830 +002831 +002832 +002833 +002834 +002835 +002836 +002837 +002838 +002839 +002840 +002841 +002842 +002843 +002844 +002845 +002846 +002847 +002848 +002849 +002850 +002851 +002852 +002853 +002854 +002855 +002856 +002857 +002858 +002859 +002860 +002861 +002862 +002863 +002864 +002865 +002866 +002867 +002868 +002869 +002870 +002871 +002872 +002873 +002874 +002875 +002876 +002877 +002878 +002879 +002880 +002881 +002882 +002883 +002884 +002885 +002886 +002887 +002888 +002889 +002890 +002891 +002892 +002893 +002894 +002895 +002896 +002897 +002898 +002899 +002900 +002901 +002902 +002903 +002904 +002905 +002906 +002907 +002908 +002909 +002910 +002911 +002912 +002913 +002914 +002915 +002916 +002917 +002918 +002919 +002920 +002921 +002922 +002923 +002924 +002925 +002926 +002927 +002928 +002929 +002930 +002931 +002932 +002933 +002934 +002935 +002936 +002937 +002938 +002939 +002940 +002941 +002942 +002943 +002944 +002945 +002946 +002947 +002948 +002949 +002950 +002951 +002952 +002953 +002954 +002955 +002956 +002957 +002958 +002959 +002960 +002961 +002962 +002963 +002964 +002965 +002966 +002967 +002968 +002969 +002970 +002971 +002972 +002973 +002974 +002975 +002976 +002977 +002978 +002979 +002980 +002981 +002982 +002983 +002984 +002985 +002986 +002987 +002988 +002989 +002990 +002991 +002992 +002993 +002994 +002995 +002996 +002997 +002998 +002999 +003000 +003001 +003002 +003003 +003004 +003005 +003006 +003007 +003008 +003009 +003010 +003011 +003012 +003013 +003014 +003015 +003016 +003017 +003018 +003019 +003020 +003021 +003022 +003023 +003024 +003025 +003026 +003027 +003028 +003029 +003030 +003031 +003032 +003033 +003034 +003035 +003036 +003037 +003038 +003039 +003040 +003041 +003042 +003043 +003044 +003045 +003046 +003047 +003048 +003049 +003050 +003051 +003052 +003053 +003054 +003055 +003056 +003057 +003058 +003059 +003060 +003061 +003062 +003063 +003064 +003065 +003066 +003067 +003068 +003069 +003070 +003071 +003072 +003073 +003074 +003075 +003076 +003077 +003078 +003079 +003080 +003081 +003082 +003083 +003084 +003085 +003086 +003087 +003088 +003089 +003090 +003091 +003092 +003093 +003094 +003095 +003096 +003097 +003098 +003099 +003100 +003101 +003102 +003103 +003104 +003105 +003106 +003107 +003108 +003109 +003110 +003111 +003112 +003113 +003114 +003115 +003116 +003117 +003118 +003119 +003120 +003121 +003122 +003123 +003124 +003125 +003126 +003127 +003128 +003129 +003130 +003131 +003132 +003133 +003134 +003135 +003136 +003137 +003138 +003139 +003140 +003141 +003142 +003143 +003144 +003145 +003146 +003147 +003148 +003149 +003150 +003151 +003152 +003153 +003154 +003155 +003156 +003157 +003158 +003159 +003160 +003161 +003162 +003163 +003164 +003165 +003166 +003167 +003168 +003169 +003170 +003171 +003172 +003173 +003174 +003175 +003176 +003177 +003178 +003179 +003180 +003181 +003182 +003183 +003184 +003185 +003186 +003187 +003188 +003189 +003190 +003191 +003192 +003193 +003194 +003195 +003196 +003197 +003198 +003199 +003200 +003201 +003202 +003203 +003204 +003205 +003206 +003207 +003208 +003209 +003210 +003211 +003212 +003213 +003214 +003215 +003216 +003217 +003218 +003219 +003220 +003221 +003222 +003223 +003224 +003225 +003226 +003227 +003228 +003229 +003230 +003231 +003232 +003233 +003234 +003235 +003236 +003237 +003238 +003239 +003240 +003241 +003242 +003243 +003244 +003245 +003246 +003247 +003248 +003249 +003250 +003251 +003252 +003253 +003254 +003255 +003256 +003257 +003258 +003259 +003260 +003261 +003262 +003263 +003264 +003265 +003266 +003267 +003268 +003269 +003270 +003271 +003272 +003273 +003274 +003275 +003276 +003277 +003278 +003279 +003280 +003281 +003282 +003283 +003284 +003285 +003286 +003287 +003288 +003289 +003290 +003291 +003292 +003293 +003294 +003295 +003296 +003297 +003298 +003299 +003300 +003301 +003302 +003303 +003304 +003305 +003306 +003307 +003308 +003309 +003310 +003311 +003312 +003313 +003314 +003315 +003316 +003317 +003318 +003319 +003320 +003321 +003322 +003323 +003324 +003325 +003326 +003327 +003328 +003329 +003330 +003331 +003332 +003333 +003334 +003335 +003336 +003337 +003338 +003339 +003340 +003341 +003342 +003343 +003344 +003345 +003346 +003347 +003348 +003349 +003350 +003351 +003352 +003353 +003354 +003355 +003356 +003357 +003358 +003359 +003360 +003361 +003362 +003363 +003364 +003365 +003366 +003367 +003368 +003369 +003370 +003371 +003372 +003373 +003374 +003375 +003376 +003377 +003378 +003379 +003380 +003381 +003382 +003383 +003384 +003385 +003386 +003387 +003388 +003389 +003390 +003391 +003392 +003393 +003394 +003395 +003396 +003397 +003398 +003399 +003400 +003401 +003402 +003403 +003404 +003405 +003406 +003407 +003408 +003409 +003410 +003411 +003412 +003413 +003414 +003415 +003416 +003417 +003418 +003419 +003420 +003421 +003422 +003423 +003424 +003425 +003426 +003427 +003428 +003429 +003430 +003431 +003432 +003433 +003434 +003435 +003436 +003437 +003438 +003439 +003440 +003441 +003442 +003443 +003444 +003445 +003446 +003447 +003448 +003449 +003450 +003451 +003452 +003453 +003454 +003455 +003456 +003457 +003458 +003459 +003460 +003461 +003462 +003463 +003464 +003465 +003466 +003467 +003468 +003469 +003470 +003471 +003472 +003473 +003474 +003475 +003476 +003477 +003478 +003479 +003480 +003481 +003482 +003483 +003484 +003485 +003486 +003487 +003488 +003489 +003490 +003491 +003492 +003493 +003494 +003495 +003496 +003497 +003498 +003499 +003500 +003501 +003502 +003503 +003504 +003505 +003506 +003507 +003508 +003509 +003510 +003511 +003512 +003513 +003514 +003515 +003516 +003517 +003518 +003519 +003520 +003521 +003522 +003523 +003524 +003525 +003526 +003527 +003528 +003529 +003530 +003531 +003532 +003533 +003534 +003535 +003536 +003537 +003538 +003539 +003540 +003541 +003542 +003543 +003544 +003545 +003546 +003547 +003548 +003549 +003550 +003551 +003552 +003553 +003554 +003555 +003556 +003557 +003558 +003559 +003560 +003561 +003562 +003563 +003564 +003565 +003566 +003567 +003568 +003569 +003570 +003571 +003572 +003573 +003574 +003575 +003576 +003577 +003578 +003579 +003580 +003581 +003582 +003583 +003584 +003585 +003586 +003587 +003588 +003589 +003590 +003591 +003592 +003593 +003594 +003595 +003596 +003597 +003598 +003599 +003600 +003601 +003602 +003603 +003604 +003605 +003606 +003607 +003608 +003609 +003610 +003611 +003612 +003613 +003614 +003615 +003616 +003617 +003618 +003619 +003620 +003621 +003622 +003623 +003624 +003625 +003626 +003627 +003628 +003629 +003630 +003631 +003632 +003633 +003634 +003635 +003636 +003637 +003638 +003639 +003640 +003641 +003642 +003643 +003644 +003645 +003646 +003647 +003648 +003649 +003650 +003651 +003652 +003653 +003654 +003655 +003656 +003657 +003658 +003659 +003660 +003661 +003662 +003663 +003664 +003665 +003666 +003667 +003668 +003669 +003670 +003671 +003672 +003673 +003674 +003675 +003676 +003677 +003678 +003679 +003680 +003681 +003682 +003683 +003684 +003685 +003686 +003687 +003688 +003689 +003690 +003691 +003692 +003693 +003694 +003695 +003696 +003697 +003698 +003699 +003700 +003701 +003702 +003703 +003704 +003705 +003706 +003707 +003708 +003709 +003710 +003711 +003712 +003713 +003714 +003715 +003716 +003717 +003718 +003719 +003720 +003721 +003722 +003723 +003724 +003725 +003726 +003727 +003728 +003729 +003730 +003731 +003732 +003733 +003734 +003735 +003736 +003737 +003738 +003739 +003740 +003741 +003742 +003743 +003744 +003745 +003746 +003747 +003748 +003749 +003750 +003751 +003752 +003753 +003754 +003755 +003756 +003757 +003758 +003759 +003760 +003761 +003762 +003763 +003764 +003765 +003766 +003767 +003768 +003769 +003770 +003771 +003772 +003773 +003774 +003775 +003776 +003777 +003778 +003779 +003780 +003781 +003782 +003783 +003784 +003785 +003786 +003787 +003788 +003789 +003790 +003791 +003792 +003793 +003794 +003795 +003796 +003797 +003798 +003799 +003800 +003801 +003802 +003803 +003804 +003805 +003806 +003807 +003808 +003809 +003810 +003811 +003812 +003813 +003814 +003815 +003816 +003817 +003818 +003819 +003820 +003821 +003822 +003823 +003824 +003825 +003826 +003827 +003828 +003829 +003830 +003831 +003832 +003833 +003834 +003835 +003836 +003837 +003838 +003839 +003840 +003841 +003842 +003843 +003844 +003845 +003846 +003847 +003848 +003849 +003850 +003851 +003852 +003853 +003854 +003855 +003856 +003857 +003858 +003859 +003860 +003861 +003862 +003863 +003864 +003865 +003866 +003867 +003868 +003869 +003870 +003871 +003872 +003873 +003874 +003875 +003876 +003877 +003878 +003879 +003880 +003881 +003882 +003883 +003884 +003885 +003886 +003887 +003888 +003889 +003890 +003891 +003892 +003893 +003894 +003895 +003896 +003897 +003898 +003899 +003900 +003901 +003902 +003903 +003904 +003905 +003906 +003907 +003908 +003909 +003910 +003911 +003912 +003913 +003914 +003915 +003916 +003917 +003918 +003919 +003920 +003921 +003922 +003923 +003924 +003925 +003926 +003927 +003928 +003929 +003930 +003931 +003932 +003933 +003934 +003935 +003936 +003937 +003938 +003939 +003940 +003941 +003942 +003943 +003944 +003945 +003946 +003947 +003948 +003949 +003950 +003951 +003952 +003953 +003954 +003955 +003956 +003957 +003958 +003959 +003960 +003961 +003962 +003963 +003964 +003965 +003966 +003967 +003968 +003969 +003970 +003971 +003972 +003973 +003974 +003975 +003976 +003977 +003978 +003979 +003980 +003981 +003982 +003983 +003984 +003985 +003986 +003987 +003988 +003989 +003990 +003991 +003992 +003993 +003994 +003995 +003996 +003997 +003998 +003999 +004000 +004001 +004002 +004003 +004004 +004005 +004006 +004007 +004008 +004009 +004010 +004011 +004012 +004013 +004014 +004015 +004016 +004017 +004018 +004019 +004020 +004021 +004022 +004023 +004024 +004025 +004026 +004027 +004028 +004029 +004030 +004031 +004032 +004033 +004034 +004035 +004036 +004037 +004038 +004039 +004040 +004041 +004042 +004043 +004044 +004045 +004046 +004047 +004048 +004049 +004050 +004051 +004052 +004053 +004054 +004055 +004056 +004057 +004058 +004059 +004060 +004061 +004062 +004063 +004064 +004065 +004066 +004067 +004068 +004069 +004070 +004071 +004072 +004073 +004074 +004075 +004076 +004077 +004078 +004079 +004080 +004081 +004082 +004083 +004084 +004085 +004086 +004087 +004088 +004089 +004090 +004091 +004092 +004093 +004094 +004095 +004096 +004097 +004098 +004099 +004100 +004101 +004102 +004103 +004104 +004105 +004106 +004107 +004108 +004109 +004110 +004111 +004112 +004113 +004114 +004115 +004116 +004117 +004118 +004119 +004120 +004121 +004122 +004123 +004124 +004125 +004126 +004127 +004128 +004129 +004130 +004131 +004132 +004133 +004134 +004135 +004136 +004137 +004138 +004139 +004140 +004141 +004142 +004143 +004144 +004145 +004146 +004147 +004148 +004149 +004150 +004151 +004152 +004153 +004154 +004155 +004156 +004157 +004158 +004159 +004160 +004161 +004162 +004163 +004164 +004165 +004166 +004167 +004168 +004169 +004170 +004171 +004172 +004173 +004174 +004175 +004176 +004177 +004178 +004179 +004180 +004181 +004182 +004183 +004184 +004185 +004186 +004187 +004188 +004189 +004190 +004191 +004192 +004193 +004194 +004195 +004196 +004197 +004198 +004199 +004200 +004201 +004202 +004203 +004204 +004205 +004206 +004207 +004208 +004209 +004210 +004211 +004212 +004213 +004214 +004215 +004216 +004217 +004218 +004219 +004220 +004221 +004222 +004223 +004224 +004225 +004226 +004227 +004228 +004229 +004230 +004231 +004232 +004233 +004234 +004235 +004236 +004237 +004238 +004239 +004240 +004241 +004242 +004243 +004244 +004245 +004246 +004247 +004248 +004249 +004250 +004251 +004252 +004253 +004254 +004255 +004256 +004257 +004258 +004259 +004260 +004261 +004262 +004263 +004264 +004265 +004266 +004267 +004268 +004269 +004270 +004271 +004272 +004273 +004274 +004275 +004276 +004277 +004278 +004279 +004280 +004281 +004282 +004283 +004284 +004285 +004286 +004287 +004288 +004289 +004290 +004291 +004292 +004293 +004294 +004295 +004296 +004297 +004298 +004299 +004300 +004301 +004302 +004303 +004304 +004305 +004306 +004307 +004308 +004309 +004310 +004311 +004312 +004313 +004314 +004315 +004316 +004317 +004318 +004319 +004320 +004321 +004322 +004323 +004324 +004325 +004326 +004327 +004328 +004329 +004330 +004331 +004332 +004333 +004334 +004335 +004336 +004337 +004338 +004339 +004340 +004341 +004342 +004343 +004344 +004345 +004346 +004347 +004348 +004349 +004350 +004351 +004352 +004353 +004354 +004355 +004356 +004357 +004358 +004359 +004360 +004361 +004362 +004363 +004364 +004365 +004366 +004367 +004368 +004369 +004370 +004371 +004372 +004373 +004374 +004375 +004376 +004377 +004378 +004379 +004380 +004381 +004382 +004383 +004384 +004385 +004386 +004387 +004388 +004389 +004390 +004391 +004392 +004393 +004394 +004395 +004396 +004397 +004398 +004399 +004400 +004401 +004402 +004403 +004404 +004405 +004406 +004407 +004408 +004409 +004410 +004411 +004412 +004413 +004414 +004415 +004416 +004417 +004418 +004419 +004420 +004421 +004422 +004423 +004424 +004425 +004426 +004427 +004428 +004429 +004430 +004431 +004432 +004433 +004434 +004435 +004436 +004437 +004438 +004439 +004440 +004441 +004442 +004443 +004444 +004445 +004446 +004447 +004448 +004449 +004450 +004451 +004452 +004453 +004454 +004455 +004456 +004457 +004458 +004459 +004460 +004461 +004462 +004463 +004464 +004465 +004466 +004467 +004468 +004469 +004470 +004471 +004472 +004473 +004474 +004475 +004476 +004477 +004478 +004479 +004480 +004481 +004482 +004483 +004484 +004485 +004486 +004487 +004488 +004489 +004490 +004491 +004492 +004493 +004494 +004495 +004496 +004497 +004498 +004499 +004500 +004501 +004502 +004503 +004504 +004505 +004506 +004507 +004508 +004509 +004510 +004511 +004512 +004513 +004514 +004515 +004516 +004517 +004518 +004519 +004520 +004521 +004522 +004523 +004524 +004525 +004526 +004527 +004528 +004529 +004530 +004531 +004532 +004533 +004534 +004535 +004536 +004537 +004538 +004539 +004540 +004541 +004542 +004543 +004544 +004545 +004546 +004547 +004548 +004549 +004550 +004551 +004552 +004553 +004554 +004555 +004556 +004557 +004558 +004559 +004560 +004561 +004562 +004563 +004564 +004565 +004566 +004567 +004568 +004569 +004570 +004571 +004572 +004573 +004574 +004575 +004576 +004577 +004578 +004579 +004580 +004581 +004582 +004583 +004584 +004585 +004586 +004587 +004588 +004589 +004590 +004591 +004592 +004593 +004594 +004595 +004596 +004597 +004598 +004599 +004600 +004601 +004602 +004603 +004604 +004605 +004606 +004607 +004608 +004609 +004610 +004611 +004612 +004613 +004614 +004615 +004616 +004617 +004618 +004619 +004620 +004621 +004622 +004623 +004624 +004625 +004626 +004627 +004628 +004629 +004630 +004631 +004632 +004633 +004634 +004635 +004636 +004637 +004638 +004639 +004640 +004641 +004642 +004643 +004644 +004645 +004646 +004647 +004648 +004649 +004650 +004651 +004652 +004653 +004654 +004655 +004656 +004657 +004658 +004659 +004660 +004661 +004662 +004663 +004664 +004665 +004666 +004667 +004668 +004669 +004670 +004671 +004672 +004673 +004674 +004675 +004676 +004677 +004678 +004679 +004680 +004681 +004682 +004683 +004684 +004685 +004686 +004687 +004688 +004689 +004690 +004691 +004692 +004693 +004694 +004695 +004696 +004697 +004698 +004699 +004700 +004701 +004702 +004703 +004704 +004705 +004706 +004707 +004708 +004709 +004710 +004711 +004712 +004713 +004714 +004715 +004716 +004717 +004718 +004719 +004720 +004721 +004722 +004723 +004724 +004725 +004726 +004727 +004728 +004729 +004730 +004731 +004732 +004733 +004734 +004735 +004736 +004737 +004738 +004739 +004740 +004741 +004742 +004743 +004744 +004745 +004746 +004747 +004748 +004749 +004750 +004751 +004752 +004753 +004754 +004755 +004756 +004757 +004758 +004759 +004760 +004761 +004762 +004763 +004764 +004765 +004766 +004767 +004768 +004769 +004770 +004771 +004772 +004773 +004774 +004775 +004776 +004777 +004778 +004779 +004780 +004781 +004782 +004783 +004784 +004785 +004786 +004787 +004788 +004789 +004790 +004791 +004792 +004793 +004794 +004795 +004796 +004797 +004798 +004799 +004800 +004801 +004802 +004803 +004804 +004805 +004806 +004807 +004808 +004809 +004810 +004811 +004812 +004813 +004814 +004815 +004816 +004817 +004818 +004819 +004820 +004821 +004822 +004823 +004824 +004825 +004826 +004827 +004828 +004829 +004830 +004831 +004832 +004833 +004834 +004835 +004836 +004837 +004838 +004839 +004840 +004841 +004842 +004843 +004844 +004845 +004846 +004847 +004848 +004849 +004850 +004851 +004852 +004853 +004854 +004855 +004856 +004857 +004858 +004859 +004860 +004861 +004862 +004863 +004864 +004865 +004866 +004867 +004868 +004869 +004870 +004871 +004872 +004873 +004874 +004875 +004876 +004877 +004878 +004879 +004880 +004881 +004882 +004883 +004884 +004885 +004886 +004887 +004888 +004889 +004890 +004891 +004892 +004893 +004894 +004895 +004896 +004897 +004898 +004899 +004900 +004901 +004902 +004903 +004904 +004905 +004906 +004907 +004908 +004909 +004910 +004911 +004912 +004913 +004914 +004915 +004916 +004917 +004918 +004919 +004920 +004921 +004922 +004923 +004924 +004925 +004926 +004927 +004928 +004929 +004930 +004931 +004932 +004933 +004934 +004935 +004936 +004937 +004938 +004939 +004940 +004941 +004942 +004943 +004944 +004945 +004946 +004947 +004948 +004949 +004950 +004951 +004952 +004953 +004954 +004955 +004956 +004957 +004958 +004959 +004960 +004961 +004962 +004963 +004964 +004965 +004966 +004967 +004968 +004969 +004970 +004971 +004972 +004973 +004974 +004975 +004976 +004977 +004978 +004979 +004980 +004981 +004982 +004983 +004984 +004985 +004986 +004987 +004988 +004989 +004990 +004991 +004992 +004993 +004994 +004995 +004996 +004997 +004998 +004999 +005000 +005001 +005002 +005003 +005004 +005005 +005006 +005007 +005008 +005009 +005010 +005011 +005012 +005013 +005014 +005015 +005016 +005017 +005018 +005019 +005020 +005021 +005022 +005023 +005024 +005025 +005026 +005027 +005028 +005029 +005030 +005031 +005032 +005033 +005034 +005035 +005036 +005037 +005038 +005039 +005040 +005041 +005042 +005043 +005044 +005045 +005046 +005047 +005048 +005049 +005050 +005051 +005052 +005053 +005054 +005055 +005056 +005057 +005058 +005059 +005060 +005061 +005062 +005063 +005064 +005065 +005066 +005067 +005068 +005069 +005070 +005071 +005072 +005073 +005074 +005075 +005076 +005077 +005078 +005079 +005080 +005081 +005082 +005083 +005084 +005085 +005086 +005087 +005088 +005089 +005090 +005091 +005092 +005093 +005094 +005095 +005096 +005097 +005098 +005099 +005100 +005101 +005102 +005103 +005104 +005105 +005106 +005107 +005108 +005109 +005110 +005111 +005112 +005113 +005114 +005115 +005116 +005117 +005118 +005119 +005120 +005121 +005122 +005123 +005124 +005125 +005126 +005127 +005128 +005129 +005130 +005131 +005132 +005133 +005134 +005135 +005136 +005137 +005138 +005139 +005140 +005141 +005142 +005143 +005144 +005145 +005146 +005147 +005148 +005149 +005150 +005151 +005152 +005153 +005154 +005155 +005156 +005157 +005158 +005159 +005160 +005161 +005162 +005163 +005164 +005165 +005166 +005167 +005168 +005169 +005170 +005171 +005172 +005173 +005174 +005175 +005176 +005177 +005178 +005179 +005180 +005181 +005182 +005183 +005184 +005185 +005186 +005187 +005188 +005189 +005190 +005191 +005192 +005193 +005194 +005195 +005196 +005197 +005198 +005199 +005200 +005201 +005202 +005203 +005204 +005205 +005206 +005207 +005208 +005209 +005210 +005211 +005212 +005213 +005214 +005215 +005216 +005217 +005218 +005219 +005220 +005221 +005222 +005223 +005224 +005225 +005226 +005227 +005228 +005229 +005230 +005231 +005232 +005233 +005234 +005235 +005236 +005237 +005238 +005239 +005240 +005241 +005242 +005243 +005244 +005245 +005246 +005247 +005248 +005249 +005250 +005251 +005252 +005253 +005254 +005255 +005256 +005257 +005258 +005259 +005260 +005261 +005262 +005263 +005264 +005265 +005266 +005267 +005268 +005269 +005270 +005271 +005272 +005273 +005274 +005275 +005276 +005277 +005278 +005279 +005280 +005281 +005282 +005283 +005284 +005285 +005286 +005287 +005288 +005289 +005290 +005291 +005292 +005293 +005294 +005295 +005296 +005297 +005298 +005299 +005300 +005301 +005302 +005303 +005304 +005305 +005306 +005307 +005308 +005309 +005310 +005311 +005312 +005313 +005314 +005315 +005316 +005317 +005318 +005319 +005320 +005321 +005322 +005323 +005324 +005325 +005326 +005327 +005328 +005329 +005330 +005331 +005332 +005333 +005334 +005335 +005336 +005337 +005338 +005339 +005340 +005341 +005342 +005343 +005344 +005345 +005346 +005347 +005348 +005349 +005350 +005351 +005352 +005353 +005354 +005355 +005356 +005357 +005358 +005359 +005360 +005361 +005362 +005363 +005364 +005365 +005366 +005367 +005368 +005369 +005370 +005371 +005372 +005373 +005374 +005375 +005376 +005377 +005378 +005379 +005380 +005381 +005382 +005383 +005384 +005385 +005386 +005387 +005388 +005389 +005390 +005391 +005392 +005393 +005394 +005395 +005396 +005397 +005398 +005399 +005400 +005401 +005402 +005403 +005404 +005405 +005406 +005407 +005408 +005409 +005410 +005411 +005412 +005413 +005414 +005415 +005416 +005417 +005418 +005419 +005420 +005421 +005422 +005423 +005424 +005425 +005426 +005427 +005428 +005429 +005430 +005431 +005432 +005433 +005434 +005435 +005436 +005437 +005438 +005439 +005440 +005441 +005442 +005443 +005444 +005445 +005446 +005447 +005448 +005449 +005450 +005451 +005452 +005453 +005454 +005455 +005456 +005457 +005458 +005459 +005460 +005461 +005462 +005463 +005464 +005465 +005466 +005467 +005468 +005469 +005470 +005471 +005472 +005473 +005474 +005475 +005476 +005477 +005478 +005479 +005480 +005481 +005482 +005483 +005484 +005485 +005486 +005487 +005488 +005489 +005490 +005491 +005492 +005493 +005494 +005495 +005496 +005497 +005498 +005499 +005500 +005501 +005502 +005503 +005504 +005505 +005506 +005507 +005508 +005509 +005510 +005511 +005512 +005513 +005514 +005515 +005516 +005517 +005518 +005519 +005520 +005521 +005522 +005523 +005524 +005525 +005526 +005527 +005528 +005529 +005530 +005531 +005532 +005533 +005534 +005535 +005536 +005537 +005538 +005539 +005540 +005541 +005542 +005543 +005544 +005545 +005546 +005547 +005548 +005549 +005550 +005551 +005552 +005553 +005554 +005555 +005556 +005557 +005558 +005559 +005560 +005561 +005562 +005563 +005564 +005565 +005566 +005567 +005568 +005569 +005570 +005571 +005572 +005573 +005574 +005575 +005576 +005577 +005578 +005579 +005580 +005581 +005582 +005583 +005584 +005585 +005586 +005587 +005588 +005589 +005590 +005591 +005592 +005593 +005594 +005595 +005596 +005597 +005598 +005599 +005600 +005601 +005602 +005603 +005604 +005605 +005606 +005607 +005608 +005609 +005610 +005611 +005612 +005613 +005614 +005615 +005616 +005617 +005618 +005619 +005620 +005621 +005622 +005623 +005624 +005625 +005626 +005627 +005628 +005629 +005630 +005631 +005632 +005633 +005634 +005635 +005636 +005637 +005638 +005639 +005640 +005641 +005642 +005643 +005644 +005645 +005646 +005647 +005648 +005649 +005650 +005651 +005652 +005653 +005654 +005655 +005656 +005657 +005658 +005659 +005660 +005661 +005662 +005663 +005664 +005665 +005666 +005667 +005668 +005669 +005670 +005671 +005672 +005673 +005674 +005675 +005676 +005677 +005678 +005679 +005680 +005681 +005682 +005683 +005684 +005685 +005686 +005687 +005688 +005689 +005690 +005691 +005692 +005693 +005694 +005695 +005696 +005697 +005698 +005699 +005700 +005701 +005702 +005703 +005704 +005705 +005706 +005707 +005708 +005709 +005710 +005711 +005712 +005713 +005714 +005715 +005716 +005717 +005718 +005719 +005720 +005721 +005722 +005723 +005724 +005725 +005726 +005727 +005728 +005729 +005730 +005731 +005732 +005733 +005734 +005735 +005736 +005737 +005738 +005739 +005740 +005741 +005742 +005743 +005744 +005745 +005746 +005747 +005748 +005749 +005750 +005751 +005752 +005753 +005754 +005755 +005756 +005757 +005758 +005759 +005760 +005761 +005762 +005763 +005764 +005765 +005766 +005767 +005768 +005769 +005770 +005771 +005772 +005773 +005774 +005775 +005776 +005777 +005778 +005779 +005780 +005781 +005782 +005783 +005784 +005785 +005786 +005787 +005788 +005789 +005790 +005791 +005792 +005793 +005794 +005795 +005796 +005797 +005798 +005799 +005800 +005801 +005802 +005803 +005804 +005805 +005806 +005807 +005808 +005809 +005810 +005811 +005812 +005813 +005814 +005815 +005816 +005817 +005818 +005819 +005820 +005821 +005822 +005823 +005824 +005825 +005826 +005827 +005828 +005829 +005830 +005831 +005832 +005833 +005834 +005835 +005836 +005837 +005838 +005839 +005840 +005841 +005842 +005843 +005844 +005845 +005846 +005847 +005848 +005849 +005850 +005851 +005852 +005853 +005854 +005855 +005856 +005857 +005858 +005859 +005860 +005861 +005862 +005863 +005864 +005865 +005866 +005867 +005868 +005869 +005870 +005871 +005872 +005873 +005874 +005875 +005876 +005877 +005878 +005879 +005880 +005881 +005882 +005883 +005884 +005885 +005886 +005887 +005888 +005889 +005890 +005891 +005892 +005893 +005894 +005895 +005896 +005897 +005898 +005899 +005900 +005901 +005902 +005903 +005904 +005905 +005906 +005907 +005908 +005909 +005910 +005911 +005912 +005913 +005914 +005915 +005916 +005917 +005918 +005919 +005920 +005921 +005922 +005923 +005924 +005925 +005926 +005927 +005928 +005929 +005930 +005931 +005932 +005933 +005934 +005935 +005936 +005937 +005938 +005939 +005940 +005941 +005942 +005943 +005944 +005945 +005946 +005947 +005948 +005949 +005950 +005951 +005952 +005953 +005954 +005955 +005956 +005957 +005958 +005959 +005960 +005961 +005962 +005963 +005964 +005965 +005966 +005967 +005968 +005969 +005970 +005971 +005972 +005973 +005974 +005975 +005976 +005977 +005978 +005979 +005980 +005981 +005982 +005983 +005984 +005985 +005986 +005987 +005988 +005989 +005990 +005991 +005992 +005993 +005994 +005995 +005996 +005997 +005998 +005999 +006000 +006001 +006002 +006003 +006004 +006005 +006006 +006007 +006008 +006009 +006010 +006011 +006012 +006013 +006014 +006015 +006016 +006017 +006018 +006019 +006020 +006021 +006022 +006023 +006024 +006025 +006026 +006027 +006028 +006029 +006030 +006031 +006032 +006033 +006034 +006035 +006036 +006037 +006038 +006039 +006040 +006041 +006042 +006043 +006044 +006045 +006046 +006047 +006048 +006049 +006050 +006051 +006052 +006053 +006054 +006055 +006056 +006057 +006058 +006059 +006060 +006061 +006062 +006063 +006064 +006065 +006066 +006067 +006068 +006069 +006070 +006071 +006072 +006073 +006074 +006075 +006076 +006077 +006078 +006079 +006080 +006081 +006082 +006083 +006084 +006085 +006086 +006087 +006088 +006089 +006090 +006091 +006092 +006093 +006094 +006095 +006096 +006097 +006098 +006099 +006100 +006101 +006102 +006103 +006104 +006105 +006106 +006107 +006108 +006109 +006110 +006111 +006112 +006113 +006114 +006115 +006116 +006117 +006118 +006119 +006120 +006121 +006122 +006123 +006124 +006125 +006126 +006127 +006128 +006129 +006130 +006131 +006132 +006133 +006134 +006135 +006136 +006137 +006138 +006139 +006140 +006141 +006142 +006143 +006144 +006145 +006146 +006147 +006148 +006149 +006150 +006151 +006152 +006153 +006154 +006155 +006156 +006157 +006158 +006159 +006160 +006161 +006162 +006163 +006164 +006165 +006166 +006167 +006168 +006169 +006170 +006171 +006172 +006173 +006174 +006175 +006176 +006177 +006178 +006179 +006180 +006181 +006182 +006183 +006184 +006185 +006186 +006187 +006188 +006189 +006190 +006191 +006192 +006193 +006194 +006195 +006196 +006197 +006198 +006199 +006200 +006201 +006202 +006203 +006204 +006205 +006206 +006207 +006208 +006209 +006210 +006211 +006212 +006213 +006214 +006215 +006216 +006217 +006218 +006219 +006220 +006221 +006222 +006223 +006224 +006225 +006226 +006227 +006228 +006229 +006230 +006231 +006232 +006233 +006234 +006235 +006236 +006237 +006238 +006239 +006240 +006241 +006242 +006243 +006244 +006245 +006246 +006247 +006248 +006249 +006250 +006251 +006252 +006253 +006254 +006255 +006256 +006257 +006258 +006259 +006260 +006261 +006262 +006263 +006264 +006265 +006266 +006267 +006268 +006269 +006270 +006271 +006272 +006273 +006274 +006275 +006276 +006277 +006278 +006279 +006280 +006281 +006282 +006283 +006284 +006285 +006286 +006287 +006288 +006289 +006290 +006291 +006292 +006293 +006294 +006295 +006296 +006297 +006298 +006299 +006300 +006301 +006302 +006303 +006304 +006305 +006306 +006307 +006308 +006309 +006310 +006311 +006312 +006313 +006314 +006315 +006316 +006317 +006318 +006319 +006320 +006321 +006322 +006323 +006324 +006325 +006326 +006327 +006328 +006329 +006330 +006331 +006332 +006333 +006334 +006335 +006336 +006337 +006338 +006339 +006340 +006341 +006342 +006343 +006344 +006345 +006346 +006347 +006348 +006349 +006350 +006351 +006352 +006353 +006354 +006355 +006356 +006357 +006358 +006359 +006360 +006361 +006362 +006363 +006364 +006365 +006366 +006367 +006368 +006369 +006370 +006371 +006372 +006373 +006374 +006375 +006376 +006377 +006378 +006379 +006380 +006381 +006382 +006383 +006384 +006385 +006386 +006387 +006388 +006389 +006390 +006391 +006392 +006393 +006394 +006395 +006396 +006397 +006398 +006399 +006400 +006401 +006402 +006403 +006404 +006405 +006406 +006407 +006408 +006409 +006410 +006411 +006412 +006413 +006414 +006415 +006416 +006417 +006418 +006419 +006420 +006421 +006422 +006423 +006424 +006425 +006426 +006427 +006428 +006429 +006430 +006431 +006432 +006433 +006434 +006435 +006436 +006437 +006438 +006439 +006440 +006441 +006442 +006443 +006444 +006445 +006446 +006447 +006448 +006449 +006450 +006451 +006452 +006453 +006454 +006455 +006456 +006457 +006458 +006459 +006460 +006461 +006462 +006463 +006464 +006465 +006466 +006467 +006468 +006469 +006470 +006471 +006472 +006473 +006474 +006475 +006476 +006477 +006478 +006479 +006480 +006481 +006482 +006483 +006484 +006485 +006486 +006487 +006488 +006489 +006490 +006491 +006492 +006493 +006494 +006495 +006496 +006497 +006498 +006499 +006500 +006501 +006502 +006503 +006504 +006505 +006506 +006507 +006508 +006509 +006510 +006511 +006512 +006513 +006514 +006515 +006516 +006517 +006518 +006519 +006520 +006521 +006522 +006523 +006524 +006525 +006526 +006527 +006528 +006529 +006530 +006531 +006532 +006533 +006534 +006535 +006536 +006537 +006538 +006539 +006540 +006541 +006542 +006543 +006544 +006545 +006546 +006547 +006548 +006549 +006550 +006551 +006552 +006553 +006554 +006555 +006556 +006557 +006558 +006559 +006560 +006561 +006562 +006563 +006564 +006565 +006566 +006567 +006568 +006569 +006570 +006571 +006572 +006573 +006574 +006575 +006576 +006577 +006578 +006579 +006580 +006581 +006582 +006583 +006584 +006585 +006586 +006587 +006588 +006589 +006590 +006591 +006592 +006593 +006594 +006595 +006596 +006597 +006598 +006599 +006600 +006601 +006602 +006603 +006604 +006605 +006606 +006607 +006608 +006609 +006610 +006611 +006612 +006613 +006614 +006615 +006616 +006617 +006618 +006619 +006620 +006621 +006622 +006623 +006624 +006625 +006626 +006627 +006628 +006629 +006630 +006631 +006632 +006633 +006634 +006635 +006636 +006637 +006638 +006639 +006640 +006641 +006642 +006643 +006644 +006645 +006646 +006647 +006648 +006649 +006650 +006651 +006652 +006653 +006654 +006655 +006656 +006657 +006658 +006659 +006660 +006661 +006662 +006663 +006664 +006665 +006666 +006667 +006668 +006669 +006670 +006671 +006672 +006673 +006674 +006675 +006676 +006677 +006678 +006679 +006680 +006681 +006682 +006683 +006684 +006685 +006686 +006687 +006688 +006689 +006690 +006691 +006692 +006693 +006694 +006695 +006696 +006697 +006698 +006699 +006700 +006701 +006702 +006703 +006704 +006705 +006706 +006707 +006708 +006709 +006710 +006711 +006712 +006713 +006714 +006715 +006716 +006717 +006718 +006719 +006720 +006721 +006722 +006723 +006724 +006725 +006726 +006727 +006728 +006729 +006730 +006731 +006732 +006733 +006734 +006735 +006736 +006737 +006738 +006739 +006740 +006741 +006742 +006743 +006744 +006745 +006746 +006747 +006748 +006749 +006750 +006751 +006752 +006753 +006754 +006755 +006756 +006757 +006758 +006759 +006760 +006761 +006762 +006763 +006764 +006765 +006766 +006767 +006768 +006769 +006770 +006771 +006772 +006773 +006774 +006775 +006776 +006777 +006778 +006779 +006780 +006781 +006782 +006783 +006784 +006785 +006786 +006787 +006788 +006789 +006790 +006791 +006792 +006793 +006794 +006795 +006796 +006797 +006798 +006799 +006800 +006801 +006802 +006803 +006804 +006805 +006806 +006807 +006808 +006809 +006810 +006811 +006812 +006813 +006814 +006815 +006816 +006817 +006818 +006819 +006820 +006821 +006822 +006823 +006824 +006825 +006826 +006827 +006828 +006829 +006830 +006831 +006832 +006833 +006834 +006835 +006836 +006837 +006838 +006839 +006840 +006841 +006842 +006843 +006844 +006845 +006846 +006847 +006848 +006849 +006850 +006851 +006852 +006853 +006854 +006855 +006856 +006857 +006858 +006859 +006860 +006861 +006862 +006863 +006864 +006865 +006866 +006867 +006868 +006869 +006870 +006871 +006872 +006873 +006874 +006875 +006876 +006877 +006878 +006879 +006880 +006881 +006882 +006883 +006884 +006885 +006886 +006887 +006888 +006889 +006890 +006891 +006892 +006893 +006894 +006895 +006896 +006897 +006898 +006899 +006900 +006901 +006902 +006903 +006904 +006905 +006906 +006907 +006908 +006909 +006910 +006911 +006912 +006913 +006914 +006915 +006916 +006917 +006918 +006919 +006920 +006921 +006922 +006923 +006924 +006925 +006926 +006927 +006928 +006929 +006930 +006931 +006932 +006933 +006934 +006935 +006936 +006937 +006938 +006939 +006940 +006941 +006942 +006943 +006944 +006945 +006946 +006947 +006948 +006949 +006950 +006951 +006952 +006953 +006954 +006955 +006956 +006957 +006958 +006959 +006960 +006961 +006962 +006963 +006964 +006965 +006966 +006967 +006968 +006969 +006970 +006971 +006972 +006973 +006974 +006975 +006976 +006977 +006978 +006979 +006980 +006981 +006982 +006983 +006984 +006985 +006986 +006987 +006988 +006989 +006990 +006991 +006992 +006993 +006994 +006995 +006996 +006997 +006998 +006999 +007000 +007001 +007002 +007003 +007004 +007005 +007006 +007007 +007008 +007009 +007010 +007011 +007012 +007013 +007014 +007015 +007016 +007017 +007018 +007019 +007020 +007021 +007022 +007023 +007024 +007025 +007026 +007027 +007028 +007029 +007030 +007031 +007032 +007033 +007034 +007035 +007036 +007037 +007038 +007039 +007040 +007041 +007042 +007043 +007044 +007045 +007046 +007047 +007048 +007049 +007050 +007051 +007052 +007053 +007054 +007055 +007056 +007057 +007058 +007059 +007060 +007061 +007062 +007063 +007064 +007065 +007066 +007067 +007068 +007069 +007070 +007071 +007072 +007073 +007074 +007075 +007076 +007077 +007078 +007079 +007080 +007081 +007082 +007083 +007084 +007085 +007086 +007087 +007088 +007089 +007090 +007091 +007092 +007093 +007094 +007095 +007096 +007097 +007098 +007099 +007100 +007101 +007102 +007103 +007104 +007105 +007106 +007107 +007108 +007109 +007110 +007111 +007112 +007113 +007114 +007115 +007116 +007117 +007118 +007119 +007120 +007121 +007122 +007123 +007124 +007125 +007126 +007127 +007128 +007129 +007130 +007131 +007132 +007133 +007134 +007135 +007136 +007137 +007138 +007139 +007140 +007141 +007142 +007143 +007144 +007145 +007146 +007147 +007148 +007149 +007150 +007151 +007152 +007153 +007154 +007155 +007156 +007157 +007158 +007159 +007160 +007161 +007162 +007163 +007164 +007165 +007166 +007167 +007168 +007169 +007170 +007171 +007172 +007173 +007174 +007175 +007176 +007177 +007178 +007179 +007180 +007181 +007182 +007183 +007184 +007185 +007186 +007187 +007188 +007189 +007190 +007191 +007192 +007193 +007194 +007195 +007196 +007197 +007198 +007199 +007200 +007201 +007202 +007203 +007204 +007205 +007206 +007207 +007208 +007209 +007210 +007211 +007212 +007213 +007214 +007215 +007216 +007217 +007218 +007219 +007220 +007221 +007222 +007223 +007224 +007225 +007226 +007227 +007228 +007229 +007230 +007231 +007232 +007233 +007234 +007235 +007236 +007237 +007238 +007239 +007240 +007241 +007242 +007243 +007244 +007245 +007246 +007247 +007248 +007249 +007250 +007251 +007252 +007253 +007254 +007255 +007256 +007257 +007258 +007259 +007260 +007261 +007262 +007263 +007264 +007265 +007266 +007267 +007268 +007269 +007270 +007271 +007272 +007273 +007274 +007275 +007276 +007277 +007278 +007279 +007280 +007281 +007282 +007283 +007284 +007285 +007286 +007287 +007288 +007289 +007290 +007291 +007292 +007293 +007294 +007295 +007296 +007297 +007298 +007299 +007300 +007301 +007302 +007303 +007304 +007305 +007306 +007307 +007308 +007309 +007310 +007311 +007312 +007313 +007314 +007315 +007316 +007317 +007318 +007319 +007320 +007321 +007322 +007323 +007324 +007325 +007326 +007327 +007328 +007329 +007330 +007331 +007332 +007333 +007334 +007335 +007336 +007337 +007338 +007339 +007340 +007341 +007342 +007343 +007344 +007345 +007346 +007347 +007348 +007349 +007350 +007351 +007352 +007353 +007354 +007355 +007356 +007357 +007358 +007359 +007360 +007361 +007362 +007363 +007364 +007365 +007366 +007367 +007368 +007369 +007370 +007371 +007372 +007373 +007374 +007375 +007376 +007377 +007378 +007379 +007380 +007381 +007382 +007383 +007384 +007385 +007386 +007387 +007388 +007389 +007390 +007391 +007392 +007393 +007394 +007395 +007396 +007397 +007398 +007399 +007400 +007401 +007402 +007403 +007404 +007405 +007406 +007407 +007408 +007409 +007410 +007411 +007412 +007413 +007414 +007415 +007416 +007417 +007418 +007419 +007420 +007421 +007422 +007423 +007424 +007425 +007426 +007427 +007428 +007429 +007430 +007431 +007432 +007433 +007434 +007435 +007436 +007437 +007438 +007439 +007440 +007441 +007442 +007443 +007444 +007445 +007446 +007447 +007448 +007449 +007450 +007451 +007452 +007453 +007454 +007455 +007456 +007457 +007458 +007459 +007460 +007461 +007462 +007463 +007464 +007465 +007466 +007467 +007468 +007469 +007470 +007471 +007472 +007473 +007474 +007475 +007476 +007477 +007478 +007479 +007480 \ No newline at end of file diff --git a/pointnet2_lib/tools/data/KITTI/ImageSets/val.txt b/pointnet2_lib/tools/data/KITTI/ImageSets/val.txt new file mode 100644 index 0000000..258ca11 --- /dev/null +++ b/pointnet2_lib/tools/data/KITTI/ImageSets/val.txt @@ -0,0 +1,3769 @@ +000001 +000002 +000004 +000005 +000006 +000008 +000015 +000019 +000020 +000021 +000023 +000024 +000025 +000027 +000028 +000031 +000033 +000035 +000037 +000039 +000040 +000042 +000047 +000048 +000050 +000052 +000053 +000058 +000059 +000061 +000062 +000063 +000065 +000066 +000076 +000077 +000078 +000081 +000089 +000090 +000093 +000094 +000098 +000102 +000104 +000106 +000107 +000108 +000116 +000117 +000118 +000122 +000124 +000126 +000128 +000132 +000134 +000135 +000137 +000139 +000140 +000143 +000147 +000151 +000152 +000153 +000156 +000159 +000161 +000167 +000168 +000169 +000170 +000173 +000174 +000175 +000181 +000182 +000183 +000186 +000187 +000188 +000190 +000191 +000192 +000194 +000195 +000196 +000197 +000199 +000201 +000203 +000204 +000207 +000211 +000212 +000213 +000216 +000218 +000223 +000224 +000226 +000229 +000230 +000231 +000234 +000235 +000236 +000237 +000239 +000242 +000246 +000247 +000248 +000249 +000250 +000251 +000252 +000260 +000262 +000263 +000265 +000266 +000268 +000269 +000270 +000272 +000273 +000278 +000279 +000281 +000283 +000284 +000289 +000290 +000291 +000293 +000297 +000301 +000302 +000305 +000307 +000308 +000309 +000311 +000312 +000314 +000315 +000319 +000320 +000321 +000323 +000324 +000327 +000328 +000329 +000332 +000333 +000335 +000336 +000340 +000341 +000343 +000345 +000346 +000347 +000350 +000351 +000352 +000354 +000355 +000356 +000357 +000359 +000360 +000361 +000362 +000365 +000366 +000369 +000370 +000372 +000373 +000376 +000377 +000378 +000379 +000381 +000382 +000383 +000385 +000386 +000388 +000391 +000392 +000393 +000394 +000395 +000396 +000397 +000398 +000399 +000401 +000402 +000403 +000404 +000407 +000408 +000409 +000413 +000414 +000415 +000419 +000420 +000422 +000427 +000428 +000429 +000430 +000436 +000437 +000440 +000443 +000446 +000448 +000450 +000451 +000452 +000453 +000454 +000455 +000457 +000459 +000463 +000468 +000469 +000472 +000473 +000475 +000476 +000477 +000478 +000479 +000480 +000481 +000485 +000486 +000489 +000491 +000492 +000493 +000494 +000495 +000496 +000498 +000499 +000503 +000504 +000506 +000508 +000509 +000510 +000512 +000515 +000517 +000519 +000521 +000524 +000527 +000528 +000530 +000533 +000536 +000541 +000542 +000543 +000545 +000546 +000548 +000551 +000554 +000555 +000558 +000559 +000560 +000561 +000564 +000566 +000567 +000568 +000569 +000571 +000572 +000581 +000583 +000588 +000589 +000590 +000591 +000595 +000600 +000601 +000604 +000610 +000611 +000612 +000613 +000614 +000615 +000618 +000619 +000620 +000624 +000625 +000626 +000628 +000630 +000634 +000635 +000636 +000639 +000642 +000644 +000645 +000647 +000648 +000650 +000655 +000657 +000658 +000659 +000660 +000667 +000669 +000670 +000674 +000677 +000679 +000682 +000683 +000684 +000691 +000692 +000694 +000696 +000698 +000699 +000700 +000702 +000704 +000706 +000708 +000716 +000717 +000718 +000721 +000722 +000725 +000727 +000728 +000729 +000731 +000734 +000736 +000737 +000740 +000741 +000745 +000746 +000748 +000750 +000751 +000752 +000754 +000756 +000761 +000765 +000766 +000767 +000768 +000769 +000771 +000772 +000773 +000774 +000778 +000779 +000782 +000790 +000792 +000795 +000798 +000800 +000801 +000802 +000803 +000804 +000805 +000806 +000807 +000809 +000810 +000811 +000812 +000816 +000819 +000823 +000826 +000831 +000837 +000838 +000840 +000841 +000843 +000844 +000847 +000848 +000849 +000850 +000852 +000854 +000859 +000862 +000863 +000869 +000873 +000874 +000875 +000876 +000877 +000878 +000879 +000881 +000884 +000885 +000889 +000893 +000894 +000897 +000899 +000904 +000907 +000909 +000911 +000912 +000915 +000916 +000917 +000920 +000922 +000923 +000926 +000928 +000930 +000931 +000932 +000938 +000939 +000940 +000942 +000943 +000944 +000948 +000949 +000952 +000953 +000956 +000958 +000961 +000963 +000964 +000966 +000967 +000969 +000970 +000971 +000973 +000974 +000976 +000979 +000981 +000983 +000984 +000985 +000986 +000988 +000991 +000999 +001002 +001006 +001007 +001008 +001010 +001011 +001012 +001013 +001014 +001015 +001018 +001019 +001021 +001022 +001025 +001026 +001027 +001035 +001037 +001039 +001042 +001043 +001046 +001050 +001051 +001053 +001054 +001055 +001058 +001063 +001065 +001066 +001067 +001068 +001069 +001070 +001071 +001075 +001076 +001077 +001078 +001083 +001084 +001086 +001088 +001089 +001094 +001095 +001096 +001097 +001099 +001101 +001102 +001104 +001106 +001107 +001108 +001111 +001113 +001114 +001115 +001116 +001118 +001120 +001123 +001125 +001127 +001129 +001131 +001132 +001133 +001134 +001135 +001136 +001138 +001139 +001140 +001141 +001143 +001144 +001145 +001147 +001148 +001149 +001150 +001152 +001153 +001154 +001155 +001158 +001162 +001163 +001167 +001172 +001173 +001176 +001177 +001178 +001179 +001180 +001182 +001183 +001187 +001188 +001189 +001191 +001192 +001193 +001194 +001195 +001198 +001199 +001203 +001206 +001207 +001213 +001214 +001216 +001217 +001218 +001221 +001222 +001224 +001225 +001226 +001228 +001230 +001232 +001234 +001235 +001236 +001237 +001239 +001241 +001242 +001243 +001244 +001245 +001246 +001249 +001251 +001252 +001253 +001254 +001255 +001257 +001259 +001260 +001261 +001263 +001265 +001266 +001267 +001268 +001269 +001270 +001271 +001272 +001273 +001274 +001275 +001281 +001284 +001286 +001287 +001289 +001291 +001292 +001294 +001295 +001296 +001303 +001304 +001305 +001306 +001307 +001308 +001314 +001317 +001318 +001329 +001330 +001331 +001332 +001333 +001334 +001336 +001337 +001339 +001342 +001344 +001345 +001346 +001347 +001350 +001352 +001353 +001355 +001356 +001359 +001363 +001365 +001372 +001374 +001375 +001376 +001377 +001380 +001381 +001382 +001384 +001386 +001387 +001388 +001389 +001391 +001395 +001397 +001398 +001407 +001410 +001411 +001412 +001415 +001416 +001419 +001421 +001424 +001427 +001431 +001432 +001435 +001437 +001438 +001439 +001441 +001442 +001443 +001445 +001446 +001448 +001450 +001451 +001458 +001461 +001463 +001466 +001469 +001471 +001477 +001478 +001480 +001481 +001485 +001487 +001488 +001489 +001495 +001497 +001501 +001502 +001507 +001508 +001511 +001513 +001514 +001516 +001517 +001521 +001522 +001524 +001525 +001526 +001527 +001528 +001533 +001535 +001536 +001537 +001538 +001542 +001545 +001546 +001547 +001549 +001552 +001555 +001557 +001560 +001562 +001564 +001565 +001567 +001569 +001573 +001574 +001576 +001577 +001579 +001582 +001583 +001585 +001586 +001587 +001588 +001589 +001590 +001591 +001592 +001594 +001596 +001597 +001600 +001602 +001603 +001605 +001606 +001610 +001613 +001615 +001616 +001617 +001619 +001621 +001625 +001627 +001629 +001631 +001633 +001634 +001635 +001640 +001643 +001645 +001647 +001650 +001654 +001656 +001658 +001660 +001662 +001664 +001665 +001666 +001667 +001670 +001675 +001680 +001682 +001683 +001684 +001689 +001693 +001694 +001697 +001699 +001701 +001702 +001704 +001705 +001706 +001707 +001709 +001710 +001711 +001712 +001713 +001714 +001717 +001718 +001719 +001721 +001722 +001726 +001727 +001729 +001732 +001733 +001740 +001741 +001742 +001745 +001746 +001749 +001750 +001751 +001752 +001755 +001758 +001762 +001764 +001765 +001768 +001771 +001772 +001774 +001776 +001778 +001780 +001781 +001782 +001783 +001786 +001787 +001794 +001795 +001797 +001800 +001801 +001802 +001804 +001807 +001808 +001813 +001814 +001817 +001818 +001820 +001822 +001823 +001824 +001825 +001828 +001831 +001835 +001840 +001844 +001846 +001848 +001851 +001852 +001853 +001854 +001855 +001856 +001858 +001859 +001861 +001862 +001863 +001867 +001868 +001869 +001872 +001875 +001877 +001878 +001880 +001881 +001884 +001885 +001886 +001887 +001888 +001890 +001892 +001893 +001897 +001898 +001900 +001904 +001905 +001909 +001919 +001920 +001923 +001924 +001925 +001926 +001927 +001928 +001929 +001931 +001932 +001933 +001934 +001936 +001937 +001940 +001941 +001942 +001943 +001945 +001946 +001952 +001954 +001959 +001960 +001966 +001967 +001969 +001972 +001977 +001978 +001979 +001980 +001982 +001983 +001984 +001985 +001986 +001989 +001991 +001995 +001996 +001997 +001999 +002000 +002001 +002002 +002004 +002008 +002010 +002011 +002012 +002013 +002014 +002017 +002019 +002021 +002022 +002025 +002027 +002028 +002029 +002034 +002035 +002036 +002037 +002038 +002042 +002043 +002044 +002045 +002046 +002048 +002049 +002050 +002052 +002054 +002056 +002057 +002058 +002062 +002068 +002071 +002073 +002074 +002075 +002076 +002078 +002079 +002081 +002082 +002085 +002086 +002087 +002089 +002091 +002093 +002094 +002100 +002101 +002102 +002103 +002107 +002108 +002111 +002112 +002113 +002115 +002118 +002120 +002121 +002123 +002124 +002127 +002128 +002130 +002131 +002135 +002136 +002137 +002138 +002139 +002140 +002142 +002151 +002152 +002153 +002158 +002159 +002160 +002161 +002163 +002165 +002166 +002168 +002169 +002170 +002173 +002177 +002179 +002182 +002183 +002185 +002187 +002188 +002193 +002196 +002200 +002201 +002202 +002206 +002207 +002209 +002215 +002216 +002218 +002219 +002220 +002224 +002225 +002228 +002229 +002232 +002233 +002234 +002239 +002243 +002245 +002246 +002248 +002250 +002251 +002254 +002255 +002257 +002258 +002260 +002262 +002266 +002272 +002276 +002277 +002279 +002280 +002282 +002283 +002284 +002286 +002287 +002290 +002291 +002292 +002293 +002294 +002295 +002298 +002299 +002300 +002303 +002304 +002306 +002307 +002308 +002310 +002314 +002315 +002319 +002320 +002325 +002327 +002329 +002330 +002332 +002334 +002336 +002337 +002338 +002340 +002341 +002344 +002345 +002346 +002347 +002348 +002353 +002356 +002357 +002359 +002362 +002365 +002366 +002367 +002369 +002370 +002372 +002376 +002378 +002380 +002382 +002383 +002384 +002385 +002386 +002387 +002391 +002392 +002393 +002397 +002398 +002399 +002404 +002405 +002411 +002414 +002415 +002418 +002419 +002420 +002422 +002423 +002424 +002425 +002428 +002429 +002432 +002433 +002434 +002439 +002440 +002442 +002446 +002450 +002454 +002455 +002457 +002458 +002460 +002461 +002462 +002463 +002473 +002474 +002476 +002477 +002478 +002479 +002483 +002486 +002488 +002490 +002492 +002495 +002497 +002499 +002500 +002502 +002503 +002504 +002505 +002506 +002509 +002511 +002516 +002519 +002520 +002521 +002525 +002526 +002528 +002529 +002530 +002531 +002532 +002534 +002538 +002539 +002540 +002541 +002543 +002546 +002548 +002552 +002556 +002557 +002558 +002562 +002563 +002564 +002565 +002568 +002569 +002570 +002572 +002574 +002575 +002577 +002580 +002581 +002583 +002584 +002585 +002586 +002590 +002594 +002598 +002599 +002600 +002601 +002602 +002603 +002604 +002606 +002612 +002613 +002615 +002619 +002621 +002625 +002626 +002628 +002630 +002631 +002633 +002635 +002636 +002638 +002640 +002641 +002644 +002645 +002646 +002651 +002653 +002656 +002657 +002661 +002663 +002666 +002669 +002673 +002674 +002675 +002677 +002680 +002681 +002685 +002686 +002690 +002692 +002693 +002694 +002695 +002696 +002699 +002702 +002706 +002707 +002709 +002710 +002711 +002712 +002713 +002715 +002717 +002720 +002721 +002722 +002724 +002725 +002726 +002727 +002728 +002729 +002730 +002735 +002737 +002740 +002742 +002744 +002745 +002746 +002747 +002748 +002749 +002752 +002753 +002755 +002757 +002758 +002760 +002761 +002763 +002764 +002765 +002767 +002772 +002773 +002775 +002783 +002786 +002787 +002789 +002793 +002794 +002796 +002797 +002800 +002801 +002804 +002805 +002806 +002809 +002810 +002811 +002812 +002814 +002815 +002818 +002820 +002826 +002827 +002828 +002830 +002831 +002833 +002836 +002839 +002840 +002841 +002844 +002845 +002846 +002847 +002848 +002853 +002856 +002858 +002861 +002863 +002866 +002867 +002875 +002876 +002877 +002878 +002879 +002880 +002881 +002883 +002885 +002889 +002890 +002891 +002892 +002893 +002894 +002895 +002896 +002900 +002901 +002902 +002903 +002905 +002908 +002911 +002914 +002916 +002917 +002919 +002924 +002925 +002928 +002930 +002934 +002935 +002937 +002942 +002944 +002945 +002947 +002948 +002951 +002953 +002955 +002957 +002958 +002959 +002960 +002961 +002962 +002963 +002964 +002966 +002971 +002974 +002976 +002977 +002978 +002979 +002982 +002984 +002985 +002988 +002991 +002993 +002994 +002995 +002997 +002999 +003000 +003001 +003003 +003004 +003005 +003006 +003007 +003010 +003011 +003019 +003022 +003024 +003025 +003027 +003029 +003030 +003031 +003032 +003033 +003034 +003035 +003038 +003042 +003043 +003046 +003047 +003048 +003050 +003052 +003053 +003054 +003055 +003056 +003058 +003061 +003062 +003065 +003066 +003067 +003071 +003073 +003074 +003076 +003080 +003082 +003087 +003088 +003090 +003094 +003096 +003099 +003101 +003102 +003103 +003106 +003107 +003109 +003110 +003112 +003114 +003116 +003118 +003124 +003126 +003127 +003129 +003131 +003133 +003134 +003135 +003136 +003137 +003141 +003142 +003144 +003145 +003146 +003148 +003150 +003153 +003156 +003159 +003161 +003162 +003165 +003167 +003170 +003172 +003174 +003175 +003177 +003179 +003180 +003181 +003182 +003183 +003187 +003190 +003192 +003194 +003197 +003199 +003202 +003203 +003204 +003207 +003210 +003211 +003214 +003216 +003217 +003219 +003221 +003222 +003224 +003225 +003226 +003228 +003229 +003231 +003232 +003233 +003236 +003239 +003240 +003242 +003247 +003250 +003251 +003252 +003254 +003255 +003257 +003259 +003265 +003266 +003269 +003272 +003275 +003276 +003280 +003281 +003283 +003288 +003292 +003295 +003296 +003298 +003300 +003301 +003302 +003304 +003305 +003306 +003308 +003310 +003312 +003313 +003315 +003316 +003318 +003319 +003322 +003323 +003324 +003325 +003330 +003331 +003337 +003338 +003341 +003343 +003346 +003347 +003350 +003351 +003352 +003353 +003355 +003357 +003358 +003364 +003365 +003366 +003367 +003368 +003370 +003373 +003375 +003379 +003385 +003386 +003393 +003394 +003395 +003396 +003397 +003399 +003401 +003402 +003403 +003404 +003405 +003406 +003407 +003408 +003409 +003410 +003411 +003412 +003417 +003419 +003421 +003422 +003425 +003426 +003428 +003429 +003430 +003432 +003434 +003435 +003443 +003447 +003448 +003449 +003450 +003453 +003456 +003461 +003464 +003465 +003466 +003467 +003469 +003470 +003471 +003474 +003478 +003480 +003481 +003482 +003483 +003484 +003487 +003488 +003489 +003490 +003491 +003492 +003495 +003496 +003497 +003502 +003503 +003504 +003506 +003511 +003515 +003517 +003519 +003520 +003521 +003524 +003527 +003528 +003529 +003530 +003531 +003535 +003539 +003543 +003544 +003547 +003550 +003552 +003553 +003554 +003557 +003558 +003559 +003562 +003563 +003568 +003571 +003573 +003574 +003580 +003582 +003583 +003584 +003588 +003600 +003601 +003604 +003605 +003607 +003608 +003609 +003611 +003614 +003616 +003618 +003620 +003621 +003622 +003623 +003624 +003627 +003629 +003630 +003631 +003632 +003633 +003634 +003635 +003643 +003645 +003647 +003649 +003652 +003653 +003655 +003658 +003659 +003661 +003662 +003667 +003668 +003669 +003671 +003676 +003677 +003678 +003679 +003682 +003683 +003684 +003688 +003689 +003690 +003691 +003692 +003702 +003703 +003705 +003707 +003708 +003711 +003712 +003715 +003716 +003718 +003719 +003723 +003726 +003728 +003735 +003736 +003737 +003738 +003739 +003746 +003747 +003748 +003750 +003751 +003753 +003755 +003756 +003762 +003763 +003764 +003769 +003771 +003775 +003777 +003778 +003779 +003781 +003782 +003787 +003788 +003793 +003794 +003798 +003800 +003802 +003804 +003805 +003807 +003808 +003809 +003811 +003812 +003814 +003820 +003822 +003826 +003827 +003828 +003830 +003834 +003835 +003837 +003841 +003847 +003852 +003854 +003856 +003859 +003860 +003864 +003866 +003869 +003870 +003872 +003873 +003874 +003878 +003879 +003880 +003881 +003883 +003885 +003886 +003890 +003891 +003892 +003894 +003897 +003898 +003899 +003901 +003902 +003905 +003907 +003909 +003914 +003915 +003916 +003920 +003923 +003924 +003926 +003931 +003932 +003934 +003937 +003938 +003943 +003945 +003946 +003948 +003950 +003956 +003958 +003961 +003962 +003964 +003965 +003969 +003970 +003972 +003975 +003977 +003980 +003981 +003982 +003984 +003986 +003992 +003996 +003998 +004000 +004001 +004002 +004003 +004004 +004007 +004008 +004009 +004010 +004011 +004016 +004021 +004026 +004027 +004028 +004032 +004033 +004034 +004036 +004038 +004040 +004041 +004042 +004045 +004048 +004049 +004051 +004055 +004059 +004061 +004063 +004064 +004065 +004068 +004072 +004074 +004077 +004079 +004081 +004082 +004083 +004085 +004087 +004089 +004091 +004092 +004095 +004096 +004098 +004100 +004101 +004104 +004105 +004107 +004108 +004109 +004110 +004111 +004113 +004116 +004117 +004118 +004119 +004120 +004121 +004122 +004124 +004125 +004126 +004128 +004129 +004130 +004131 +004132 +004136 +004137 +004138 +004140 +004142 +004143 +004148 +004149 +004150 +004152 +004153 +004154 +004155 +004156 +004157 +004158 +004160 +004161 +004162 +004163 +004164 +004168 +004171 +004172 +004173 +004174 +004175 +004185 +004187 +004188 +004189 +004190 +004191 +004195 +004196 +004202 +004205 +004206 +004207 +004209 +004210 +004213 +004214 +004215 +004220 +004221 +004222 +004223 +004224 +004226 +004228 +004232 +004237 +004239 +004241 +004242 +004243 +004246 +004248 +004249 +004250 +004251 +004254 +004255 +004256 +004259 +004260 +004263 +004270 +004271 +004275 +004277 +004278 +004280 +004281 +004282 +004284 +004285 +004288 +004289 +004290 +004291 +004293 +004294 +004295 +004298 +004299 +004300 +004301 +004303 +004305 +004306 +004307 +004309 +004311 +004312 +004314 +004318 +004319 +004321 +004323 +004324 +004326 +004327 +004329 +004330 +004335 +004336 +004337 +004338 +004340 +004342 +004343 +004345 +004348 +004349 +004350 +004352 +004353 +004360 +004362 +004363 +004364 +004367 +004368 +004369 +004370 +004373 +004374 +004377 +004383 +004384 +004385 +004388 +004391 +004392 +004393 +004396 +004397 +004398 +004401 +004402 +004403 +004404 +004406 +004407 +004414 +004415 +004418 +004419 +004420 +004421 +004422 +004423 +004424 +004425 +004426 +004429 +004430 +004433 +004434 +004435 +004437 +004438 +004439 +004440 +004443 +004444 +004447 +004450 +004452 +004454 +004456 +004458 +004460 +004462 +004465 +004469 +004470 +004472 +004474 +004475 +004480 +004481 +004482 +004483 +004485 +004486 +004487 +004489 +004490 +004491 +004493 +004494 +004496 +004501 +004502 +004508 +004511 +004513 +004516 +004517 +004519 +004520 +004521 +004526 +004527 +004528 +004529 +004530 +004531 +004532 +004534 +004540 +004541 +004542 +004547 +004548 +004549 +004551 +004553 +004556 +004557 +004562 +004566 +004567 +004568 +004569 +004570 +004573 +004574 +004576 +004578 +004581 +004582 +004585 +004587 +004588 +004589 +004591 +004596 +004598 +004599 +004603 +004608 +004609 +004610 +004611 +004612 +004615 +004618 +004620 +004622 +004624 +004626 +004629 +004630 +004632 +004633 +004634 +004636 +004638 +004640 +004644 +004647 +004648 +004649 +004650 +004651 +004652 +004655 +004657 +004658 +004660 +004665 +004666 +004667 +004668 +004669 +004672 +004673 +004679 +004680 +004682 +004683 +004685 +004686 +004687 +004688 +004689 +004691 +004692 +004693 +004694 +004695 +004697 +004698 +004699 +004700 +004705 +004706 +004708 +004709 +004710 +004711 +004713 +004714 +004715 +004716 +004717 +004718 +004720 +004721 +004722 +004724 +004725 +004726 +004730 +004732 +004734 +004735 +004737 +004738 +004739 +004740 +004742 +004743 +004744 +004745 +004746 +004748 +004752 +004753 +004756 +004759 +004762 +004763 +004764 +004766 +004768 +004769 +004770 +004773 +004776 +004777 +004782 +004783 +004787 +004788 +004790 +004791 +004792 +004797 +004799 +004800 +004804 +004806 +004807 +004810 +004811 +004813 +004814 +004815 +004816 +004817 +004821 +004822 +004825 +004829 +004830 +004831 +004832 +004835 +004839 +004843 +004846 +004848 +004849 +004850 +004851 +004852 +004858 +004859 +004860 +004861 +004862 +004863 +004864 +004867 +004868 +004871 +004873 +004874 +004875 +004881 +004885 +004887 +004888 +004891 +004892 +004893 +004895 +004896 +004898 +004902 +004903 +004904 +004905 +004907 +004909 +004914 +004917 +004918 +004920 +004921 +004924 +004926 +004927 +004928 +004929 +004931 +004932 +004934 +004935 +004938 +004941 +004942 +004943 +004944 +004946 +004947 +004948 +004949 +004953 +004954 +004956 +004958 +004959 +004960 +004962 +004963 +004966 +004974 +004976 +004979 +004981 +004983 +004985 +004986 +004988 +004989 +004990 +004993 +004994 +004995 +004996 +004998 +004999 +005001 +005002 +005004 +005008 +005010 +005013 +005014 +005015 +005017 +005019 +005021 +005024 +005026 +005028 +005032 +005034 +005036 +005037 +005038 +005040 +005041 +005045 +005049 +005050 +005052 +005053 +005054 +005055 +005056 +005057 +005058 +005062 +005063 +005064 +005065 +005067 +005068 +005070 +005072 +005073 +005074 +005075 +005077 +005078 +005079 +005080 +005081 +005082 +005086 +005090 +005093 +005094 +005095 +005101 +005103 +005105 +005108 +005109 +005110 +005112 +005113 +005120 +005121 +005122 +005124 +005125 +005127 +005128 +005133 +005135 +005136 +005138 +005139 +005140 +005141 +005143 +005144 +005145 +005147 +005149 +005153 +005155 +005156 +005157 +005158 +005161 +005162 +005163 +005164 +005166 +005167 +005168 +005170 +005172 +005174 +005175 +005176 +005179 +005180 +005181 +005182 +005184 +005185 +005188 +005189 +005190 +005191 +005194 +005197 +005198 +005199 +005201 +005206 +005213 +005214 +005217 +005218 +005219 +005221 +005222 +005226 +005227 +005229 +005230 +005233 +005234 +005236 +005237 +005240 +005241 +005242 +005244 +005246 +005249 +005251 +005255 +005256 +005260 +005262 +005267 +005268 +005271 +005273 +005274 +005275 +005276 +005279 +005280 +005282 +005284 +005287 +005289 +005292 +005296 +005297 +005298 +005299 +005304 +005307 +005308 +005309 +005311 +005312 +005313 +005315 +005316 +005318 +005319 +005321 +005322 +005323 +005325 +005328 +005329 +005330 +005333 +005334 +005335 +005336 +005337 +005338 +005341 +005342 +005343 +005345 +005347 +005349 +005350 +005359 +005360 +005363 +005365 +005366 +005368 +005369 +005371 +005372 +005375 +005377 +005378 +005379 +005381 +005385 +005386 +005389 +005390 +005391 +005404 +005405 +005413 +005415 +005422 +005423 +005426 +005427 +005429 +005430 +005431 +005434 +005437 +005441 +005443 +005444 +005445 +005447 +005448 +005449 +005450 +005452 +005453 +005458 +005459 +005460 +005461 +005465 +005466 +005467 +005471 +005472 +005473 +005474 +005476 +005477 +005479 +005481 +005482 +005484 +005486 +005487 +005489 +005494 +005495 +005498 +005505 +005510 +005511 +005514 +005515 +005523 +005525 +005528 +005531 +005532 +005534 +005536 +005538 +005540 +005542 +005544 +005545 +005546 +005551 +005552 +005555 +005556 +005557 +005558 +005559 +005560 +005565 +005566 +005570 +005571 +005572 +005573 +005576 +005577 +005580 +005581 +005582 +005584 +005586 +005587 +005588 +005589 +005590 +005595 +005596 +005600 +005601 +005602 +005603 +005610 +005613 +005616 +005617 +005618 +005619 +005623 +005625 +005630 +005631 +005633 +005634 +005635 +005638 +005639 +005640 +005642 +005643 +005649 +005650 +005652 +005653 +005656 +005658 +005659 +005660 +005662 +005664 +005668 +005669 +005672 +005673 +005676 +005677 +005680 +005683 +005685 +005687 +005689 +005695 +005698 +005699 +005700 +005703 +005704 +005706 +005707 +005708 +005709 +005712 +005713 +005714 +005717 +005724 +005725 +005727 +005728 +005729 +005731 +005735 +005736 +005739 +005740 +005741 +005743 +005744 +005745 +005746 +005747 +005751 +005754 +005757 +005760 +005762 +005763 +005765 +005777 +005782 +005783 +005784 +005785 +005786 +005787 +005790 +005793 +005794 +005796 +005800 +005801 +005803 +005805 +005806 +005807 +005811 +005812 +005818 +005819 +005820 +005821 +005822 +005826 +005827 +005829 +005834 +005839 +005840 +005841 +005843 +005852 +005854 +005855 +005856 +005857 +005859 +005864 +005869 +005873 +005876 +005878 +005879 +005881 +005882 +005883 +005885 +005887 +005889 +005892 +005893 +005894 +005899 +005900 +005901 +005903 +005905 +005906 +005907 +005909 +005910 +005911 +005912 +005913 +005914 +005916 +005917 +005918 +005919 +005921 +005922 +005923 +005925 +005926 +005927 +005931 +005933 +005935 +005938 +005939 +005944 +005947 +005948 +005949 +005952 +005955 +005958 +005961 +005962 +005963 +005965 +005969 +005970 +005972 +005975 +005978 +005981 +005982 +005984 +005985 +005986 +005988 +005994 +005996 +005997 +005999 +006001 +006002 +006003 +006005 +006008 +006009 +006010 +006012 +006013 +006014 +006016 +006023 +006024 +006026 +006027 +006028 +006029 +006030 +006031 +006033 +006034 +006036 +006038 +006039 +006041 +006042 +006043 +006044 +006045 +006046 +006047 +006048 +006050 +006052 +006054 +006057 +006058 +006060 +006061 +006062 +006063 +006066 +006067 +006068 +006070 +006071 +006074 +006075 +006077 +006078 +006083 +006085 +006086 +006087 +006088 +006093 +006095 +006096 +006097 +006098 +006100 +006102 +006103 +006106 +006107 +006110 +006114 +006115 +006116 +006117 +006118 +006121 +006122 +006123 +006125 +006126 +006127 +006130 +006133 +006136 +006139 +006144 +006146 +006148 +006151 +006152 +006154 +006156 +006161 +006163 +006165 +006167 +006168 +006169 +006173 +006176 +006177 +006182 +006185 +006186 +006187 +006190 +006194 +006195 +006196 +006198 +006202 +006204 +006208 +006210 +006213 +006215 +006219 +006222 +006227 +006228 +006229 +006232 +006233 +006238 +006240 +006244 +006246 +006247 +006249 +006250 +006258 +006263 +006265 +006266 +006267 +006269 +006270 +006272 +006273 +006274 +006275 +006276 +006278 +006280 +006282 +006286 +006287 +006288 +006297 +006300 +006301 +006302 +006305 +006306 +006312 +006314 +006315 +006316 +006317 +006321 +006322 +006324 +006331 +006332 +006333 +006334 +006338 +006339 +006340 +006342 +006343 +006344 +006345 +006348 +006349 +006351 +006353 +006354 +006355 +006356 +006357 +006360 +006364 +006366 +006368 +006369 +006370 +006371 +006372 +006377 +006379 +006380 +006381 +006385 +006386 +006388 +006391 +006393 +006394 +006395 +006396 +006403 +006405 +006406 +006407 +006409 +006410 +006411 +006415 +006416 +006417 +006420 +006423 +006424 +006425 +006426 +006427 +006433 +006434 +006435 +006436 +006437 +006439 +006440 +006441 +006442 +006444 +006445 +006446 +006451 +006452 +006453 +006454 +006462 +006464 +006465 +006468 +006469 +006470 +006472 +006473 +006474 +006475 +006477 +006478 +006481 +006482 +006483 +006484 +006486 +006488 +006491 +006493 +006496 +006497 +006498 +006503 +006505 +006506 +006507 +006508 +006512 +006514 +006515 +006516 +006517 +006519 +006520 +006521 +006524 +006525 +006529 +006530 +006531 +006532 +006533 +006534 +006535 +006537 +006540 +006542 +006548 +006549 +006551 +006553 +006555 +006556 +006558 +006560 +006561 +006563 +006565 +006568 +006569 +006570 +006574 +006576 +006577 +006578 +006581 +006582 +006583 +006586 +006588 +006590 +006592 +006593 +006595 +006596 +006597 +006602 +006603 +006604 +006611 +006612 +006613 +006614 +006618 +006623 +006624 +006625 +006626 +006628 +006629 +006632 +006633 +006634 +006636 +006637 +006638 +006641 +006643 +006647 +006649 +006650 +006651 +006655 +006656 +006658 +006659 +006660 +006664 +006666 +006667 +006669 +006670 +006674 +006676 +006677 +006678 +006679 +006682 +006685 +006686 +006692 +006693 +006694 +006695 +006696 +006698 +006701 +006703 +006709 +006710 +006711 +006712 +006713 +006714 +006715 +006719 +006720 +006723 +006725 +006726 +006729 +006731 +006732 +006733 +006734 +006737 +006738 +006741 +006744 +006745 +006747 +006751 +006752 +006753 +006754 +006755 +006756 +006758 +006759 +006760 +006761 +006762 +006764 +006765 +006767 +006768 +006770 +006771 +006772 +006773 +006777 +006778 +006780 +006781 +006782 +006783 +006785 +006786 +006789 +006791 +006792 +006794 +006796 +006797 +006798 +006800 +006803 +006804 +006806 +006807 +006808 +006811 +006812 +006813 +006815 +006816 +006818 +006819 +006822 +006828 +006829 +006832 +006833 +006836 +006837 +006841 +006843 +006844 +006847 +006849 +006850 +006852 +006853 +006854 +006855 +006856 +006858 +006860 +006862 +006863 +006866 +006868 +006870 +006872 +006873 +006874 +006876 +006879 +006881 +006882 +006884 +006885 +006887 +006889 +006891 +006895 +006897 +006898 +006899 +006900 +006901 +006903 +006906 +006907 +006908 +006910 +006913 +006914 +006917 +006922 +006925 +006928 +006930 +006936 +006937 +006938 +006942 +006943 +006944 +006945 +006948 +006950 +006953 +006954 +006955 +006956 +006959 +006960 +006962 +006964 +006968 +006971 +006973 +006977 +006978 +006980 +006981 +006982 +006987 +006989 +006990 +006992 +006994 +006997 +006999 +007000 +007003 +007005 +007006 +007008 +007010 +007011 +007012 +007014 +007015 +007016 +007019 +007022 +007023 +007026 +007027 +007028 +007029 +007030 +007031 +007032 +007033 +007034 +007037 +007038 +007042 +007043 +007047 +007048 +007049 +007052 +007053 +007055 +007056 +007059 +007061 +007063 +007065 +007067 +007068 +007069 +007071 +007072 +007074 +007076 +007078 +007079 +007080 +007081 +007082 +007083 +007084 +007085 +007087 +007088 +007089 +007091 +007095 +007098 +007100 +007103 +007109 +007110 +007112 +007115 +007117 +007119 +007120 +007122 +007125 +007130 +007131 +007132 +007133 +007135 +007136 +007138 +007139 +007144 +007145 +007146 +007149 +007154 +007157 +007158 +007161 +007162 +007163 +007164 +007165 +007166 +007168 +007169 +007172 +007174 +007176 +007177 +007178 +007180 +007182 +007183 +007187 +007194 +007198 +007199 +007200 +007201 +007202 +007204 +007205 +007207 +007208 +007210 +007212 +007214 +007215 +007217 +007219 +007221 +007225 +007227 +007229 +007230 +007232 +007233 +007235 +007238 +007240 +007242 +007244 +007246 +007247 +007252 +007253 +007255 +007256 +007258 +007260 +007261 +007262 +007265 +007266 +007267 +007271 +007272 +007273 +007274 +007275 +007277 +007278 +007279 +007280 +007283 +007284 +007287 +007288 +007289 +007290 +007291 +007292 +007294 +007299 +007300 +007302 +007303 +007304 +007309 +007310 +007311 +007315 +007318 +007319 +007322 +007323 +007325 +007326 +007327 +007329 +007330 +007331 +007336 +007337 +007339 +007342 +007343 +007344 +007345 +007347 +007349 +007350 +007351 +007352 +007353 +007359 +007360 +007364 +007369 +007371 +007374 +007375 +007376 +007377 +007380 +007381 +007382 +007383 +007384 +007385 +007389 +007391 +007395 +007396 +007397 +007398 +007401 +007402 +007403 +007405 +007407 +007409 +007410 +007411 +007412 +007413 +007415 +007416 +007419 +007420 +007421 +007422 +007423 +007424 +007426 +007430 +007433 +007434 +007435 +007436 +007437 +007439 +007440 +007442 +007445 +007447 +007448 +007449 +007450 +007453 +007456 +007458 +007462 +007463 +007464 +007466 +007467 +007468 +007469 +007470 +007473 +007475 +007477 +007478 +007480 \ No newline at end of file diff --git a/pointnet2_lib/tools/dataset.py b/pointnet2_lib/tools/dataset.py new file mode 100644 index 0000000..bf7874e --- /dev/null +++ b/pointnet2_lib/tools/dataset.py @@ -0,0 +1,187 @@ +import os +import numpy as np +import torch.utils.data as torch_data +import kitti_utils +import cv2 +from PIL import Image + +USE_INTENSITY = False + + +class KittiDataset(torch_data.Dataset): + def __init__(self, root_dir, split = 'train', mode = 'TRAIN'): + self.split = split + self.mode = mode + self.classes = ['Car'] + is_test = self.split == 'test' + self.imageset_dir = os.path.join(root_dir, 'KITTI', 'object', 'testing' if is_test else 'training') + + split_dir = os.path.join(root_dir, 'KITTI', 'ImageSets', split + '.txt') + self.image_idx_list = [x.strip() for x in open(split_dir).readlines()] + self.sample_id_list = [int(sample_id) for sample_id in self.image_idx_list] + self.num_sample = self.image_idx_list.__len__() + + self.npoints = 16384 + + self.image_dir = os.path.join(self.imageset_dir, 'image_2') + self.lidar_dir = os.path.join(self.imageset_dir, 'velodyne') + self.calib_dir = os.path.join(self.imageset_dir, 'calib') + self.label_dir = os.path.join(self.imageset_dir, 'label_2') + self.plane_dir = os.path.join(self.imageset_dir, 'planes') + + def get_image(self, idx): + img_file = os.path.join(self.image_dir, '%06d.png' % idx) + assert os.path.exists(img_file) + return cv2.imread(img_file) # (H, W, 3) BGR mode + + def get_image_shape(self, idx): + img_file = os.path.join(self.image_dir, '%06d.png' % idx) + assert os.path.exists(img_file) + im = Image.open(img_file) + width, height = im.size + return height, width, 3 + + def get_lidar(self, idx): + lidar_file = os.path.join(self.lidar_dir, '%06d.bin' % idx) + assert os.path.exists(lidar_file) + return np.fromfile(lidar_file, dtype = np.float32).reshape(-1, 4) + + def get_calib(self, idx): + calib_file = os.path.join(self.calib_dir, '%06d.txt' % idx) + assert os.path.exists(calib_file) + return kitti_utils.Calibration(calib_file) + + def get_label(self, idx): + label_file = os.path.join(self.label_dir, '%06d.txt' % idx) + assert os.path.exists(label_file) + return kitti_utils.get_objects_from_label(label_file) + + @staticmethod + def get_valid_flag(pts_rect, pts_img, pts_rect_depth, img_shape): + val_flag_1 = np.logical_and(pts_img[:, 0] >= 0, pts_img[:, 0] < img_shape[1]) + val_flag_2 = np.logical_and(pts_img[:, 1] >= 0, pts_img[:, 1] < img_shape[0]) + val_flag_merge = np.logical_and(val_flag_1, val_flag_2) + pts_valid_flag = np.logical_and(val_flag_merge, pts_rect_depth >= 0) + return pts_valid_flag + + def filtrate_objects(self, obj_list): + type_whitelist = self.classes + if self.mode == 'TRAIN': + type_whitelist = list(self.classes) + if 'Car' in self.classes: + type_whitelist.append('Van') + + valid_obj_list = [] + for obj in obj_list: + if obj.cls_type not in type_whitelist: + continue + + valid_obj_list.append(obj) + return valid_obj_list + + def __len__(self): + return len(self.sample_id_list) + + def __getitem__(self, index): + sample_id = int(self.sample_id_list[index]) + calib = self.get_calib(sample_id) + img_shape = self.get_image_shape(sample_id) + pts_lidar = self.get_lidar(sample_id) + + # get valid point (projected points should be in image) + pts_rect = calib.lidar_to_rect(pts_lidar[:, 0:3]) + pts_intensity = pts_lidar[:, 3] + + pts_img, pts_rect_depth = calib.rect_to_img(pts_rect) + pts_valid_flag = self.get_valid_flag(pts_rect, pts_img, pts_rect_depth, img_shape) + + pts_rect = pts_rect[pts_valid_flag][:, 0:3] + pts_intensity = pts_intensity[pts_valid_flag] + + if self.npoints < len(pts_rect): + pts_depth = pts_rect[:, 2] + pts_near_flag = pts_depth < 40.0 + far_idxs_choice = np.where(pts_near_flag == 0)[0] + near_idxs = np.where(pts_near_flag == 1)[0] + near_idxs_choice = np.random.choice(near_idxs, self.npoints - len(far_idxs_choice), replace = False) + + choice = np.concatenate((near_idxs_choice, far_idxs_choice), axis = 0) \ + if len(far_idxs_choice) > 0 else near_idxs_choice + np.random.shuffle(choice) + else: + choice = np.arange(0, len(pts_rect), dtype = np.int32) + if self.npoints > len(pts_rect): + extra_choice = np.random.choice(choice, self.npoints - len(pts_rect), replace = False) + choice = np.concatenate((choice, extra_choice), axis = 0) + np.random.shuffle(choice) + + ret_pts_rect = pts_rect[choice, :] + ret_pts_intensity = pts_intensity[choice] - 0.5 # translate intensity to [-0.5, 0.5] + + pts_features = [ret_pts_intensity.reshape(-1, 1)] + ret_pts_features = np.concatenate(pts_features, axis = 1) if pts_features.__len__() > 1 else pts_features[0] + + sample_info = { 'sample_id': sample_id } + + if self.mode == 'TEST': + if USE_INTENSITY: + pts_input = np.concatenate((ret_pts_rect, ret_pts_features), axis = 1) # (N, C) + else: + pts_input = ret_pts_rect + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = ret_pts_rect + sample_info['pts_features'] = ret_pts_features + return sample_info + + gt_obj_list = self.filtrate_objects(self.get_label(sample_id)) + + gt_boxes3d = kitti_utils.objs_to_boxes3d(gt_obj_list) + + # prepare input + if USE_INTENSITY: + pts_input = np.concatenate((ret_pts_rect, ret_pts_features), axis = 1) # (N, C) + else: + pts_input = ret_pts_rect + + # generate training labels + cls_labels = self.generate_training_labels(ret_pts_rect, gt_boxes3d) + sample_info['pts_input'] = pts_input + sample_info['pts_rect'] = ret_pts_rect + sample_info['cls_labels'] = cls_labels + return sample_info + + @staticmethod + def generate_training_labels(pts_rect, gt_boxes3d): + cls_label = np.zeros((pts_rect.shape[0]), dtype = np.int32) + gt_corners = kitti_utils.boxes3d_to_corners3d(gt_boxes3d, rotate = True) + extend_gt_boxes3d = kitti_utils.enlarge_box3d(gt_boxes3d, extra_width = 0.2) + extend_gt_corners = kitti_utils.boxes3d_to_corners3d(extend_gt_boxes3d, rotate = True) + for k in range(gt_boxes3d.shape[0]): + box_corners = gt_corners[k] + fg_pt_flag = kitti_utils.in_hull(pts_rect, box_corners) + cls_label[fg_pt_flag] = 1 + + # enlarge the bbox3d, ignore nearby points + extend_box_corners = extend_gt_corners[k] + fg_enlarge_flag = kitti_utils.in_hull(pts_rect, extend_box_corners) + ignore_flag = np.logical_xor(fg_pt_flag, fg_enlarge_flag) + cls_label[ignore_flag] = -1 + + return cls_label + + def collate_batch(self, batch): + batch_size = batch.__len__() + ans_dict = { } + + for key in batch[0].keys(): + if isinstance(batch[0][key], np.ndarray): + ans_dict[key] = np.concatenate([batch[k][key][np.newaxis, ...] for k in range(batch_size)], axis = 0) + + else: + ans_dict[key] = [batch[k][key] for k in range(batch_size)] + if isinstance(batch[0][key], int): + ans_dict[key] = np.array(ans_dict[key], dtype = np.int32) + elif isinstance(batch[0][key], float): + ans_dict[key] = np.array(ans_dict[key], dtype = np.float32) + + return ans_dict diff --git a/pointnet2_lib/tools/kitti_utils.py b/pointnet2_lib/tools/kitti_utils.py new file mode 100644 index 0000000..b1f05f6 --- /dev/null +++ b/pointnet2_lib/tools/kitti_utils.py @@ -0,0 +1,231 @@ +import numpy as np +from scipy.spatial import Delaunay +import scipy + + +def cls_type_to_id(cls_type): + type_to_id = { 'Car': 1, 'Pedestrian': 2, 'Cyclist': 3, 'Van': 4 } + if cls_type not in type_to_id.keys(): + return -1 + return type_to_id[cls_type] + + +class Object3d(object): + def __init__(self, line): + label = line.strip().split(' ') + self.src = line + self.cls_type = label[0] + self.cls_id = cls_type_to_id(self.cls_type) + self.trucation = float(label[1]) + self.occlusion = float(label[2]) # 0:fully visible 1:partly occluded 2:largely occluded 3:unknown + self.alpha = float(label[3]) + self.box2d = np.array((float(label[4]), float(label[5]), float(label[6]), float(label[7])), dtype = np.float32) + self.h = float(label[8]) + self.w = float(label[9]) + self.l = float(label[10]) + self.pos = np.array((float(label[11]), float(label[12]), float(label[13])), dtype = np.float32) + self.dis_to_cam = np.linalg.norm(self.pos) + self.ry = float(label[14]) + self.score = float(label[15]) if label.__len__() == 16 else -1.0 + self.level_str = None + self.level = self.get_obj_level() + + def get_obj_level(self): + height = float(self.box2d[3]) - float(self.box2d[1]) + 1 + + if height >= 40 and self.trucation <= 0.15 and self.occlusion <= 0: + self.level_str = 'Easy' + return 1 # Easy + elif height >= 25 and self.trucation <= 0.3 and self.occlusion <= 1: + self.level_str = 'Moderate' + return 2 # Moderate + elif height >= 25 and self.trucation <= 0.5 and self.occlusion <= 2: + self.level_str = 'Hard' + return 3 # Hard + else: + self.level_str = 'UnKnown' + return 4 + + def generate_corners3d(self): + """ + generate corners3d representation for this object + :return corners_3d: (8, 3) corners of box3d in camera coord + """ + l, h, w = self.l, self.h, self.w + x_corners = [l / 2, l / 2, -l / 2, -l / 2, l / 2, l / 2, -l / 2, -l / 2] + y_corners = [0, 0, 0, 0, -h, -h, -h, -h] + z_corners = [w / 2, -w / 2, -w / 2, w / 2, w / 2, -w / 2, -w / 2, w / 2] + + R = np.array([[np.cos(self.ry), 0, np.sin(self.ry)], + [0, 1, 0], + [-np.sin(self.ry), 0, np.cos(self.ry)]]) + corners3d = np.vstack([x_corners, y_corners, z_corners]) # (3, 8) + corners3d = np.dot(R, corners3d).T + corners3d = corners3d + self.pos + return corners3d + + def to_str(self): + print_str = '%s %.3f %.3f %.3f box2d: %s hwl: [%.3f %.3f %.3f] pos: %s ry: %.3f' \ + % (self.cls_type, self.trucation, self.occlusion, self.alpha, self.box2d, self.h, self.w, self.l, + self.pos, self.ry) + return print_str + + def to_kitti_format(self): + kitti_str = '%s %.2f %d %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f' \ + % (self.cls_type, self.trucation, int(self.occlusion), self.alpha, self.box2d[0], self.box2d[1], + self.box2d[2], self.box2d[3], self.h, self.w, self.l, self.pos[0], self.pos[1], self.pos[2], + self.ry) + return kitti_str + + +def get_calib_from_file(calib_file): + with open(calib_file) as f: + lines = f.readlines() + + obj = lines[2].strip().split(' ')[1:] + P2 = np.array(obj, dtype = np.float32) + obj = lines[3].strip().split(' ')[1:] + P3 = np.array(obj, dtype = np.float32) + obj = lines[4].strip().split(' ')[1:] + R0 = np.array(obj, dtype = np.float32) + obj = lines[5].strip().split(' ')[1:] + Tr_velo_to_cam = np.array(obj, dtype = np.float32) + + return { 'P2' : P2.reshape(3, 4), + 'P3' : P3.reshape(3, 4), + 'R0' : R0.reshape(3, 3), + 'Tr_velo2cam': Tr_velo_to_cam.reshape(3, 4) } + + +class Calibration(object): + def __init__(self, calib_file): + if isinstance(calib_file, str): + calib = get_calib_from_file(calib_file) + else: + calib = calib_file + + self.P2 = calib['P2'] # 3 x 4 + self.R0 = calib['R0'] # 3 x 3 + self.V2C = calib['Tr_velo2cam'] # 3 x 4 + + def cart_to_hom(self, pts): + """ + :param pts: (N, 3 or 2) + :return pts_hom: (N, 4 or 3) + """ + pts_hom = np.hstack((pts, np.ones((pts.shape[0], 1), dtype = np.float32))) + return pts_hom + + def lidar_to_rect(self, pts_lidar): + """ + :param pts_lidar: (N, 3) + :return pts_rect: (N, 3) + """ + pts_lidar_hom = self.cart_to_hom(pts_lidar) + pts_rect = np.dot(pts_lidar_hom, np.dot(self.V2C.T, self.R0.T)) + return pts_rect + + def rect_to_img(self, pts_rect): + """ + :param pts_rect: (N, 3) + :return pts_img: (N, 2) + """ + pts_rect_hom = self.cart_to_hom(pts_rect) + pts_2d_hom = np.dot(pts_rect_hom, self.P2.T) + pts_img = (pts_2d_hom[:, 0:2].T / pts_rect_hom[:, 2]).T # (N, 2) + pts_rect_depth = pts_2d_hom[:, 2] - self.P2.T[3, 2] # depth in rect camera coord + return pts_img, pts_rect_depth + + def lidar_to_img(self, pts_lidar): + """ + :param pts_lidar: (N, 3) + :return pts_img: (N, 2) + """ + pts_rect = self.lidar_to_rect(pts_lidar) + pts_img, pts_depth = self.rect_to_img(pts_rect) + return pts_img, pts_depth + + +def get_objects_from_label(label_file): + with open(label_file, 'r') as f: + lines = f.readlines() + objects = [Object3d(line) for line in lines] + return objects + + +def objs_to_boxes3d(obj_list): + boxes3d = np.zeros((obj_list.__len__(), 7), dtype = np.float32) + for k, obj in enumerate(obj_list): + boxes3d[k, 0:3], boxes3d[k, 3], boxes3d[k, 4], boxes3d[k, 5], boxes3d[k, 6] \ + = obj.pos, obj.h, obj.w, obj.l, obj.ry + return boxes3d + + +def boxes3d_to_corners3d(boxes3d, rotate = True): + """ + :param boxes3d: (N, 7) [x, y, z, h, w, l, ry] + :param rotate: + :return: corners3d: (N, 8, 3) + """ + boxes_num = boxes3d.shape[0] + h, w, l = boxes3d[:, 3], boxes3d[:, 4], boxes3d[:, 5] + x_corners = np.array([l / 2., l / 2., -l / 2., -l / 2., l / 2., l / 2., -l / 2., -l / 2.], + dtype = np.float32).T # (N, 8) + z_corners = np.array([w / 2., -w / 2., -w / 2., w / 2., w / 2., -w / 2., -w / 2., w / 2.], + dtype = np.float32).T # (N, 8) + + y_corners = np.zeros((boxes_num, 8), dtype = np.float32) + y_corners[:, 4:8] = -h.reshape(boxes_num, 1).repeat(4, axis = 1) # (N, 8) + + if rotate: + ry = boxes3d[:, 6] + zeros, ones = np.zeros(ry.size, dtype = np.float32), np.ones(ry.size, dtype = np.float32) + rot_list = np.array([[np.cos(ry), zeros, -np.sin(ry)], + [zeros, ones, zeros], + [np.sin(ry), zeros, np.cos(ry)]]) # (3, 3, N) + R_list = np.transpose(rot_list, (2, 0, 1)) # (N, 3, 3) + + temp_corners = np.concatenate((x_corners.reshape(-1, 8, 1), y_corners.reshape(-1, 8, 1), + z_corners.reshape(-1, 8, 1)), axis = 2) # (N, 8, 3) + rotated_corners = np.matmul(temp_corners, R_list) # (N, 8, 3) + x_corners, y_corners, z_corners = rotated_corners[:, :, 0], rotated_corners[:, :, 1], rotated_corners[:, :, 2] + + x_loc, y_loc, z_loc = boxes3d[:, 0], boxes3d[:, 1], boxes3d[:, 2] + + x = x_loc.reshape(-1, 1) + x_corners.reshape(-1, 8) + y = y_loc.reshape(-1, 1) + y_corners.reshape(-1, 8) + z = z_loc.reshape(-1, 1) + z_corners.reshape(-1, 8) + + corners = np.concatenate((x.reshape(-1, 8, 1), y.reshape(-1, 8, 1), z.reshape(-1, 8, 1)), axis = 2) + + return corners.astype(np.float32) + + +def enlarge_box3d(boxes3d, extra_width): + """ + :param boxes3d: (N, 7) [x, y, z, h, w, l, ry] + """ + if isinstance(boxes3d, np.ndarray): + large_boxes3d = boxes3d.copy() + else: + large_boxes3d = boxes3d.clone() + large_boxes3d[:, 3:6] += extra_width * 2 + large_boxes3d[:, 1] += extra_width + return large_boxes3d + + +def in_hull(p, hull): + """ + :param p: (N, K) test points + :param hull: (M, K) M corners of a box + :return (N) bool + """ + try: + if not isinstance(hull, Delaunay): + hull = Delaunay(hull) + flag = hull.find_simplex(p) >= 0 + except scipy.spatial.qhull.QhullError: + print('Warning: not a hull %s' % str(hull)) + flag = np.zeros(p.shape[0], dtype = np.bool) + + return flag diff --git a/pointnet2_lib/tools/pointnet2_msg.py b/pointnet2_lib/tools/pointnet2_msg.py new file mode 100644 index 0000000..56a075c --- /dev/null +++ b/pointnet2_lib/tools/pointnet2_msg.py @@ -0,0 +1,90 @@ +import torch +import torch.nn as nn +from pointnet2.pointnet2_modules import PointnetFPModule, PointnetSAModuleMSG +import pointnet2.pytorch_utils as pt_utils + + +def get_model(input_channels = 0): + return Pointnet2MSG(input_channels = input_channels) + + +NPOINTS = [4096, 1024, 256, 64] +RADIUS = [[0.1, 0.5], [0.5, 1.0], [1.0, 2.0], [2.0, 4.0]] +NSAMPLE = [[16, 32], [16, 32], [16, 32], [16, 32]] +MLPS = [[[16, 16, 32], [32, 32, 64]], [[64, 64, 128], [64, 96, 128]], + [[128, 196, 256], [128, 196, 256]], [[256, 256, 512], [256, 384, 512]]] +FP_MLPS = [[128, 128], [256, 256], [512, 512], [512, 512]] +CLS_FC = [128] +DP_RATIO = 0.5 + + +class Pointnet2MSG(nn.Module): + def __init__(self, input_channels = 6): + super().__init__() + + self.SA_modules = nn.ModuleList() + channel_in = input_channels + + skip_channel_list = [input_channels] + for k in range(NPOINTS.__len__()): + mlps = MLPS[k].copy() + channel_out = 0 + for idx in range(mlps.__len__()): + mlps[idx] = [channel_in] + mlps[idx] + channel_out += mlps[idx][-1] + + self.SA_modules.append( + PointnetSAModuleMSG( + npoint = NPOINTS[k], + radii = RADIUS[k], + nsamples = NSAMPLE[k], + mlps = mlps, + use_xyz = True, + bn = True + ) + ) + skip_channel_list.append(channel_out) + channel_in = channel_out + + self.FP_modules = nn.ModuleList() + + for k in range(FP_MLPS.__len__()): + pre_channel = FP_MLPS[k + 1][-1] if k + 1 < len(FP_MLPS) else channel_out + self.FP_modules.append( + PointnetFPModule(mlp = [pre_channel + skip_channel_list[k]] + FP_MLPS[k]) + ) + + cls_layers = [] + pre_channel = FP_MLPS[0][-1] + for k in range(0, CLS_FC.__len__()): + cls_layers.append(pt_utils.Conv1d(pre_channel, CLS_FC[k], bn = True)) + pre_channel = CLS_FC[k] + cls_layers.append(pt_utils.Conv1d(pre_channel, 1, activation = None)) + cls_layers.insert(1, nn.Dropout(0.5)) + self.cls_layer = nn.Sequential(*cls_layers) + + def _break_up_pc(self, pc): + xyz = pc[..., 0:3].contiguous() + features = ( + pc[..., 3:].transpose(1, 2).contiguous() + if pc.size(-1) > 3 else None + ) + + return xyz, features + + def forward(self, pointcloud: torch.cuda.FloatTensor): + xyz, features = self._break_up_pc(pointcloud) + + l_xyz, l_features = [xyz], [features] + for i in range(len(self.SA_modules)): + li_xyz, li_features = self.SA_modules[i](l_xyz[i], l_features[i]) + l_xyz.append(li_xyz) + l_features.append(li_features) + + for i in range(-1, -(len(self.FP_modules) + 1), -1): + l_features[i - 1] = self.FP_modules[i]( + l_xyz[i - 1], l_xyz[i], l_features[i - 1], l_features[i] + ) + + pred_cls = self.cls_layer(l_features[0]).transpose(1, 2).contiguous() # (B, N, 1) + return pred_cls diff --git a/pointnet2_lib/tools/train_and_eval.py b/pointnet2_lib/tools/train_and_eval.py new file mode 100644 index 0000000..61d57b5 --- /dev/null +++ b/pointnet2_lib/tools/train_and_eval.py @@ -0,0 +1,216 @@ +import _init_path +import numpy as np +import os +import torch +import torch.nn as nn +import torch.optim as optim +import torch.optim.lr_scheduler as lr_sched +from torch.nn.utils import clip_grad_norm_ +from torch.utils.data import DataLoader +import tensorboard_logger as tb_log +from dataset import KittiDataset +import argparse +import importlib + +parser = argparse.ArgumentParser(description = "Arg parser") +parser.add_argument("--batch_size", type = int, default = 8) +parser.add_argument("--epochs", type = int, default = 100) +parser.add_argument("--ckpt_save_interval", type = int, default = 5) +parser.add_argument('--workers', type = int, default = 4) +parser.add_argument("--mode", type = str, default = 'train') +parser.add_argument("--ckpt", type = str, default = 'None') + +parser.add_argument("--net", type = str, default = 'pointnet2_msg') + +parser.add_argument('--lr', type = float, default = 0.002) +parser.add_argument('--lr_decay', type = float, default = 0.2) +parser.add_argument('--lr_clip', type = float, default = 0.000001) +parser.add_argument('--decay_step_list', type = list, default = [50, 70, 80, 90]) +parser.add_argument('--weight_decay', type = float, default = 0.001) + +parser.add_argument("--output_dir", type = str, default = 'output') +parser.add_argument("--extra_tag", type = str, default = 'default') + +args = parser.parse_args() + +FG_THRESH = 0.3 + + +def log_print(info, log_f = None): + print(info) + if log_f is not None: + print(info, file = log_f) + + +class DiceLoss(nn.Module): + def __init__(self, ignore_target = -1): + super().__init__() + self.ignore_target = ignore_target + + def forward(self, input, target): + """ + :param input: (N), logit + :param target: (N), {0, 1} + :return: + """ + input = torch.sigmoid(input.view(-1)) + target = target.float().view(-1) + mask = (target != self.ignore_target).float() + return 1.0 - (torch.min(input, target) * mask).sum() / torch.clamp((torch.max(input, target) * mask).sum(), + min = 1.0) + + +def train_one_epoch(model, train_loader, optimizer, epoch, lr_scheduler, total_it, tb_log, log_f): + model.train() + log_print('===============TRAIN EPOCH %d================' % epoch, log_f = log_f) + loss_func = DiceLoss(ignore_target = -1) + + for it, batch in enumerate(train_loader): + optimizer.zero_grad() + + pts_input, cls_labels = batch['pts_input'], batch['cls_labels'] + pts_input = torch.from_numpy(pts_input).cuda(non_blocking = True).float() + cls_labels = torch.from_numpy(cls_labels).cuda(non_blocking = True).long().view(-1) + + pred_cls = model(pts_input) + pred_cls = pred_cls.view(-1) + + loss = loss_func(pred_cls, cls_labels) + loss.backward() + clip_grad_norm_(model.parameters(), 1.0) + optimizer.step() + + total_it += 1 + + pred_class = (torch.sigmoid(pred_cls) > FG_THRESH) + fg_mask = cls_labels > 0 + correct = ((pred_class.long() == cls_labels) & fg_mask).float().sum() + union = fg_mask.sum().float() + (pred_class > 0).sum().float() - correct + iou = correct / torch.clamp(union, min = 1.0) + + cur_lr = lr_scheduler.get_lr()[0] + tb_log.log_value('learning_rate', cur_lr, epoch) + if tb_log is not None: + tb_log.log_value('train_loss', loss, total_it) + tb_log.log_value('train_fg_iou', iou, total_it) + + log_print('training epoch %d: it=%d/%d, total_it=%d, loss=%.5f, fg_iou=%.3f, lr=%f' % + (epoch, it, len(train_loader), total_it, loss.item(), iou.item(), cur_lr), log_f = log_f) + + return total_it + + +def eval_one_epoch(model, eval_loader, epoch, tb_log, log_f = None): + model.train() + log_print('===============EVAL EPOCH %d================' % epoch, log_f = log_f) + + iou_list = [] + for it, batch in enumerate(eval_loader): + pts_input, cls_labels = batch['pts_input'], batch['cls_labels'] + pts_input = torch.from_numpy(pts_input).cuda(non_blocking = True).float() + cls_labels = torch.from_numpy(cls_labels).cuda(non_blocking = True).long().view(-1) + + pred_cls = model(pts_input) + pred_cls = pred_cls.view(-1) + + pred_class = (torch.sigmoid(pred_cls) > FG_THRESH) + fg_mask = cls_labels > 0 + correct = ((pred_class.long() == cls_labels) & fg_mask).float().sum() + union = fg_mask.sum().float() + (pred_class > 0).sum().float() - correct + iou = correct / torch.clamp(union, min = 1.0) + + iou_list.append(iou.item()) + log_print('EVAL: it=%d/%d, iou=%.3f' % (it, len(eval_loader), iou), log_f = log_f) + + iou_list = np.array(iou_list) + avg_iou = iou_list.mean() + tb_log.log_value('eval_fg_iou', avg_iou, epoch) + + log_print('\nEpoch %d: Average IoU (samples=%d): %.6f' % (epoch, iou_list.__len__(), avg_iou), log_f = log_f) + return avg_iou + + +def save_checkpoint(model, epoch, ckpt_name): + if isinstance(model, torch.nn.DataParallel): + model_state = model.module.state_dict() + else: + model_state = model.state_dict() + + state = { 'epoch': epoch, 'model_state': model_state } + ckpt_name = '{}.pth'.format(ckpt_name) + torch.save(state, ckpt_name) + + +def load_checkpoint(model, filename): + if os.path.isfile(filename): + log_print("==> Loading from checkpoint %s" % filename) + checkpoint = torch.load(filename) + epoch = checkpoint['epoch'] + model.load_state_dict(checkpoint['model_state']) + log_print("==> Done") + else: + raise FileNotFoundError + + return epoch + + +def train_and_eval(model, train_loader, eval_loader, tb_log, ckpt_dir, log_f): + model.cuda() + optimizer = optim.Adam(model.parameters(), lr = args.lr, weight_decay = args.weight_decay) + + def lr_lbmd(cur_epoch): + cur_decay = 1 + for decay_step in args.decay_step_list: + if cur_epoch >= decay_step: + cur_decay = cur_decay * args.lr_decay + return max(cur_decay, args.lr_clip / args.lr) + + lr_scheduler = lr_sched.LambdaLR(optimizer, lr_lbmd) + + total_it = 0 + for epoch in range(1, args.epochs + 1): + lr_scheduler.step(epoch) + total_it = train_one_epoch(model, train_loader, optimizer, epoch, lr_scheduler, total_it, tb_log, log_f) + + if epoch % args.ckpt_save_interval == 0: + with torch.no_grad(): + avg_iou = eval_one_epoch(model, eval_loader, epoch, tb_log, log_f) + ckpt_name = os.path.join(ckpt_dir, 'checkpoint_epoch_%d' % epoch) + save_checkpoint(model, epoch, ckpt_name) + + +if __name__ == '__main__': + MODEL = importlib.import_module(args.net) # import network module + model = MODEL.get_model(input_channels = 0) + + eval_set = KittiDataset(root_dir = './data', mode = 'EVAL') + eval_loader = DataLoader(eval_set, batch_size = args.batch_size, shuffle = False, pin_memory = True, + num_workers = args.workers, collate_fn = eval_set.collate_batch) + + if args.mode == 'train': + train_set = KittiDataset(root_dir = './data', mode = 'TRAIN') + train_loader = DataLoader(train_set, batch_size = args.batch_size, shuffle = True, pin_memory = True, + num_workers = args.workers, collate_fn = train_set.collate_batch) + # output dir config + output_dir = os.path.join(args.output_dir, args.extra_tag) + os.makedirs(output_dir, exist_ok = True) + tb_log.configure(os.path.join(output_dir, 'tensorboard')) + ckpt_dir = os.path.join(output_dir, 'ckpt') + os.makedirs(ckpt_dir, exist_ok = True) + + log_file = os.path.join(output_dir, 'log.txt') + log_f = open(log_file, 'w') + + for key, val in vars(args).items(): + log_print("{:16} {}".format(key, val), log_f = log_f) + + # train and eval + train_and_eval(model, train_loader, eval_loader, tb_log, ckpt_dir, log_f) + log_f.close() + elif args.mode == 'eval': + epoch = load_checkpoint(model, args.ckpt) + model.cuda() + with torch.no_grad(): + avg_iou = eval_one_epoch(model, eval_loader, epoch, log_f) + else: + raise NotImplementedError diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..fca9b23 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,17 @@ +#torch===1.2.0 (对PED和CYC影响很大!!!) +#torchvision===0.4.0 +opencv-python +shapely +Cython +scipy +pandas +PyYAML==5.4.1 +json_tricks +scikit-image +yacs>=0.1.5 +tensorboardX>=1.6 +tqdm +ninja +fire +numba==0.53.1 +easydict diff --git a/tools/_init_path.py b/tools/_init_path.py new file mode 100644 index 0000000..5bf4292 --- /dev/null +++ b/tools/_init_path.py @@ -0,0 +1,5 @@ +import os, sys + +sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), '../')) +sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), '../lib/datasets')) +sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), '../lib/net')) diff --git a/tools/cfgs/CAR_EPNet_plus_plus.yaml b/tools/cfgs/CAR_EPNet_plus_plus.yaml new file mode 100644 index 0000000..4e0186a --- /dev/null +++ b/tools/cfgs/CAR_EPNet_plus_plus.yaml @@ -0,0 +1,221 @@ +CLASSES: Car + +INCLUDE_SIMILAR_TYPE: True + +# config of augmentation +AUG_DATA: True +AUG_METHOD_LIST: ['rotation', 'scaling', 'flip'] +AUG_METHOD_PROB: [1.0, 1.0, 0.5] +AUG_ROT_RANGE: 18 + +GT_AUG_ENABLED: False +GT_EXTRA_NUM: 15 +GT_AUG_RAND_NUM: True +GT_AUG_APPLY_PROB: 1.0 +GT_AUG_HARD_RATIO: 0.6 + +PC_REDUCE_BY_RANGE: True +PC_AREA_SCOPE: [[-40, 40], [-1, 3], [0, 70.4]] # x, y, z scope in rect camera coords +CLS_MEAN_SIZE: [[1.52563191462, 1.62856739989, 3.88311640418]] + +USE_IOU_BRANCH: True +USE_IM_DEPTH: False +CROSS_FUSION: False +INPUT_CROSS_FUSION: False + +USE_SELF_ATTENTION: False + +DEEP_RCNN_FUSION: False + +USE_IMAGE_LOSS: False +USE_IMAGE_SCORE: False +IMAGE_WEIGHT: 1.0 + +USE_IMG_DENSE_LOSS: False +USE_MC_LOSS: False + +SAVE_MODEL_PREP: 0.8 + +USE_P2I_GATE: False + +STACK_CROSS_FUSION: False + +USE_IMAGE_RES: False + +USE_PURE_IMG_BACKBONE: False + +USE_PAINTING_SCORE: False +USE_PAINTING_FEAT: False + +LI_FUSION: + ENABLED: True + + ADD_Image_Attention: True + IMG_FEATURES_CHANNEL: 128 + + IMG_CHANNELS: [3, 64, 128, 256, 512] + POINT_CHANNELS: [96, 256, 512, 1024] + + DeConv_Reduce: [16, 16, 16, 16] + DeConv_Kernels: [2, 4, 8, 16] + DeConv_Strides: [2, 4, 8, 16] + +# 1. config of rpn network +RPN: + ENABLED: True + FIXED: False + + # config of input + USE_INTENSITY: False + + # config of bin-based loss + LOC_XZ_FINE: True + LOC_SCOPE: 3.0 + LOC_BIN_SIZE: 0.5 + NUM_HEAD_BIN: 12 + + # config of network structure + BACKBONE: pointnet2_msg + USE_BN: True + NUM_POINTS: 16384 + + SA_CONFIG: + ATTN_DIM: 128 + ATTN: [0, 0, 128, 128] + NPOINTS: [4096, 1024, 256, 64] + RADIUS: [[0.1, 0.5], [0.5, 1.0], [1.0, 2.0], [2.0, 4.0]] + NSAMPLE: [[16, 32], [16, 32], [16, 32], [16, 32]] + MLPS: [[[16, 16, 32], [32, 32, 64]], + [[64, 64, 128], [64, 96, 128]], + [[128, 196, 256], [128, 196, 256]], + [[256, 256, 512], [256, 384, 512]]] + FP_MLPS: [[128, 128], [256, 256], [512, 512], [512, 512]] + CLS_FC: [128] + REG_FC: [128] + DP_RATIO: 0.5 + + # config of training + LOSS_CLS: SigmoidFocalLoss + FG_WEIGHT: 15 + FOCAL_ALPHA: [0.25, 0.75] + FOCAL_GAMMA: 2.0 + REG_LOSS_WEIGHT: [1.0, 1.0, 1.0, 1.0] + LOSS_WEIGHT: [1.0, 1.0] + NMS_TYPE: normal + + # config of testing + SCORE_THRESH: 0.2 + +# 2. config of rcnn network +RCNN: + ENABLED: True + + # config of input + ROI_SAMPLE_JIT: True + REG_AUG_METHOD: multiple # multiple, single, normal + ROI_FG_AUG_TIMES: 10 + + USE_RPN_FEATURES: True + USE_MASK: True + MASK_TYPE: seg + USE_INTENSITY: False + USE_DEPTH: True + USE_SEG_SCORE: False + + POOL_EXTRA_WIDTH: 0.2 + + # config of bin-based loss + LOC_SCOPE: 1.5 + LOC_BIN_SIZE: 0.5 + NUM_HEAD_BIN: 9 + LOC_Y_BY_BIN: False + LOC_Y_SCOPE: 0.5 + LOC_Y_BIN_SIZE: 0.25 + SIZE_RES_ON_ROI: False + + # config of network structure + USE_BN: False + DP_RATIO: 0.0 + + BACKBONE: pointnet # pointnet + XYZ_UP_LAYER: [128, 128] + + NUM_POINTS: 512 + SA_CONFIG: + NPOINTS: [128, 32, -1] + RADIUS: [0.2, 0.4, 100] + NSAMPLE: [64, 64, 64] + MLPS: [[128, 128, 128], + [128, 128, 256], + [256, 256, 512]] + CLS_FC: [512, 512] #[256, 256] + REG_FC: [512, 512] #[256, 256] + + # config of training + LOSS_CLS: BinaryCrossEntropy + FOCAL_ALPHA: [0.25, 0.75] + FOCAL_GAMMA: 2.0 + CLS_WEIGHT: [1.0, 1.0, 1.0] + CLS_FG_THRESH: 0.6 + CLS_BG_THRESH: 0.45 + CLS_BG_THRESH_LO: 0.05 + REG_FG_THRESH: 0.55 + FG_RATIO: 0.5 + ROI_PER_IMAGE: 64 + HARD_BG_RATIO: 0.8 + + # config of testing + SCORE_THRESH: 0.2 + NMS_THRESH: 0.1 + +# general training config +TRAIN: + SPLIT: train #aug_train + VAL_SPLIT: smallval + + LR: 0.002 + LR_CLIP: 0.00001 + LR_DECAY: 0.5 + DECAY_STEP_LIST: [100, 150, 180, 200] + LR_WARMUP: True + WARMUP_MIN: 0.0002 + WARMUP_EPOCH: 1 + + BN_MOMENTUM: 0.1 + BN_DECAY: 0.5 + BNM_CLIP: 0.01 + BN_DECAY_STEP_LIST: [1000] + + OPTIMIZER: adam_onecycle # adam, adam_onecycle + WEIGHT_DECAY: 0.001 # L2 regularization + MOMENTUM: 0.9 + + MOMS: [0.95, 0.85] + DIV_FACTOR: 10.0 + PCT_START: 0.4 + + GRAD_NORM_CLIP: 1.0 + + RPN_PRE_NMS_TOP_N: 9000 + RPN_POST_NMS_TOP_N: 512 + RPN_NMS_THRESH: 0.85 + RPN_DISTANCE_BASED_PROPOSE: True + ## + RPN_TRAIN_WEIGHT: 1.0 + RCNN_TRAIN_WEIGHT: 1.0 + CE_WEIGHT: 5.0 + RPN_CE_WEIGHT: 5.0 + IOU_LOSS_TYPE: cls_mask_with_bin #raw + BBOX_AVG_BY_BIN: True + RY_WITH_BIN: False + +TEST: + SPLIT: val #test + RPN_PRE_NMS_TOP_N: 9000 + RPN_POST_NMS_TOP_N: 100 + RPN_NMS_THRESH: 0.8 + RPN_DISTANCE_BASED_PROPOSE: True + + ## + BBOX_AVG_BY_BIN: True + RY_WITH_BIN: False diff --git a/tools/cfgs/CYC_EPNet_plus_plus.yaml b/tools/cfgs/CYC_EPNet_plus_plus.yaml new file mode 100644 index 0000000..f8dc467 --- /dev/null +++ b/tools/cfgs/CYC_EPNet_plus_plus.yaml @@ -0,0 +1,215 @@ +CLASSES: Cyclist + +INCLUDE_SIMILAR_TYPE: True + +# config of augmentation +AUG_DATA: True +AUG_METHOD_LIST: ['rotation', 'scaling', 'flip'] +AUG_METHOD_PROB: [1.0, 1.0, 0.5] +AUG_ROT_RANGE: 18 + +GT_AUG_ENABLED: False +GT_EXTRA_NUM: 15 +GT_AUG_RAND_NUM: True +GT_AUG_APPLY_PROB: 1.0 +GT_AUG_HARD_RATIO: 0.6 + +PC_REDUCE_BY_RANGE: True +PC_AREA_SCOPE: [[-40, 40], [-0.5, 2.5], [0, 70.4]] +CLS_MEAN_SIZE: [[1.73698127,0.59706367,1.76282397]] + +USE_IOU_BRANCH: True +USE_IM_DEPTH: False +CROSS_FUSION: False +INPUT_CROSS_FUSION: False + +USE_SELF_ATTENTION: False + +DEEP_RCNN_FUSION: False + +USE_IMAGE_LOSS: False +USE_IMAGE_SCORE: False +IMAGE_WEIGHT: 1.0 + +USE_IMG_DENSE_LOSS: False +USE_MC_LOSS: False + +SAVE_MODEL_PREP: 0.8 +USE_P2I_GATE: False + +USE_PURE_IMG_BACKBONE: False + +USE_PAINTING_SCORE: False +USE_PAINTING_FEAT: False + +LI_FUSION: + ENABLED: True + + ADD_Image_Attention: True + IMG_FEATURES_CHANNEL: 128 + + IMG_CHANNELS: [3, 64, 128, 256, 512] + POINT_CHANNELS: [96, 256, 512, 1024] + + DeConv_Reduce: [16, 16, 16, 16] + DeConv_Kernels: [2, 4, 8, 16] + DeConv_Strides: [2, 4, 8, 16] + +# 1. config of rpn network +RPN: + ENABLED: True + FIXED: False + + # config of input + USE_INTENSITY: False + + # config of bin-based loss + LOC_XZ_FINE: True + LOC_SCOPE: 3.0 + LOC_BIN_SIZE: 0.5 + NUM_HEAD_BIN: 12 + + # config of network structure + BACKBONE: pointnet2_msg + USE_BN: True + NUM_POINTS: 16384 + + SA_CONFIG: + ATTN_DIM: 128 + ATTN: [0, 0, 128, 128] + NPOINTS: [4096, 1024, 256, 64] + RADIUS: [[0.1, 0.5], [0.5, 1.0], [1.0, 2.0], [2.0, 4.0]] + NSAMPLE: [[16, 32], [16, 32], [16, 32], [16, 32]] + MLPS: [[[16, 16, 32], [32, 32, 64]], + [[64, 64, 128], [64, 96, 128]], + [[128, 196, 256], [128, 196, 256]], + [[256, 256, 512], [256, 384, 512]]] + FP_MLPS: [[128, 128], [256, 256], [512, 512], [512, 512]] + CLS_FC: [128] + REG_FC: [128] + DP_RATIO: 0.5 + + # config of training + LOSS_CLS: SigmoidFocalLoss + FG_WEIGHT: 15 + FOCAL_ALPHA: [0.25, 0.75] + FOCAL_GAMMA: 2.0 + REG_LOSS_WEIGHT: [1.0, 1.0, 1.0, 1.0] + LOSS_WEIGHT: [1.0, 1.0] + NMS_TYPE: normal + + # config of testing + SCORE_THRESH: 0.2 + +# 2. config of rcnn network +RCNN: + ENABLED: True + + # config of input + ROI_SAMPLE_JIT: True + REG_AUG_METHOD: multiple # multiple, single, normal + ROI_FG_AUG_TIMES: 10 + + USE_RPN_FEATURES: True + USE_MASK: True + MASK_TYPE: seg + USE_INTENSITY: False + USE_DEPTH: True + USE_SEG_SCORE: False + + POOL_EXTRA_WIDTH: 0.2 + + # config of bin-based loss + LOC_SCOPE: 1.5 + LOC_BIN_SIZE: 0.5 + NUM_HEAD_BIN: 9 + LOC_Y_BY_BIN: False + LOC_Y_SCOPE: 0.5 + LOC_Y_BIN_SIZE: 0.25 + SIZE_RES_ON_ROI: False + + # config of network structure + USE_BN: False + DP_RATIO: 0.0 + + BACKBONE: pointnet # pointnet + XYZ_UP_LAYER: [128, 128] + + NUM_POINTS: 512 + SA_CONFIG: + NPOINTS: [128, 32, -1] + RADIUS: [0.2, 0.4, 100] + NSAMPLE: [64, 64, 64] + MLPS: [[128, 128, 128], + [128, 128, 256], + [256, 256, 512]] + CLS_FC: [512, 512] #[256, 256] + REG_FC: [512, 512] #[256, 256] + + # config of training + LOSS_CLS: BinaryCrossEntropy + FOCAL_ALPHA: [0.25, 0.75] + FOCAL_GAMMA: 2.0 + CLS_WEIGHT: [1.0, 1.0, 1.0] + CLS_FG_THRESH: 0.6 + CLS_BG_THRESH: 0.45 + CLS_BG_THRESH_LO: 0.05 + REG_FG_THRESH: 0.55 + FG_RATIO: 0.5 + ROI_PER_IMAGE: 64 + HARD_BG_RATIO: 0.8 + + # config of testing + SCORE_THRESH: 0.2 + NMS_THRESH: 0.1 + +# general training config +TRAIN: + SPLIT: train #aug_train + VAL_SPLIT: smallval + + LR: 0.002 + LR_CLIP: 0.00001 + LR_DECAY: 0.5 + DECAY_STEP_LIST: [100, 150, 180, 200] + LR_WARMUP: True + WARMUP_MIN: 0.0002 + WARMUP_EPOCH: 1 + + BN_MOMENTUM: 0.1 + BN_DECAY: 0.5 + BNM_CLIP: 0.01 + BN_DECAY_STEP_LIST: [1000] + + OPTIMIZER: adam_onecycle # adam, adam_onecycle + WEIGHT_DECAY: 0.001 # L2 regularization + MOMENTUM: 0.9 + + MOMS: [0.95, 0.85] + DIV_FACTOR: 10.0 + PCT_START: 0.4 + + GRAD_NORM_CLIP: 1.0 + + RPN_PRE_NMS_TOP_N: 9000 + RPN_POST_NMS_TOP_N: 512 + RPN_NMS_THRESH: 0.85 + RPN_DISTANCE_BASED_PROPOSE: True + ## + RPN_TRAIN_WEIGHT: 1.0 + RCNN_TRAIN_WEIGHT: 1.0 + CE_WEIGHT: 5.0 + IOU_LOSS_TYPE: cls_mask_with_bin #raw + BBOX_AVG_BY_BIN: True + RY_WITH_BIN: False + +TEST: + SPLIT: val #test + RPN_PRE_NMS_TOP_N: 9000 + RPN_POST_NMS_TOP_N: 100 + RPN_NMS_THRESH: 0.8 + RPN_DISTANCE_BASED_PROPOSE: True + + ## + BBOX_AVG_BY_BIN: True + RY_WITH_BIN: False diff --git a/tools/cfgs/PED_EPNet_plus_plus.yaml b/tools/cfgs/PED_EPNet_plus_plus.yaml new file mode 100644 index 0000000..5a0a79c --- /dev/null +++ b/tools/cfgs/PED_EPNet_plus_plus.yaml @@ -0,0 +1,216 @@ +CLASSES: Pedestrian + +INCLUDE_SIMILAR_TYPE: True + +# config of augmentation +AUG_DATA: True +AUG_METHOD_LIST: ['rotation', 'scaling', 'flip'] +AUG_METHOD_PROB: [1.0, 1.0, 0.5] +AUG_ROT_RANGE: 18 + +GT_AUG_ENABLED: False +GT_EXTRA_NUM: 15 +GT_AUG_RAND_NUM: True +GT_AUG_APPLY_PROB: 1.0 +GT_AUG_HARD_RATIO: 0.6 + +PC_REDUCE_BY_RANGE: True +PC_AREA_SCOPE: [[-19.84, 19.84], [-0.5, 2.5], [0, 47.36]] +CLS_MEAN_SIZE: [[1.76255119,0.66068622,0.84422524]] # for Ped + +USE_IOU_BRANCH: True +USE_IM_DEPTH: False +CROSS_FUSION: False +INPUT_CROSS_FUSION: False + +USE_SELF_ATTENTION: False + +DEEP_RCNN_FUSION: False + +USE_IMAGE_LOSS: False +USE_IMAGE_SCORE: False +IMAGE_WEIGHT: 1.0 + +USE_IMG_DENSE_LOSS: False +USE_MC_LOSS: False + +SAVE_MODEL_PREP: 0.8 + +USE_P2I_GATE: False + +USE_PURE_IMG_BACKBONE: False + +USE_PAINTING_SCORE: False +USE_PAINTING_FEAT: False + +LI_FUSION: + ENABLED: True + + ADD_Image_Attention: True + IMG_FEATURES_CHANNEL: 128 + + IMG_CHANNELS: [3, 64, 128, 256, 512] + POINT_CHANNELS: [96, 256, 512, 1024] + + DeConv_Reduce: [16, 16, 16, 16] + DeConv_Kernels: [2, 4, 8, 16] + DeConv_Strides: [2, 4, 8, 16] + +# 1. config of rpn network +RPN: + ENABLED: True + FIXED: False + + # config of input + USE_INTENSITY: False + + # config of bin-based loss + LOC_XZ_FINE: True + LOC_SCOPE: 3.0 + LOC_BIN_SIZE: 0.5 + NUM_HEAD_BIN: 12 + + # config of network structure + BACKBONE: pointnet2_msg + USE_BN: True + NUM_POINTS: 16384 + + SA_CONFIG: + ATTN_DIM: 128 + ATTN: [0, 0, 128, 128] + NPOINTS: [4096, 1024, 256, 64] + RADIUS: [[0.1, 0.5], [0.5, 1.0], [1.0, 2.0], [2.0, 4.0]] + NSAMPLE: [[16, 32], [16, 32], [16, 32], [16, 32]] + MLPS: [[[16, 16, 32], [32, 32, 64]], + [[64, 64, 128], [64, 96, 128]], + [[128, 196, 256], [128, 196, 256]], + [[256, 256, 512], [256, 384, 512]]] + FP_MLPS: [[128, 128], [256, 256], [512, 512], [512, 512]] + CLS_FC: [128] + REG_FC: [128] + DP_RATIO: 0.5 + + # config of training + LOSS_CLS: SigmoidFocalLoss + FG_WEIGHT: 15 + FOCAL_ALPHA: [0.25, 0.75] + FOCAL_GAMMA: 2.0 + REG_LOSS_WEIGHT: [1.0, 1.0, 1.0, 1.0] + LOSS_WEIGHT: [1.0, 1.0] + NMS_TYPE: normal + + # config of testing + SCORE_THRESH: 0.2 + +# 2. config of rcnn network +RCNN: + ENABLED: True + + # config of input + ROI_SAMPLE_JIT: True + REG_AUG_METHOD: multiple # multiple, single, normal + ROI_FG_AUG_TIMES: 10 + + USE_RPN_FEATURES: True + USE_MASK: True + MASK_TYPE: seg + USE_INTENSITY: False + USE_DEPTH: True + USE_SEG_SCORE: False + + POOL_EXTRA_WIDTH: 0.2 + + # config of bin-based loss + LOC_SCOPE: 1.5 + LOC_BIN_SIZE: 0.5 + NUM_HEAD_BIN: 9 + LOC_Y_BY_BIN: False + LOC_Y_SCOPE: 0.5 + LOC_Y_BIN_SIZE: 0.25 + SIZE_RES_ON_ROI: False + + # config of network structure + USE_BN: False + DP_RATIO: 0.0 + + BACKBONE: pointnet # pointnet + XYZ_UP_LAYER: [128, 128] + + NUM_POINTS: 512 + SA_CONFIG: + NPOINTS: [128, 32, -1] + RADIUS: [0.2, 0.4, 100] + NSAMPLE: [64, 64, 64] + MLPS: [[128, 128, 128], + [128, 128, 256], + [256, 256, 512]] + CLS_FC: [512, 512] #[256, 256] + REG_FC: [512, 512] #[256, 256] + + # config of training + LOSS_CLS: BinaryCrossEntropy + FOCAL_ALPHA: [0.25, 0.75] + FOCAL_GAMMA: 2.0 + CLS_WEIGHT: [1.0, 1.0, 1.0] + CLS_FG_THRESH: 0.6 + CLS_BG_THRESH: 0.45 + CLS_BG_THRESH_LO: 0.05 + REG_FG_THRESH: 0.55 + FG_RATIO: 0.5 + ROI_PER_IMAGE: 64 + HARD_BG_RATIO: 0.8 + + # config of testing + SCORE_THRESH: 0.2 + NMS_THRESH: 0.1 + +# general training config +TRAIN: + SPLIT: train #aug_train + VAL_SPLIT: smallval + + LR: 0.002 + LR_CLIP: 0.00001 + LR_DECAY: 0.5 + DECAY_STEP_LIST: [100, 150, 180, 200] + LR_WARMUP: True + WARMUP_MIN: 0.0002 + WARMUP_EPOCH: 1 + + BN_MOMENTUM: 0.1 + BN_DECAY: 0.5 + BNM_CLIP: 0.01 + BN_DECAY_STEP_LIST: [1000] + + OPTIMIZER: adam_onecycle # adam, adam_onecycle + WEIGHT_DECAY: 0.001 # L2 regularization + MOMENTUM: 0.9 + + MOMS: [0.95, 0.85] + DIV_FACTOR: 10.0 + PCT_START: 0.4 + + GRAD_NORM_CLIP: 1.0 + + RPN_PRE_NMS_TOP_N: 9000 + RPN_POST_NMS_TOP_N: 512 + RPN_NMS_THRESH: 0.85 + RPN_DISTANCE_BASED_PROPOSE: True + ## + RPN_TRAIN_WEIGHT: 1.0 + RCNN_TRAIN_WEIGHT: 1.0 + CE_WEIGHT: 5.0 + IOU_LOSS_TYPE: cls_mask_with_bin #raw + BBOX_AVG_BY_BIN: True + RY_WITH_BIN: False + +TEST: + SPLIT: val #test + RPN_PRE_NMS_TOP_N: 9000 + RPN_POST_NMS_TOP_N: 200 + RPN_NMS_THRESH: 0.8 + RPN_DISTANCE_BASED_PROPOSE: True + + ## + BBOX_AVG_BY_BIN: True + RY_WITH_BIN: False diff --git a/tools/eval_rcnn.py b/tools/eval_rcnn.py new file mode 100644 index 0000000..71de621 --- /dev/null +++ b/tools/eval_rcnn.py @@ -0,0 +1,1026 @@ +import _init_path +import os +import numpy as np +import torch +from torch.utils.data import DataLoader +import torch.nn.functional as F +from lib.net.point_rcnn import PointRCNN +from lib.datasets.kitti_rcnn_dataset import KittiRCNNDataset +import tools.train_utils.train_utils as train_utils +from lib.utils.bbox_transform import decode_bbox_target +from tools.kitti_object_eval_python.evaluate import evaluate as kitti_evaluate + +from lib.config import cfg, cfg_from_file, save_config_to_file, cfg_from_list +import argparse +import lib.utils.kitti_utils as kitti_utils +import lib.utils.iou3d.iou3d_utils as iou3d_utils +from datetime import datetime +import logging +import re +import glob +import time +from tensorboardX import SummaryWriter +import tqdm + +np.random.seed(1024) # set the same seed + +parser = argparse.ArgumentParser(description = "arg parser") +parser.add_argument('--cfg_file', type = str, default = 'cfgs/default.yml', help = 'specify the config for evaluation') +parser.add_argument("--eval_mode", type = str, default = 'rpn', required = True, help = "specify the evaluation mode") + +parser.add_argument('--eval_all', action = 'store_true', default = False, help = 'whether to evaluate all checkpoints') +parser.add_argument('--test', action = 'store_true', default = False, help = 'evaluate without ground truth') +parser.add_argument("--ckpt", type = str, default = None, help = "specify a checkpoint to be evaluated") +parser.add_argument("--rpn_ckpt", type = str, default = None, + help = "specify the checkpoint of rpn if trained separated") +parser.add_argument("--rcnn_ckpt", type = str, default = None, + help = "specify the checkpoint of rcnn if trained separated") + +parser.add_argument('--batch_size', type = int, default = 1, help = 'batch size for evaluation') +parser.add_argument('--workers', type = int, default = 4, help = 'number of workers for dataloader') +parser.add_argument("--extra_tag", type = str, default = 'default', help = "extra tag for multiple evaluation") +parser.add_argument('--output_dir', type = str, default = None, help = 'specify an output directory if needed') +parser.add_argument("--ckpt_dir", type = str, default = None, + help = "specify a ckpt directory to be evaluated if needed") +parser.add_argument('--data_path', type = str, default =None, help = 'specify an output directory if needed') + +parser.add_argument('--save_result', action = 'store_true', default = False, help = 'save evaluation results to files') +parser.add_argument('--save_rpn_feature', action = 'store_true', default = False, + help = 'save features for separately rcnn training and evaluation') + +parser.add_argument('--random_select', action = 'store_true', default = True, + help = 'sample to the same number of points') +parser.add_argument('--start_epoch', default = 0, type = int, help = 'ignore the checkpoint smaller than this epoch') +parser.add_argument('--max_waiting_mins', type=int, default=30, help='max waiting minutes') +parser.add_argument("--rcnn_eval_roi_dir", type = str, default = None, + help = 'specify the saved rois for rcnn evaluation when using rcnn_offline mode') +parser.add_argument("--rcnn_eval_feature_dir", type = str, default = None, + help = 'specify the saved features for rcnn evaluation when using rcnn_offline mode') +parser.add_argument('--set', dest = 'set_cfgs', default = None, nargs = argparse.REMAINDER, + help = 'set extra config keys if needed') + +parser.add_argument('--model_type', type = str, default = 'base', help = 'model type') + +args = parser.parse_args() + + +def create_logger(log_file): + log_format = '%(asctime)s %(levelname)5s %(message)s' + logging.basicConfig(level = logging.INFO, format = log_format, filename = log_file) + console = logging.StreamHandler() + console.setLevel(logging.INFO) + console.setFormatter(logging.Formatter(log_format)) + logging.getLogger(__name__).addHandler(console) + return logging.getLogger(__name__) + + +def save_kitti_format(sample_id, calib, bbox3d, kitti_output_dir, scores, img_shape): + corners3d = kitti_utils.boxes3d_to_corners3d(bbox3d) + img_boxes, _ = calib.corners3d_to_img_boxes(corners3d) + + img_boxes[:, 0] = np.clip(img_boxes[:, 0], 0, img_shape[1] - 1) + img_boxes[:, 1] = np.clip(img_boxes[:, 1], 0, img_shape[0] - 1) + img_boxes[:, 2] = np.clip(img_boxes[:, 2], 0, img_shape[1] - 1) + img_boxes[:, 3] = np.clip(img_boxes[:, 3], 0, img_shape[0] - 1) + + img_boxes_w = img_boxes[:, 2] - img_boxes[:, 0] + img_boxes_h = img_boxes[:, 3] - img_boxes[:, 1] + box_valid_mask = np.logical_and(img_boxes_w < img_shape[1] * 0.8, img_boxes_h < img_shape[0] * 0.8) + + kitti_output_file = os.path.join(kitti_output_dir, '%06d.txt' % sample_id) + with open(kitti_output_file, 'w') as f: + for k in range(bbox3d.shape[0]): + if box_valid_mask[k] == 0: + continue + x, z, ry = bbox3d[k, 0], bbox3d[k, 2], bbox3d[k, 6] + beta = np.arctan2(z, x) + alpha = -np.sign(beta) * np.pi / 2 + beta + ry + + print('%s -1 -1 %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f' % + (cfg.CLASSES, alpha, img_boxes[k, 0], img_boxes[k, 1], img_boxes[k, 2], img_boxes[k, 3], + bbox3d[k, 3], bbox3d[k, 4], bbox3d[k, 5], bbox3d[k, 0], bbox3d[k, 1], bbox3d[k, 2], + bbox3d[k, 6], scores[k]), file = f) + + +def save_rpn_features(seg_result, rpn_scores_raw, pts_features, backbone_xyz, backbone_features, kitti_features_dir, + sample_id): + pts_intensity = pts_features[:, 0] + + output_file = os.path.join(kitti_features_dir, '%06d.npy' % sample_id) + xyz_file = os.path.join(kitti_features_dir, '%06d_xyz.npy' % sample_id) + seg_file = os.path.join(kitti_features_dir, '%06d_seg.npy' % sample_id) + intensity_file = os.path.join(kitti_features_dir, '%06d_intensity.npy' % sample_id) + np.save(output_file, backbone_features) + np.save(xyz_file, backbone_xyz) + np.save(seg_file, seg_result) + np.save(intensity_file, pts_intensity) + rpn_scores_raw_file = os.path.join(kitti_features_dir, '%06d_rawscore.npy' % sample_id) + np.save(rpn_scores_raw_file, rpn_scores_raw) + + +def eval_one_epoch_rpn(model, dataloader, epoch_id, result_dir, logger): + np.random.seed(1024) + mode = 'TEST' if args.test else 'EVAL' + + if args.save_rpn_feature: + kitti_features_dir = os.path.join(result_dir, 'features') + os.makedirs(kitti_features_dir, exist_ok = True) + + if args.save_result or args.save_rpn_feature: + kitti_output_dir = os.path.join(result_dir, 'detections', 'data') + seg_output_dir = os.path.join(result_dir, 'seg_result') + os.makedirs(kitti_output_dir, exist_ok = True) + os.makedirs(seg_output_dir, exist_ok = True) + + logger.info('---- EPOCH %s RPN EVALUATION ----' % epoch_id) + model.eval() + + thresh_list = [0.1, 0.3, 0.5, 0.7, 0.9] + total_recalled_bbox_list, total_gt_bbox = [0] * 5, 0 + dataset = dataloader.dataset + cnt = max_num = rpn_iou_avg = 0 + + progress_bar = tqdm.tqdm(total = len(dataloader), leave = True, desc = 'eval') + + for data in dataloader: + sample_id_list, pts_rect, pts_features, pts_input = \ + data['sample_id'], data['pts_rect'], data['pts_features'], data['pts_input'] + sample_id = sample_id_list[0] + cnt += len(sample_id_list) + + if not args.test: + rpn_cls_label, rpn_reg_label = data['rpn_cls_label'], data['rpn_reg_label'] + gt_boxes3d = data['gt_boxes3d'] + + rpn_cls_label = torch.from_numpy(rpn_cls_label).cuda(non_blocking = True).long() + if gt_boxes3d.shape[1] == 0: # (B, M, 7) + pass + # logger.info('%06d: No gt box' % sample_id) + else: + gt_boxes3d = torch.from_numpy(gt_boxes3d).cuda(non_blocking = True).float() + + inputs = torch.from_numpy(pts_input).cuda(non_blocking = True).float() + input_data = { 'pts_input': inputs } + + # img feature + if cfg.LI_FUSION.ENABLED: + pts_origin_xy, img = data['pts_origin_xy'], data['img'] + pts_origin_xy = torch.from_numpy(pts_origin_xy).cuda(non_blocking = True).float() + img = torch.from_numpy(img).cuda(non_blocking = True).float().permute((0,3,1,2)) + input_data['pts_origin_xy'] = pts_origin_xy + input_data['img'] = img + + if cfg.RPN.USE_RGB or cfg.RCNN.USE_RGB: + pts_rgb=data['rgb'] + pts_rgb=torch.from_numpy(pts_rgb).cuda(non_blocking = True).float() + input_data['pts_rgb']=pts_rgb + + + # model inference + ret_dict = model(input_data) + rpn_cls, rpn_reg = ret_dict['rpn_cls'], ret_dict['rpn_reg'] + backbone_xyz, backbone_features = ret_dict['backbone_xyz'], ret_dict['backbone_features'] + + rpn_scores_raw = rpn_cls[:, :, 0] + rpn_scores = torch.sigmoid(rpn_scores_raw) + seg_result = (rpn_scores > cfg.RPN.SCORE_THRESH).long() + + # proposal layer + rois, roi_scores_raw = model.rpn.proposal_layer(rpn_scores_raw, rpn_reg, backbone_xyz) # (B, M, 7) + batch_size = rois.shape[0] + + # calculate recall and save results to file + for bs_idx in range(batch_size): + cur_sample_id = sample_id_list[bs_idx] + cur_scores_raw = roi_scores_raw[bs_idx] # (N) + cur_boxes3d = rois[bs_idx] # (N, 7) + cur_seg_result = seg_result[bs_idx] + cur_pts_rect = pts_rect[bs_idx] + + # calculate recall + if not args.test: + cur_rpn_cls_label = rpn_cls_label[bs_idx] + cur_gt_boxes3d = gt_boxes3d[bs_idx] + + k = cur_gt_boxes3d.__len__() - 1 + while k > 0 and cur_gt_boxes3d[k].sum() == 0: + k -= 1 + cur_gt_boxes3d = cur_gt_boxes3d[:k + 1] + + recalled_num = 0 + if cur_gt_boxes3d.shape[0] > 0: + iou3d = iou3d_utils.boxes_iou3d_gpu(cur_boxes3d, cur_gt_boxes3d[:, 0:7]) + gt_max_iou, _ = iou3d.max(dim = 0) + + for idx, thresh in enumerate(thresh_list): + total_recalled_bbox_list[idx] += (gt_max_iou > thresh).sum().item() + recalled_num = (gt_max_iou > 0.7).sum().item() + total_gt_bbox += cur_gt_boxes3d.__len__() + + fg_mask = cur_rpn_cls_label > 0 + correct = ((cur_seg_result == cur_rpn_cls_label) & fg_mask).sum().float() + union = fg_mask.sum().float() + (cur_seg_result > 0).sum().float() - correct + rpn_iou = correct / torch.clamp(union, min = 1.0) + rpn_iou_avg += rpn_iou.item() + + # save result + if args.save_rpn_feature: + # save features to file + save_rpn_features(seg_result[bs_idx].float().cpu().numpy(), + rpn_scores_raw[bs_idx].float().cpu().numpy(), + pts_features[bs_idx], + backbone_xyz[bs_idx].cpu().numpy(), + backbone_features[bs_idx].cpu().numpy().transpose(1, 0), + kitti_features_dir, cur_sample_id) + + if args.save_result or args.save_rpn_feature: + cur_pred_cls = cur_seg_result.cpu().numpy() + output_file = os.path.join(seg_output_dir, '%06d.npy' % cur_sample_id) + if not args.test: + cur_gt_cls = cur_rpn_cls_label.cpu().numpy() + output_data = np.concatenate( + (cur_pts_rect.reshape(-1, 3), cur_gt_cls.reshape(-1, 1), cur_pred_cls.reshape(-1, 1)), + axis = 1) + else: + output_data = np.concatenate((cur_pts_rect.reshape(-1, 3), cur_pred_cls.reshape(-1, 1)), axis = 1) + + np.save(output_file, output_data.astype(np.float16)) + + # save as kitti format + calib = dataset.get_calib(cur_sample_id) + cur_boxes3d = cur_boxes3d.cpu().numpy() + image_shape = dataset.get_image_shape(cur_sample_id) + save_kitti_format(cur_sample_id, calib, cur_boxes3d, kitti_output_dir, cur_scores_raw, image_shape) + + disp_dict = { 'mode' : mode, 'recall': '%d/%d' % (total_recalled_bbox_list[3], total_gt_bbox), + 'rpn_iou': rpn_iou_avg / max(cnt, 1.0) } + progress_bar.set_postfix(disp_dict) + progress_bar.update() + + progress_bar.close() + + logger.info(str(datetime.now())) + logger.info('-------------------performance of epoch %s---------------------' % epoch_id) + logger.info('max number of objects: %d' % max_num) + logger.info('rpn iou avg: %f' % (rpn_iou_avg / max(cnt, 1.0))) + + ret_dict = { 'max_obj_num': max_num, 'rpn_iou': rpn_iou_avg / cnt } + + for idx, thresh in enumerate(thresh_list): + cur_recall = total_recalled_bbox_list[idx] / max(total_gt_bbox, 1.0) + logger.info('total bbox recall(thresh=%.3f): %d / %d = %f' % (thresh, total_recalled_bbox_list[idx], + total_gt_bbox, cur_recall)) + ret_dict['rpn_recall(thresh=%.2f)' % thresh] = cur_recall + logger.info('result is saved to: %s' % result_dir) + + return ret_dict + + +def eval_one_epoch_rcnn(model, dataloader, epoch_id, result_dir, logger): + np.random.seed(1024) + MEAN_SIZE = torch.from_numpy(cfg.CLS_MEAN_SIZE[0]).cuda() + mode = 'TEST' if args.test else 'EVAL' + + final_output_dir = os.path.join(result_dir, 'final_result', 'data') + os.makedirs(final_output_dir, exist_ok = True) + + if args.save_result: + roi_output_dir = os.path.join(result_dir, 'roi_result', 'data') + refine_output_dir = os.path.join(result_dir, 'refine_result', 'data') + os.makedirs(roi_output_dir, exist_ok = True) + os.makedirs(refine_output_dir, exist_ok = True) + + logger.info('---- EPOCH %s RCNN EVALUATION ----' % epoch_id) + model.eval() + + thresh_list = [0.1, 0.3, 0.5, 0.7, 0.9] + total_recalled_bbox_list, total_gt_bbox = [0] * 5, 0 + total_roi_recalled_bbox_list = [0] * 5 + dataset = dataloader.dataset + cnt = final_total = total_cls_acc = total_cls_acc_refined = 0 + + progress_bar = tqdm.tqdm(total = len(dataloader), leave = True, desc = 'eval') + for data in dataloader: + sample_id = data['sample_id'] + cnt += 1 + assert args.batch_size == 1, 'Only support bs=1 here' + input_data = { } + for key, val in data.items(): + if key != 'sample_id': + input_data[key] = torch.from_numpy(val).contiguous().cuda(non_blocking = True).float() + + roi_boxes3d = input_data['roi_boxes3d'] + roi_scores = input_data['roi_scores'] + if cfg.RCNN.ROI_SAMPLE_JIT: + for key, val in input_data.items(): + if key in ['gt_iou', 'gt_boxes3d']: + continue + input_data[key] = input_data[key].unsqueeze(dim = 0) + else: + pts_input = torch.cat((input_data['pts_input'], input_data['pts_features']), dim = -1) + input_data['pts_input'] = pts_input + + # img feature + if cfg.LI_FUSION.ENABLED: + pts_origin_xy, img = data['pts_origin_xy'], data['img'] + pts_origin_xy = torch.from_numpy(pts_origin_xy).cuda(non_blocking = True).float() + img = torch.from_numpy(img).cuda(non_blocking = True).float().permute((0,3,1,2)) + input_data['pts_origin_xy'] = pts_origin_xy + input_data['img'] = img + + if cfg.RPN.USE_RGB or cfg.RCNN.USE_RGB: + pts_rgb=data['rgb'] + pts_rgb=torch.from_numpy(pts_rgb).cuda(non_blocking = True).float() + input_data['pts_rgb']=pts_rgb + + + + ret_dict = model(input_data) + rcnn_cls = ret_dict['rcnn_cls'] + rcnn_reg = ret_dict['rcnn_reg'] + + # bounding box regression + anchor_size = MEAN_SIZE + if cfg.RCNN.SIZE_RES_ON_ROI: + roi_size = input_data['roi_size'] + anchor_size = roi_size + + pred_boxes3d = decode_bbox_target(roi_boxes3d, rcnn_reg, + anchor_size = anchor_size, + loc_scope = cfg.RCNN.LOC_SCOPE, + loc_bin_size = cfg.RCNN.LOC_BIN_SIZE, + num_head_bin = cfg.RCNN.NUM_HEAD_BIN, + get_xz_fine = True, get_y_by_bin = cfg.RCNN.LOC_Y_BY_BIN, + loc_y_scope = cfg.RCNN.LOC_Y_SCOPE, loc_y_bin_size = cfg.RCNN.LOC_Y_BIN_SIZE, + get_ry_fine = True) + + # scoring + if rcnn_cls.shape[1] == 1: + raw_scores = rcnn_cls.view(-1) + norm_scores = torch.sigmoid(raw_scores) + pred_classes = (norm_scores > cfg.RCNN.SCORE_THRESH).long() + else: + pred_classes = torch.argmax(rcnn_cls, dim = 1).view(-1) + cls_norm_scores = F.softmax(rcnn_cls, dim = 1) + raw_scores = rcnn_cls[:, pred_classes] + norm_scores = cls_norm_scores[:, pred_classes] + + # evaluation + disp_dict = { 'mode': mode } + if not args.test: + gt_boxes3d = input_data['gt_boxes3d'] + gt_iou = input_data['gt_iou'] + + # calculate recall + gt_num = gt_boxes3d.shape[0] + if gt_num > 0: + iou3d = iou3d_utils.boxes_iou3d_gpu(pred_boxes3d, gt_boxes3d) + gt_max_iou, _ = iou3d.max(dim = 0) + refined_iou, _ = iou3d.max(dim = 1) + + for idx, thresh in enumerate(thresh_list): + total_recalled_bbox_list[idx] += (gt_max_iou > thresh).sum().item() + recalled_num = (gt_max_iou > 0.7).sum().item() + total_gt_bbox += gt_num + + iou3d_in = iou3d_utils.boxes_iou3d_gpu(roi_boxes3d, gt_boxes3d) + gt_max_iou_in, _ = iou3d_in.max(dim = 0) + + for idx, thresh in enumerate(thresh_list): + total_roi_recalled_bbox_list[idx] += (gt_max_iou_in > thresh).sum().item() + + # classification accuracy + cls_label = (gt_iou > cfg.RCNN.CLS_FG_THRESH).float() + cls_valid_mask = ((gt_iou >= cfg.RCNN.CLS_FG_THRESH) | (gt_iou <= cfg.RCNN.CLS_BG_THRESH)).float() + cls_acc = ((pred_classes == cls_label.long()).float() * cls_valid_mask).sum() / max(cls_valid_mask.sum(), + 1.0) + + iou_thresh = 0.7 if cfg.CLASSES == 'Car' else 0.5 + cls_label_refined = (gt_iou >= iou_thresh).float() + cls_acc_refined = (pred_classes == cls_label_refined.long()).float().sum() / max(cls_label_refined.shape[0], + 1.0) + + total_cls_acc += cls_acc.item() + total_cls_acc_refined += cls_acc_refined.item() + + disp_dict['recall'] = '%d/%d' % (total_recalled_bbox_list[3], total_gt_bbox) + disp_dict['cls_acc_refined'] = '%.2f' % cls_acc_refined.item() + + progress_bar.set_postfix(disp_dict) + progress_bar.update() + + image_shape = dataset.get_image_shape(sample_id) + if args.save_result: + # save roi and refine results + roi_boxes3d_np = roi_boxes3d.cpu().numpy() + pred_boxes3d_np = pred_boxes3d.cpu().numpy() + calib = dataset.get_calib(sample_id) + + save_kitti_format(sample_id, calib, roi_boxes3d_np, roi_output_dir, roi_scores, image_shape) + save_kitti_format(sample_id, calib, pred_boxes3d_np, refine_output_dir, raw_scores.cpu().numpy(), + image_shape) + + # NMS and scoring + # scores thresh + inds = norm_scores > cfg.RCNN.SCORE_THRESH + if inds.sum() == 0: + continue + + pred_boxes3d_selected = pred_boxes3d[inds] + raw_scores_selected = raw_scores[inds] + + # NMS thresh + boxes_bev_selected = kitti_utils.boxes3d_to_bev_torch(pred_boxes3d_selected) + keep_idx = iou3d_utils.nms_gpu(boxes_bev_selected, raw_scores_selected, cfg.RCNN.NMS_THRESH) + pred_boxes3d_selected = pred_boxes3d_selected[keep_idx] + + scores_selected = raw_scores_selected[keep_idx] + pred_boxes3d_selected, scores_selected = pred_boxes3d_selected.cpu().numpy(), scores_selected.cpu().numpy() + + calib = dataset.get_calib(sample_id) + final_total += pred_boxes3d_selected.shape[0] + save_kitti_format(sample_id, calib, pred_boxes3d_selected, final_output_dir, scores_selected, image_shape) + + progress_bar.close() + + # dump empty files + split_file = os.path.join(dataset.imageset_dir, '..', '..', 'ImageSets', dataset.split + '.txt') + split_file = os.path.abspath(split_file) + image_idx_list = [x.strip() for x in open(split_file).readlines()] + empty_cnt = 0 + for k in range(image_idx_list.__len__()): + cur_file = os.path.join(final_output_dir, '%s.txt' % image_idx_list[k]) + if not os.path.exists(cur_file): + with open(cur_file, 'w') as temp_f: + pass + empty_cnt += 1 + logger.info('empty_cnt=%d: dump empty file %s' % (empty_cnt, cur_file)) + + ret_dict = { 'empty_cnt': empty_cnt } + + logger.info('-------------------performance of epoch %s---------------------' % epoch_id) + logger.info(str(datetime.now())) + + avg_cls_acc = (total_cls_acc / max(cnt, 1.0)) + avg_cls_acc_refined = (total_cls_acc_refined / max(cnt, 1.0)) + avg_det_num = (final_total / max(cnt, 1.0)) + logger.info('final average detections: %.3f' % avg_det_num) + logger.info('final average cls acc: %.3f' % avg_cls_acc) + logger.info('final average cls acc refined: %.3f' % avg_cls_acc_refined) + ret_dict['rcnn_cls_acc'] = avg_cls_acc + ret_dict['rcnn_cls_acc_refined'] = avg_cls_acc_refined + ret_dict['rcnn_avg_num'] = avg_det_num + + for idx, thresh in enumerate(thresh_list): + cur_roi_recall = total_roi_recalled_bbox_list[idx] / max(total_gt_bbox, 1.0) + logger.info('total roi bbox recall(thresh=%.3f): %d / %d = %f' % (thresh, total_roi_recalled_bbox_list[idx], + total_gt_bbox, cur_roi_recall)) + ret_dict['rpn_recall(thresh=%.2f)' % thresh] = cur_roi_recall + + for idx, thresh in enumerate(thresh_list): + cur_recall = total_recalled_bbox_list[idx] / max(total_gt_bbox, 1.0) + logger.info('total bbox recall(thresh=%.3f): %d / %d = %f' % (thresh, total_recalled_bbox_list[idx], + total_gt_bbox, cur_recall)) + ret_dict['rcnn_recall(thresh=%.2f)' % thresh] = cur_recall + + if cfg.TEST.SPLIT != 'test': + logger.info('Averate Precision:') + name_to_class = { 'Car': 0, 'Pedestrian': 1, 'Cyclist': 2 } + ap_result_str, ap_dict = kitti_evaluate(dataset.label_dir, final_output_dir, label_split_file = split_file, + current_class = name_to_class[cfg.CLASSES]) + logger.info(ap_result_str) + ret_dict.update(ap_dict) + + logger.info('result is saved to: %s' % result_dir) + + return ret_dict + + +def eval_one_epoch_joint(model, dataloader, epoch_id, result_dir, logger): + np.random.seed(666) + MEAN_SIZE = torch.from_numpy(cfg.CLS_MEAN_SIZE[0]).cuda() + mode = 'TEST' if args.test else 'EVAL' + + final_output_dir = os.path.join(result_dir, 'final_result', 'data') + os.makedirs(final_output_dir, exist_ok = True) + + if args.save_result: + roi_output_dir = os.path.join(result_dir, 'roi_result', 'data') + refine_output_dir = os.path.join(result_dir, 'refine_result', 'data') + rpn_output_dir = os.path.join(result_dir, 'rpn_result', 'data') + os.makedirs(rpn_output_dir, exist_ok = True) + os.makedirs(roi_output_dir, exist_ok = True) + os.makedirs(refine_output_dir, exist_ok = True) + + logger.info('---- EPOCH %s JOINT EVALUATION ----' % epoch_id) + logger.info('==> Output file: %s' % result_dir) + model.eval() + + thresh_list = [0.1, 0.3, 0.5, 0.7, 0.9] + total_recalled_bbox_list, total_gt_bbox = [0] * 5, 0 + total_roi_recalled_bbox_list = [0] * 5 + dataset = dataloader.dataset + cnt = final_total = total_cls_acc = total_cls_acc_refined = total_rpn_iou = 0 + + progress_bar = tqdm.tqdm(total = len(dataloader), leave = True, desc = 'eval') + for data in dataloader: + cnt += 1 + sample_id, pts_rect, pts_features, pts_input = \ + data['sample_id'], data['pts_rect'], data['pts_features'], data['pts_input'] + batch_size = len(sample_id) + inputs = torch.from_numpy(pts_input).cuda(non_blocking = True).float() + ###print('sample_id:', sample_id) + input_data = { 'pts_input': inputs } + # img feature + if cfg.LI_FUSION.ENABLED: + pts_origin_xy, img = data['pts_origin_xy'], data['img'] + pts_origin_xy = torch.from_numpy(pts_origin_xy).cuda(non_blocking = True).float() + img = torch.from_numpy(img).cuda(non_blocking = True).float().permute((0,3,1,2)) + input_data['pts_origin_xy'] = pts_origin_xy + input_data['img'] = img + + if cfg.RPN.USE_RGB or cfg.RCNN.USE_RGB: + pts_rgb=data['rgb'] + pts_rgb=torch.from_numpy(pts_rgb).cuda(non_blocking = True).float() + input_data['pts_rgb']=pts_rgb + + if cfg.USE_PAINTING_SCORE: + pts_paint_scores = data['pts_paint_scores'] + input_data['pts_paint_scores'] = torch.from_numpy(pts_paint_scores).cuda(non_blocking = True).float() + if cfg.USE_PAINTING_FEAT: + pts_paint_feats = data['pts_paint_feats'] + input_data['pts_paint_feats'] = torch.from_numpy(pts_paint_feats).cuda(non_blocking = True).float() + + # model inference + ret_dict = model(input_data) + + roi_scores_raw = ret_dict['roi_scores_raw'] # (B, M) + + ######## 2022.9.2 + # rpn_img_score_map = ret_dict['rpn_image_seg'].permute(0,2,3,1).squeeze(0).detach().cpu().numpy() + # rpn_img_feat_map = ret_dict['img_feature'].permute(0,2,3,1).squeeze(0).detach().cpu().numpy() + # str_cls_name = cfg.CLASSES + # #print('####sample_id:', cnt-1) + # print('####sample_id:', sample_id) + # result_dir = '/data2/zheliu/TPAMI_rebuttal_2022' + # final_output_pretrained_img_scores_dir = os.path.join(result_dir, 'img_output', 'pretrained_img_scores', str_cls_name) + # os.makedirs(final_output_pretrained_img_scores_dir, exist_ok = True) + # final_output_pretrained_img_feats_dir = os.path.join(result_dir, 'img_output', 'pretrained_img_feats', str_cls_name) + # os.makedirs(final_output_pretrained_img_feats_dir, exist_ok = True) + # np.save(final_output_pretrained_img_scores_dir+'/'+'%04d'%sample_id+'.npy', rpn_img_score_map) ## have saved!!! + #np.save(final_output_pretrained_img_feats_dir+'/'+str(sample_id)+'.npy', rpn_img_feat_map) + + ################################################## + if cfg.USE_IMAGE_SCORE: + final_output_point_score_dir = os.path.join(result_dir, 'final_result', 'data_point_scores') + os.makedirs(final_output_point_score_dir, exist_ok = True) + final_output_image_score_dir = os.path.join(result_dir, 'final_result', 'data_image_scores') + os.makedirs(final_output_image_score_dir, exist_ok = True) + + rpn_point_scores = ret_dict['rpn_point_scores'] + rpn_image_scores = ret_dict['rpn_image_scores'] + a11 = rpn_point_scores.data.cpu().numpy() + b11 = rpn_image_scores.data.cpu().numpy() + # np.save(final_output_point_score_dir+'/point_scores'+str(cnt-1)+'.npy', a11) + # np.save(final_output_image_score_dir+'/img_scores'+str(cnt-1)+'.npy', b11) + ################################################## + + roi_boxes3d = ret_dict['rois'] # (B, M, 7) + seg_result = ret_dict['seg_result'].long() # (B, N) + + rcnn_cls = ret_dict['rcnn_cls'].view(batch_size, -1, ret_dict['rcnn_cls'].shape[1]) + rcnn_reg = ret_dict['rcnn_reg'].view(batch_size, -1, ret_dict['rcnn_reg'].shape[1]) # (B, M, C) + + if cfg.USE_IOU_BRANCH: + rcnn_iou_branch = ret_dict['rcnn_iou_branch'].view(batch_size, -1, ret_dict['rcnn_iou_branch'].shape[1]) ##########################TO + rcnn_iou_branch = torch.max(rcnn_iou_branch, rcnn_iou_branch.new().resize_(rcnn_iou_branch.shape).fill_(1e-4)) + rcnn_cls = rcnn_iou_branch * rcnn_cls + + # bounding box regression + anchor_size = MEAN_SIZE + if cfg.RCNN.SIZE_RES_ON_ROI: + assert False + + pred_boxes3d = decode_bbox_target(roi_boxes3d.view(-1, 7), rcnn_reg.view(-1, rcnn_reg.shape[-1]), + anchor_size = anchor_size, + loc_scope = cfg.RCNN.LOC_SCOPE, + loc_bin_size = cfg.RCNN.LOC_BIN_SIZE, + num_head_bin = cfg.RCNN.NUM_HEAD_BIN, + get_xz_fine = True, get_y_by_bin = cfg.RCNN.LOC_Y_BY_BIN, + loc_y_scope = cfg.RCNN.LOC_Y_SCOPE, loc_y_bin_size = cfg.RCNN.LOC_Y_BIN_SIZE, + get_ry_fine = True).view(batch_size, -1, 7) + + # scoring + if rcnn_cls.shape[2] == 1: + raw_scores = rcnn_cls # (B, M, 1) + + norm_scores = torch.sigmoid(raw_scores) + pred_classes = (norm_scores > cfg.RCNN.SCORE_THRESH).long() + else: + pred_classes = torch.argmax(rcnn_cls, dim = 1).view(-1) + cls_norm_scores = F.softmax(rcnn_cls, dim = 1) + raw_scores = rcnn_cls[:, pred_classes] + norm_scores = cls_norm_scores[:, pred_classes] + + # evaluation + recalled_num = gt_num = rpn_iou = 0 + if not args.test: + if not cfg.RPN.FIXED: + rpn_cls_label, rpn_reg_label = data['rpn_cls_label'], data['rpn_reg_label'] + rpn_cls_label = torch.from_numpy(rpn_cls_label).cuda(non_blocking = True).long() + + gt_boxes3d = data['gt_boxes3d'] + + for k in range(batch_size): + # calculate recall + cur_gt_boxes3d = gt_boxes3d[k] + tmp_idx = cur_gt_boxes3d.__len__() - 1 + + while tmp_idx >= 0 and cur_gt_boxes3d[tmp_idx].sum() == 0: + tmp_idx -= 1 + + if tmp_idx >= 0: + cur_gt_boxes3d = cur_gt_boxes3d[:tmp_idx + 1] + + cur_gt_boxes3d = torch.from_numpy(cur_gt_boxes3d).cuda(non_blocking = True).float() + iou3d = iou3d_utils.boxes_iou3d_gpu(pred_boxes3d[k], cur_gt_boxes3d) + gt_max_iou, _ = iou3d.max(dim = 0) + refined_iou, _ = iou3d.max(dim = 1) + + for idx, thresh in enumerate(thresh_list): + total_recalled_bbox_list[idx] += (gt_max_iou > thresh).sum().item() + recalled_num += (gt_max_iou > 0.7).sum().item() + gt_num += cur_gt_boxes3d.shape[0] + total_gt_bbox += cur_gt_boxes3d.shape[0] + + # original recall + iou3d_in = iou3d_utils.boxes_iou3d_gpu(roi_boxes3d[k], cur_gt_boxes3d) + gt_max_iou_in, _ = iou3d_in.max(dim = 0) + + for idx, thresh in enumerate(thresh_list): + total_roi_recalled_bbox_list[idx] += (gt_max_iou_in > thresh).sum().item() + + if not cfg.RPN.FIXED: + fg_mask = rpn_cls_label > 0 + correct = ((seg_result == rpn_cls_label) & fg_mask).sum().float() + union = fg_mask.sum().float() + (seg_result > 0).sum().float() - correct + rpn_iou = correct / torch.clamp(union, min = 1.0) + total_rpn_iou += rpn_iou.item() + + disp_dict = { 'mode': mode, 'recall': '%d/%d' % (total_recalled_bbox_list[3], total_gt_bbox) } + progress_bar.set_postfix(disp_dict) + progress_bar.update() + + if args.save_result: + # save roi and refine results + roi_boxes3d_np = roi_boxes3d.cpu().numpy() + pred_boxes3d_np = pred_boxes3d.cpu().numpy() + roi_scores_raw_np = roi_scores_raw.cpu().numpy() + raw_scores_np = raw_scores.cpu().numpy() + + rpn_cls_np = ret_dict['rpn_cls'].cpu().numpy() + rpn_xyz_np = ret_dict['backbone_xyz'].cpu().numpy() + seg_result_np = seg_result.cpu().numpy() + output_data = np.concatenate((rpn_xyz_np, rpn_cls_np.reshape(batch_size, -1, 1), + seg_result_np.reshape(batch_size, -1, 1)), axis = 2) + + for k in range(batch_size): + cur_sample_id = sample_id[k] + calib = dataset.get_calib(cur_sample_id) + image_shape = dataset.get_image_shape(cur_sample_id) + save_kitti_format(cur_sample_id, calib, roi_boxes3d_np[k], roi_output_dir, + roi_scores_raw_np[k], image_shape) + save_kitti_format(cur_sample_id, calib, pred_boxes3d_np[k], refine_output_dir, + raw_scores_np[k], image_shape) + + output_file = os.path.join(rpn_output_dir, '%06d.npy' % cur_sample_id) + np.save(output_file, output_data.astype(np.float32)) + + # scores thresh + inds = norm_scores > cfg.RCNN.SCORE_THRESH + # print('cfg.RCNN.SCORE_THRESH:',cfg.RCNN.SCORE_THRESH) + # print('cfg.RCNN.NMS_THRESH:',cfg.RCNN.NMS_THRESH) + + for k in range(batch_size): + cur_inds = inds[k].view(-1) + if cur_inds.sum() == 0: + continue + + pred_boxes3d_selected = pred_boxes3d[k, cur_inds] + raw_scores_selected = raw_scores[k, cur_inds] + norm_scores_selected = norm_scores[k, cur_inds] + + # NMS thresh + # rotated nms + boxes_bev_selected = kitti_utils.boxes3d_to_bev_torch(pred_boxes3d_selected) + keep_idx = iou3d_utils.nms_gpu(boxes_bev_selected, raw_scores_selected, cfg.RCNN.NMS_THRESH).view(-1) + pred_boxes3d_selected = pred_boxes3d_selected[keep_idx] + scores_selected = raw_scores_selected[keep_idx] + pred_boxes3d_selected, scores_selected = pred_boxes3d_selected.cpu().numpy(), scores_selected.cpu().numpy() + + cur_sample_id = sample_id[k] + calib = dataset.get_calib(cur_sample_id) + final_total += pred_boxes3d_selected.shape[0] + image_shape = dataset.get_image_shape(cur_sample_id) + save_kitti_format(cur_sample_id, calib, pred_boxes3d_selected, final_output_dir, scores_selected, + image_shape) + + progress_bar.close() + # dump empty files + split_file = os.path.join(dataset.imageset_dir, '..', '..', 'ImageSets', dataset.split + '.txt') + split_file = os.path.abspath(split_file) + image_idx_list = [x.strip() for x in open(split_file).readlines()] + empty_cnt = 0 + for k in range(image_idx_list.__len__()): + cur_file = os.path.join(final_output_dir, '%s.txt' % image_idx_list[k]) + if not os.path.exists(cur_file): + with open(cur_file, 'w') as temp_f: + pass + empty_cnt += 1 + logger.info('empty_cnt=%d: dump empty file %s' % (empty_cnt, cur_file)) + + ret_dict = { 'empty_cnt': empty_cnt } + + logger.info('-------------------performance of epoch %s---------------------' % epoch_id) + logger.info(str(datetime.now())) + + avg_rpn_iou = (total_rpn_iou / max(cnt, 1.0)) + avg_cls_acc = (total_cls_acc / max(cnt, 1.0)) + avg_cls_acc_refined = (total_cls_acc_refined / max(cnt, 1.0)) + avg_det_num = (final_total / max(len(dataset), 1.0)) + logger.info('final average detections: %.3f' % avg_det_num) + logger.info('final average rpn_iou refined: %.3f' % avg_rpn_iou) + logger.info('final average cls acc: %.3f' % avg_cls_acc) + logger.info('final average cls acc refined: %.3f' % avg_cls_acc_refined) + ret_dict['rpn_iou'] = avg_rpn_iou + ret_dict['rcnn_cls_acc'] = avg_cls_acc + ret_dict['rcnn_cls_acc_refined'] = avg_cls_acc_refined + ret_dict['rcnn_avg_num'] = avg_det_num + + for idx, thresh in enumerate(thresh_list): + cur_roi_recall = total_roi_recalled_bbox_list[idx] / max(total_gt_bbox, 1.0) + logger.info('total roi bbox recall(thresh=%.3f): %d / %d = %f' % (thresh, total_roi_recalled_bbox_list[idx], + total_gt_bbox, cur_roi_recall)) + ret_dict['rpn_recall(thresh=%.2f)' % thresh] = cur_roi_recall + + for idx, thresh in enumerate(thresh_list): + cur_recall = total_recalled_bbox_list[idx] / max(total_gt_bbox, 1.0) + logger.info('total bbox recall(thresh=%.3f): %d / %d = %f' % (thresh, total_recalled_bbox_list[idx], + total_gt_bbox, cur_recall)) + ret_dict['rcnn_recall(thresh=%.2f)' % thresh] = cur_recall + + if cfg.TEST.SPLIT != 'test': + logger.info('Averate Precision:') + name_to_class = { 'Car': 0, 'Pedestrian': 1, 'Cyclist': 2 } + ap_result_str, ap_dict = kitti_evaluate(dataset.label_dir, final_output_dir, label_split_file = split_file, + current_class = name_to_class[cfg.CLASSES]) + logger.info(ap_result_str) + ret_dict.update(ap_dict) + + logger.info('result is saved to: %s' % result_dir) + return ret_dict + + +def eval_one_epoch(model, dataloader, epoch_id, result_dir, logger): + if cfg.RPN.ENABLED and not cfg.RCNN.ENABLED: + ret_dict = eval_one_epoch_rpn(model, dataloader, epoch_id, result_dir, logger) + elif not cfg.RPN.ENABLED and cfg.RCNN.ENABLED: + ret_dict = eval_one_epoch_rcnn(model, dataloader, epoch_id, result_dir, logger) + elif cfg.RPN.ENABLED and cfg.RCNN.ENABLED: + ret_dict = eval_one_epoch_joint(model, dataloader, epoch_id, result_dir, logger) + else: + raise NotImplementedError + return ret_dict + + +def load_part_ckpt(model, filename, logger, total_keys = -1): + if os.path.isfile(filename): + logger.info("==> Loading part model from checkpoint '{}'".format(filename)) + checkpoint = torch.load(filename) + model_state = checkpoint['model_state'] + + update_model_state = { key: val for key, val in model_state.items() if key in model.state_dict() } + state_dict = model.state_dict() + state_dict.update(update_model_state) + model.load_state_dict(state_dict) + + update_keys = update_model_state.keys().__len__() + if update_keys == 0: + raise RuntimeError + logger.info("==> Done (loaded %d/%d)" % (update_keys, total_keys)) + else: + raise FileNotFoundError + + +def load_ckpt_based_on_args(model, logger): + if args.ckpt is not None: + train_utils.load_checkpoint(model, filename = args.ckpt, logger = logger) + + total_keys = model.state_dict().keys().__len__() + if cfg.RPN.ENABLED and args.rpn_ckpt is not None: + load_part_ckpt(model, filename = args.rpn_ckpt, logger = logger, total_keys = total_keys) + + if cfg.RCNN.ENABLED and args.rcnn_ckpt is not None: + load_part_ckpt(model, filename = args.rcnn_ckpt, logger = logger, total_keys = total_keys) + + +def eval_single_ckpt(root_result_dir,data_path): + root_result_dir = os.path.join(root_result_dir, 'eval') + # set epoch_id and output dir + num_list = re.findall(r'\d+', args.ckpt) if args.ckpt is not None else [] + epoch_id = num_list[-1] if num_list.__len__() > 0 else 'no_number' + root_result_dir = os.path.join(root_result_dir, 'epoch_%s' % epoch_id, cfg.TEST.SPLIT) + if args.test: + root_result_dir = os.path.join(root_result_dir, 'test_mode') + + if args.extra_tag != 'default': + root_result_dir = os.path.join(root_result_dir, args.extra_tag) + os.makedirs(root_result_dir, exist_ok = True) + + log_file = os.path.join(root_result_dir, 'log_eval_one.txt') + logger = create_logger(log_file) + logger.info('**********************Start logging**********************') + for key, val in vars(args).items(): + logger.info("{:16} {}".format(key, val)) + save_config_to_file(cfg, logger = logger) + + # create dataloader & network + test_loader = create_dataloader(logger, data_path) + # model = PointRCNN(num_classes=test_loader.dataset.num_class, use_xyz=True, mode='TEST') + if args.model_type == 'base': + model = PointRCNN(num_classes = test_loader.dataset.num_class, use_xyz = True, mode = 'TEST') + # elif args.model_type == 'rpn_mscale': + # model = PointRCNN_mScale(num_classes = test_loader.dataset.num_class, use_xyz = True, mode = 'TEST') + + model.cuda() + + # copy important files to backup + backup_dir = os.path.join(root_result_dir, 'backup_files') + os.makedirs(backup_dir, exist_ok = True) + os.system('cp *.py %s/' % backup_dir) + os.system('cp ../lib/net/*.py %s/' % backup_dir) + os.system('cp ../lib/datasets/kitti_rcnn_dataset.py %s/' % backup_dir) + + # load checkpoint + load_ckpt_based_on_args(model, logger) + + # start evaluation + eval_one_epoch(model, test_loader, epoch_id, root_result_dir, logger) + + +def get_no_evaluated_ckpt(ckpt_dir, ckpt_record_file): + ckpt_list = glob.glob(os.path.join(ckpt_dir, '*checkpoint_epoch_*.pth')) + ckpt_list.sort(key = os.path.getmtime) + evaluated_ckpt_list = [float(x.strip()) for x in open(ckpt_record_file, 'r').readlines()] + + for cur_ckpt in ckpt_list: + num_list = re.findall('checkpoint_epoch_(.*).pth', cur_ckpt) + if num_list.__len__() == 0: + continue + + epoch_id = num_list[-1] + if float(epoch_id) not in evaluated_ckpt_list and int(float(epoch_id)) >= args.start_epoch: + return epoch_id, cur_ckpt + return -1, None + + +def repeat_eval_ckpt(root_result_dir, ckpt_dir, data_path): + root_result_dir = os.path.join(root_result_dir, 'eval', 'eval_all_' + args.extra_tag) + os.makedirs(root_result_dir, exist_ok = True) + + log_file = os.path.join(root_result_dir, 'log_eval_all_%s.txt' % cfg.TEST.SPLIT) + logger = create_logger(log_file) + logger.info('**********************Start logging**********************') + + # save config + for key, val in vars(args).items(): + logger.info("{:16} {}".format(key, val)) + save_config_to_file(cfg, logger = logger) + + # create dataloader & network + test_loader = create_dataloader(logger, data_path) + # model = PointRCNN(num_classes=test_loader.dataset.num_class, use_xyz=True, mode='TEST') + if args.model_type == 'base': + model = PointRCNN(num_classes = test_loader.dataset.num_class, use_xyz = True, mode = 'TEST') + # print(model) + + # elif args.model_type == 'rpn_mscale': + # model = PointRCNN_mScale(num_classes = test_loader.dataset.num_class, use_xyz = True, mode = 'TEST') + model.cuda() + + # copy important files to backup + backup_dir = os.path.join(root_result_dir, 'backup_files') + os.makedirs(backup_dir, exist_ok = True) + os.system('cp *.py %s/' % backup_dir) + os.system('cp ../lib/net/*.py %s/' % backup_dir) + os.system('cp ../lib/datasets/kitti_rcnn_dataset.py %s/' % backup_dir) + + # evaluated ckpt record + ckpt_record_file = os.path.join(root_result_dir, 'eval_list_%s.txt' % cfg.TEST.SPLIT) + with open(ckpt_record_file, 'a'): + pass + + # tensorboard log + tb_log = SummaryWriter(logdir = os.path.join(root_result_dir, 'tensorboard_%s' % cfg.TEST.SPLIT)) + + total_time = 0 + first_eval = True + while True: + # check whether there is checkpoint which is not evaluated + cur_epoch_id, cur_ckpt = get_no_evaluated_ckpt(ckpt_dir, ckpt_record_file) + if cur_epoch_id == -1 or int(float(cur_epoch_id)) < args.start_epoch: + wait_second = 30 + print('Wait %s second for next check: %s' % (wait_second, ckpt_dir)) + time.sleep(wait_second) + total_time += 30 + if total_time > args.max_waiting_mins * 60 and (first_eval is False): + break + continue + + total_time = 0 + first_eval = False + + # load checkpoint + print('##############cur_ckpt:', cur_ckpt) + train_utils.load_checkpoint(model, filename = cur_ckpt) + + # start evaluation + cur_result_dir = os.path.join(root_result_dir, 'epoch_%s' % cur_epoch_id, cfg.TEST.SPLIT) + tb_dict = eval_one_epoch(model, test_loader, cur_epoch_id, cur_result_dir, logger) + + step = int(float(cur_epoch_id)) + if step == float(cur_epoch_id): + for key, val in tb_dict.items(): + tb_log.add_scalar(key, val, step) + + # record this epoch which has been evaluated + with open(ckpt_record_file, 'a') as f: + print('%s' % cur_epoch_id, file = f) + logger.info('Epoch %s has been evaluated' % cur_epoch_id) + + +def create_dataloader(logger, data_path): + mode = 'TEST' if args.test else 'EVAL' + DATA_PATH = os.path.join(data_path) #os.path.join('../', 'data') + + # create dataloader + test_set = KittiRCNNDataset(root_dir = DATA_PATH, npoints = cfg.RPN.NUM_POINTS, split = cfg.TEST.SPLIT, mode = mode, + random_select = args.random_select, + rcnn_eval_roi_dir = args.rcnn_eval_roi_dir, + rcnn_eval_feature_dir = args.rcnn_eval_feature_dir, + classes = cfg.CLASSES, + logger = logger) + + test_loader = DataLoader(test_set, batch_size = args.batch_size, shuffle = False, pin_memory = True, + num_workers = args.workers, collate_fn = test_set.collate_batch) + + return test_loader + + +if __name__ == "__main__": + # merge config and log to file + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + cfg.TAG = os.path.splitext(os.path.basename(args.cfg_file))[0] + + if args.eval_mode == 'rpn': + cfg.RPN.ENABLED = True + cfg.RCNN.ENABLED = False + root_result_dir = os.path.join('../', 'output', 'rpn', cfg.TAG) + ckpt_dir = os.path.join('../', 'output', 'rpn', cfg.TAG, 'ckpt') + elif args.eval_mode == 'rcnn': + cfg.RCNN.ENABLED = True + cfg.RPN.ENABLED = cfg.RPN.FIXED = True + root_result_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG) + ckpt_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG, 'ckpt') + elif args.eval_mode == 'rcnn_online': + cfg.RCNN.ENABLED = True + cfg.RPN.ENABLED = True + cfg.RPN.FIXED = False + root_result_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG) + ckpt_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG, 'ckpt') + elif args.eval_mode == 'rcnn_offline': + cfg.RCNN.ENABLED = True + cfg.RPN.ENABLED = False + root_result_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG) + ckpt_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG, 'ckpt') + assert args.rcnn_eval_roi_dir is not None and args.rcnn_eval_feature_dir is not None + else: + raise NotImplementedError + + if args.ckpt_dir is not None: + ckpt_dir = args.ckpt_dir + + if args.output_dir is not None: + root_result_dir = args.output_dir + + os.makedirs(root_result_dir, exist_ok = True) + + with torch.no_grad(): + if args.eval_all: + assert os.path.exists(ckpt_dir), '%s' % ckpt_dir + repeat_eval_ckpt(root_result_dir, ckpt_dir, data_path=args.data_path) + else: + eval_single_ckpt(root_result_dir, data_path=args.data_path) diff --git a/tools/generate_aug_scene.py b/tools/generate_aug_scene.py new file mode 100644 index 0000000..56b6e09 --- /dev/null +++ b/tools/generate_aug_scene.py @@ -0,0 +1,325 @@ +import _init_path +import os +import numpy as np +import pickle +import torch + +import lib.utils.kitti_utils as kitti_utils +import lib.utils.roipool3d.roipool3d_utils as roipool3d_utils +import lib.utils.iou3d.iou3d_utils as iou3d_utils +from lib.datasets.kitti_dataset import KittiDataset +import argparse + +np.random.seed(1024) + +parser = argparse.ArgumentParser() +parser.add_argument('--mode', type = str, default = 'generator') +parser.add_argument('--class_name', type = str, default = 'Car') +parser.add_argument('--save_dir', type = str, default = '../../data/KITTI/aug_scene/training') +parser.add_argument('--split', type = str, default = 'train') +parser.add_argument('--gt_database_dir', type = str, default = 'gt_database/train_gt_database_3level_Car.pkl') +parser.add_argument('--include_similar', action = 'store_true', default = False) +parser.add_argument('--aug_times', type = int, default = 4) +args = parser.parse_args() + +PC_REDUCE_BY_RANGE = True +if args.class_name == 'Car': + PC_AREA_SCOPE = np.array([[-40, 40], [-1, 3], [0, 70.4]]) # x, y, z scope in rect camera coords +else: + PC_AREA_SCOPE = np.array([[-30, 30], [-1, 3], [0, 50]]) + + +def log_print(info, fp = None): + print(info) + if fp is not None: + print(info, file = fp) + + +def save_kitti_format(calib, bbox3d, obj_list, img_shape, save_fp): + corners3d = kitti_utils.boxes3d_to_corners3d(bbox3d) + img_boxes, _ = calib.corners3d_to_img_boxes(corners3d) + + img_boxes[:, 0] = np.clip(img_boxes[:, 0], 0, img_shape[1] - 1) + img_boxes[:, 1] = np.clip(img_boxes[:, 1], 0, img_shape[0] - 1) + img_boxes[:, 2] = np.clip(img_boxes[:, 2], 0, img_shape[1] - 1) + img_boxes[:, 3] = np.clip(img_boxes[:, 3], 0, img_shape[0] - 1) + + # Discard boxes that are larger than 80% of the image width OR height + img_boxes_w = img_boxes[:, 2] - img_boxes[:, 0] + img_boxes_h = img_boxes[:, 3] - img_boxes[:, 1] + box_valid_mask = np.logical_and(img_boxes_w < img_shape[1] * 0.8, img_boxes_h < img_shape[0] * 0.8) + + for k in range(bbox3d.shape[0]): + if box_valid_mask[k] == 0: + continue + x, z, ry = bbox3d[k, 0], bbox3d[k, 2], bbox3d[k, 6] + beta = np.arctan2(z, x) + alpha = -np.sign(beta) * np.pi / 2 + beta + ry + + print('%s %.2f %d %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f' % + (args.class_name, obj_list[k].trucation, int(obj_list[k].occlusion), alpha, img_boxes[k, 0], + img_boxes[k, 1], + img_boxes[k, 2], img_boxes[k, 3], + bbox3d[k, 3], bbox3d[k, 4], bbox3d[k, 5], bbox3d[k, 0], bbox3d[k, 1], bbox3d[k, 2], + bbox3d[k, 6]), file = save_fp) + + +class AugSceneGenerator(KittiDataset): + def __init__(self, root_dir, gt_database = None, split = 'train', classes = args.class_name): + super().__init__(root_dir, split = split) + self.gt_database = None + if classes == 'Car': + self.classes = ('Background', 'Car') + elif classes == 'People': + self.classes = ('Background', 'Pedestrian', 'Cyclist') + elif classes == 'Pedestrian': + self.classes = ('Background', 'Pedestrian') + elif classes == 'Cyclist': + self.classes = ('Background', 'Cyclist') + else: + assert False, "Invalid classes: %s" % classes + + self.gt_database = gt_database + + def __len__(self): + raise NotImplementedError + + def __getitem__(self, item): + raise NotImplementedError + + def filtrate_dc_objects(self, obj_list): + valid_obj_list = [] + for obj in obj_list: + if obj.cls_type in ['DontCare']: + continue + valid_obj_list.append(obj) + + return valid_obj_list + + def filtrate_objects(self, obj_list): + valid_obj_list = [] + type_whitelist = self.classes + if args.include_similar: + type_whitelist = list(self.classes) + if 'Car' in self.classes: + type_whitelist.append('Van') + if 'Pedestrian' in self.classes or 'Cyclist' in self.classes: + type_whitelist.append('Person_sitting') + + for obj in obj_list: + if obj.cls_type in type_whitelist: + valid_obj_list.append(obj) + return valid_obj_list + + @staticmethod + def get_valid_flag(pts_rect, pts_img, pts_rect_depth, img_shape): + """ + Valid point should be in the image (and in the PC_AREA_SCOPE) + :param pts_rect: + :param pts_img: + :param pts_rect_depth: + :param img_shape: + :return: + """ + val_flag_1 = np.logical_and(pts_img[:, 0] >= 0, pts_img[:, 0] < img_shape[1]) + val_flag_2 = np.logical_and(pts_img[:, 1] >= 0, pts_img[:, 1] < img_shape[0]) + val_flag_merge = np.logical_and(val_flag_1, val_flag_2) + pts_valid_flag = np.logical_and(val_flag_merge, pts_rect_depth >= 0) + + if PC_REDUCE_BY_RANGE: + x_range, y_range, z_range = PC_AREA_SCOPE + pts_x, pts_y, pts_z = pts_rect[:, 0], pts_rect[:, 1], pts_rect[:, 2] + range_flag = (pts_x >= x_range[0]) & (pts_x <= x_range[1]) \ + & (pts_y >= y_range[0]) & (pts_y <= y_range[1]) \ + & (pts_z >= z_range[0]) & (pts_z <= z_range[1]) + pts_valid_flag = pts_valid_flag & range_flag + return pts_valid_flag + + @staticmethod + def check_pc_range(xyz): + """ + :param xyz: [x, y, z] + :return: + """ + x_range, y_range, z_range = PC_AREA_SCOPE + if (x_range[0] <= xyz[0] <= x_range[1]) and (y_range[0] <= xyz[1] <= y_range[1]) and \ + (z_range[0] <= xyz[2] <= z_range[1]): + return True + return False + + def aug_one_scene(self, sample_id, pts_rect, pts_intensity, all_gt_boxes3d): + """ + :param pts_rect: (N, 3) + :param gt_boxes3d: (M1, 7) + :param all_gt_boxex3d: (M2, 7) + :return: + """ + assert self.gt_database is not None + extra_gt_num = np.random.randint(10, 15) + try_times = 50 + cnt = 0 + cur_gt_boxes3d = all_gt_boxes3d.copy() + cur_gt_boxes3d[:, 4] += 0.5 + cur_gt_boxes3d[:, 5] += 0.5 # enlarge new added box to avoid too nearby boxes + + extra_gt_obj_list = [] + extra_gt_boxes3d_list = [] + new_pts_list, new_pts_intensity_list = [], [] + src_pts_flag = np.ones(pts_rect.shape[0], dtype = np.int32) + + road_plane = self.get_road_plane(sample_id) + a, b, c, d = road_plane + + while try_times > 0: + try_times -= 1 + + rand_idx = np.random.randint(0, self.gt_database.__len__() - 1) + + new_gt_dict = self.gt_database[rand_idx] + new_gt_box3d = new_gt_dict['gt_box3d'].copy() + new_gt_points = new_gt_dict['points'].copy() + new_gt_intensity = new_gt_dict['intensity'].copy() + new_gt_obj = new_gt_dict['obj'] + center = new_gt_box3d[0:3] + if PC_REDUCE_BY_RANGE and (self.check_pc_range(center) is False): + continue + if cnt > extra_gt_num: + break + if new_gt_points.__len__() < 5: # too few points + continue + + # put it on the road plane + cur_height = (-d - a * center[0] - c * center[2]) / b + move_height = new_gt_box3d[1] - cur_height + new_gt_box3d[1] -= move_height + new_gt_points[:, 1] -= move_height + + cnt += 1 + + iou3d = iou3d_utils.boxes_iou3d_gpu(torch.from_numpy(new_gt_box3d.reshape(1, 7)).cuda(), + torch.from_numpy(cur_gt_boxes3d).cuda()).cpu().numpy() + + valid_flag = iou3d.max() < 1e-8 + if not valid_flag: + continue + + enlarged_box3d = new_gt_box3d.copy() + enlarged_box3d[3] += 2 # remove the points above and below the object + boxes_pts_mask_list = roipool3d_utils.pts_in_boxes3d_cpu(torch.from_numpy(pts_rect), + torch.from_numpy(enlarged_box3d.reshape(1, 7))) + pt_mask_flag = (boxes_pts_mask_list[0].numpy() == 1) + src_pts_flag[pt_mask_flag] = 0 # remove the original points which are inside the new box + + new_pts_list.append(new_gt_points) + new_pts_intensity_list.append(new_gt_intensity) + enlarged_box3d = new_gt_box3d.copy() + enlarged_box3d[4] += 0.5 + enlarged_box3d[5] += 0.5 # enlarge new added box to avoid too nearby boxes + cur_gt_boxes3d = np.concatenate((cur_gt_boxes3d, enlarged_box3d.reshape(1, 7)), axis = 0) + extra_gt_boxes3d_list.append(new_gt_box3d.reshape(1, 7)) + extra_gt_obj_list.append(new_gt_obj) + + if new_pts_list.__len__() == 0: + return False, pts_rect, pts_intensity, None, None + + extra_gt_boxes3d = np.concatenate(extra_gt_boxes3d_list, axis = 0) + # remove original points and add new points + pts_rect = pts_rect[src_pts_flag == 1] + pts_intensity = pts_intensity[src_pts_flag == 1] + new_pts_rect = np.concatenate(new_pts_list, axis = 0) + new_pts_intensity = np.concatenate(new_pts_intensity_list, axis = 0) + pts_rect = np.concatenate((pts_rect, new_pts_rect), axis = 0) + pts_intensity = np.concatenate((pts_intensity, new_pts_intensity), axis = 0) + + return True, pts_rect, pts_intensity, extra_gt_boxes3d, extra_gt_obj_list + + def aug_one_epoch_scene(self, base_id, data_save_dir, label_save_dir, split_list, log_fp = None): + for idx, sample_id in enumerate(self.image_idx_list): + sample_id = int(sample_id) + print('process gt sample (%s, id=%06d)' % (args.split, sample_id)) + + pts_lidar = self.get_lidar(sample_id) + calib = self.get_calib(sample_id) + pts_rect = calib.lidar_to_rect(pts_lidar[:, 0:3]) + pts_img, pts_rect_depth = calib.rect_to_img(pts_rect) + img_shape = self.get_image_shape(sample_id) + + pts_valid_flag = self.get_valid_flag(pts_rect, pts_img, pts_rect_depth, img_shape) + pts_rect = pts_rect[pts_valid_flag][:, 0:3] + pts_intensity = pts_lidar[pts_valid_flag][:, 3] + + # all labels for checking overlapping + all_obj_list = self.filtrate_dc_objects(self.get_label(sample_id)) + all_gt_boxes3d = np.zeros((all_obj_list.__len__(), 7), dtype = np.float32) + for k, obj in enumerate(all_obj_list): + all_gt_boxes3d[k, 0:3], all_gt_boxes3d[k, 3], all_gt_boxes3d[k, 4], all_gt_boxes3d[k, 5], \ + all_gt_boxes3d[k, 6] = obj.pos, obj.h, obj.w, obj.l, obj.ry + + # gt_boxes3d of current label + obj_list = self.filtrate_objects(self.get_label(sample_id)) + if args.class_name != 'Car' and obj_list.__len__() == 0: + continue + + # augment one scene + aug_flag, pts_rect, pts_intensity, extra_gt_boxes3d, extra_gt_obj_list = \ + self.aug_one_scene(sample_id, pts_rect, pts_intensity, all_gt_boxes3d) + + # save augment result to file + pts_info = np.concatenate((pts_rect, pts_intensity.reshape(-1, 1)), axis = 1) + bin_file = os.path.join(data_save_dir, '%06d.bin' % (base_id + sample_id)) + pts_info.astype(np.float32).tofile(bin_file) + + # save filtered original gt_boxes3d + label_save_file = os.path.join(label_save_dir, '%06d.txt' % (base_id + sample_id)) + with open(label_save_file, 'w') as f: + for obj in obj_list: + print(obj.to_kitti_format(), file = f) + + if aug_flag: + # augment successfully + save_kitti_format(calib, extra_gt_boxes3d, extra_gt_obj_list, img_shape = img_shape, save_fp = f) + else: + extra_gt_boxes3d = np.zeros((0, 7), dtype = np.float32) + log_print('Save to file (new_obj: %s): %s' % (extra_gt_boxes3d.__len__(), label_save_file), fp = log_fp) + split_list.append('%06d' % (base_id + sample_id)) + + def generate_aug_scene(self, aug_times, log_fp = None): + data_save_dir = os.path.join(args.save_dir, 'rectified_data') + label_save_dir = os.path.join(args.save_dir, 'aug_label') + os.makedirs(data_save_dir, exist_ok = True) + os.makedirs(label_save_dir, exist_ok = True) + + split_file = os.path.join(args.save_dir, '%s_aug.txt' % args.split) + split_list = self.image_idx_list.copy() + for epoch in range(aug_times): + base_id = (epoch + 1) * 10000 + self.aug_one_epoch_scene(base_id, data_save_dir, label_save_dir, split_list, log_fp = log_fp) + + with open(split_file, 'w') as f: + for idx, sample_id in enumerate(split_list): + print(sample_id, file = f, end = '') + if idx != len(split_list) - 1: + print('', file = f) + log_print('Save split file to %s' % split_file, fp = log_fp) + target_dir = '../../data/KITTI/ImageSets/' + os.system('cp %s %s' % (split_file, target_dir)) + log_print('Copy split file from %s to %s' % (split_file, target_dir), fp = log_fp) + + +if __name__ == '__main__': + os.makedirs(args.save_dir, exist_ok = True) + info_file = os.path.join(args.save_dir, 'log_info.txt') + + if args.mode == 'generator': + log_fp = open(info_file, 'w') + + gt_database = pickle.load(open(args.gt_database_dir, 'rb')) + log_print('Loading gt_database(%d) from %s' % (gt_database.__len__(), args.gt_database_dir), fp = log_fp) + + dataset = AugSceneGenerator(root_dir = '../../data', gt_database = gt_database, split = args.split) + dataset.generate_aug_scene(aug_times = args.aug_times, log_fp = log_fp) + + log_fp.close() + + else: + pass diff --git a/tools/generate_gt_database.py b/tools/generate_gt_database.py new file mode 100644 index 0000000..ebe6034 --- /dev/null +++ b/tools/generate_gt_database.py @@ -0,0 +1,121 @@ +import _init_path +import os +import numpy as np +import pickle +import torch +from torch.nn.functional import grid_sample + +import lib.utils.roipool3d.roipool3d_utils as roipool3d_utils +from lib.datasets.kitti_dataset import KittiDataset +import argparse + +from lib.datasets.kitti_rcnn_dataset import interpolate_img_by_xy + +parser = argparse.ArgumentParser() +parser.add_argument('--save_dir', type = str, default = './gt_database') +parser.add_argument('--class_name', type = str, default = 'Car') +parser.add_argument('--split', type = str, default = 'train') +args = parser.parse_args() + + +class GTDatabaseGenerator(KittiDataset): + def __init__(self, root_dir, split = 'train', classes = args.class_name): + super().__init__(root_dir, split = split) + self.gt_database = None + if classes == 'Car': + self.classes = ('Background', 'Car') + elif classes == 'People': + self.classes = ('Background', 'Pedestrian', 'Cyclist') + elif classes == 'Pedestrian': + self.classes = ('Background', 'Pedestrian') + elif classes == 'Cyclist': + self.classes = ('Background', 'Cyclist') + else: + assert False, "Invalid classes: %s" % classes + + def __len__(self): + raise NotImplementedError + + def __getitem__(self, item): + raise NotImplementedError + + def filtrate_objects(self, obj_list): + valid_obj_list = [] + for obj in obj_list: + if obj.cls_type not in self.classes: + continue + if obj.level_str not in ['Easy', 'Moderate', 'Hard']: + continue + valid_obj_list.append(obj) + + return valid_obj_list + + def generate_gt_database(self): + gt_database = [] + for idx, sample_id in enumerate(self.image_idx_list): + sample_id = int(sample_id) + print('process gt sample (id=%06d)' % sample_id) + + pts_lidar = self.get_lidar(sample_id) + calib = self.get_calib(sample_id) + pts_rect = calib.lidar_to_rect(pts_lidar[:, 0:3]) + pts_intensity = pts_lidar[:, 3] + + # (H,W,3) + img = self.get_image_rgb_with_normal(sample_id) + + pts_img, pts_depth = calib.rect_to_img(pts_rect) + + obj_list = self.filtrate_objects(self.get_label(sample_id)) + + gt_boxes3d = np.zeros((obj_list.__len__(), 7), dtype = np.float32) + for k, obj in enumerate(obj_list): + gt_boxes3d[k, 0:3], gt_boxes3d[k, 3], gt_boxes3d[k, 4], gt_boxes3d[k, 5], gt_boxes3d[k, 6] \ + = obj.pos, obj.h, obj.w, obj.l, obj.ry + + if gt_boxes3d.__len__() == 0: + print('No gt object') + continue + + boxes_pts_mask_list = roipool3d_utils.pts_in_boxes3d_cpu(torch.from_numpy(pts_rect), + torch.from_numpy(gt_boxes3d)) + + shape = self.image_hw_with_padding_np.reshape((1, 1, 2)) + for k in range(boxes_pts_mask_list.__len__()): + pt_mask_flag = (boxes_pts_mask_list[k].numpy() == 1) + cur_pts = pts_rect[pt_mask_flag].astype(np.float32) + cur_pts_intensity = pts_intensity[pt_mask_flag].astype(np.float32) + # add img rbg into point cloud + # (N,2) + cur_pts_img_xy = pts_img[pt_mask_flag].astype(np.float32) + + cur_pts_rgb=interpolate_img_by_xy(img, cur_pts_img_xy, shape) + + + sample_dict = { 'sample_id': sample_id, + 'cls_type' : obj_list[k].cls_type, + 'gt_box3d' : gt_boxes3d[k], + 'points' : cur_pts, + 'rgb' : cur_pts_rgb, + 'intensity': cur_pts_intensity, + 'obj' : obj_list[k] } + gt_database.append(sample_dict) + + save_file_name = os.path.join(args.save_dir, '%s_gt_database_3level_%s.pkl' % (args.split, self.classes[-1])) + with open(save_file_name, 'wb') as f: + pickle.dump(gt_database, f) + + self.gt_database = gt_database + print('Save refine training sample info file to %s' % save_file_name) + + +if __name__ == '__main__': + dataset = GTDatabaseGenerator(root_dir = '../../data/', split = args.split) + os.makedirs(args.save_dir, exist_ok = True) + + dataset.generate_gt_database() + + # gt_database = pickle.load(open('gt_database/train_gt_database.pkl', 'rb')) + # print(gt_database.__len__()) + # import pdb + # pdb.set_trace() diff --git a/tools/kitti_object_eval_python/LICENSE b/tools/kitti_object_eval_python/LICENSE new file mode 100644 index 0000000..ab60297 --- /dev/null +++ b/tools/kitti_object_eval_python/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/kitti_object_eval_python/README.md b/tools/kitti_object_eval_python/README.md new file mode 100644 index 0000000..913183e --- /dev/null +++ b/tools/kitti_object_eval_python/README.md @@ -0,0 +1,32 @@ +# kitti-object-eval-python +**Note**: This is borrowed from [traveller59/kitti-object-eval-python](https://github.com/traveller59/kitti-object-eval-python) + +Fast kitti object detection eval in python(finish eval in less than 10 second), support 2d/bev/3d/aos. , support coco-style AP. If you use command line interface, numba need some time to compile jit functions. +## Dependencies +Only support python 3.6+, need `numpy`, `skimage`, `numba`, `fire`. If you have Anaconda, just install `cudatoolkit` in anaconda. Otherwise, please reference to this [page](https://github.com/numba/numba#custom-python-environments) to set up llvm and cuda for numba. +* Install by conda: +``` +conda install -c numba cudatoolkit=x.x (8.0, 9.0, 9.1, depend on your environment) +``` +## Usage +* commandline interface: +``` +python evaluate.py evaluate --label_path=/path/to/your_gt_label_folder --result_path=/path/to/your_result_folder --label_split_file=/path/to/val.txt --current_class=0 --coco=False +``` +* python interface: +```Python +import kitti_common as kitti +from eval import get_official_eval_result, get_coco_eval_result +def _read_imageset_file(path): + with open(path, 'r') as f: + lines = f.readlines() + return [int(line) for line in lines] +det_path = "/path/to/your_result_folder" +dt_annos = kitti.get_label_annos(det_path) +gt_path = "/path/to/your_gt_label_folder" +gt_split_file = "/path/to/val.txt" # from https://xiaozhichen.github.io/files/mv3d/imagesets.tar.gz +val_image_ids = _read_imageset_file(gt_split_file) +gt_annos = kitti.get_label_annos(gt_path, val_image_ids) +print(get_official_eval_result(gt_annos, dt_annos, 0)) # 6s in my computer +print(get_coco_eval_result(gt_annos, dt_annos, 0)) # 18s in my computer +``` diff --git a/tools/kitti_object_eval_python/eval.py b/tools/kitti_object_eval_python/eval.py new file mode 100644 index 0000000..812603b --- /dev/null +++ b/tools/kitti_object_eval_python/eval.py @@ -0,0 +1,744 @@ +import numpy as np +import numba +import io as sysio +from tools.kitti_object_eval_python.rotate_iou import rotate_iou_gpu_eval + + +@numba.jit +def get_thresholds(scores: np.ndarray, num_gt, num_sample_pts = 41): + scores.sort() + scores = scores[::-1] + current_recall = 0 + thresholds = [] + for i, score in enumerate(scores): + l_recall = (i + 1) / num_gt + if i < (len(scores) - 1): + r_recall = (i + 2) / num_gt + else: + r_recall = l_recall + if (((r_recall - current_recall) < (current_recall - l_recall)) + and (i < (len(scores) - 1))): + continue + # recall = l_recall + thresholds.append(score) + current_recall += 1 / (num_sample_pts - 1.0) + return thresholds + + +def clean_data(gt_anno, dt_anno, current_class, difficulty): + CLASS_NAMES = ['car', 'pedestrian', 'cyclist'] + MIN_HEIGHT = [40, 25, 25] + MAX_OCCLUSION = [0, 1, 2] + MAX_TRUNCATION = [0.15, 0.3, 0.5] + dc_bboxes, ignored_gt, ignored_dt = [], [], [] + current_cls_name = CLASS_NAMES[current_class].lower() + num_gt = len(gt_anno["name"]) + num_dt = len(dt_anno["name"]) + num_valid_gt = 0 + for i in range(num_gt): + bbox = gt_anno["bbox"][i] + gt_name = gt_anno["name"][i].lower() + height = bbox[3] - bbox[1] + valid_class = -1 + if (gt_name == current_cls_name): + valid_class = 1 + elif (current_cls_name == "Pedestrian".lower() + and "Person_sitting".lower() == gt_name): + valid_class = 0 + elif (current_cls_name == "Car".lower() and "Van".lower() == gt_name): + valid_class = 0 + else: + valid_class = -1 + ignore = False + if ((gt_anno["occluded"][i] > MAX_OCCLUSION[difficulty]) + or (gt_anno["truncated"][i] > MAX_TRUNCATION[difficulty]) + or (height <= MIN_HEIGHT[difficulty])): + # if gt_anno["difficulty"][i] > difficulty or gt_anno["difficulty"][i] == -1: + ignore = True + if valid_class == 1 and not ignore: + ignored_gt.append(0) + num_valid_gt += 1 + elif (valid_class == 0 or (ignore and (valid_class == 1))): + ignored_gt.append(1) + else: + ignored_gt.append(-1) + # for i in range(num_gt): + if gt_anno["name"][i] == "DontCare": + dc_bboxes.append(gt_anno["bbox"][i]) + for i in range(num_dt): + if (dt_anno["name"][i].lower() == current_cls_name): + valid_class = 1 + else: + valid_class = -1 + height = abs(dt_anno["bbox"][i, 3] - dt_anno["bbox"][i, 1]) + if height < MIN_HEIGHT[difficulty]: + ignored_dt.append(1) + elif valid_class == 1: + ignored_dt.append(0) + else: + ignored_dt.append(-1) + + return num_valid_gt, ignored_gt, ignored_dt, dc_bboxes + + +@numba.jit(nopython = True) +def image_box_overlap(boxes, query_boxes, criterion = -1): + N = boxes.shape[0] + K = query_boxes.shape[0] + overlaps = np.zeros((N, K), dtype = boxes.dtype) + for k in range(K): + qbox_area = ((query_boxes[k, 2] - query_boxes[k, 0]) * + (query_boxes[k, 3] - query_boxes[k, 1])) + for n in range(N): + iw = (min(boxes[n, 2], query_boxes[k, 2]) - + max(boxes[n, 0], query_boxes[k, 0])) + if iw > 0: + ih = (min(boxes[n, 3], query_boxes[k, 3]) - + max(boxes[n, 1], query_boxes[k, 1])) + if ih > 0: + if criterion == -1: + ua = ( + (boxes[n, 2] - boxes[n, 0]) * + (boxes[n, 3] - boxes[n, 1]) + qbox_area - iw * ih) + elif criterion == 0: + ua = ((boxes[n, 2] - boxes[n, 0]) * + (boxes[n, 3] - boxes[n, 1])) + elif criterion == 1: + ua = qbox_area + else: + ua = 1.0 + overlaps[n, k] = iw * ih / ua + return overlaps + + +def bev_box_overlap(boxes, qboxes, criterion = -1): + riou = rotate_iou_gpu_eval(boxes, qboxes, criterion) + return riou + + +@numba.jit(nopython = True, parallel = True) +def d3_box_overlap_kernel(boxes, qboxes, rinc, criterion = -1): + # ONLY support overlap in CAMERA, not lider. + N, K = boxes.shape[0], qboxes.shape[0] + for i in range(N): + for j in range(K): + if rinc[i, j] > 0: + # iw = (min(boxes[i, 1] + boxes[i, 4], qboxes[j, 1] + + # qboxes[j, 4]) - max(boxes[i, 1], qboxes[j, 1])) + iw = (min(boxes[i, 1], qboxes[j, 1]) - max( + boxes[i, 1] - boxes[i, 4], qboxes[j, 1] - qboxes[j, 4])) + + if iw > 0: + area1 = boxes[i, 3] * boxes[i, 4] * boxes[i, 5] + area2 = qboxes[j, 3] * qboxes[j, 4] * qboxes[j, 5] + inc = iw * rinc[i, j] + if criterion == -1: + ua = (area1 + area2 - inc) + elif criterion == 0: + ua = area1 + elif criterion == 1: + ua = area2 + else: + ua = inc + rinc[i, j] = inc / ua + else: + rinc[i, j] = 0.0 + + +def d3_box_overlap(boxes, qboxes, criterion = -1): + rinc = rotate_iou_gpu_eval(boxes[:, [0, 2, 3, 5, 6]], + qboxes[:, [0, 2, 3, 5, 6]], 2) + d3_box_overlap_kernel(boxes, qboxes, rinc, criterion) + return rinc + + +@numba.jit(nopython = True) +def compute_statistics_jit(overlaps, + gt_datas, + dt_datas, + ignored_gt, + ignored_det, + dc_bboxes, + metric, + min_overlap, + thresh = 0, + compute_fp = False, + compute_aos = False): + det_size = dt_datas.shape[0] + gt_size = gt_datas.shape[0] + dt_scores = dt_datas[:, -1] + dt_alphas = dt_datas[:, 4] + gt_alphas = gt_datas[:, 4] + dt_bboxes = dt_datas[:, :4] + gt_bboxes = gt_datas[:, :4] + + assigned_detection = [False] * det_size + ignored_threshold = [False] * det_size + if compute_fp: + for i in range(det_size): + if (dt_scores[i] < thresh): + ignored_threshold[i] = True + NO_DETECTION = -10000000 + tp, fp, fn, similarity = 0, 0, 0, 0 + # thresholds = [0.0] + # delta = [0.0] + thresholds = np.zeros((gt_size,)) + thresh_idx = 0 + delta = np.zeros((gt_size,)) + delta_idx = 0 + for i in range(gt_size): + if ignored_gt[i] == -1: + continue + det_idx = -1 + valid_detection = NO_DETECTION + max_overlap = 0 + assigned_ignored_det = False + + for j in range(det_size): + if (ignored_det[j] == -1): + continue + if (assigned_detection[j]): + continue + if (ignored_threshold[j]): + continue + overlap = overlaps[j, i] + dt_score = dt_scores[j] + if (not compute_fp and (overlap > min_overlap) + and dt_score > valid_detection): + det_idx = j + valid_detection = dt_score + elif (compute_fp and (overlap > min_overlap) + and (overlap > max_overlap or assigned_ignored_det) + and ignored_det[j] == 0): + max_overlap = overlap + det_idx = j + valid_detection = 1 + assigned_ignored_det = False + elif (compute_fp and (overlap > min_overlap) + and (valid_detection == NO_DETECTION) + and ignored_det[j] == 1): + det_idx = j + valid_detection = 1 + assigned_ignored_det = True + + if (valid_detection == NO_DETECTION) and ignored_gt[i] == 0: + fn += 1 + elif ((valid_detection != NO_DETECTION) + and (ignored_gt[i] == 1 or ignored_det[det_idx] == 1)): + assigned_detection[det_idx] = True + elif valid_detection != NO_DETECTION: + tp += 1 + # thresholds.append(dt_scores[det_idx]) + thresholds[thresh_idx] = dt_scores[det_idx] + thresh_idx += 1 + if compute_aos: + # delta.append(gt_alphas[i] - dt_alphas[det_idx]) + delta[delta_idx] = gt_alphas[i] - dt_alphas[det_idx] + delta_idx += 1 + + assigned_detection[det_idx] = True + if compute_fp: + for i in range(det_size): + if (not (assigned_detection[i] or ignored_det[i] == -1 + or ignored_det[i] == 1 or ignored_threshold[i])): + fp += 1 + nstuff = 0 + if metric == 0: + overlaps_dt_dc = image_box_overlap(dt_bboxes, dc_bboxes, 0) + for i in range(dc_bboxes.shape[0]): + for j in range(det_size): + if (assigned_detection[j]): + continue + if (ignored_det[j] == -1 or ignored_det[j] == 1): + continue + if (ignored_threshold[j]): + continue + if overlaps_dt_dc[j, i] > min_overlap: + assigned_detection[j] = True + nstuff += 1 + fp -= nstuff + if compute_aos: + tmp = np.zeros((fp + delta_idx,)) + # tmp = [0] * fp + for i in range(delta_idx): + tmp[i + fp] = (1.0 + np.cos(delta[i])) / 2.0 + # tmp.append((1.0 + np.cos(delta[i])) / 2.0) + # assert len(tmp) == fp + tp + # assert len(delta) == tp + if tp > 0 or fp > 0: + similarity = np.sum(tmp) + else: + similarity = -1 + return tp, fp, fn, similarity, thresholds[:thresh_idx] + + +def get_split_parts(num, num_part): + same_part = num // num_part + remain_num = num % num_part + if remain_num == 0: + return [same_part] * num_part + else: + return [same_part] * num_part + [remain_num] + + +@numba.jit(nopython = True) +def fused_compute_statistics(overlaps, + pr, + gt_nums, + dt_nums, + dc_nums, + gt_datas, + dt_datas, + dontcares, + ignored_gts, + ignored_dets, + metric, + min_overlap, + thresholds, + compute_aos = False): + gt_num = 0 + dt_num = 0 + dc_num = 0 + for i in range(gt_nums.shape[0]): + for t, thresh in enumerate(thresholds): + overlap = overlaps[dt_num:dt_num + dt_nums[i], gt_num: + gt_num + gt_nums[i]] + + gt_data = gt_datas[gt_num:gt_num + gt_nums[i]] + dt_data = dt_datas[dt_num:dt_num + dt_nums[i]] + ignored_gt = ignored_gts[gt_num:gt_num + gt_nums[i]] + ignored_det = ignored_dets[dt_num:dt_num + dt_nums[i]] + dontcare = dontcares[dc_num:dc_num + dc_nums[i]] + tp, fp, fn, similarity, _ = compute_statistics_jit( + overlap, + gt_data, + dt_data, + ignored_gt, + ignored_det, + dontcare, + metric, + min_overlap = min_overlap, + thresh = thresh, + compute_fp = True, + compute_aos = compute_aos) + pr[t, 0] += tp + pr[t, 1] += fp + pr[t, 2] += fn + if similarity != -1: + pr[t, 3] += similarity + gt_num += gt_nums[i] + dt_num += dt_nums[i] + dc_num += dc_nums[i] + + +def calculate_iou_partly(gt_annos, dt_annos, metric, num_parts = 50): + """fast iou algorithm. this function can be used independently to + do result analysis. Must be used in CAMERA coordinate system. + Args: + gt_annos: dict, must from get_label_annos() in kitti_common.py + dt_annos: dict, must from get_label_annos() in kitti_common.py + metric: eval type. 0: bbox, 1: bev, 2: 3d + num_parts: int. a parameter for fast calculate algorithm + """ + assert len(gt_annos) == len(dt_annos) + total_dt_num = np.stack([len(a["name"]) for a in dt_annos], 0) + total_gt_num = np.stack([len(a["name"]) for a in gt_annos], 0) + num_examples = len(gt_annos) + split_parts = get_split_parts(num_examples, num_parts) + parted_overlaps = [] + example_idx = 0 + + for num_part in split_parts: + gt_annos_part = gt_annos[example_idx:example_idx + num_part] + dt_annos_part = dt_annos[example_idx:example_idx + num_part] + if metric == 0: + gt_boxes = np.concatenate([a["bbox"] for a in gt_annos_part], 0) + dt_boxes = np.concatenate([a["bbox"] for a in dt_annos_part], 0) + overlap_part = image_box_overlap(gt_boxes, dt_boxes) + elif metric == 1: + loc = np.concatenate( + [a["location"][:, [0, 2]] for a in gt_annos_part], 0) + dims = np.concatenate( + [a["dimensions"][:, [0, 2]] for a in gt_annos_part], 0) + rots = np.concatenate([a["rotation_y"] for a in gt_annos_part], 0) + gt_boxes = np.concatenate( + [loc, dims, rots[..., np.newaxis]], axis = 1) + loc = np.concatenate( + [a["location"][:, [0, 2]] for a in dt_annos_part], 0) + dims = np.concatenate( + [a["dimensions"][:, [0, 2]] for a in dt_annos_part], 0) + rots = np.concatenate([a["rotation_y"] for a in dt_annos_part], 0) + dt_boxes = np.concatenate( + [loc, dims, rots[..., np.newaxis]], axis = 1) + overlap_part = bev_box_overlap(gt_boxes, dt_boxes).astype( + np.float64) + elif metric == 2: + loc = np.concatenate([a["location"] for a in gt_annos_part], 0) + dims = np.concatenate([a["dimensions"] for a in gt_annos_part], 0) + rots = np.concatenate([a["rotation_y"] for a in gt_annos_part], 0) + gt_boxes = np.concatenate( + [loc, dims, rots[..., np.newaxis]], axis = 1) + loc = np.concatenate([a["location"] for a in dt_annos_part], 0) + dims = np.concatenate([a["dimensions"] for a in dt_annos_part], 0) + rots = np.concatenate([a["rotation_y"] for a in dt_annos_part], 0) + dt_boxes = np.concatenate( + [loc, dims, rots[..., np.newaxis]], axis = 1) + overlap_part = d3_box_overlap(gt_boxes, dt_boxes).astype( + np.float64) + else: + raise ValueError("unknown metric") + parted_overlaps.append(overlap_part) + example_idx += num_part + overlaps = [] + example_idx = 0 + for j, num_part in enumerate(split_parts): + gt_annos_part = gt_annos[example_idx:example_idx + num_part] + dt_annos_part = dt_annos[example_idx:example_idx + num_part] + gt_num_idx, dt_num_idx = 0, 0 + for i in range(num_part): + gt_box_num = total_gt_num[example_idx + i] + dt_box_num = total_dt_num[example_idx + i] + overlaps.append( + parted_overlaps[j][gt_num_idx:gt_num_idx + gt_box_num, + dt_num_idx:dt_num_idx + dt_box_num]) + gt_num_idx += gt_box_num + dt_num_idx += dt_box_num + example_idx += num_part + + return overlaps, parted_overlaps, total_gt_num, total_dt_num + + +def _prepare_data(gt_annos, dt_annos, current_class, difficulty): + gt_datas_list = [] + dt_datas_list = [] + total_dc_num = [] + ignored_gts, ignored_dets, dontcares = [], [], [] + total_num_valid_gt = 0 + for i in range(len(gt_annos)): + rets = clean_data(gt_annos[i], dt_annos[i], current_class, difficulty) + num_valid_gt, ignored_gt, ignored_det, dc_bboxes = rets + ignored_gts.append(np.array(ignored_gt, dtype = np.int64)) + ignored_dets.append(np.array(ignored_det, dtype = np.int64)) + if len(dc_bboxes) == 0: + dc_bboxes = np.zeros((0, 4)).astype(np.float64) + else: + dc_bboxes = np.stack(dc_bboxes, 0).astype(np.float64) + total_dc_num.append(dc_bboxes.shape[0]) + dontcares.append(dc_bboxes) + total_num_valid_gt += num_valid_gt + gt_datas = np.concatenate( + [gt_annos[i]["bbox"], gt_annos[i]["alpha"][..., np.newaxis]], 1) + dt_datas = np.concatenate([ + dt_annos[i]["bbox"], dt_annos[i]["alpha"][..., np.newaxis], + dt_annos[i]["score"][..., np.newaxis] + ], 1) + gt_datas_list.append(gt_datas) + dt_datas_list.append(dt_datas) + total_dc_num = np.stack(total_dc_num, axis = 0) + return (gt_datas_list, dt_datas_list, ignored_gts, ignored_dets, dontcares, + total_dc_num, total_num_valid_gt) + + +def eval_class(gt_annos, + dt_annos, + current_classes, + difficultys, + metric, + min_overlaps, + compute_aos = False, + num_parts = 50): + """Kitti eval. support 2d/bev/3d/aos eval. support 0.5:0.05:0.95 coco AP. + Args: + gt_annos: dict, must from get_label_annos() in kitti_common.py + dt_annos: dict, must from get_label_annos() in kitti_common.py + current_classes: list of int, 0: car, 1: pedestrian, 2: cyclist + difficultys: list of int. eval difficulty, 0: easy, 1: normal, 2: hard + metric: eval type. 0: bbox, 1: bev, 2: 3d + min_overlaps: float, min overlap. format: [num_overlap, metric, class]. + num_parts: int. a parameter for fast calculate algorithm + + Returns: + dict of recall, precision and aos + """ + assert len(gt_annos) == len(dt_annos) + num_examples = len(gt_annos) + split_parts = get_split_parts(num_examples, num_parts) + + rets = calculate_iou_partly(dt_annos, gt_annos, metric, num_parts) + overlaps, parted_overlaps, total_dt_num, total_gt_num = rets + N_SAMPLE_PTS = 41 + num_minoverlap = len(min_overlaps) + num_class = len(current_classes) + num_difficulty = len(difficultys) + precision = np.zeros( + [num_class, num_difficulty, num_minoverlap, N_SAMPLE_PTS]) + recall = np.zeros( + [num_class, num_difficulty, num_minoverlap, N_SAMPLE_PTS]) + aos = np.zeros([num_class, num_difficulty, num_minoverlap, N_SAMPLE_PTS]) + for m, current_class in enumerate(current_classes): + for l, difficulty in enumerate(difficultys): + rets = _prepare_data(gt_annos, dt_annos, current_class, difficulty) + (gt_datas_list, dt_datas_list, ignored_gts, ignored_dets, + dontcares, total_dc_num, total_num_valid_gt) = rets + for k, min_overlap in enumerate(min_overlaps[:, metric, m]): + thresholdss = [] + for i in range(len(gt_annos)): + rets = compute_statistics_jit( + overlaps[i], + gt_datas_list[i], + dt_datas_list[i], + ignored_gts[i], + ignored_dets[i], + dontcares[i], + metric, + min_overlap = min_overlap, + thresh = 0.0, + compute_fp = False) + tp, fp, fn, similarity, thresholds = rets + thresholdss += thresholds.tolist() + thresholdss = np.array(thresholdss) + thresholds = get_thresholds(thresholdss, total_num_valid_gt) + thresholds = np.array(thresholds) + pr = np.zeros([len(thresholds), 4]) + idx = 0 + for j, num_part in enumerate(split_parts): + gt_datas_part = np.concatenate( + gt_datas_list[idx:idx + num_part], 0) + dt_datas_part = np.concatenate( + dt_datas_list[idx:idx + num_part], 0) + dc_datas_part = np.concatenate( + dontcares[idx:idx + num_part], 0) + ignored_dets_part = np.concatenate( + ignored_dets[idx:idx + num_part], 0) + ignored_gts_part = np.concatenate( + ignored_gts[idx:idx + num_part], 0) + fused_compute_statistics( + parted_overlaps[j], + pr, + total_gt_num[idx:idx + num_part], + total_dt_num[idx:idx + num_part], + total_dc_num[idx:idx + num_part], + gt_datas_part, + dt_datas_part, + dc_datas_part, + ignored_gts_part, + ignored_dets_part, + metric, + min_overlap = min_overlap, + thresholds = thresholds, + compute_aos = compute_aos) + idx += num_part + for i in range(len(thresholds)): + recall[m, l, k, i] = pr[i, 0] / (pr[i, 0] + pr[i, 2]) + precision[m, l, k, i] = pr[i, 0] / (pr[i, 0] + pr[i, 1]) + if compute_aos: + aos[m, l, k, i] = pr[i, 3] / (pr[i, 0] + pr[i, 1]) + for i in range(len(thresholds)): + precision[m, l, k, i] = np.max( + precision[m, l, k, i:], axis = -1) + recall[m, l, k, i] = np.max(recall[m, l, k, i:], axis = -1) + if compute_aos: + aos[m, l, k, i] = np.max(aos[m, l, k, i:], axis = -1) + ret_dict = { + "recall" : recall, + "precision" : precision, + "orientation": aos, + } + return ret_dict + + +# def get_mAP(prec): +# sums = 0 +# for i in range(0, prec.shape[-1], 4): +# sums = sums + prec[..., i] +# return sums / 11 * 100 + +def get_mAP(prec): + sums = 0 + for i in range(1, prec.shape[-1], 1): + sums = sums + prec[..., i] + return sums / 40 * 100 + + +def print_str(value, *arg, sstream = None): + if sstream is None: + sstream = sysio.StringIO() + sstream.truncate(0) + sstream.seek(0) + print(value, *arg, file = sstream) + return sstream.getvalue() + + +def do_eval(gt_annos, + dt_annos, + current_classes, + min_overlaps, + compute_aos = False): + # min_overlaps: [num_minoverlap, metric, num_class] + difficultys = [0, 1, 2] + ret = eval_class(gt_annos, dt_annos, current_classes, difficultys, 0, + min_overlaps, compute_aos) + # ret: [num_class, num_diff, num_minoverlap, num_sample_points] + mAP_bbox = get_mAP(ret["precision"]) + mAP_aos = None + if compute_aos: + mAP_aos = get_mAP(ret["orientation"]) + ret = eval_class(gt_annos, dt_annos, current_classes, difficultys, 1, + min_overlaps) + mAP_bev = get_mAP(ret["precision"]) + ret = eval_class(gt_annos, dt_annos, current_classes, difficultys, 2, + min_overlaps) + mAP_3d = get_mAP(ret["precision"]) + return mAP_bbox, mAP_bev, mAP_3d, mAP_aos + + +def do_coco_style_eval(gt_annos, dt_annos, current_classes, overlap_ranges, + compute_aos): + # overlap_ranges: [range, metric, num_class] + min_overlaps = np.zeros([10, *overlap_ranges.shape[1:]]) + for i in range(overlap_ranges.shape[1]): + for j in range(overlap_ranges.shape[2]): + min_overlaps[:, i, j] = np.linspace(*overlap_ranges[:, i, j]) + mAP_bbox, mAP_bev, mAP_3d, mAP_aos = do_eval( + gt_annos, dt_annos, current_classes, min_overlaps, compute_aos) + # ret: [num_class, num_diff, num_minoverlap] + mAP_bbox = mAP_bbox.mean(-1) + mAP_bev = mAP_bev.mean(-1) + mAP_3d = mAP_3d.mean(-1) + if mAP_aos is not None: + mAP_aos = mAP_aos.mean(-1) + return mAP_bbox, mAP_bev, mAP_3d, mAP_aos + + +def get_official_eval_result(gt_annos, dt_annos, current_classes): + overlap_0_7 = np.array([[0.7, 0.5, 0.5, 0.7, + 0.5], [0.7, 0.5, 0.5, 0.7, 0.5], + [0.7, 0.5, 0.5, 0.7, 0.5]]) + overlap_0_5 = np.array([[0.7, 0.5, 0.5, 0.7, + 0.5], [0.5, 0.25, 0.25, 0.5, 0.25], + [0.5, 0.25, 0.25, 0.5, 0.25]]) + min_overlaps = np.stack([overlap_0_7, overlap_0_5], axis = 0) # [2, 3, 5] + class_to_name = { + 0: 'Car', + 1: 'Pedestrian', + 2: 'Cyclist', + 3: 'Van', + 4: 'Person_sitting', + } + name_to_class = { v: n for n, v in class_to_name.items() } + if not isinstance(current_classes, (list, tuple)): + current_classes = [current_classes] + current_classes_int = [] + for curcls in current_classes: + if isinstance(curcls, str): + current_classes_int.append(name_to_class[curcls]) + else: + current_classes_int.append(curcls) + current_classes = current_classes_int + min_overlaps = min_overlaps[:, :, current_classes] + result = '' + # check whether alpha is valid + compute_aos = False + for anno in dt_annos: + if anno['alpha'].shape[0] != 0: + if anno['alpha'][0] != -10: + compute_aos = True + break + mAPbbox, mAPbev, mAP3d, mAPaos = do_eval( + gt_annos, dt_annos, current_classes, min_overlaps, compute_aos) + + ret_dict = { } + for j, curcls in enumerate(current_classes): + # mAP threshold array: [num_minoverlap, metric, class] + # mAP result: [num_class, num_diff, num_minoverlap] + for i in range(min_overlaps.shape[0]): + result += print_str( + (f"{class_to_name[curcls]} " + "AP@{:.2f}, {:.2f}, {:.2f}:".format(*min_overlaps[i, :, j]))) + result += print_str((f"bbox AP:{mAPbbox[j, 0, i]:.4f}, " + f"{mAPbbox[j, 1, i]:.4f}, " + f"{mAPbbox[j, 2, i]:.4f}")) + result += print_str((f"bev AP:{mAPbev[j, 0, i]:.4f}, " + f"{mAPbev[j, 1, i]:.4f}, " + f"{mAPbev[j, 2, i]:.4f}")) + result += print_str((f"3d AP:{mAP3d[j, 0, i]:.4f}, " + f"{mAP3d[j, 1, i]:.4f}, " + f"{mAP3d[j, 2, i]:.4f}")) + + if compute_aos: + result += print_str((f"aos AP:{mAPaos[j, 0, i]:.2f}, " + f"{mAPaos[j, 1, i]:.2f}, " + f"{mAPaos[j, 2, i]:.2f}")) + ret_dict['Car_3d_easy'] = mAP3d[0, 0, 0] + ret_dict['Car_3d_moderate'] = mAP3d[0, 1, 0] + ret_dict['Car_3d_hard'] = mAP3d[0, 2, 0] + ret_dict['Car_bev_easy'] = mAPbev[0, 0, 0] + ret_dict['Car_bev_moderate'] = mAPbev[0, 1, 0] + ret_dict['Car_bev_hard'] = mAPbev[0, 2, 0] + ret_dict['Car_image_easy'] = mAPbbox[0, 0, 0] + ret_dict['Car_image_moderate'] = mAPbbox[0, 1, 0] + ret_dict['Car_image_hard'] = mAPbbox[0, 2, 0] + + return result, ret_dict + + +def get_coco_eval_result(gt_annos, dt_annos, current_classes): + class_to_name = { + 0: 'Car', + 1: 'Pedestrian', + 2: 'Cyclist', + 3: 'Van', + 4: 'Person_sitting', + } + class_to_range = { + 0: [0.5, 0.95, 10], + 1: [0.25, 0.7, 10], + 2: [0.25, 0.7, 10], + 3: [0.5, 0.95, 10], + 4: [0.25, 0.7, 10], + } + name_to_class = { v: n for n, v in class_to_name.items() } + if not isinstance(current_classes, (list, tuple)): + current_classes = [current_classes] + current_classes_int = [] + for curcls in current_classes: + if isinstance(curcls, str): + current_classes_int.append(name_to_class[curcls]) + else: + current_classes_int.append(curcls) + current_classes = current_classes_int + overlap_ranges = np.zeros([3, 3, len(current_classes)]) + for i, curcls in enumerate(current_classes): + overlap_ranges[:, :, i] = np.array( + class_to_range[curcls])[:, np.newaxis] + result = '' + # check whether alpha is valid + compute_aos = False + for anno in dt_annos: + if anno['alpha'].shape[0] != 0: + if anno['alpha'][0] != -10: + compute_aos = True + break + mAPbbox, mAPbev, mAP3d, mAPaos = do_coco_style_eval( + gt_annos, dt_annos, current_classes, overlap_ranges, compute_aos) + for j, curcls in enumerate(current_classes): + # mAP threshold array: [num_minoverlap, metric, class] + # mAP result: [num_class, num_diff, num_minoverlap] + o_range = np.array(class_to_range[curcls])[[0, 2, 1]] + o_range[1] = (o_range[2] - o_range[0]) / (o_range[1] - 1) + result += print_str((f"{class_to_name[curcls]} " + "coco AP@{:.2f}:{:.2f}:{:.2f}:".format(*o_range))) + result += print_str((f"bbox AP:{mAPbbox[j, 0]:.2f}, " + f"{mAPbbox[j, 1]:.2f}, " + f"{mAPbbox[j, 2]:.2f}")) + result += print_str((f"bev AP:{mAPbev[j, 0]:.2f}, " + f"{mAPbev[j, 1]:.2f}, " + f"{mAPbev[j, 2]:.2f}")) + result += print_str((f"3d AP:{mAP3d[j, 0]:.2f}, " + f"{mAP3d[j, 1]:.2f}, " + f"{mAP3d[j, 2]:.2f}")) + if compute_aos: + result += print_str((f"aos AP:{mAPaos[j, 0]:.2f}, " + f"{mAPaos[j, 1]:.2f}, " + f"{mAPaos[j, 2]:.2f}")) + return result diff --git a/tools/kitti_object_eval_python/evaluate.py b/tools/kitti_object_eval_python/evaluate.py new file mode 100644 index 0000000..e56bfe5 --- /dev/null +++ b/tools/kitti_object_eval_python/evaluate.py @@ -0,0 +1,32 @@ +import time +import fire + +import tools.kitti_object_eval_python.kitti_common as kitti +from tools.kitti_object_eval_python.eval import get_official_eval_result, get_coco_eval_result + + +def _read_imageset_file(path): + with open(path, 'r') as f: + lines = f.readlines() + return [int(line) for line in lines] + + +def evaluate(label_path, + result_path, + label_split_file, + current_class = 0, + coco = False, + score_thresh = -1): + dt_annos = kitti.get_label_annos(result_path) + if score_thresh > 0: + dt_annos = kitti.filter_annos_low_score(dt_annos, score_thresh) + val_image_ids = _read_imageset_file(label_split_file) + gt_annos = kitti.get_label_annos(label_path, val_image_ids) + if coco: + return get_coco_eval_result(gt_annos, dt_annos, current_class) + else: + return get_official_eval_result(gt_annos, dt_annos, current_class) + + +if __name__ == '__main__': + fire.Fire() diff --git a/tools/kitti_object_eval_python/kitti_common.py b/tools/kitti_object_eval_python/kitti_common.py new file mode 100644 index 0000000..a31092c --- /dev/null +++ b/tools/kitti_object_eval_python/kitti_common.py @@ -0,0 +1,416 @@ +import concurrent.futures as futures +import os +import pathlib +import re +from collections import OrderedDict + +import numpy as np +from skimage import io + + +def get_image_index_str(img_idx): + return "{:06d}".format(img_idx) + + +def get_kitti_info_path(idx, + prefix, + info_type = 'image_2', + file_tail = '.png', + training = True, + relative_path = True): + img_idx_str = get_image_index_str(idx) + img_idx_str += file_tail + prefix = pathlib.Path(prefix) + if training: + file_path = pathlib.Path('training') / info_type / img_idx_str + else: + file_path = pathlib.Path('testing') / info_type / img_idx_str + if not (prefix / file_path).exists(): + raise ValueError("file not exist: {}".format(file_path)) + if relative_path: + return str(file_path) + else: + return str(prefix / file_path) + + +def get_image_path(idx, prefix, training = True, relative_path = True): + return get_kitti_info_path(idx, prefix, 'image_2', '.png', training, + relative_path) + + +def get_label_path(idx, prefix, training = True, relative_path = True): + return get_kitti_info_path(idx, prefix, 'label_2', '.txt', training, + relative_path) + + +def get_velodyne_path(idx, prefix, training = True, relative_path = True): + return get_kitti_info_path(idx, prefix, 'velodyne', '.bin', training, + relative_path) + + +def get_calib_path(idx, prefix, training = True, relative_path = True): + return get_kitti_info_path(idx, prefix, 'calib', '.txt', training, + relative_path) + + +def _extend_matrix(mat): + mat = np.concatenate([mat, np.array([[0., 0., 0., 1.]])], axis = 0) + return mat + + +def get_kitti_image_info(path, + training = True, + label_info = True, + velodyne = False, + calib = False, + image_ids = 7481, + extend_matrix = True, + num_worker = 8, + relative_path = True, + with_imageshape = True): + # image_infos = [] + root_path = pathlib.Path(path) + if not isinstance(image_ids, list): + image_ids = list(range(image_ids)) + + def map_func(idx): + image_info = { 'image_idx': idx } + annotations = None + if velodyne: + image_info['velodyne_path'] = get_velodyne_path( + idx, path, training, relative_path) + image_info['img_path'] = get_image_path(idx, path, training, + relative_path) + if with_imageshape: + img_path = image_info['img_path'] + if relative_path: + img_path = str(root_path / img_path) + image_info['img_shape'] = np.array( + io.imread(img_path).shape[:2], dtype = np.int32) + if label_info: + label_path = get_label_path(idx, path, training, relative_path) + if relative_path: + label_path = str(root_path / label_path) + annotations = get_label_anno(label_path) + if calib: + calib_path = get_calib_path( + idx, path, training, relative_path = False) + with open(calib_path, 'r') as f: + lines = f.readlines() + P0 = np.array( + [float(info) for info in lines[0].split(' ')[1:13]]).reshape( + [3, 4]) + P1 = np.array( + [float(info) for info in lines[1].split(' ')[1:13]]).reshape( + [3, 4]) + P2 = np.array( + [float(info) for info in lines[2].split(' ')[1:13]]).reshape( + [3, 4]) + P3 = np.array( + [float(info) for info in lines[3].split(' ')[1:13]]).reshape( + [3, 4]) + if extend_matrix: + P0 = _extend_matrix(P0) + P1 = _extend_matrix(P1) + P2 = _extend_matrix(P2) + P3 = _extend_matrix(P3) + image_info['calib/P0'] = P0 + image_info['calib/P1'] = P1 + image_info['calib/P2'] = P2 + image_info['calib/P3'] = P3 + R0_rect = np.array([ + float(info) for info in lines[4].split(' ')[1:10] + ]).reshape([3, 3]) + if extend_matrix: + rect_4x4 = np.zeros([4, 4], dtype = R0_rect.dtype) + rect_4x4[3, 3] = 1. + rect_4x4[:3, :3] = R0_rect + else: + rect_4x4 = R0_rect + image_info['calib/R0_rect'] = rect_4x4 + Tr_velo_to_cam = np.array([ + float(info) for info in lines[5].split(' ')[1:13] + ]).reshape([3, 4]) + Tr_imu_to_velo = np.array([ + float(info) for info in lines[6].split(' ')[1:13] + ]).reshape([3, 4]) + if extend_matrix: + Tr_velo_to_cam = _extend_matrix(Tr_velo_to_cam) + Tr_imu_to_velo = _extend_matrix(Tr_imu_to_velo) + image_info['calib/Tr_velo_to_cam'] = Tr_velo_to_cam + image_info['calib/Tr_imu_to_velo'] = Tr_imu_to_velo + if annotations is not None: + image_info['annos'] = annotations + add_difficulty_to_annos(image_info) + return image_info + + with futures.ThreadPoolExecutor(num_worker) as executor: + image_infos = executor.map(map_func, image_ids) + return list(image_infos) + + +def filter_kitti_anno(image_anno, + used_classes, + used_difficulty = None, + dontcare_iou = None): + if not isinstance(used_classes, (list, tuple)): + used_classes = [used_classes] + img_filtered_annotations = { } + relevant_annotation_indices = [ + i for i, x in enumerate(image_anno['name']) if x in used_classes + ] + for key in image_anno.keys(): + img_filtered_annotations[key] = ( + image_anno[key][relevant_annotation_indices]) + if used_difficulty is not None: + relevant_annotation_indices = [ + i for i, x in enumerate(img_filtered_annotations['difficulty']) + if x in used_difficulty + ] + for key in image_anno.keys(): + img_filtered_annotations[key] = ( + img_filtered_annotations[key][relevant_annotation_indices]) + + if 'DontCare' in used_classes and dontcare_iou is not None: + dont_care_indices = [ + i for i, x in enumerate(img_filtered_annotations['name']) + if x == 'DontCare' + ] + # bounding box format [y_min, x_min, y_max, x_max] + all_boxes = img_filtered_annotations['bbox'] + ious = iou(all_boxes, all_boxes[dont_care_indices]) + + # Remove all bounding boxes that overlap with a dontcare region. + if ious.size > 0: + boxes_to_remove = np.amax(ious, axis = 1) > dontcare_iou + for key in image_anno.keys(): + img_filtered_annotations[key] = (img_filtered_annotations[key][ + np.logical_not(boxes_to_remove)]) + return img_filtered_annotations + + +def filter_annos_low_score(image_annos, thresh): + new_image_annos = [] + for anno in image_annos: + img_filtered_annotations = { } + relevant_annotation_indices = [ + i for i, s in enumerate(anno['score']) if s >= thresh + ] + for key in anno.keys(): + img_filtered_annotations[key] = ( + anno[key][relevant_annotation_indices]) + new_image_annos.append(img_filtered_annotations) + return new_image_annos + + +def kitti_result_line(result_dict, precision = 4): + prec_float = "{" + ":.{}f".format(precision) + "}" + res_line = [] + all_field_default = OrderedDict([ + ('name', None), + ('truncated', -1), + ('occluded', -1), + ('alpha', -10), + ('bbox', None), + ('dimensions', [-1, -1, -1]), + ('location', [-1000, -1000, -1000]), + ('rotation_y', -10), + ('score', None), + ]) + res_dict = [(key, None) for key, val in all_field_default.items()] + res_dict = OrderedDict(res_dict) + for key, val in result_dict.items(): + if all_field_default[key] is None and val is None: + raise ValueError("you must specify a value for {}".format(key)) + res_dict[key] = val + + for key, val in res_dict.items(): + if key == 'name': + res_line.append(val) + elif key in ['truncated', 'alpha', 'rotation_y', 'score']: + if val is None: + res_line.append(str(all_field_default[key])) + else: + res_line.append(prec_float.format(val)) + elif key == 'occluded': + if val is None: + res_line.append(str(all_field_default[key])) + else: + res_line.append('{}'.format(val)) + elif key in ['bbox', 'dimensions', 'location']: + if val is None: + res_line += [str(v) for v in all_field_default[key]] + else: + res_line += [prec_float.format(v) for v in val] + else: + raise ValueError("unknown key. supported key:{}".format( + res_dict.keys())) + return ' '.join(res_line) + + +def add_difficulty_to_annos(info): + min_height = [40, 25, + 25] # minimum height for evaluated groundtruth/detections + max_occlusion = [ + 0, 1, 2 + ] # maximum occlusion level of the groundtruth used for evaluation + max_trunc = [ + 0.15, 0.3, 0.5 + ] # maximum truncation level of the groundtruth used for evaluation + annos = info['annos'] + dims = annos['dimensions'] # lhw format + bbox = annos['bbox'] + height = bbox[:, 3] - bbox[:, 1] + occlusion = annos['occluded'] + truncation = annos['truncated'] + diff = [] + easy_mask = np.ones((len(dims),), dtype = np.bool) + moderate_mask = np.ones((len(dims),), dtype = np.bool) + hard_mask = np.ones((len(dims),), dtype = np.bool) + i = 0 + for h, o, t in zip(height, occlusion, truncation): + if o > max_occlusion[0] or h <= min_height[0] or t > max_trunc[0]: + easy_mask[i] = False + if o > max_occlusion[1] or h <= min_height[1] or t > max_trunc[1]: + moderate_mask[i] = False + if o > max_occlusion[2] or h <= min_height[2] or t > max_trunc[2]: + hard_mask[i] = False + i += 1 + is_easy = easy_mask + is_moderate = np.logical_xor(easy_mask, moderate_mask) + is_hard = np.logical_xor(hard_mask, moderate_mask) + + for i in range(len(dims)): + if is_easy[i]: + diff.append(0) + elif is_moderate[i]: + diff.append(1) + elif is_hard[i]: + diff.append(2) + else: + diff.append(-1) + annos["difficulty"] = np.array(diff, np.int32) + return diff + + +def get_label_anno(label_path): + annotations = { } + annotations.update({ + 'name' : [], + 'truncated' : [], + 'occluded' : [], + 'alpha' : [], + 'bbox' : [], + 'dimensions': [], + 'location' : [], + 'rotation_y': [] + }) + with open(label_path, 'r') as f: + lines = f.readlines() + # if len(lines) == 0 or len(lines[0]) < 15: + # content = [] + # else: + content = [line.strip().split(' ') for line in lines] + annotations['name'] = np.array([x[0] for x in content]) + annotations['truncated'] = np.array([float(x[1]) for x in content]) + annotations['occluded'] = np.array([int(x[2]) for x in content]) + annotations['alpha'] = np.array([float(x[3]) for x in content]) + annotations['bbox'] = np.array( + [[float(info) for info in x[4:8]] for x in content]).reshape(-1, 4) + # dimensions will convert hwl format to standard lhw(camera) format. + annotations['dimensions'] = np.array( + [[float(info) for info in x[8:11]] for x in content]).reshape( + -1, 3)[:, [2, 0, 1]] + annotations['location'] = np.array( + [[float(info) for info in x[11:14]] for x in content]).reshape(-1, 3) + annotations['rotation_y'] = np.array( + [float(x[14]) for x in content]).reshape(-1) + if len(content) != 0 and len(content[0]) == 16: # have score + annotations['score'] = np.array([float(x[15]) for x in content]) + else: + annotations['score'] = np.zeros([len(annotations['bbox'])]) + return annotations + + +def get_label_annos(label_folder, image_ids = None): + if image_ids is None: + filepaths = pathlib.Path(label_folder).glob('*.txt') + prog = re.compile(r'^\d{6}.txt$') + filepaths = filter(lambda f: prog.match(f.name), filepaths) + image_ids = [int(p.stem) for p in filepaths] + image_ids = sorted(image_ids) + if not isinstance(image_ids, list): + image_ids = list(range(image_ids)) + annos = [] + label_folder = pathlib.Path(label_folder) + for idx in image_ids: + image_idx = get_image_index_str(idx) + label_filename = label_folder / (image_idx + '.txt') + annos.append(get_label_anno(label_filename)) + return annos + + +def area(boxes, add1 = False): + """Computes area of boxes. + + Args: + boxes: Numpy array with shape [N, 4] holding N boxes + + Returns: + a numpy array with shape [N*1] representing box areas + """ + if add1: + return (boxes[:, 2] - boxes[:, 0] + 1.0) * ( + boxes[:, 3] - boxes[:, 1] + 1.0) + else: + return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) + + +def intersection(boxes1, boxes2, add1 = False): + """Compute pairwise intersection areas between boxes. + + Args: + boxes1: a numpy array with shape [N, 4] holding N boxes + boxes2: a numpy array with shape [M, 4] holding M boxes + + Returns: + a numpy array with shape [N*M] representing pairwise intersection area + """ + [y_min1, x_min1, y_max1, x_max1] = np.split(boxes1, 4, axis = 1) + [y_min2, x_min2, y_max2, x_max2] = np.split(boxes2, 4, axis = 1) + + all_pairs_min_ymax = np.minimum(y_max1, np.transpose(y_max2)) + all_pairs_max_ymin = np.maximum(y_min1, np.transpose(y_min2)) + if add1: + all_pairs_min_ymax += 1.0 + intersect_heights = np.maximum( + np.zeros(all_pairs_max_ymin.shape), + all_pairs_min_ymax - all_pairs_max_ymin) + + all_pairs_min_xmax = np.minimum(x_max1, np.transpose(x_max2)) + all_pairs_max_xmin = np.maximum(x_min1, np.transpose(x_min2)) + if add1: + all_pairs_min_xmax += 1.0 + intersect_widths = np.maximum( + np.zeros(all_pairs_max_xmin.shape), + all_pairs_min_xmax - all_pairs_max_xmin) + return intersect_heights * intersect_widths + + +def iou(boxes1, boxes2, add1 = False): + """Computes pairwise intersection-over-union between box collections. + + Args: + boxes1: a numpy array with shape [N, 4] holding N boxes. + boxes2: a numpy array with shape [M, 4] holding N boxes. + + Returns: + a numpy array with shape [N, M] representing pairwise iou scores. + """ + intersect = intersection(boxes1, boxes2, add1) + area1 = area(boxes1, add1) + area2 = area(boxes2, add1) + union = np.expand_dims( + area1, axis = 1) + np.expand_dims( + area2, axis = 0) - intersect + return intersect / union diff --git a/tools/kitti_object_eval_python/rotate_iou.py b/tools/kitti_object_eval_python/rotate_iou.py new file mode 100644 index 0000000..ca3e788 --- /dev/null +++ b/tools/kitti_object_eval_python/rotate_iou.py @@ -0,0 +1,332 @@ +##################### +# Based on https://github.com/hongzhenwang/RRPN-revise +# Licensed under The MIT License +# Author: yanyan, scrin@foxmail.com +##################### +import math + +import numba +import numpy as np +from numba import cuda + + +@numba.jit(nopython = True) +def div_up(m, n): + return m // n + (m % n > 0) + + +@cuda.jit('(float32[:], float32[:], float32[:])', device = True, inline = True) +def trangle_area(a, b, c): + return ((a[0] - c[0]) * (b[1] - c[1]) - (a[1] - c[1]) * + (b[0] - c[0])) / 2.0 + + +@cuda.jit('(float32[:], int32)', device = True, inline = True) +def area(int_pts, num_of_inter): + area_val = 0.0 + for i in range(num_of_inter - 2): + area_val += abs( + trangle_area(int_pts[:2], int_pts[2 * i + 2:2 * i + 4], + int_pts[2 * i + 4:2 * i + 6])) + return area_val + + +@cuda.jit('(float32[:], int32)', device = True, inline = True) +def sort_vertex_in_convex_polygon(int_pts, num_of_inter): + if num_of_inter > 0: + center = cuda.local.array((2,), dtype = numba.float32) + center[:] = 0.0 + for i in range(num_of_inter): + center[0] += int_pts[2 * i] + center[1] += int_pts[2 * i + 1] + center[0] /= num_of_inter + center[1] /= num_of_inter + v = cuda.local.array((2,), dtype = numba.float32) + vs = cuda.local.array((16,), dtype = numba.float32) + for i in range(num_of_inter): + v[0] = int_pts[2 * i] - center[0] + v[1] = int_pts[2 * i + 1] - center[1] + d = math.sqrt(v[0] * v[0] + v[1] * v[1]) + v[0] = v[0] / d + v[1] = v[1] / d + if v[1] < 0: + v[0] = -2 - v[0] + vs[i] = v[0] + j = 0 + temp = 0 + for i in range(1, num_of_inter): + if vs[i - 1] > vs[i]: + temp = vs[i] + tx = int_pts[2 * i] + ty = int_pts[2 * i + 1] + j = i + while j > 0 and vs[j - 1] > temp: + vs[j] = vs[j - 1] + int_pts[j * 2] = int_pts[j * 2 - 2] + int_pts[j * 2 + 1] = int_pts[j * 2 - 1] + j -= 1 + + vs[j] = temp + int_pts[j * 2] = tx + int_pts[j * 2 + 1] = ty + + +@cuda.jit( + '(float32[:], float32[:], int32, int32, float32[:])', + device = True, + inline = True) +def line_segment_intersection(pts1, pts2, i, j, temp_pts): + A = cuda.local.array((2,), dtype = numba.float32) + B = cuda.local.array((2,), dtype = numba.float32) + C = cuda.local.array((2,), dtype = numba.float32) + D = cuda.local.array((2,), dtype = numba.float32) + + A[0] = pts1[2 * i] + A[1] = pts1[2 * i + 1] + + B[0] = pts1[2 * ((i + 1) % 4)] + B[1] = pts1[2 * ((i + 1) % 4) + 1] + + C[0] = pts2[2 * j] + C[1] = pts2[2 * j + 1] + + D[0] = pts2[2 * ((j + 1) % 4)] + D[1] = pts2[2 * ((j + 1) % 4) + 1] + BA0 = B[0] - A[0] + BA1 = B[1] - A[1] + DA0 = D[0] - A[0] + CA0 = C[0] - A[0] + DA1 = D[1] - A[1] + CA1 = C[1] - A[1] + acd = DA1 * CA0 > CA1 * DA0 + bcd = (D[1] - B[1]) * (C[0] - B[0]) > (C[1] - B[1]) * (D[0] - B[0]) + if acd != bcd: + abc = CA1 * BA0 > BA1 * CA0 + abd = DA1 * BA0 > BA1 * DA0 + if abc != abd: + DC0 = D[0] - C[0] + DC1 = D[1] - C[1] + ABBA = A[0] * B[1] - B[0] * A[1] + CDDC = C[0] * D[1] - D[0] * C[1] + DH = BA1 * DC0 - BA0 * DC1 + Dx = ABBA * DC0 - BA0 * CDDC + Dy = ABBA * DC1 - BA1 * CDDC + temp_pts[0] = Dx / DH + temp_pts[1] = Dy / DH + return True + return False + + +@cuda.jit( + '(float32[:], float32[:], int32, int32, float32[:])', + device = True, + inline = True) +def line_segment_intersection_v1(pts1, pts2, i, j, temp_pts): + a = cuda.local.array((2,), dtype = numba.float32) + b = cuda.local.array((2,), dtype = numba.float32) + c = cuda.local.array((2,), dtype = numba.float32) + d = cuda.local.array((2,), dtype = numba.float32) + + a[0] = pts1[2 * i] + a[1] = pts1[2 * i + 1] + + b[0] = pts1[2 * ((i + 1) % 4)] + b[1] = pts1[2 * ((i + 1) % 4) + 1] + + c[0] = pts2[2 * j] + c[1] = pts2[2 * j + 1] + + d[0] = pts2[2 * ((j + 1) % 4)] + d[1] = pts2[2 * ((j + 1) % 4) + 1] + + area_abc = trangle_area(a, b, c) + area_abd = trangle_area(a, b, d) + + if area_abc * area_abd >= 0: + return False + + area_cda = trangle_area(c, d, a) + area_cdb = area_cda + area_abc - area_abd + + if area_cda * area_cdb >= 0: + return False + t = area_cda / (area_abd - area_abc) + + dx = t * (b[0] - a[0]) + dy = t * (b[1] - a[1]) + temp_pts[0] = a[0] + dx + temp_pts[1] = a[1] + dy + return True + + +@cuda.jit('(float32, float32, float32[:])', device = True, inline = True) +def point_in_quadrilateral(pt_x, pt_y, corners): + ab0 = corners[2] - corners[0] + ab1 = corners[3] - corners[1] + + ad0 = corners[6] - corners[0] + ad1 = corners[7] - corners[1] + + ap0 = pt_x - corners[0] + ap1 = pt_y - corners[1] + + abab = ab0 * ab0 + ab1 * ab1 + abap = ab0 * ap0 + ab1 * ap1 + adad = ad0 * ad0 + ad1 * ad1 + adap = ad0 * ap0 + ad1 * ap1 + + return abab >= abap and abap >= 0 and adad >= adap and adap >= 0 + + +@cuda.jit('(float32[:], float32[:], float32[:])', device = True, inline = True) +def quadrilateral_intersection(pts1, pts2, int_pts): + num_of_inter = 0 + for i in range(4): + if point_in_quadrilateral(pts1[2 * i], pts1[2 * i + 1], pts2): + int_pts[num_of_inter * 2] = pts1[2 * i] + int_pts[num_of_inter * 2 + 1] = pts1[2 * i + 1] + num_of_inter += 1 + if point_in_quadrilateral(pts2[2 * i], pts2[2 * i + 1], pts1): + int_pts[num_of_inter * 2] = pts2[2 * i] + int_pts[num_of_inter * 2 + 1] = pts2[2 * i + 1] + num_of_inter += 1 + temp_pts = cuda.local.array((2,), dtype = numba.float32) + for i in range(4): + for j in range(4): + has_pts = line_segment_intersection(pts1, pts2, i, j, temp_pts) + if has_pts: + int_pts[num_of_inter * 2] = temp_pts[0] + int_pts[num_of_inter * 2 + 1] = temp_pts[1] + num_of_inter += 1 + + return num_of_inter + + +@cuda.jit('(float32[:], float32[:])', device = True, inline = True) +def rbbox_to_corners(corners, rbbox): + # generate clockwise corners and rotate it clockwise + angle = rbbox[4] + a_cos = math.cos(angle) + a_sin = math.sin(angle) + center_x = rbbox[0] + center_y = rbbox[1] + x_d = rbbox[2] + y_d = rbbox[3] + corners_x = cuda.local.array((4,), dtype = numba.float32) + corners_y = cuda.local.array((4,), dtype = numba.float32) + corners_x[0] = -x_d / 2 + corners_x[1] = -x_d / 2 + corners_x[2] = x_d / 2 + corners_x[3] = x_d / 2 + corners_y[0] = -y_d / 2 + corners_y[1] = y_d / 2 + corners_y[2] = y_d / 2 + corners_y[3] = -y_d / 2 + for i in range(4): + corners[2 * + i] = a_cos * corners_x[i] + a_sin * corners_y[i] + center_x + corners[2 * i + + 1] = -a_sin * corners_x[i] + a_cos * corners_y[i] + center_y + + +@cuda.jit('(float32[:], float32[:])', device = True, inline = True) +def inter(rbbox1, rbbox2): + corners1 = cuda.local.array((8,), dtype = numba.float32) + corners2 = cuda.local.array((8,), dtype = numba.float32) + intersection_corners = cuda.local.array((16,), dtype = numba.float32) + + rbbox_to_corners(corners1, rbbox1) + rbbox_to_corners(corners2, rbbox2) + + num_intersection = quadrilateral_intersection(corners1, corners2, + intersection_corners) + sort_vertex_in_convex_polygon(intersection_corners, num_intersection) + # print(intersection_corners.reshape([-1, 2])[:num_intersection]) + + return area(intersection_corners, num_intersection) + + +@cuda.jit('(float32[:], float32[:], int32)', device = True, inline = True) +def devRotateIoUEval(rbox1, rbox2, criterion = -1): + area1 = rbox1[2] * rbox1[3] + area2 = rbox2[2] * rbox2[3] + area_inter = inter(rbox1, rbox2) + if criterion == -1: + return area_inter / (area1 + area2 - area_inter) + elif criterion == 0: + return area_inter / area1 + elif criterion == 1: + return area_inter / area2 + else: + return area_inter + + +@cuda.jit('(int64, int64, float32[:], float32[:], float32[:], int32)', fastmath = False) +def rotate_iou_kernel_eval(N, K, dev_boxes, dev_query_boxes, dev_iou, criterion = -1): + threadsPerBlock = 8 * 8 + row_start = cuda.blockIdx.x + col_start = cuda.blockIdx.y + tx = cuda.threadIdx.x + row_size = min(N - row_start * threadsPerBlock, threadsPerBlock) + col_size = min(K - col_start * threadsPerBlock, threadsPerBlock) + block_boxes = cuda.shared.array(shape = (64 * 5,), dtype = numba.float32) + block_qboxes = cuda.shared.array(shape = (64 * 5,), dtype = numba.float32) + + dev_query_box_idx = threadsPerBlock * col_start + tx + dev_box_idx = threadsPerBlock * row_start + tx + if (tx < col_size): + block_qboxes[tx * 5 + 0] = dev_query_boxes[dev_query_box_idx * 5 + 0] + block_qboxes[tx * 5 + 1] = dev_query_boxes[dev_query_box_idx * 5 + 1] + block_qboxes[tx * 5 + 2] = dev_query_boxes[dev_query_box_idx * 5 + 2] + block_qboxes[tx * 5 + 3] = dev_query_boxes[dev_query_box_idx * 5 + 3] + block_qboxes[tx * 5 + 4] = dev_query_boxes[dev_query_box_idx * 5 + 4] + if (tx < row_size): + block_boxes[tx * 5 + 0] = dev_boxes[dev_box_idx * 5 + 0] + block_boxes[tx * 5 + 1] = dev_boxes[dev_box_idx * 5 + 1] + block_boxes[tx * 5 + 2] = dev_boxes[dev_box_idx * 5 + 2] + block_boxes[tx * 5 + 3] = dev_boxes[dev_box_idx * 5 + 3] + block_boxes[tx * 5 + 4] = dev_boxes[dev_box_idx * 5 + 4] + cuda.syncthreads() + if tx < row_size: + for i in range(col_size): + offset = row_start * threadsPerBlock * K + col_start * threadsPerBlock + tx * K + i + dev_iou[offset] = devRotateIoUEval(block_qboxes[i * 5:i * 5 + 5], + block_boxes[tx * 5:tx * 5 + 5], criterion) + + +def rotate_iou_gpu_eval(boxes, query_boxes, criterion = -1, device_id = 0): + """rotated box iou running in gpu. 500x faster than cpu version + (take 5ms in one example with numba.cuda code). + convert from [this project]( + https://github.com/hongzhenwang/RRPN-revise/tree/master/lib/rotation). + + Args: + boxes (float tensor: [N, 5]): rbboxes. format: centers, dims, + angles(clockwise when positive) + query_boxes (float tensor: [K, 5]): [description] + device_id (int, optional): Defaults to 0. [description] + + Returns: + [type]: [description] + """ + box_dtype = boxes.dtype + boxes = boxes.astype(np.float32) + query_boxes = query_boxes.astype(np.float32) + N = boxes.shape[0] + K = query_boxes.shape[0] + iou = np.zeros((N, K), dtype = np.float32) + if N == 0 or K == 0: + return iou + threadsPerBlock = 8 * 8 + cuda.select_device(device_id) + blockspergrid = (div_up(N, threadsPerBlock), div_up(K, threadsPerBlock)) + + stream = cuda.stream() + with stream.auto_synchronize(): + boxes_dev = cuda.to_device(boxes.reshape([-1]), stream) + query_boxes_dev = cuda.to_device(query_boxes.reshape([-1]), stream) + iou_dev = cuda.to_device(iou.reshape([-1]), stream) + rotate_iou_kernel_eval[blockspergrid, threadsPerBlock, stream]( + N, K, boxes_dev, query_boxes_dev, iou_dev, criterion) + iou_dev.copy_to_host(iou.reshape([-1]), stream = stream) + return iou.astype(boxes.dtype) diff --git a/tools/run_all_eval_epnet_plus_plus_models.sh b/tools/run_all_eval_epnet_plus_plus_models.sh new file mode 100644 index 0000000..48be684 --- /dev/null +++ b/tools/run_all_eval_epnet_plus_plus_models.sh @@ -0,0 +1,23 @@ +#! /bin/bash + +################################## eval CAR +CUDA_VISIBLE_DEVICES=0 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--output_dir ./epnet_plus_plus_released_trained_models/CAR/eval_results/ \ +--data_path ../data/ --ckpt ./epnet_plus_plus_released_trained_models/CAR/checkpoint_epoch_43.pth \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True + + +################################## eval PED +CUDA_VISIBLE_DEVICES=1 python eval_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--output_dir ./epnet_plus_plus_released_trained_models/PED/eval_results/ \ +--data_path ../data/ --ckpt ./epnet_plus_plus_released_trained_models/PED/checkpoint_epoch_44.pth \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True + +################################## eval CYC +CUDA_VISIBLE_DEVICES=2 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--output_dir ./epnet_plus_plus_released_trained_models/CYC/eval_results/ \ +--data_path ../data/ --ckpt ./epnet_plus_plus_released_trained_models/CYC/checkpoint_epoch_50.pth \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True diff --git a/tools/run_train_and_eval_epnet_plus_plus_car.sh b/tools/run_train_and_eval_epnet_plus_plus_car.sh new file mode 100644 index 0000000..ad51ba1 --- /dev/null +++ b/tools/run_train_and_eval_epnet_plus_plus_car.sh @@ -0,0 +1,22 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=0,1 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_MC_LOSS True \ +MC_LOSS_WEIGHT 1.0 I2P_Weight 0.5 P2I_Weight 0.5 ADD_MC_MASK True MC_MASK_THRES 0.2 SAVE_MODEL_PREP 0.8 + + +################################## eval CAR +CUDA_VISIBLE_DEVICES=0 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CAR_EPNet_plus_plus/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CAR_EPNet_plus_plus/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_MC_LOSS True \ +MC_LOSS_WEIGHT 1.0 I2P_Weight 0.5 P2I_Weight 0.5 ADD_MC_MASK True MC_MASK_THRES 0.2 SAVE_MODEL_PREP 0.8 + diff --git a/tools/run_train_and_eval_epnet_plus_plus_cyc.sh b/tools/run_train_and_eval_epnet_plus_plus_cyc.sh new file mode 100644 index 0000000..9b101ac --- /dev/null +++ b/tools/run_train_and_eval_epnet_plus_plus_cyc.sh @@ -0,0 +1,22 @@ +#! /bin/bash + + +################################## train CYC +CUDA_VISIBLE_DEVICES=0,1 python train_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CYC_EPNet_plus_plus/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_MC_LOSS True \ +MC_LOSS_WEIGHT 1.0 I2P_Weight 0.5 P2I_Weight 0.5 ADD_MC_MASK True MC_MASK_THRES 0.2 SAVE_MODEL_PREP 0.8 + + +################################## eval CYC +CUDA_VISIBLE_DEVICES=0 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CYC_EPNet_plus_plus/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CYC_EPNet_plus_plus/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_MC_LOSS True \ +MC_LOSS_WEIGHT 1.0 I2P_Weight 0.5 P2I_Weight 0.5 ADD_MC_MASK True MC_MASK_THRES 0.2 SAVE_MODEL_PREP 0.8 + diff --git a/tools/run_train_and_eval_epnet_plus_plus_ped.sh b/tools/run_train_and_eval_epnet_plus_plus_ped.sh new file mode 100644 index 0000000..e40a638 --- /dev/null +++ b/tools/run_train_and_eval_epnet_plus_plus_ped.sh @@ -0,0 +1,22 @@ +#! /bin/bash + + +################################## train PED +CUDA_VISIBLE_DEVICES=0,1 python train_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/PED_EPNet_plus_plus/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_MC_LOSS True \ +MC_LOSS_WEIGHT 1.0 I2P_Weight 0.5 P2I_Weight 0.5 ADD_MC_MASK True MC_MASK_THRES 0.2 SAVE_MODEL_PREP 0.8 + + +################################## eval PED +CUDA_VISIBLE_DEVICES=0 python eval_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/PED_EPNet_plus_plus/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/PED_EPNet_plus_plus/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True CROSS_FUSION True USE_P2I_GATE True \ +DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_MC_LOSS True \ +MC_LOSS_WEIGHT 1.0 I2P_Weight 0.5 P2I_Weight 0.5 ADD_MC_MASK True MC_MASK_THRES 0.2 SAVE_MODEL_PREP 0.8 + diff --git a/tools/run_train_epnet_plus_plus_car.sh b/tools/run_train_epnet_plus_plus_car.sh new file mode 100644 index 0000000..c30f5b9 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car.sh @@ -0,0 +1,23 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=0,1 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_js_02/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + +################################## eval CAR +CUDA_VISIBLE_DEVICES=0 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CAR_EPNet_plus_plus_js_02/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CAR_EPNet_plus_plus_js_02/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + diff --git a/tools/run_train_epnet_plus_plus_car_2.sh b/tools/run_train_epnet_plus_plus_car_2.sh new file mode 100644 index 0000000..4be82c4 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car_2.sh @@ -0,0 +1,23 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=2,3 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_js_02_run2/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + +################################## eval CAR +CUDA_VISIBLE_DEVICES=2 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CAR_EPNet_plus_plus_js_02_run2/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CAR_EPNet_plus_plus_js_02_run2/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + diff --git a/tools/run_train_epnet_plus_plus_car_3.sh b/tools/run_train_epnet_plus_plus_car_3.sh new file mode 100644 index 0000000..cdef161 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car_3.sh @@ -0,0 +1,23 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=4,5 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_js_01_run1/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.1 + + +################################## eval CAR +CUDA_VISIBLE_DEVICES=4 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CAR_EPNet_plus_plus_js_01_run1/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CAR_EPNet_plus_plus_js_01_run1/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.1 + + diff --git a/tools/run_train_epnet_plus_plus_car_4.sh b/tools/run_train_epnet_plus_plus_car_4.sh new file mode 100644 index 0000000..12398c8 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car_4.sh @@ -0,0 +1,23 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=6,7 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_js_01_run2/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.1 + + +################################## eval CAR +CUDA_VISIBLE_DEVICES=7 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CAR_EPNet_plus_plus_js_01_run2/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CAR_EPNet_plus_plus_js_01_run2/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.1 + + diff --git a/tools/run_train_epnet_plus_plus_car_del_mc_loss.sh b/tools/run_train_epnet_plus_plus_car_del_mc_loss.sh new file mode 100644 index 0000000..005ac89 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car_del_mc_loss.sh @@ -0,0 +1,23 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=6,7 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_del_mc_loss/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.0 P2I_Weight 0.0 ADD_JS_MASK True JS_MASK_THRES 0.05 + + +################################## eval CAR +CUDA_VISIBLE_DEVICES=6 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CAR_EPNet_plus_plus_del_mc_loss/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CAR_EPNet_plus_plus_del_mc_loss/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.0 P2I_Weight 0.0 ADD_JS_MASK True JS_MASK_THRES 0.05 + + diff --git a/tools/run_train_epnet_plus_plus_car_only_cb_fusion.sh b/tools/run_train_epnet_plus_plus_car_only_cb_fusion.sh new file mode 100644 index 0000000..b937dff --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car_only_cb_fusion.sh @@ -0,0 +1,18 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=0,1 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_only_cb_fusion/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ + USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + +################################## eval CAR +#CUDA_VISIBLE_DEVICES=0 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/CAR_EPNet_plus_plus_only_cb_fusion/eval_results/ \ +#--data_path ../data/ --ckpt_dir ./log/CAR_EPNet_plus_plus_only_cb_fusion/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ +# USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + diff --git a/tools/run_train_epnet_plus_plus_car_only_cb_fusion_run2.sh b/tools/run_train_epnet_plus_plus_car_only_cb_fusion_run2.sh new file mode 100644 index 0000000..a723ede --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car_only_cb_fusion_run2.sh @@ -0,0 +1,18 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=2,3 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_only_cb_fusion_run2/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ + USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + +################################## eval CAR +#CUDA_VISIBLE_DEVICES=2 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/CAR_EPNet_plus_plus_only_cb_fusion_run2/eval_results/ \ +#--data_path ../data/ --ckpt_dir ./log/CAR_EPNet_plus_plus_only_cb_fusion_run2/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ +# USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + diff --git a/tools/run_train_epnet_plus_plus_car_run_80ep.sh b/tools/run_train_epnet_plus_plus_car_run_80ep.sh new file mode 100644 index 0000000..4a9ad1a --- /dev/null +++ b/tools/run_train_epnet_plus_plus_car_run_80ep.sh @@ -0,0 +1,24 @@ +#! /bin/bash + + +################################## train CAR +CUDA_VISIBLE_DEVICES=6,7 python train_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 80 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CAR_EPNet_plus_plus_run_80ep/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.05 + + +################################## eval CAR +CUDA_VISIBLE_DEVICES=1 python eval_rcnn.py --cfg_file cfgs/CAR_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CAR_EPNet_plus_plus_run_80ep/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CAR_EPNet_plus_plus_run_80ep/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.05 + + + diff --git a/tools/run_train_epnet_plus_plus_cyc.sh b/tools/run_train_epnet_plus_plus_cyc.sh new file mode 100644 index 0000000..5e31e6d --- /dev/null +++ b/tools/run_train_epnet_plus_plus_cyc.sh @@ -0,0 +1,44 @@ +#! /bin/bash + + +################################## train CYC +CUDA_VISIBLE_DEVICES=2,3 python train_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_02/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + + +################################## eval CYC +CUDA_VISIBLE_DEVICES=2 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_02/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_02/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + + +#CUDA_VISIBLE_DEVICES=2 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_01/eval_results_200_08/ \ +#--data_path ../data/ \ +#--ckpt_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_01/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +#USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +#KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.1 TEST.RPN_POST_NMS_TOP_N 200 +# +# +# +#CUDA_VISIBLE_DEVICES=2 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_01/eval_results_200_08/ \ +#--data_path ../data/ \ +#--ckpt_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_01/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +#USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +#KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.1 TEST.RPN_POST_NMS_TOP_N 200 +# +# diff --git a/tools/run_train_epnet_plus_plus_cyc_run2.sh b/tools/run_train_epnet_plus_plus_cyc_run2.sh new file mode 100644 index 0000000..1fd8488 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_cyc_run2.sh @@ -0,0 +1,26 @@ +#! /bin/bash + + +################################## train CYC +CUDA_VISIBLE_DEVICES=0,1 python train_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_02_run2/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + + +################################## eval CYC +CUDA_VISIBLE_DEVICES=1 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_02_run2/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/CYC_EPNet_plus_plus_js_mask_thr_02_run2/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + + + diff --git a/tools/run_train_epnet_plus_plus_ped.sh b/tools/run_train_epnet_plus_plus_ped.sh new file mode 100644 index 0000000..d0d11ce --- /dev/null +++ b/tools/run_train_epnet_plus_plus_ped.sh @@ -0,0 +1,21 @@ +#! /bin/bash + +################################## train PED +CUDA_VISIBLE_DEVICES=4,5 python train_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/PED_EPNet_plus_plus_js_mask_00/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.0 + + +################################## eval PED +CUDA_VISIBLE_DEVICES=4 python eval_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/PED_EPNet_plus_plus_js_mask_00/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/PED_EPNet_plus_plus_js_mask_00/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.0 + diff --git a/tools/run_train_epnet_plus_plus_ped_2.sh b/tools/run_train_epnet_plus_plus_ped_2.sh new file mode 100644 index 0000000..8633167 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_ped_2.sh @@ -0,0 +1,21 @@ +#! /bin/bash + +################################## train PED +CUDA_VISIBLE_DEVICES=4,5 python train_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/PED_EPNet_plus_plus_js_mask_02_run1/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + +################################## eval PED +CUDA_VISIBLE_DEVICES=4 python eval_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/PED_EPNet_plus_plus_js_mask_02_run1/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/PED_EPNet_plus_plus_js_mask_02_run1/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + diff --git a/tools/run_train_epnet_plus_plus_ped_3.sh b/tools/run_train_epnet_plus_plus_ped_3.sh new file mode 100644 index 0000000..cbaa6a4 --- /dev/null +++ b/tools/run_train_epnet_plus_plus_ped_3.sh @@ -0,0 +1,21 @@ +#! /bin/bash + +################################## train PED +CUDA_VISIBLE_DEVICES=6,7 python train_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/PED_EPNet_plus_plus_js_mask_02_run2/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + + +################################## eval PED +CUDA_VISIBLE_DEVICES=7 python eval_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +--eval_all --output_dir ./log/PED_EPNet_plus_plus_js_mask_02_run2/eval_results/ \ +--data_path ../data/ \ +--ckpt_dir ./log/PED_EPNet_plus_plus_js_mask_02_run2/ckpt \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 RPN.SCORE_THRESH 0.2 RCNN.SCORE_THRESH 0.2 USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True \ +USE_SELF_ATTENTION False DEEP_RCNN_FUSION False USE_IMAGE_LOSS True IMAGE_WEIGHT 1.0 USE_IMAGE_SCORE True USE_IMG_DENSE_LOSS True USE_KL_LOSS True USE_KL_LOSS_TYPE 'JS' \ +KL_LOSS_WEIGHT 1.0 USE_P2I_GATE True TRAIN.RPN_CE_WEIGHT 5.0 SAVE_MODEL_PREP 0.8 I2P_Weight 0.5 P2I_Weight 0.5 ADD_JS_MASK True JS_MASK_THRES 0.2 + diff --git a/tools/run_train_epnet_plus_plus_ped_only_cb_fusion.sh b/tools/run_train_epnet_plus_plus_ped_only_cb_fusion.sh new file mode 100644 index 0000000..493c46f --- /dev/null +++ b/tools/run_train_epnet_plus_plus_ped_only_cb_fusion.sh @@ -0,0 +1,79 @@ +#! /bin/bash + + +################################## train PED +CUDA_VISIBLE_DEVICES=5,6 python train_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/PED_EPNet_plus_plus_only_cb_fusion/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ + USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + + +################################## train PED +CUDA_VISIBLE_DEVICES=5,6 python train_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/PED_EPNet_plus_plus_only_cb_fusion_run2/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ + USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + + +################################### eval PED +#CUDA_VISIBLE_DEVICES=4 python eval_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/PED_EPNet_plus_plus_only_cb_fusion/eval_results/ \ +#--data_path ../data/ --ckpt_dir ./log/PED_EPNet_plus_plus_only_cb_fusion/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ +# USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True +# +# +#CUDA_VISIBLE_DEVICES=7 python eval_rcnn.py --cfg_file cfgs/PED_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/PED_EPNet_plus_plus_only_cb_fusion_run2/eval_results/ \ +#--data_path ../data/ --ckpt_dir ./log/PED_EPNet_plus_plus_only_cb_fusion_run2/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ +# USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + + + + +################################## train CYC +CUDA_VISIBLE_DEVICES=5,6 python train_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CYC_EPNet_plus_plus_only_cb_fusion/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ + USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + + +################################## train CYC +CUDA_VISIBLE_DEVICES=5,6 python train_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/CYC_EPNet_plus_plus_only_cb_fusion_run2/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ + USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + + + ################################## eval CYC +#CUDA_VISIBLE_DEVICES=4 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/CYC_EPNet_plus_plus_only_cb_fusion/eval_results/ \ +#--data_path ../data/ --ckpt_dir ./log/CYC_EPNet_plus_plus_only_cb_fusion/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ +#USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + + +#CUDA_VISIBLE_DEVICES=7 python eval_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml --eval_mode rcnn_online \ +#--eval_all --output_dir ./log/CYC_EPNet_plus_plus_only_cb_fusion_run2/eval_results/ \ +#--data_path ../data/ --ckpt_dir ./log/CYC_EPNet_plus_plus_only_cb_fusion_run2/ckpt \ +#--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ +#USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True + + + + +CUDA_VISIBLE_DEVICES=5,6 python train_rcnn.py --cfg_file cfgs/CYC_EPNet_plus_plus.yaml \ +--batch_size 4 --train_mode rcnn_online --epochs 50 --mgpus --ckpt_save_interval 1 \ +--output_dir ./log/debug/ \ +--data_path ../data/ \ +--set LI_FUSION.ENABLED True LI_FUSION.ADD_Image_Attention True RCNN.POOL_EXTRA_WIDTH 0.2 \ + USE_IOU_BRANCH True TRAIN.CE_WEIGHT 5.0 CROSS_FUSION True USE_P2I_GATE True \ No newline at end of file diff --git a/tools/train_rcnn.py b/tools/train_rcnn.py new file mode 100644 index 0000000..0b577c9 --- /dev/null +++ b/tools/train_rcnn.py @@ -0,0 +1,279 @@ +import _init_path +import torch +import torch.optim as optim +import torch.optim.lr_scheduler as lr_sched +import torch.nn as nn +from torch.utils.data import DataLoader +from tensorboardX import SummaryWriter +import os +import argparse +import logging +from functools import partial + +from lib.net.point_rcnn import PointRCNN + +import lib.net.train_functions as train_functions + +from lib.datasets.kitti_rcnn_dataset import KittiRCNNDataset +from lib.config import cfg, cfg_from_file, save_config_to_file, cfg_from_list +import tools.train_utils.train_utils as train_utils +from tools.train_utils.fastai_optim import OptimWrapper +from tools.train_utils import learning_schedules_fastai as lsf + +parser = argparse.ArgumentParser(description = "arg parser") +parser.add_argument('--cfg_file', type = str, default = 'cfgs/LI_Fusion_with_attention_use_ce_loss.yaml', help = 'specify the config for training') +parser.add_argument("--train_mode", type = str, default = 'rpn', required = True, help = "specify the training mode") +parser.add_argument("--batch_size", type = int, default = 16, required = True, help = "batch size for training") +parser.add_argument("--epochs", type = int, default = 200, required = True, help = "Number of epochs to train for") + +parser.add_argument('--workers', type = int, default = 8, help = 'number of workers for dataloader') +parser.add_argument("--ckpt_save_interval", type = int, default = 5, help = "number of training epochs") +parser.add_argument('--output_dir', type = str, default = None, help = 'specify an output directory if needed') +parser.add_argument('--mgpus', action = 'store_true', default = False, help = 'whether to use multiple gpu') +parser.add_argument('--data_path', type = str, default = None, help = 'specify an output directory if needed') + +parser.add_argument("--ckpt", type = str, default = None, help = "continue training from this checkpoint") +parser.add_argument("--rpn_ckpt", type = str, default = None, help = "specify the well-trained rpn checkpoint") + +parser.add_argument("--gt_database", type = str, default = None, + help = 'generated gt database for augmentation') +parser.add_argument("--rcnn_training_roi_dir", type = str, default = None, + help = 'specify the saved rois for rcnn training when using rcnn_offline mode') +parser.add_argument("--rcnn_training_feature_dir", type = str, default = None, + help = 'specify the saved features for rcnn training when using rcnn_offline mode') + +parser.add_argument('--train_with_eval', action = 'store_true', default = False, + help = 'whether to train with evaluation') +parser.add_argument("--rcnn_eval_roi_dir", type = str, default = None, + help = 'specify the saved rois for rcnn evaluation when using rcnn_offline mode') +parser.add_argument("--rcnn_eval_feature_dir", type = str, default = None, + help = 'specify the saved features for rcnn evaluation when using rcnn_offline mode') +parser.add_argument('--set', dest = 'set_cfgs', default = None, nargs = argparse.REMAINDER, + help = 'set extra config keys if needed') +parser.add_argument('--model_type', type = str, default = 'base', help = 'model type') +args = parser.parse_args() + + +def create_logger(log_file): + log_format = '%(asctime)s %(levelname)5s %(message)s' + logging.basicConfig(level = logging.DEBUG, format = log_format, filename = log_file) + console = logging.StreamHandler() + console.setLevel(logging.DEBUG) + console.setFormatter(logging.Formatter(log_format)) + logging.getLogger(__name__).addHandler(console) + return logging.getLogger(__name__) + + +def create_dataloader(logger, data_path): + DATA_PATH = os.path.join(data_path) #os.path.join('../', 'data') + + # create dataloader + train_set = KittiRCNNDataset(root_dir = DATA_PATH, npoints = cfg.RPN.NUM_POINTS, split = cfg.TRAIN.SPLIT, + mode='TRAIN', + logger = logger, + classes = cfg.CLASSES, + rcnn_training_roi_dir = args.rcnn_training_roi_dir, + rcnn_training_feature_dir = args.rcnn_training_feature_dir, + gt_database_dir = args.gt_database) + train_loader = DataLoader(train_set, batch_size = args.batch_size, pin_memory = True, + num_workers = args.workers, shuffle = True, collate_fn = train_set.collate_batch, + drop_last = True) + + if args.train_with_eval: + test_set = KittiRCNNDataset(root_dir = DATA_PATH, npoints = cfg.RPN.NUM_POINTS, split = cfg.TRAIN.VAL_SPLIT, + mode = 'EVAL', + logger = logger, + classes = cfg.CLASSES, + rcnn_eval_roi_dir = args.rcnn_eval_roi_dir, + rcnn_eval_feature_dir = args.rcnn_eval_feature_dir) + test_loader = DataLoader(test_set, batch_size = 1, shuffle = True, pin_memory = True, + num_workers = args.workers, collate_fn = test_set.collate_batch) + else: + test_loader = None + return train_loader, test_loader + + +def create_optimizer(model): + if cfg.TRAIN.OPTIMIZER == 'adam': + optimizer = optim.Adam(model.parameters(), lr = cfg.TRAIN.LR, weight_decay = cfg.TRAIN.WEIGHT_DECAY) + elif cfg.TRAIN.OPTIMIZER == 'sgd': + optimizer = optim.SGD(model.parameters(), lr = cfg.TRAIN.LR, weight_decay = cfg.TRAIN.WEIGHT_DECAY, + momentum = cfg.TRAIN.MOMENTUM) + elif cfg.TRAIN.OPTIMIZER == 'adam_onecycle': + def children(m: nn.Module): + return list(m.children()) + + def num_children(m: nn.Module) -> int: + return len(children(m)) + + flatten_model = lambda m: sum(map(flatten_model, m.children()), []) if num_children(m) else [m] + get_layer_groups = lambda m: [nn.Sequential(*flatten_model(m))] + + optimizer_func = partial(optim.Adam, betas = (0.9, 0.99)) + optimizer = OptimWrapper.create( + optimizer_func, 3e-3, get_layer_groups(model), wd = cfg.TRAIN.WEIGHT_DECAY, true_wd = True, bn_wd = True + ) + + # fix rpn: do this since we use costomized optimizer.step + if cfg.RPN.ENABLED and cfg.RPN.FIXED: + for param in model.rpn.parameters(): + param.requires_grad = False + else: + raise NotImplementedError + + return optimizer + + +def create_scheduler(optimizer, total_steps, last_epoch): + def lr_lbmd(cur_epoch): + cur_decay = 1 + for decay_step in cfg.TRAIN.DECAY_STEP_LIST: + if cur_epoch >= decay_step: + cur_decay = cur_decay * cfg.TRAIN.LR_DECAY + return max(cur_decay, cfg.TRAIN.LR_CLIP / cfg.TRAIN.LR) + + def bnm_lmbd(cur_epoch): + cur_decay = 1 + for decay_step in cfg.TRAIN.BN_DECAY_STEP_LIST: + if cur_epoch >= decay_step: + cur_decay = cur_decay * cfg.TRAIN.BN_DECAY + return max(cfg.TRAIN.BN_MOMENTUM * cur_decay, cfg.TRAIN.BNM_CLIP) + + if cfg.TRAIN.OPTIMIZER == 'adam_onecycle': + lr_scheduler = lsf.OneCycle( + optimizer, total_steps, cfg.TRAIN.LR, list(cfg.TRAIN.MOMS), cfg.TRAIN.DIV_FACTOR, cfg.TRAIN.PCT_START + ) + else: + lr_scheduler = lr_sched.LambdaLR(optimizer, lr_lbmd, last_epoch = last_epoch) + + bnm_scheduler = train_utils.BNMomentumScheduler(model, bnm_lmbd, last_epoch = last_epoch) + return lr_scheduler, bnm_scheduler + + +if __name__ == "__main__": + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + print(cfg.TRAIN.RPN_TRAIN_WEIGHT, cfg.TRAIN.RCNN_TRAIN_WEIGHT) + # input() + + cfg.TAG = os.path.splitext(os.path.basename(args.cfg_file))[0] + + if args.train_mode == 'rpn': + cfg.RPN.ENABLED = True + cfg.RCNN.ENABLED = False + root_result_dir = os.path.join('../', 'output', 'rpn', cfg.TAG) + elif args.train_mode == 'rcnn': + cfg.RCNN.ENABLED = True + cfg.RPN.ENABLED = cfg.RPN.FIXED = True + root_result_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG) + elif args.train_mode == 'rcnn_online': + cfg.RCNN.ENABLED = True + cfg.RPN.ENABLED = True + cfg.RPN.FIXED = False + root_result_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG) + elif args.train_mode == 'rcnn_offline': + cfg.RCNN.ENABLED = True + cfg.RPN.ENABLED = False + root_result_dir = os.path.join('../', 'output', 'rcnn', cfg.TAG) + else: + raise NotImplementedError + + if args.output_dir is not None: + root_result_dir = args.output_dir + os.makedirs(root_result_dir, exist_ok = True) + + log_file = os.path.join(root_result_dir, 'log_train.txt') + logger = create_logger(log_file) + logger.info('**********************Start logging**********************') + + # log to file + gpu_list = os.environ['CUDA_VISIBLE_DEVICES'] if 'CUDA_VISIBLE_DEVICES' in os.environ.keys() else 'ALL' + logger.info('CUDA_VISIBLE_DEVICES=%s' % gpu_list) + + for key, val in vars(args).items(): + logger.info("{:16} {}".format(key, val)) + + save_config_to_file(cfg, logger = logger) + + # copy important files to backup + backup_dir = os.path.join(root_result_dir, 'backup_files') + os.makedirs(backup_dir, exist_ok = True) + os.system('cp *.py %s/' % backup_dir) + os.system('cp ../lib/ %s/' % backup_dir) + os.system('cp ../tools %s/' % backup_dir) + os.system('cp ../*.py %s/' % backup_dir) + + # tensorboard log + print(root_result_dir) + tb_log = SummaryWriter(logdir = os.path.join(root_result_dir, 'tensorboard')) + + # create dataloader & network & optimizer + train_loader, test_loader = create_dataloader(logger, data_path=args.data_path) + # model = PointRCNN(num_classes=train_loader.dataset.num_class, use_xyz=True, mode='TRAIN') + fn_decorator = train_functions.model_joint_fn_decorator() + + model = PointRCNN(num_classes = train_loader.dataset.num_class, use_xyz = True, mode = 'TRAIN') + + optimizer = create_optimizer(model) + + if args.mgpus: + model = nn.DataParallel(model) + model.cuda() + + # load checkpoint if it is possible + start_epoch = it = 0 + last_epoch = -1 + if args.ckpt is not None: + pure_model = model.module if isinstance(model, torch.nn.DataParallel) else model + it, start_epoch = train_utils.load_checkpoint(pure_model, optimizer, filename = args.ckpt, logger = logger) + last_epoch = start_epoch + 1 + + lr_scheduler, bnm_scheduler = create_scheduler(optimizer, total_steps = len(train_loader) * args.epochs, + last_epoch = last_epoch) + + if args.rpn_ckpt is not None: + pure_model = model.module if isinstance(model, torch.nn.DataParallel) else model + total_keys = pure_model.state_dict().keys().__len__() + train_utils.load_part_ckpt(pure_model, filename = args.rpn_ckpt, logger = logger, total_keys = total_keys) + + if cfg.TRAIN.LR_WARMUP and cfg.TRAIN.OPTIMIZER != 'adam_onecycle': + lr_warmup_scheduler = train_utils.CosineWarmupLR(optimizer, T_max = cfg.TRAIN.WARMUP_EPOCH * len(train_loader), + eta_min = cfg.TRAIN.WARMUP_MIN) + else: + lr_warmup_scheduler = None + + # start training + logger.info('**********************Start training**********************') + ckpt_dir = os.path.join(root_result_dir, 'ckpt') + os.makedirs(ckpt_dir, exist_ok = True) + trainer = train_utils.Trainer( + model, + # train_functions.model_joint_fn_decorator(), + fn_decorator, + optimizer, + ckpt_dir = ckpt_dir, + lr_scheduler = lr_scheduler, + bnm_scheduler = bnm_scheduler, + # model_fn_eval=train_functions.model_joint_fn_decorator(), + model_fn_eval = fn_decorator, + tb_log = tb_log, + eval_frequency = 1, + lr_warmup_scheduler = lr_warmup_scheduler, + warmup_epoch = cfg.TRAIN.WARMUP_EPOCH, + grad_norm_clip = cfg.TRAIN.GRAD_NORM_CLIP + ) + + trainer.train( + it, + start_epoch, + args.epochs, + train_loader, + test_loader, + ckpt_save_interval = args.ckpt_save_interval, + lr_scheduler_each_iter = (cfg.TRAIN.OPTIMIZER == 'adam_onecycle') + ) + + logger.info('**********************End training**********************') diff --git a/tools/train_utils/fastai_optim.py b/tools/train_utils/fastai_optim.py new file mode 100644 index 0000000..f85c1b6 --- /dev/null +++ b/tools/train_utils/fastai_optim.py @@ -0,0 +1,261 @@ +# This file is borrowed from https://github.com/traveller59/second.pytorch + +from collections import Iterable + +import torch +from torch import nn +from torch.nn.utils import parameters_to_vector +from torch._utils import _unflatten_dense_tensors + +bn_types = (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d) + + +def split_bn_bias(layer_groups): + "Split the layers in `layer_groups` into batchnorm (`bn_types`) and non-batchnorm groups." + split_groups = [] + for l in layer_groups: + l1, l2 = [], [] + for c in l.children(): + if isinstance(c, bn_types): + l2.append(c) + else: + l1.append(c) + split_groups += [nn.Sequential(*l1), nn.Sequential(*l2)] + return split_groups + + +def get_master(layer_groups, flat_master: bool = False): + "Return two lists, one for the model parameters in FP16 and one for the master parameters in FP32." + split_groups = split_bn_bias(layer_groups) + model_params = [[param for param in lg.parameters() if param.requires_grad] for lg in split_groups] + if flat_master: + master_params = [] + for lg in model_params: + if len(lg) != 0: + mp = parameters_to_vector([param.data.float() for param in lg]) + mp = torch.nn.Parameter(mp, requires_grad = True) + if mp.grad is None: mp.grad = mp.new(*mp.size()) + master_params.append([mp]) + else: + master_params.append([]) + return model_params, master_params + else: + master_params = [[param.clone().float().detach() for param in lg] for lg in model_params] + for mp in master_params: + for param in mp: param.requires_grad = True + return model_params, master_params + + +def model_g2master_g(model_params, master_params, flat_master: bool = False) -> None: + "Copy the `model_params` gradients to `master_params` for the optimizer step." + if flat_master: + for model_group, master_group in zip(model_params, master_params): + if len(master_group) != 0: + master_group[0].grad.data.copy_(parameters_to_vector([p.grad.data.float() for p in model_group])) + else: + for model_group, master_group in zip(model_params, master_params): + for model, master in zip(model_group, master_group): + if model.grad is not None: + if master.grad is None: master.grad = master.data.new(*master.data.size()) + master.grad.data.copy_(model.grad.data) + else: + master.grad = None + + +def master2model(model_params, master_params, flat_master: bool = False) -> None: + "Copy `master_params` to `model_params`." + if flat_master: + for model_group, master_group in zip(model_params, master_params): + if len(model_group) != 0: + for model, master in zip(model_group, _unflatten_dense_tensors(master_group[0].data, model_group)): + model.data.copy_(master) + else: + for model_group, master_group in zip(model_params, master_params): + for model, master in zip(model_group, master_group): model.data.copy_(master.data) + + +def listify(p = None, q = None): + "Make `p` listy and the same length as `q`." + if p is None: + p = [] + elif isinstance(p, str): + p = [p] + elif not isinstance(p, Iterable): + p = [p] + n = q if type(q) == int else len(p) if q is None else len(q) + if len(p) == 1: p = p * n + assert len(p) == n, f'List len mismatch ({len(p)} vs {n})' + return list(p) + + +def trainable_params(m: nn.Module): + "Return list of trainable params in `m`." + res = filter(lambda p: p.requires_grad, m.parameters()) + return res + + +def is_tuple(x) -> bool: return isinstance(x, tuple) + + +# copy from fastai. +class OptimWrapper(): + "Basic wrapper around `opt` to simplify hyper-parameters changes." + + def __init__(self, opt, wd, true_wd: bool = False, bn_wd: bool = True): + self.opt, self.true_wd, self.bn_wd = opt, true_wd, bn_wd + self.opt_keys = list(self.opt.param_groups[0].keys()) + self.opt_keys.remove('params') + self.read_defaults() + self.wd = wd + + @classmethod + def create(cls, opt_func, lr, + layer_groups, **kwargs): + "Create an `optim.Optimizer` from `opt_func` with `lr`. Set lr on `layer_groups`." + split_groups = split_bn_bias(layer_groups) + opt = opt_func([{ 'params': trainable_params(l), 'lr': 0 } for l in split_groups]) + opt = cls(opt, **kwargs) + opt.lr, opt.opt_func = listify(lr, layer_groups), opt_func + return opt + + def new(self, layer_groups): + "Create a new `OptimWrapper` from `self` with another `layer_groups` but the same hyper-parameters." + opt_func = getattr(self, 'opt_func', self.opt.__class__) + split_groups = split_bn_bias(layer_groups) + opt = opt_func([{ 'params': trainable_params(l), 'lr': 0 } for l in split_groups]) + return self.create(opt_func, self.lr, layer_groups, wd = self.wd, true_wd = self.true_wd, bn_wd = self.bn_wd) + + def __repr__(self) -> str: + return f'OptimWrapper over {repr(self.opt)}.\nTrue weight decay: {self.true_wd}' + + # Pytorch optimizer methods + def step(self) -> None: + "Set weight decay and step optimizer." + # weight decay outside of optimizer step (AdamW) + if self.true_wd: + for lr, wd, pg1, pg2 in zip(self._lr, self._wd, self.opt.param_groups[::2], self.opt.param_groups[1::2]): + for p in pg1['params']: + # When some parameters are fixed: Shaoshuai Shi + if p.requires_grad is False: + continue + p.data.mul_(1 - wd * lr) + if self.bn_wd: + for p in pg2['params']: + # When some parameters are fixed: Shaoshuai Shi + if p.requires_grad is False: + continue + p.data.mul_(1 - wd * lr) + self.set_val('weight_decay', listify(0, self._wd)) + self.opt.step() + + def zero_grad(self) -> None: + "Clear optimizer gradients." + self.opt.zero_grad() + + # Passthrough to the inner opt. + def __getattr__(self, k: str): + return getattr(self.opt, k, None) + + def clear(self): + "Reset the state of the inner optimizer." + sd = self.state_dict() + sd['state'] = { } + self.load_state_dict(sd) + + # Hyperparameters as properties + @property + def lr(self) -> float: + return self._lr[-1] + + @lr.setter + def lr(self, val: float) -> None: + self._lr = self.set_val('lr', listify(val, self._lr)) + + @property + def mom(self) -> float: + return self._mom[-1] + + @mom.setter + def mom(self, val: float) -> None: + if 'momentum' in self.opt_keys: + self.set_val('momentum', listify(val, self._mom)) + elif 'betas' in self.opt_keys: + self.set_val('betas', (listify(val, self._mom), self._beta)) + self._mom = listify(val, self._mom) + + @property + def beta(self) -> float: + return None if self._beta is None else self._beta[-1] + + @beta.setter + def beta(self, val: float) -> None: + "Set beta (or alpha as makes sense for given optimizer)." + if val is None: return + if 'betas' in self.opt_keys: + self.set_val('betas', (self._mom, listify(val, self._beta))) + elif 'alpha' in self.opt_keys: + self.set_val('alpha', listify(val, self._beta)) + self._beta = listify(val, self._beta) + + @property + def wd(self) -> float: + return self._wd[-1] + + @wd.setter + def wd(self, val: float) -> None: + "Set weight decay." + if not self.true_wd: self.set_val('weight_decay', listify(val, self._wd), bn_groups = self.bn_wd) + self._wd = listify(val, self._wd) + + # Helper functions + def read_defaults(self) -> None: + "Read the values inside the optimizer for the hyper-parameters." + self._beta = None + if 'lr' in self.opt_keys: self._lr = self.read_val('lr') + if 'momentum' in self.opt_keys: self._mom = self.read_val('momentum') + if 'alpha' in self.opt_keys: self._beta = self.read_val('alpha') + if 'betas' in self.opt_keys: self._mom, self._beta = self.read_val('betas') + if 'weight_decay' in self.opt_keys: self._wd = self.read_val('weight_decay') + + def set_val(self, key: str, val, bn_groups: bool = True): + "Set `val` inside the optimizer dictionary at `key`." + if is_tuple(val): val = [(v1, v2) for v1, v2 in zip(*val)] + for v, pg1, pg2 in zip(val, self.opt.param_groups[::2], self.opt.param_groups[1::2]): + pg1[key] = v + if bn_groups: pg2[key] = v + return val + + def read_val(self, key: str): + "Read a hyperparameter `key` in the optimizer dictionary." + val = [pg[key] for pg in self.opt.param_groups[::2]] + if is_tuple(val[0]): val = [o[0] for o in val], [o[1] for o in val] + return val + + +class FastAIMixedOptim(OptimWrapper): + @classmethod + def create(cls, opt_func, lr, + layer_groups, model, flat_master = False, loss_scale = 512.0, **kwargs): + "Create an `optim.Optimizer` from `opt_func` with `lr`. Set lr on `layer_groups`." + opt = OptimWrapper.create(opt_func, lr, layer_groups, **kwargs) + opt.model_params, opt.master_params = get_master(layer_groups, flat_master) + opt.flat_master = flat_master + opt.loss_scale = loss_scale + opt.model = model + # Changes the optimizer so that the optimization step is done in FP32. + # opt = self.learn.opt + mom, wd, beta = opt.mom, opt.wd, opt.beta + lrs = [lr for lr in opt._lr for _ in range(2)] + opt_params = [{ 'params': mp, 'lr': lr } for mp, lr in zip(opt.master_params, lrs)] + opt.opt = opt_func(opt_params) + opt.mom, opt.wd, opt.beta = mom, wd, beta + return opt + + def step(self): + model_g2master_g(self.model_params, self.master_params, self.flat_master) + for group in self.master_params: + for param in group: param.grad.div_(self.loss_scale) + super(FastAIMixedOptim, self).step() + self.model.zero_grad() + # Update the params from master to model. + master2model(self.model_params, self.master_params, self.flat_master) diff --git a/tools/train_utils/learning_schedules_fastai.py b/tools/train_utils/learning_schedules_fastai.py new file mode 100644 index 0000000..78722d9 --- /dev/null +++ b/tools/train_utils/learning_schedules_fastai.py @@ -0,0 +1,98 @@ +# This file is borrowed from https://github.com/traveller59/second.pytorch + +import numpy as np +from functools import partial +from .fastai_optim import OptimWrapper + + +class LRSchedulerStep(object): + def __init__(self, fai_optimizer: OptimWrapper, total_step, lr_phases, + mom_phases): + # if not isinstance(fai_optimizer, OptimWrapper): + # raise TypeError('{} is not a fastai OptimWrapper'.format( + # type(fai_optimizer).__name__)) + self.optimizer = fai_optimizer + self.total_step = total_step + self.lr_phases = [] + + for i, (start, lambda_func) in enumerate(lr_phases): + if len(self.lr_phases) != 0: + assert self.lr_phases[-1][0] < start + if isinstance(lambda_func, str): + lambda_func = eval(lambda_func) + if i < len(lr_phases) - 1: + self.lr_phases.append((int(start * total_step), int(lr_phases[i + 1][0] * total_step), lambda_func)) + else: + self.lr_phases.append((int(start * total_step), total_step, lambda_func)) + assert self.lr_phases[0][0] == 0 + self.mom_phases = [] + for i, (start, lambda_func) in enumerate(mom_phases): + if len(self.mom_phases) != 0: + assert self.mom_phases[-1][0] < start + if isinstance(lambda_func, str): + lambda_func = eval(lambda_func) + if i < len(mom_phases) - 1: + self.mom_phases.append((int(start * total_step), int(mom_phases[i + 1][0] * total_step), lambda_func)) + else: + self.mom_phases.append((int(start * total_step), total_step, lambda_func)) + assert self.mom_phases[0][0] == 0 + + def step(self, step): + for start, end, func in self.lr_phases: + if step >= start: + self.optimizer.lr = func((step - start) / (end - start)) + for start, end, func in self.mom_phases: + if step >= start: + self.optimizer.mom = func((step - start) / (end - start)) + + +def annealing_cos(start, end, pct): + # print(pct, start, end) + "Cosine anneal from `start` to `end` as pct goes from 0.0 to 1.0." + cos_out = np.cos(np.pi * pct) + 1 + return end + (start - end) / 2 * cos_out + + +class OneCycle(LRSchedulerStep): + def __init__(self, fai_optimizer, total_step, lr_max, moms, div_factor, + pct_start): + self.lr_max = lr_max + self.moms = moms + self.div_factor = div_factor + self.pct_start = pct_start + a1 = int(total_step * self.pct_start) + a2 = total_step - a1 + low_lr = self.lr_max / self.div_factor + lr_phases = ((0, partial(annealing_cos, low_lr, self.lr_max)), + (self.pct_start, + partial(annealing_cos, self.lr_max, low_lr / 1e4))) + mom_phases = ((0, partial(annealing_cos, *self.moms)), + (self.pct_start, partial(annealing_cos, + *self.moms[::-1]))) + fai_optimizer.lr, fai_optimizer.mom = low_lr, self.moms[0] + super().__init__(fai_optimizer, total_step, lr_phases, mom_phases) + + +class FakeOptim: + def __init__(self): + self.lr = 0 + self.mom = 0 + + +if __name__ == "__main__": + import matplotlib.pyplot as plt + + opt = FakeOptim() # 3e-3, wd=0.4, div_factor=10 + schd = OneCycle(opt, 100, 3e-3, (0.95, 0.85), 10.0, 0.1) + + lrs = [] + moms = [] + for i in range(100): + schd.step(i) + lrs.append(opt.lr) + moms.append(opt.mom) + plt.plot(lrs) + # plt.plot(moms) + plt.show() + plt.plot(moms) + plt.show() diff --git a/tools/train_utils/train_utils.py b/tools/train_utils/train_utils.py new file mode 100644 index 0000000..6c43e45 --- /dev/null +++ b/tools/train_utils/train_utils.py @@ -0,0 +1,242 @@ +import logging +import os +import torch +import torch.nn as nn +from torch.nn.utils import clip_grad_norm_ +import tqdm +import torch.optim.lr_scheduler as lr_sched +import math +from lib.config import cfg + + +logging.getLogger(__name__).addHandler(logging.StreamHandler()) +cur_logger = logging.getLogger(__name__) + + +def set_bn_momentum_default(bn_momentum): + def fn(m): + if isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d)): + m.momentum = bn_momentum + + return fn + + +class BNMomentumScheduler(object): + + def __init__( + self, model, bn_lambda, last_epoch = -1, + setter = set_bn_momentum_default + ): + if not isinstance(model, nn.Module): + raise RuntimeError("Class '{}' is not a PyTorch nn Module".format(type(model).__name__)) + + self.model = model + self.setter = setter + self.lmbd = bn_lambda + + self.step(last_epoch + 1) + self.last_epoch = last_epoch + + def step(self, epoch = None): + if epoch is None: + epoch = self.last_epoch + 1 + + self.last_epoch = epoch + self.model.apply(self.setter(self.lmbd(epoch))) + + +class CosineWarmupLR(lr_sched._LRScheduler): + def __init__(self, optimizer, T_max, eta_min = 0, last_epoch = -1): + self.T_max = T_max + self.eta_min = eta_min + super(CosineWarmupLR, self).__init__(optimizer, last_epoch) + + def get_lr(self): + return [self.eta_min + (base_lr - self.eta_min) * + (1 - math.cos(math.pi * self.last_epoch / self.T_max)) / 2 + for base_lr in self.base_lrs] + + +def checkpoint_state(model = None, optimizer = None, epoch = None, it = None): + optim_state = optimizer.state_dict() if optimizer is not None else None + if model is not None: + if isinstance(model, torch.nn.DataParallel): + model_state = model.module.state_dict() + else: + model_state = model.state_dict() + else: + model_state = None + + return { 'epoch': epoch, 'it': it, 'model_state': model_state, 'optimizer_state': optim_state } + + +def save_checkpoint(state, filename = 'checkpoint'): + filename = '{}.pth'.format(filename) + torch.save(state, filename) + + +def load_checkpoint(model = None, optimizer = None, filename = 'checkpoint', logger = cur_logger): + if os.path.isfile(filename): + logger.info("==> Loading from checkpoint '{}'".format(filename)) + checkpoint = torch.load(filename) + epoch = checkpoint['epoch'] if 'epoch' in checkpoint.keys() else -1 + it = checkpoint.get('it', 0.0) + if model is not None and checkpoint['model_state'] is not None: + model.load_state_dict(checkpoint['model_state']) + if optimizer is not None and checkpoint['optimizer_state'] is not None: + optimizer.load_state_dict(checkpoint['optimizer_state']) + logger.info("==> Done") + else: + raise FileNotFoundError + + return it, epoch + + +def load_part_ckpt(model, filename, logger = cur_logger, total_keys = -1): + if os.path.isfile(filename): + logger.info("==> Loading part model from checkpoint '{}'".format(filename)) + checkpoint = torch.load(filename) + model_state = checkpoint['model_state'] + + update_model_state = { key: val for key, val in model_state.items() if key in model.state_dict() } + state_dict = model.state_dict() + state_dict.update(update_model_state) + model.load_state_dict(state_dict) + + update_keys = update_model_state.keys().__len__() + if update_keys == 0: + raise RuntimeError + logger.info("==> Done (loaded %d/%d)" % (update_keys, total_keys)) + else: + raise FileNotFoundError + + +class Trainer(object): + def __init__(self, model, model_fn, optimizer, ckpt_dir, lr_scheduler, bnm_scheduler, + model_fn_eval, tb_log, eval_frequency = 1, lr_warmup_scheduler = None, warmup_epoch = -1, + grad_norm_clip = 1.0): + self.model, self.model_fn, self.optimizer, self.lr_scheduler, self.bnm_scheduler, self.model_fn_eval = \ + model, model_fn, optimizer, lr_scheduler, bnm_scheduler, model_fn_eval + + self.ckpt_dir = ckpt_dir + self.eval_frequency = eval_frequency + self.tb_log = tb_log + self.lr_warmup_scheduler = lr_warmup_scheduler + self.warmup_epoch = warmup_epoch + self.grad_norm_clip = grad_norm_clip + + def _train_it(self, batch): + self.model.train() + + self.optimizer.zero_grad() + loss, tb_dict, disp_dict = self.model_fn(self.model, batch) + + loss.backward() + clip_grad_norm_(self.model.parameters(), self.grad_norm_clip) + self.optimizer.step() + + return loss.item(), tb_dict, disp_dict + + def eval_epoch(self, d_loader): + self.model.eval() + + eval_dict = { } + total_loss = count = 0.0 + + # eval one epoch + for i, data in tqdm.tqdm(enumerate(d_loader, 0), total = len(d_loader), leave = False, desc = 'val'): + self.optimizer.zero_grad() + + loss, tb_dict, disp_dict = self.model_fn_eval(self.model, data) + + total_loss += loss.item() + count += 1 + for k, v in tb_dict.items(): + eval_dict[k] = eval_dict.get(k, 0) + v + + # statistics this epoch + for k, v in eval_dict.items(): + eval_dict[k] = eval_dict[k] / max(count, 1) + + cur_performance = 0 + if 'recalled_cnt' in eval_dict: + eval_dict['recall'] = eval_dict['recalled_cnt'] / max(eval_dict['gt_cnt'], 1) + cur_performance = eval_dict['recall'] + elif 'iou' in eval_dict: + cur_performance = eval_dict['iou'] + + return total_loss / count, eval_dict, cur_performance + + def train(self, start_it, start_epoch, n_epochs, train_loader, test_loader = None, ckpt_save_interval = 5, + lr_scheduler_each_iter = False): + eval_frequency = self.eval_frequency if self.eval_frequency > 0 else 1 + + it = start_it + with tqdm.trange(start_epoch, n_epochs, desc = 'epochs') as tbar, \ + tqdm.tqdm(total = len(train_loader), leave = False, desc = 'train') as pbar: + + for epoch in tbar: + if self.lr_scheduler is not None and self.warmup_epoch <= epoch and (not lr_scheduler_each_iter): + self.lr_scheduler.step(epoch) + + if self.bnm_scheduler is not None: + self.bnm_scheduler.step(it) + self.tb_log.add_scalar('bn_momentum', self.bnm_scheduler.lmbd(epoch), it) + + # train one epoch + for cur_it, batch in enumerate(train_loader): + if lr_scheduler_each_iter: + self.lr_scheduler.step(it) + cur_lr = float(self.optimizer.lr) + self.tb_log.add_scalar('learning_rate', cur_lr, it) + else: + if self.lr_warmup_scheduler is not None and epoch < self.warmup_epoch: + self.lr_warmup_scheduler.step(it) + cur_lr = self.lr_warmup_scheduler.get_lr()[0] + else: + cur_lr = self.lr_scheduler.get_lr()[0] + + loss, tb_dict, disp_dict = self._train_it(batch) + it += 1 + + disp_dict.update({ 'loss': loss, 'lr': cur_lr }) + # print('#################trained_epoch:', epoch) + # print('##################n_epochs * cfg.SAVE_MODEL_PREP:', n_epochs * cfg.SAVE_MODEL_PREP) + + # log to console and tensorboard + pbar.update() + pbar.set_postfix(dict(total_it = it)) + tbar.set_postfix(disp_dict) + tbar.refresh() + + if self.tb_log is not None: + self.tb_log.add_scalar('train_loss', loss, it) + self.tb_log.add_scalar('learning_rate', cur_lr, it) + for key, val in tb_dict.items(): + self.tb_log.add_scalar('train_' + key, val, it) + + # save trained model + trained_epoch = epoch + 1 + if (trained_epoch % ckpt_save_interval == 0) and (trained_epoch >= n_epochs * cfg.SAVE_MODEL_PREP): + ckpt_name = os.path.join(self.ckpt_dir, 'checkpoint_epoch_%d' % trained_epoch) + save_checkpoint( + checkpoint_state(self.model, self.optimizer, trained_epoch, it), filename = ckpt_name, + ) + + # eval one epoch + if (epoch % eval_frequency) == 0: + pbar.close() + if test_loader is not None: + with torch.set_grad_enabled(False): + val_loss, eval_dict, cur_performance = self.eval_epoch(test_loader) + + if self.tb_log is not None: + self.tb_log.add_scalar('val_loss', val_loss, it) + for key, val in eval_dict.items(): + self.tb_log.add_scalar('val_' + key, val, it) + + pbar.close() + pbar = tqdm.tqdm(total = len(train_loader), leave = False, desc = 'train') + pbar.set_postfix(dict(total_it = it)) + + return None