From 1a4726e1e3a15151ae194578183afde86b8154d4 Mon Sep 17 00:00:00 2001
From: jtwalsh0
Date: Mon, 5 Nov 2018 20:17:32 +0700
Subject: [PATCH] imported post-summer changes to the public repo
---
README.md | 96 +-
config_examples/config.yml | 13 +-
resources/Object Detection_Mask RCNN/LICENSE | 23 +
.../Object Detection_Mask RCNN/README.md | 225 ++
.../images/frame2.jpg | Bin 0 -> 772486 bytes
.../mrcnn/__init__.py | 1 +
.../mrcnn/config.py | 227 ++
.../Object Detection_Mask RCNN/mrcnn/model.py | 2875 +++++++++++++++++
.../mrcnn/parallel_model.py | 175 +
.../Object Detection_Mask RCNN/mrcnn/utils.py | 893 +++++
.../mrcnn/visualize.py | 502 +++
.../samples/coco/coco.py | 534 +++
.../samples/coco/inspect_data.ipynb | 1048 ++++++
.../samples/coco/inspect_model.ipynb | 1388 ++++++++
.../samples/coco/inspect_weights.ipynb | 279 ++
.../samples/demo-Copy1.ipynb | 228 ++
.../samples/demo-Copy2.ipynb | 151 +
.../samples/demo.ipynb | 238 ++
.../samples/image.png | Bin 0 -> 2535234 bytes
.../Object Detection_Mask RCNN/setup.cfg | 4 +
resources/Object Detection_Mask RCNN/setup.py | 68 +
resources/find_events.ipynb | 587 ++++
resources/semantic_seg/LICENSE | 29 +
resources/semantic_seg/README.md | 169 +
resources/semantic_seg/dataset/dataset.py | 45 +
resources/semantic_seg/dataset/transform.py | 39 +
.../experiments/densenet264_ipabn_lr_256.json | 26 +
.../resnext101_ipabn-sync_lr_256.json | 26 +
.../experiments/resnext101_ipabn_lr_512.json | 26 +
.../experiments/resnext101_stdbn_lr_256.json | 26 +
.../experiments/resnext152_ipabn_lr_256.json | 26 +
.../wider_resnet38_ipabn_lr_256.json | 28 +
resources/semantic_seg/inplace_abn.png | Bin 0 -> 86754 bytes
resources/semantic_seg/licenses.csv | 11 +
resources/semantic_seg/median_cameras.ipynb | 119 +
resources/semantic_seg/models/__init__.py | 3 +
resources/semantic_seg/models/_util.py | 5 +
resources/semantic_seg/models/densenet.py | 120 +
resources/semantic_seg/models/resnext.py | 132 +
resources/semantic_seg/models/wider_resnet.py | 194 ++
resources/semantic_seg/modules/__init__.py | 5 +
resources/semantic_seg/modules/bn.py | 174 +
resources/semantic_seg/modules/deeplab.py | 84 +
resources/semantic_seg/modules/dense.py | 42 +
resources/semantic_seg/modules/functions.py | 256 ++
resources/semantic_seg/modules/misc.py | 11 +
resources/semantic_seg/modules/residual.py | 88 +
resources/semantic_seg/modules/src/common.h | 108 +
.../semantic_seg/modules/src/inplace_abn.cpp | 75 +
.../semantic_seg/modules/src/inplace_abn.h | 29 +
.../modules/src/inplace_abn_cpu.cpp | 120 +
.../modules/src/inplace_abn_cuda.cu | 346 ++
resources/semantic_seg/test_vistas.py | 326 ++
resources/validation_image_comparison.ipynb | 145 +
.../docker-compose.override.yml | 7 +
.../video_annotation_guide.md | 2 +-
.../video_quality_tests/average_fps.ipynb | 300 ++
.../video_quality_tests/frame_spacings.ipynb | 260 ++
scratch/EDA.ipynb | 16 +-
...ta_Traffic_Exploratory_Data_Analysis.ipynb | 2 +-
scratch/Preprocessing.ipynb | 2 +-
scratch/Script.sql | 210 ++
scratch/Video_download.ipynb | 8 +-
scratch/YOLO/Yolo on vid.ipynb | 2 +-
scratch/__init__.py | 0
scratch/checkcomplete.py | 33 +
scratch/checkcomplete_ffmpeg.py | 28 +
scratch/extract_subtitles.py | 49 +
scratch/extract_subtitles.sh | 14 +
scratch/extract_video_segments.sh | 33 +
scratch/extract_video_segments_new.sh | 34 +
scratch/ffprobe_framelist.sh | 7 +
scratch/hashing.py | 35 +
scratch/metadata_contiguous.py | 111 +
scratch/metadata_new.py | 57 +
scratch/process_subtitles.py | 30 +
scratch/process_videos.py | 56 +
scratch/remote_play.sh | 5 +
scratch/sample_videos.py | 59 +
scratch/sample_videos_new.py | 71 +
scratch/scripts/download.py | 52 +
scratch/scripts/hashcheck.py | 116 +
scratch/scripts/hashcheck_final.py | 126 +
scratch/scripts/s3etag.sh | 47 +
scratch/scripts/semantic logic.py | 68 +
scratch/scripts/vid_downloader.py | 53 +
scratch/scripts/vid_downloader_our_cams.py | 53 +
src/main/precision_recall.py | 487 ++-
src/modules/data/database_io.py | 34 +-
src/modules/pipeline/workers/workers_list.py | 2 +
90 files changed, 14662 insertions(+), 195 deletions(-)
create mode 100644 resources/Object Detection_Mask RCNN/LICENSE
create mode 100644 resources/Object Detection_Mask RCNN/README.md
create mode 100644 resources/Object Detection_Mask RCNN/images/frame2.jpg
create mode 100644 resources/Object Detection_Mask RCNN/mrcnn/__init__.py
create mode 100644 resources/Object Detection_Mask RCNN/mrcnn/config.py
create mode 100644 resources/Object Detection_Mask RCNN/mrcnn/model.py
create mode 100644 resources/Object Detection_Mask RCNN/mrcnn/parallel_model.py
create mode 100644 resources/Object Detection_Mask RCNN/mrcnn/utils.py
create mode 100644 resources/Object Detection_Mask RCNN/mrcnn/visualize.py
create mode 100644 resources/Object Detection_Mask RCNN/samples/coco/coco.py
create mode 100644 resources/Object Detection_Mask RCNN/samples/coco/inspect_data.ipynb
create mode 100644 resources/Object Detection_Mask RCNN/samples/coco/inspect_model.ipynb
create mode 100644 resources/Object Detection_Mask RCNN/samples/coco/inspect_weights.ipynb
create mode 100644 resources/Object Detection_Mask RCNN/samples/demo-Copy1.ipynb
create mode 100644 resources/Object Detection_Mask RCNN/samples/demo-Copy2.ipynb
create mode 100644 resources/Object Detection_Mask RCNN/samples/demo.ipynb
create mode 100644 resources/Object Detection_Mask RCNN/samples/image.png
create mode 100644 resources/Object Detection_Mask RCNN/setup.cfg
create mode 100644 resources/Object Detection_Mask RCNN/setup.py
create mode 100644 resources/find_events.ipynb
create mode 100644 resources/semantic_seg/LICENSE
create mode 100644 resources/semantic_seg/README.md
create mode 100644 resources/semantic_seg/dataset/dataset.py
create mode 100644 resources/semantic_seg/dataset/transform.py
create mode 100644 resources/semantic_seg/experiments/densenet264_ipabn_lr_256.json
create mode 100644 resources/semantic_seg/experiments/resnext101_ipabn-sync_lr_256.json
create mode 100644 resources/semantic_seg/experiments/resnext101_ipabn_lr_512.json
create mode 100644 resources/semantic_seg/experiments/resnext101_stdbn_lr_256.json
create mode 100644 resources/semantic_seg/experiments/resnext152_ipabn_lr_256.json
create mode 100644 resources/semantic_seg/experiments/wider_resnet38_ipabn_lr_256.json
create mode 100644 resources/semantic_seg/inplace_abn.png
create mode 100644 resources/semantic_seg/licenses.csv
create mode 100644 resources/semantic_seg/median_cameras.ipynb
create mode 100644 resources/semantic_seg/models/__init__.py
create mode 100644 resources/semantic_seg/models/_util.py
create mode 100644 resources/semantic_seg/models/densenet.py
create mode 100644 resources/semantic_seg/models/resnext.py
create mode 100644 resources/semantic_seg/models/wider_resnet.py
create mode 100644 resources/semantic_seg/modules/__init__.py
create mode 100644 resources/semantic_seg/modules/bn.py
create mode 100644 resources/semantic_seg/modules/deeplab.py
create mode 100644 resources/semantic_seg/modules/dense.py
create mode 100644 resources/semantic_seg/modules/functions.py
create mode 100644 resources/semantic_seg/modules/misc.py
create mode 100644 resources/semantic_seg/modules/residual.py
create mode 100644 resources/semantic_seg/modules/src/common.h
create mode 100644 resources/semantic_seg/modules/src/inplace_abn.cpp
create mode 100644 resources/semantic_seg/modules/src/inplace_abn.h
create mode 100644 resources/semantic_seg/modules/src/inplace_abn_cpu.cpp
create mode 100644 resources/semantic_seg/modules/src/inplace_abn_cuda.cu
create mode 100644 resources/semantic_seg/test_vistas.py
create mode 100644 resources/validation_image_comparison.ipynb
create mode 100644 resources/video annotation/docker-compose.override.yml
create mode 100644 resources/video_quality_tests/average_fps.ipynb
create mode 100644 resources/video_quality_tests/frame_spacings.ipynb
create mode 100644 scratch/Script.sql
create mode 100644 scratch/__init__.py
create mode 100644 scratch/checkcomplete.py
create mode 100644 scratch/checkcomplete_ffmpeg.py
create mode 100644 scratch/extract_subtitles.py
create mode 100755 scratch/extract_subtitles.sh
create mode 100755 scratch/extract_video_segments.sh
create mode 100755 scratch/extract_video_segments_new.sh
create mode 100755 scratch/ffprobe_framelist.sh
create mode 100644 scratch/hashing.py
create mode 100644 scratch/metadata_contiguous.py
create mode 100644 scratch/metadata_new.py
create mode 100644 scratch/process_subtitles.py
create mode 100644 scratch/process_videos.py
create mode 100644 scratch/remote_play.sh
create mode 100644 scratch/sample_videos.py
create mode 100644 scratch/sample_videos_new.py
create mode 100644 scratch/scripts/download.py
create mode 100644 scratch/scripts/hashcheck.py
create mode 100644 scratch/scripts/hashcheck_final.py
create mode 100755 scratch/scripts/s3etag.sh
create mode 100644 scratch/scripts/semantic logic.py
create mode 100644 scratch/scripts/vid_downloader.py
create mode 100644 scratch/scripts/vid_downloader_our_cams.py
diff --git a/README.md b/README.md
index 57079ca..98eb0f0 100644
--- a/README.md
+++ b/README.md
@@ -1,16 +1,16 @@
# Improving Traffic Safety through (Traffic) Video Analysis
-This repository that contains the code necessary to replicate the work done by fellows at the University of Chicago's Data Science for Social Good (DSSG) 2018 summer fellowship. In partnership with Jakarta Smart City (JSC) and United Nations Global Pulse (UNGP), DSSG developed a pipeline that can be used to analyze traffic videos taken from Jakarta's vast network of CCTV cameras. The pipeline is able to ingest a video, perform a range of computer vision techniques (object detection, object classification, optical flow), and output the results to a PostgreSQL database. This repository allows users to download all of the files necessary to build and launch the pipeline, and customize it as necessary. We also include some other tools related to extracting video metadata, randomly sampling segments from clean videos, and evaluationg the performance of various model components.
+This repository that contains the code necessary to replicate the work done by fellows at the University of Chicago's Data Science for Social Good (DSSG) 2018 summer fellowship. In partnership with Jakarta Smart City (JSC) and United Nations Global Pulse (UNGP), DSSG developed a pipeline that can be used to analyze traffic videos taken from Jakarta's vast network of CCTV cameras. The pipeline is able to ingest a video, perform a range of computer vision techniques (object detection, object classification, optical flow), and output the results to a PostgreSQL database. This repository allows users to download all of the files necessary to build and launch the pipeline, and customize it as necessary. We also include some other tools related to extracting video metadata, randomly sampling segments from clean videos, and evaluating the performance of various model components.
## Table of Contents
-1. [Introduction](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/dev#introduction)
-2. [Setup](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/dev#setup)
-3. [Modules Outside the Pipeline](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/dev#modules-outside-the-pipeline)
-4. [Modules Inside the Pipeline](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/dev#modules-inside-the-pipeline)
-5. [Testing the Pipeline](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/dev#testing-the-pipeline)
-6. [Suggested Workflow](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/dev#suggested-workflow)
-7. [Contributors](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/dev#contributors)
+1. [Introduction](https://github.com/dssg/jakarta_smart_city_traffic_safety#introduction)
+2. [Setup](https://github.com/dssg/jakarta_smart_city_traffic_safety#setup)
+3. [Non-Pipeline Functionality](https://github.com/dssg/jakarta_smart_city_traffic_safety#non-pipeline-functionality)
+4. [Pipeline Functionality](https://github.com/dssg/jakarta_smart_city_traffic_safety#pipeline-functionality)
+5. [Testing the Pipeline](https://github.com/dssg/jakarta_smart_city_traffic_safety#testing-the-pipeline)
+6. [Workflows](https://github.com/dssg/jakarta_smart_city_traffic_safety#workflows)
+7. [Contributors](https://github.com/dssg/jakarta_smart_city_traffic_safety#contributors)
## Introduction
@@ -28,9 +28,10 @@ For three months they learn, hone, and apply their data science, analytical, and
-Jakarta Smart City (JSC) is a government initiative to develop a multi-use, crowdsourced, big data platform to close the digital divide and facilitate data transparency and citizen communication with government officials in Jakarta. The initiative includes a website, [https://smartcity.jakarta.go.id](smartcity.jakarta.go.id), as well as eight (8) citizen complaint channels, such as Qlue, LAPOR!, Balai Warga, popular social media, SMS platform, and e-mail, and CROP Jakarta for civil servants and officials. smartcity.jakarta.go.id uses the Google Maps engine and data from the traffic application Waze.
+Jakarta Smart City (JSC) is a government initiative to develop a multi-use, crowdsourced, big data platform to close the digital divide and facilitate data transparency and citizen communication with government officials in Jakarta. The initiative includes a website, [https://smartcity.jakarta.go.id](smartcity.jakarta.go.id), as well as eight (8) citizen complaint channels, such as Qlue, LAPOR!, Balai Warga, popular social media, SMS platform, and e-mail, and CROP Jakarta for civil servants and officials.
+smartcity.jakarta.go.id uses the Google Maps engine and data from the traffic application Waze.
-United Nations Global Pulse is an organization in the United Nations system that focuses on harnessing big data, artificial intelligence, and other emerging technologies for sustainable development and humanitarian action. It is headquartered in New York City, with additional Pulse Labs in Jakarta, Indonesia and Kampala, Uganda. Global Pulse aims to bring together a wide range of stakeholders (academia, agencies, local governments, etc.) to use Big Data in support of the UN’s broader development goals.
+United Nations Global Pulse is an organization in the United Nations system that focuses on harnessing big data, artificial intelligence, and other emerging technologies for sustainable development and humanitarian action. It is headquartered in New York City, with additional Pulse Labs in Jakarta, Indonesia and Kampala, Uganda. Global Pulse aims to bring together a wide range of stakeholders (academia, agencies, local governments, etc.) to use Big Data in support of the UN’s broader development goals.
#### Code Base
@@ -74,6 +75,7 @@ This project makes use of General Purpose GPU computing, so a GPU is highly reco
* Some code in this repository interacts with [Amazon AWS S3](https://aws.amazon.com/s3/) buckets, though this functionality is not central to any of the main functions of this repository.
+
#### Environment Variables
The following environment variables should be set:
@@ -85,7 +87,7 @@ The following environment variables should be set:
Many system specifications such as model parameters and file paths are contained in `YAML` configuration files found in `project_root/config`. During setup, our system reads all the files with `.yml` or `.yaml` extensions in that directory and combines them into a single configuration object. This allows flexibility in organizing config files.
We recommend using four separate config files:
-* `config.yml` contains general configuration options.
+* `config.yml` contains general configuration options, including validation parameters.
* `creds.yml` contains credentials necessary for accessing the PostgreSQL database and Amazon AWS services.
* `paths.yml` contains relevant file paths for input and output files.
* `pipeline.yml` defines that pipeline that should be run.
@@ -126,17 +128,23 @@ For long-term storage, raw videos can be uploaded to an S3 bucket by running:
#### Downloading New Videos
-New videos can be downloaded from Jakarta's Open Data Portal, and we provide a Python script that automates this process. There are hundreds of CCTV cameras posted around the city, and users can watch both live streams or streams going back approximately 48 hours. We provide a script that allows a user to specify which cameras they would like to download video from, as well as the amount of video they would like to download. The script currently retrieves the current timestamp, and searches for videos in the previous 48 hours.
+New videos can be downloaded from Jakarta's Open Data Portal. We provide a Python script that automates this process. There are hundreds of CCTV cameras posted around the city, and users can watch both live streams or streams going back approximately 48 hours. The script allows a user to specify which cameras they would like to download video from, as well as the amount of video they would like to download. The script retrieves the current timestamp, and searches for videos in the previous 48 hours.
To run this script:
`python src/main/download_videos_from_web_portal.py`
-The resulting files are in `.mp4` format and placed in the directory specified by`conf>dirs>downloaded_videos`.
+The resulting files are in `.mp4` format and placed in the directory specified by`conf>dirs>downloaded_videos`. Note these videos will not have subtitles.
+
+#### Assessing video quality
+
+When starting to work with the videos, it might be important to assess their quality. One simple check one might want to start with is to plot how consistent the frame rate of the videos is. We found that the CCTV video often has dropped frames, which lead to a drop in the average frame rate. An example of how to perform this check can be found in [`resources/video_quality_tests/average_fps.ipynb`](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/master/resources/video_quality_tests/average_fps.ipynb), using the frame statistics extracted above.
+
+In videos with framerates below 25 fps, we noted that whenever frames were dropped, they were glued together with a time between frames that was either the time with no frames (if that was on the order of a few seconds), or 0.04s as if the video framerate was 25 fps. Therefore we provide functions that will detect segments with no dropped frames, by detecting if the spacing between frames is smaller or larger than expected. This provided completely clean segments for at least two of the cameras used in this work. The function can be found in [`resources/video_quality_tests/frame_spacings.ipynb`](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/master/resources/video_quality_tests/frame_spacings.ipynb).
#### Sampling and Chunking Videos
-You may want to randomly sample videos from different cameras, times, and locations in order to evaluate the pipeline's performance under a variety of circumstances. There are multiple scripts which can be used for this:
+You may want to randomly sample videos from different cameras, times, and locations in order to evaluate the pipeline's performance under a variety of circumstances. There are multiple scripts which can be used for this.
We opted to run validation only on clean segments of video. The following script identifies clean segments using the subtitle files found in `conf>dirs>subtitles` and places the resulting list of clean segments in `conf>dirs>video_samples`:
@@ -146,7 +154,7 @@ Once clean segments have been identified, we identify a random sample from those
`python src/main/sample_from_contiguous_segments.py`
-The output file is placed in `conf>dirs>video_samples`. To extract the video segments described in this files, you can run:
+The output file is placed in `conf>dirs>video_samples`. This file is also used to read information about the segments when validating. To extract the video segments described in this files, you can run:
`python src/main/extract_video_samples_from_videos.py`
@@ -155,23 +163,23 @@ which will produce sample files contained in `conf>dirs>video_samples`.
#### Video Annotation
-Labeling is an important part of any machine learning application. Because this pipeline is centered around object detection, classification, and motion determination, there are several outputs that benefit from validation tools. We use the Computer Vision Annotation Tool (CVAT) which allows a user to validate all of these outputs. The tool allows users to label video segments by providing bounding boxes, class labels, and trajectories to each object in a video. The results of this labeling are placed to a table, and can be compared against the results of the pipeline.
+Labeling is an important part of any machine learning application. Because this pipeline is centered around object detection, classification, and motion determination, there are several outputs that benefit from validation tools. We use the Computer Vision Annotation Tool (CVAT), which enables a user to validate all of these outputs. With the tool, users can add bounding boxes, class labels, and trajectories to each object in a video. The results of this labeling are placed in a table, and provide a ground truth to select models and assess their performance.
To download and setup CVAT, see the original documentation here.
This also has information on how to set up Annotation Jobs. In our work, we created a separate job for each video segment. Also, here is the string we used for creating video labels:
-```car ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false pedestrian ~checkbox=on_road:false bicycle ~checkbox=going_the_wrong_way:false motorbike ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false ~checkbox=on_sidewalk:false @checkbox=more_than_two_people:false bus ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false truck ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false @checkbox=heavy_truck:false minibus_van_angkot ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false train tuktuk_motortrike_bajaj ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false food_cart_or_street_vendor ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false other_interesting_event @text=please_describe:```
+```car ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false pedestrian ~checkbox=on_road:false bicycle ~checkbox=going_the_wrong_way:false motorbike ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false ~checkbox=on_sidewalk:false @checkbox=more_than_two_people:false bus ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false truck ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false @checkbox=heavy_truck:false minibus_van_angkot ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false train tuktuk_motortrike_bajaj ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false food_cart_or_street_vendor ~checkbox=stopped_on_road:false ~checkbox=going_the_wrong_way:false other_interesting_event @text=please_describe:""```
-We created a cvat docker container called `cvat` that houses the contents.
+Running the `docker-compose` command as documented in the CVAT repo creates a cvat docker container called `cvat` which houses the contents of the annotation tool, as well as two auxiliary containers: `cvat_db` and `cvat_redis`.
-We additionally provide a quickstart guide for how to label videos in CVAT here.
+We additionally provide a quickstart guide for how to label videos in CVAT here.
#### Moving CVAT Annotations from the CVAT Docker Container into the PostgreSQL Database
-CVAT Annotations are contained in a separate database within the CVAT Docker container. Here is how to extract them and pass
+CVAT Annotations are contained in a separate database within the CVAT Docker container. Here is how to extract them and store them in your database.
-While inside the docker container, run the following command to generate a database dump:
+While inside the `cvat_db` docker container, run the following command to generate a database dump:
`pg_dump -h localhost -U postgres -d cvat -n public --no-owner -f cvat.dump`
@@ -189,6 +197,23 @@ To perform validation, we create derived tables from the CVAT output, by running
The labeled information is now ready for validation!
+#### Validating the model
+
+After the pipeline has been run with some parameters, many plots useful for model validation can be generated by running
+
+`python src/main/precision_recall.py`
+
+The plots will be placed in the folder `conf>dirs>pr_curves` in the config files. The script will generate plots for each separate video chunk, aggregate videos from the same camera (all, all day videos, and all night videos), then all day and all night videos, and finally all videos together.
+
+You can also change which models will be tested by changing the `conf>validation>model_numbers` list in the config files. Other parameters that should be changed as the models are further examined are:
+* The IOU threshold above which two boxes will be matched (`conf>validation>iou_threshold`),
+* How much of the top of the image that will be disregarded when validating (`conf>validation>disregard_region`),
+* What is the minimum motion detected for angle comparison (`conf>validation>minimum_motion`).
+
+#### Finding events of interest
+
+After running the pipeline on some videos, you might want to find when specific types of events happen in the footage, such as when cars are found coming the wrong way down a specific stretch of road. This can be done either using the results of the semantic segmentation or by defining the regions of interest by hand. An example of how to find such events can be found in [`resources/find_events.ipynb`](https://github.com/dssg/jakarta_smart_city_traffic_safety/blob/master/resources/find_events.ipynb).
+
## Pipeline Functionality
The pipeline contains several worker processes that perform the various tasks needed to analyze raw videos. In this section, we list the workers that are built into the pipeline, and describe their functionality. This list of modules is not exhaustive, and users can easily plug in new workers as necessary. These workers are listed in workers_list.py. Currently, the workers include the following:
@@ -205,41 +230,41 @@ The pipeline contains several worker processes that perform the various tasks ne
#### Write Frames to Video Files
-This module is contained in write_frames_to_vid_files.py. The worker takes in a series of frames, and outputs a video. The user may specify how many frames they would like to concatenate. This worker will generally be called at a point following one of the other tasks. For instance, this may be called after running the object detector, so that the user can see the results of the bounding boxes and classifications.
+This module is contained in write_frames_to_vid_files.py. The worker takes in a series of frames, and outputs a video. The user may specify how many frames they would like to concatenate. This worker will generally be called at a point following one of the other tasks. For instance, this may be called after running the object detector, so that the user can see the results of the bounding boxes and classifications.
#### Read Frames From Video Files
-This module is contained in read_frames_from_vid_files_in_dir.py. This module breaks up a video into frames, that can then be passed to the rest of the workers. Generally, this worker should come at the beginning of the pipeline, as the output of this worker is necessary as the inputs for the rest of the workers.
+This module is contained in read_frames_from_vid_files_in_dir.py. This module breaks up a video into frames, that can then be passed to the rest of the workers. Generally, this worker should come at the beginning of the pipeline, as the output of this worker is necessary as the inputs for the rest of the workers.
#### YOLO3 Object Detection
-This module is contained in yolo3_detect.py. It provides the core method we deploy for object detection and classification, and is derived from YOLOv3: An Incremental Approach by Joseph Redmon and Ali Farhadi. The main advantage of YOLO is that it runs quickly, and was trained on a fairly extensive dataset. One drawback to its application in Jakarta is that there are objects that are specific to Jakarta and do not appear in the YOLO training set. We provide tools to help overcome this issue by allowing the user to collect labeled data for these more rare events, and therefore retrain YOLO to improve its performance in specific contexts. This worker outputs a dictionary containing frame number, bounding box dimensions, and an object's predicted classification.
+This module is contained in yolo3_detect.py. It provides the core method we deploy for object detection and classification, and is derived from YOLOv3: An Incremental Approach by Joseph Redmon and Ali Farhadi. The main advantage of YOLO is that it runs quickly, and was trained on a fairly extensive dataset. One drawback to its application in Jakarta is that there are objects that are specific to Jakarta and do not appear in the YOLO training set. We provide tools to help overcome this issue by allowing the user to collect labeled data for these more rare events, and therefore retrain YOLO to improve its performance in specific contexts. This worker outputs a dictionary containing frame number, bounding box dimensions, and an object's predicted classification.
To run this worker, you need the YOLO weights found [here](https://pjreddie.com/media/files/yolov3.weights).
#### Lucas-Kanade Sparse Optical Flow
-This module is contained in lk_sparse_optical_flow.py. This module implements the Lucas-Kanade algorithm to calculate the optical flow for detected corners in objects. The Lucas-Kanade algorithm solves a linear system in the neighborhood of a point to calculate the "flow" from one frame to the next. The output from this method returns a list of arrays containing the vectors for optical flows of the various detected points.
+This module is contained in lk_sparse_optical_flow.py. This module implements the Lucas-Kanade algorithm to calculate the optical flow for detected corners in objects. The Lucas-Kanade algorithm solves a linear system in the neighborhood of a point to calculate the "flow" from one frame to the next. The output from this method returns a list of arrays containing the vectors for optical flows of the various detected points.
#### Compute Frame Statistics
-This module is contained in compute_frame_stats.py. It takes the boxes and predicted classes output by the YOLO3 module. It allows the user to return values that count the number of each type of object in a frame, as well as the associated confidence scores.
+This module is contained in compute_frame_stats.py. It takes the boxes and predicted classes output by the YOLO3 module. It allows the user to return values that count the number of each type of object in a frame, as well as the associated confidence scores.
#### Write Keys to Flat Files
-This module is contained in write_keys_to_files.py It takes as its input the outputs from previous steps, and returns a csv with the relevant outputs.
+This module is contained in write_keys_to_files.py. It takes as its input the outputs from previous steps, and returns a csv with the relevant outputs.
#### Write Keys to Database
-This module is contained in write_keys_to_database_table.py. This module works similarly to the flat files one, except it outputs its results to a postgres database instead of a csv.
+This module is contained in write_keys_to_database_table.py. This module works similarly to the flat files one, except it outputs its results to a postgres database instead of a csv.
#### Mean Motion Direction
-This module is contained in "mean_motion_direction.py." It takes the output from the LK Sparse Optical Flow and the boxes from YOLO3 as its inputs. It matches the optical flow points to their corresponding boxes, and returns an average displacement vector for that box. It also returns the magnitude of the displacement, and its angle. These two measures can be used for validation purposes.
+This module is contained in mean_motion_direction.py. It takes the output from the LK Sparse Optical Flow and the boxes from YOLO3 as its inputs. It matches the optical flow points to their corresponding boxes, and returns an average displacement vector for that box. It also returns the magnitude of the displacement, and its angle. These two measures can be used for validation purposes.
#### Semantic Segmenter
-We used the "WideResNet38 + DeepLab3 pre-trained algorithm" to classify each pixel into particular classes(road, sidewalk, e.t.c). This will help us to identify different image regions so we can then say things like " the motorcycle is on the sidewalk". Semantic segmentation is an expensive process to run, it takes some time to classify each pixel and then turn it into a mask. In our case, we have static cameras and regions such as road and sidewalks (which we are interested in) do not change as often so we will performthis process seldomly and store the masks into the database.This module is contained in "WideResNet38 + DeepLab3 pre-trained algorithm"
+We used the WideResNet38 + DeepLab3 pre-trained algorithm to classify each pixel into particular classes (road, sidewalk, etc.). This helps us identify different image regions so we can then say things like "the motorcycle is on the sidewalk". Semantic segmentation is an expensive process to run; it takes some time to classify each pixel and then turn it into a mask. In our case, we have static cameras and regions such as road and sidewalks (which we are interested in) do not change as often so we will perform this process seldomly and store the masks into the database.
## Testing the pipeline
@@ -266,11 +291,14 @@ Note: Running the pipeline requires that the database contain metadata for all v
4. Move annotations from the CVAT docker container to the PostgreSQL database
#### Generate Semantic Segmentation
-1.
-2.
+1. Getting the median image for foreground subtraction. An example of how do so can be found in [resources/semantic_seg/median_cameras.ipynb](https://github.com/dssg/jakarta_smart_city_traffic_safety/blob/master/resources/semantic_seg/median_cameras.ipynb). We have found good results by sampling one frame per second for 100-300 seconds from relatively calm times such as weekend mornings.
+2. In order to perform Semantic Segmentation on the images you need to firstly run the [requirements.txt](https://github.com/dssg/jakarta_smart_city_traffic_safety/blob/master/resources/semantic_seg/requirements.txt) file to install all dependencies using pip including pytorch. Place the images into the /resources/semantic_seg/input/ folder, [download weights](https://github.com/dssg/jakarta_smart_city_traffic_safety/tree/master/resources/semantic_seg#mapillary-vistas-pre-trained-model) and then run `python test_vistas.py`.
+
#### Validate the Pipeline
-1.
+1. Run the scripts to assess the performance of the models used.
+2. Look at images containing predicted and annotated boxes in order to better understand where models are working and failing. An example notebook to do this is contained in [`resources/validation_image_comparison.ipynb`](https://github.com/dssg/jakarta_smart_city_traffic_safety/blob/master/resources/validation_image_comparison.ipynb).
+3. Change parameters and test some more!
The particular methods included in each of these steps are detailed above.
@@ -280,7 +308,7 @@ Most main executable scripts in this repository produce log files for auditing p
## Fine Tuning
-In our work, due to the paucity of labeled traffic footage from Jakarta roads, we used a pretrained model to perform object detection. Naturally, we might desire to fine tune such models for classes that are specific to jakarta. Currently, fine tuning of object classification models exists in THIS repository, but there exist many resources which explain the process, such as [here](http://wiki.fast.ai/index.php/Fine_tuning) or [here](http://blog.revolutionanalytics.com/2016/08/deep-learning-part-2.html)
+In our work, due to the paucity of labeled traffic footage from Jakarta roads, we used a pretrained model to perform object detection. Naturally, we might desire to fine tune such models for classes that are specific to Jakarta. Currently, fine tuning of object classification models is not implemented in this repository, but many resources exist that explain the process, such as [here](http://wiki.fast.ai/index.php/Fine_tuning) or [here](http://blog.revolutionanalytics.com/2016/08/deep-learning-part-2.html).
## Contributors
diff --git a/config_examples/config.yml b/config_examples/config.yml
index 155a85b..d3bf0a5 100644
--- a/config_examples/config.yml
+++ b/config_examples/config.yml
@@ -1,9 +1,18 @@
visualization:
path_track_length: 10 # length of paths drawn on frames to ensure motion
- annotation_font_scale: 1.0 # controls the size of fonts drawn on frames to ensure motion
+ annotation_font_scale: 1.0 # controls the size of fonts drawn on frames
vid_containers: [".mkv"] # which file type to use for visualization
output_extension: ".pdf" # output format for figures created during visualization
validation:
iou_threshold: 0.25 # intersection over union threshold. If a true box and label have this value or greater, then they are matched for validation purposes
- labels_we_test: ['pedestrian','bicycle','car','motorbike','bus','train','truck'] # the labels output by the model that we want to validate
+ labels_we_test: ['pedestrian','bicycle','car','motorbike','bus','train','truck'] # the labels output by the model that we want to validate
+ other_labels: ['minibus_van_angkot','tuktuk_motortrike_bajaj','food_cart_or_street_vendor'] # labels annotated but not output by the model
+ short_names: # shorter versions of labels, used in plotting
+ minibus_van_angkot: minibus
+ tuktuk_motortrike_bajaj: tuktuk
+ food_cart_or_street_vendor: food cart
+ confidence_threshold: 0.1 # what is the lowest confidence threshold for which boxes should be validated?
+ disregard_region: 0.5 # validation scripts will drop boxes whose bottom is in top fraction of frame. this sets that fraction
+ model_numbers: [40] # which models should be validated?
+ minimum_motion: [1,3,10] # values of minimum motion (in pixels) for which to draw histograms of angles
diff --git a/resources/Object Detection_Mask RCNN/LICENSE b/resources/Object Detection_Mask RCNN/LICENSE
new file mode 100644
index 0000000..aa14198
--- /dev/null
+++ b/resources/Object Detection_Mask RCNN/LICENSE
@@ -0,0 +1,23 @@
+Mask R-CNN
+
+The MIT License (MIT)
+
+Copyright (c) 2017 Matterport, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/resources/Object Detection_Mask RCNN/README.md b/resources/Object Detection_Mask RCNN/README.md
new file mode 100644
index 0000000..66a38a7
--- /dev/null
+++ b/resources/Object Detection_Mask RCNN/README.md
@@ -0,0 +1,225 @@
+# Mask R-CNN for Object Detection and Segmentation
+
+This is an implementation of [Mask R-CNN](https://arxiv.org/abs/1703.06870) on Python 3, Keras, and TensorFlow. The model generates bounding boxes and segmentation masks for each instance of an object in the image. It's based on Feature Pyramid Network (FPN) and a ResNet101 backbone.
+
+![Instance Segmentation Sample](assets/street.png)
+
+The repository includes:
+* Source code of Mask R-CNN built on FPN and ResNet101.
+* Training code for MS COCO
+* Pre-trained weights for MS COCO
+* Jupyter notebooks to visualize the detection pipeline at every step
+* ParallelModel class for multi-GPU training
+* Evaluation on MS COCO metrics (AP)
+* Example of training on your own dataset
+
+[The referenced Repo](https://github.com/matterport/Mask_RCNN)
+
+The code is documented and designed to be easy to extend. If you use it in your research, please consider citing this repository (bibtex below). If you work on 3D vision, you might find our recently released [Matterport3D](https://matterport.com/blog/2017/09/20/announcing-matterport3d-research-dataset/) dataset useful as well.
+This dataset was created from 3D-reconstructed spaces captured by our customers who agreed to make them publicly available for academic use. You can see more examples [here](https://matterport.com/gallery/).
+
+# Getting Started
+* [demo.ipynb](samples/demo.ipynb) Is the easiest way to start. It shows an example of using a model pre-trained on [MS COCO weights](https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5) to segment objects in your own images.
+It includes code to run object detection and instance segmentation on arbitrary images.
+
+* [train_shapes.ipynb](samples/shapes/train_shapes.ipynb) shows how to train Mask R-CNN on your own dataset. This notebook introduces a toy dataset (Shapes) to demonstrate training on a new dataset.
+
+* ([model.py](mrcnn/model.py), [utils.py](mrcnn/utils.py), [config.py](mrcnn/config.py)): These files contain the main Mask RCNN implementation.
+
+
+* [inspect_data.ipynb](samples/coco/inspect_data.ipynb). This notebook visualizes the different pre-processing steps
+to prepare the training data.
+
+* [inspect_model.ipynb](samples/coco/inspect_model.ipynb) This notebook goes in depth into the steps performed to detect and segment objects. It provides visualizations of every step of the pipeline.
+
+* [inspect_weights.ipynb](samples/coco/inspect_weights.ipynb)
+This notebooks inspects the weights of a trained model and looks for anomalies and odd patterns.
+
+
+# Step by Step Detection
+To help with debugging and understanding the model, there are 3 notebooks
+([inspect_data.ipynb](samples/coco/inspect_data.ipynb), [inspect_model.ipynb](samples/coco/inspect_model.ipynb),
+[inspect_weights.ipynb](samples/coco/inspect_weights.ipynb)) that provide a lot of visualizations and allow running the model step by step to inspect the output at each point. Here are a few examples:
+
+
+
+## 1. Anchor sorting and filtering
+Visualizes every step of the first stage Region Proposal Network and displays positive and negative anchors along with anchor box refinement.
+![](assets/detection_anchors.png)
+
+## 2. Bounding Box Refinement
+This is an example of final detection boxes (dotted lines) and the refinement applied to them (solid lines) in the second stage.
+![](assets/detection_refinement.png)
+
+## 3. Mask Generation
+Examples of generated masks. These then get scaled and placed on the image in the right location.
+
+![](assets/detection_masks.png)
+
+## 4.Layer activations
+Often it's useful to inspect the activations at different layers to look for signs of trouble (all zeros or random noise).
+
+![](assets/detection_activations.png)
+
+## 5. Weight Histograms
+Another useful debugging tool is to inspect the weight histograms. These are included in the inspect_weights.ipynb notebook.
+
+![](assets/detection_histograms.png)
+
+## 6. Logging to TensorBoard
+TensorBoard is another great debugging and visualization tool. The model is configured to log losses and save weights at the end of every epoch.
+
+![](assets/detection_tensorboard.png)
+
+## 6. Composing the different pieces into a final result
+
+![](assets/detection_final.png)
+
+
+# Training on MS COCO
+We're providing pre-trained weights for MS COCO to make it easier to start. You can
+use those weights as a starting point to train your own variation on the network.
+Training and evaluation code is in `samples/coco/coco.py`. You can import this
+module in Jupyter notebook (see the provided notebooks for examples) or you
+can run it directly from the command line as such:
+
+```
+# Train a new model starting from pre-trained COCO weights
+python3 samples/coco/coco.py train --dataset=/path/to/coco/ --model=coco
+
+# Train a new model starting from ImageNet weights
+python3 samples/coco/coco.py train --dataset=/path/to/coco/ --model=imagenet
+
+# Continue training a model that you had trained earlier
+python3 samples/coco/coco.py train --dataset=/path/to/coco/ --model=/path/to/weights.h5
+
+# Continue training the last model you trained. This will find
+# the last trained weights in the model directory.
+python3 samples/coco/coco.py train --dataset=/path/to/coco/ --model=last
+```
+
+You can also run the COCO evaluation code with:
+```
+# Run COCO evaluation on the last trained model
+python3 samples/coco/coco.py evaluate --dataset=/path/to/coco/ --model=last
+```
+
+The training schedule, learning rate, and other parameters should be set in `samples/coco/coco.py`.
+
+
+# Training on Your Own Dataset
+
+Start by reading this [blog post about the balloon color splash sample](https://engineering.matterport.com/splash-of-color-instance-segmentation-with-mask-r-cnn-and-tensorflow-7c761e238b46). It covers the process starting from annotating images to training to using the results in a sample application.
+
+In summary, to train the model on your own dataset you'll need to extend two classes:
+
+```Config```
+This class contains the default configuration. Subclass it and modify the attributes you need to change.
+
+```Dataset```
+This class provides a consistent way to work with any dataset.
+It allows you to use new datasets for training without having to change
+the code of the model. It also supports loading multiple datasets at the
+same time, which is useful if the objects you want to detect are not
+all available in one dataset.
+
+See examples in `samples/shapes/train_shapes.ipynb`, `samples/coco/coco.py`, `samples/balloon/balloon.py`, and `samples/nucleus/nucleus.py`.
+
+## Differences from the Official Paper
+This implementation follows the Mask RCNN paper for the most part, but there are a few cases where we deviated in favor of code simplicity and generalization. These are some of the differences we're aware of. If you encounter other differences, please do let us know.
+
+* **Image Resizing:** To support training multiple images per batch we resize all images to the same size. For example, 1024x1024px on MS COCO. We preserve the aspect ratio, so if an image is not square we pad it with zeros. In the paper the resizing is done such that the smallest side is 800px and the largest is trimmed at 1000px.
+* **Bounding Boxes**: Some datasets provide bounding boxes and some provide masks only. To support training on multiple datasets we opted to ignore the bounding boxes that come with the dataset and generate them on the fly instead. We pick the smallest box that encapsulates all the pixels of the mask as the bounding box. This simplifies the implementation and also makes it easy to apply image augmentations that would otherwise be harder to apply to bounding boxes, such as image rotation.
+
+ To validate this approach, we compared our computed bounding boxes to those provided by the COCO dataset.
+We found that ~2% of bounding boxes differed by 1px or more, ~0.05% differed by 5px or more,
+and only 0.01% differed by 10px or more.
+
+* **Learning Rate:** The paper uses a learning rate of 0.02, but we found that to be
+too high, and often causes the weights to explode, especially when using a small batch
+size. It might be related to differences between how Caffe and TensorFlow compute
+gradients (sum vs mean across batches and GPUs). Or, maybe the official model uses gradient
+clipping to avoid this issue. We do use gradient clipping, but don't set it too aggressively.
+We found that smaller learning rates converge faster anyway so we go with that.
+
+## Citation
+Use this bibtex to cite this repository:
+```
+@misc{matterport_maskrcnn_2017,
+ title={Mask R-CNN for object detection and instance segmentation on Keras and TensorFlow},
+ author={Abdulla, Waleed},
+ year={2017},
+ publisher={Github},
+ journal={GitHub repository},
+ howpublished={\url{https://github.com/matterport/Mask_RCNN}},
+}
+```
+
+## Contributing
+Contributions to this repository are welcome. Examples of things you can contribute:
+* Speed Improvements. Like re-writing some Python code in TensorFlow or Cython.
+* Training on other datasets.
+* Accuracy Improvements.
+* Visualizations and examples.
+
+You can also [join our team](https://matterport.com/careers/) and help us build even more projects like this one.
+
+## Requirements
+Python 3.4, TensorFlow 1.3, Keras 2.0.8 and other common packages listed in `requirements.txt`.
+
+### MS COCO Requirements:
+To train or test on MS COCO, you'll also need:
+* pycocotools (installation instructions below)
+* [MS COCO Dataset](http://cocodataset.org/#home)
+* Download the 5K [minival](https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0)
+ and the 35K [validation-minus-minival](https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0)
+ subsets. More details in the original [Faster R-CNN implementation](https://github.com/rbgirshick/py-faster-rcnn/blob/master/data/README.md).
+
+If you use Docker, the code has been verified to work on
+[this Docker container](https://hub.docker.com/r/waleedka/modern-deep-learning/).
+
+
+## Installation
+1. Install dependencies
+ ```bash
+ pip3 install -r requirements.txt
+ ```
+2. Clone this repository
+3. Run setup from the repository root directory
+ ```bash
+ python3 setup.py install
+ ```
+3. Download pre-trained COCO weights (mask_rcnn_coco.h5) from the [releases page](https://github.com/matterport/Mask_RCNN/releases).
+4. (Optional) To train or test on MS COCO install `pycocotools` from one of these repos. They are forks of the original pycocotools with fixes for Python3 and Windows (the official repo doesn't seem to be active anymore).
+
+ * Linux: https://github.com/waleedka/coco
+ * Windows: https://github.com/philferriere/cocoapi.
+ You must have the Visual C++ 2015 build tools on your path (see the repo for additional details)
+
+# Projects Using this Model
+If you extend this model to other datasets or build projects that use it, we'd love to hear from you.
+
+### [4K Video Demo](https://www.youtube.com/watch?v=OOT3UIXZztE) by Karol Majek.
+[![Mask RCNN on 4K Video](assets/4k_video.gif)](https://www.youtube.com/watch?v=OOT3UIXZztE)
+
+### [Images to OSM](https://github.com/jremillard/images-to-osm): Improve OpenStreetMap by adding baseball, soccer, tennis, football, and basketball fields.
+
+![Identify sport fields in satellite images](assets/images_to_osm.png)
+
+### [Splash of Color](https://engineering.matterport.com/splash-of-color-instance-segmentation-with-mask-r-cnn-and-tensorflow-7c761e238b46). A blog post explaining how to train this model from scratch and use it to implement a color splash effect.
+![Balloon Color Splash](assets/balloon_color_splash.gif)
+
+
+### [Segmenting Nuclei in Microscopy Images](samples/nucleus). Built for the [2018 Data Science Bowl](https://www.kaggle.com/c/data-science-bowl-2018)
+Code is in the `samples/nucleus` directory.
+
+![Nucleus Segmentation](assets/nucleus_segmentation.png)
+
+### [Detection and Segmentation for Surgery Robots](https://github.com/SUYEgit/Surgery-Robot-Detection-Segmentation) by the NUS Control & Mechatronics Lab.
+![Surgery Robot Detection and Segmentation](https://github.com/SUYEgit/Surgery-Robot-Detection-Segmentation/raw/master/assets/video.gif)
+
+### [Mapping Challenge](https://github.com/crowdAI/crowdai-mapping-challenge-mask-rcnn): Convert satellite imagery to maps for use by humanitarian organisations.
+![Mapping Challenge](assets/mapping_challenge.png)
+
+### [GRASS GIS Addon](https://github.com/ctu-geoforall-lab/i.ann.maskrcnn) to generate vector masks from geospatial imagery. Based on a [Master's thesis](https://github.com/ctu-geoforall-lab-projects/dp-pesek-2018) by Ondřej Pešek.
+![GRASS GIS Image](https://github.com/ctu-geoforall-lab/i.ann.maskrcnn/raw/master/samples/out3.png)
diff --git a/resources/Object Detection_Mask RCNN/images/frame2.jpg b/resources/Object Detection_Mask RCNN/images/frame2.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..8c797940289d08496464ff8eec1dad418b381d22
GIT binary patch
literal 772486
zcmbTdcT`hv@HZHxOYfbaG^I-K#D+8xL8VAj5fBj~JxB>fdJ_;(5TX>3-g^lh>C!u-
zP^1JB!4OCYoA2*E=iRe=_K)4YIrpCA$r58ikgO&jzR__<%R|}3i(u26ryP;6jSaFq5Kb^W~bq}s(720)BFY9H7_ou
z@YL`0Vs~n~xh*EK;>s_*BN(pm@bd8sNJvUuzag!ns-~`?sde|B{(S>OBjd+Uo?2R2
z+dOl6xub+PaB=CLYhp6b7*tqz#^iLU?S)adT=NA+f{V4ufQd(PA-_Y39
z+|t_9+t)uZ_-kl*YIvIUm=rT63&
z9&r_<1nxi9{)e;wcZ@~+|KjX_GxmS-wFF?Ixm;!?vomn+w&(vI
z0XV!~>4eeLQCHB>Dp2(4H$g6|7ji*5$`|3OgwDGTi$o`>ym`jp!A$DkvGqPFC8r
z(i1+<^2I1^KWuDwXEqybjg%ONQTN8_x4S9nj@;Ayb;^kJz9g%y5Cy$N;>EVuT1066
zlk?n_ee)h)>?eD)YG|!oeI)$h_Gen@&-_pK>YYq^5MFaIdh&Hr&=)`Gl1^{YpPEA4
z0zuiZHT%Z&sN;>TmbRvrrTxh-PV7IPh?kpiY(Abl7N)w7?M!kZW~(h9|3zZ+MNEZdRS-u0)w1mIhw3^JU%e)ZlH`5d^Tk{EZ{Us_7mp&R*yp2h3+b6!gH^K7fq)w*Vg=K%Q{wu4wK7HGr
zFkL`-6$d1T%sv}=&NzP5L=j%|fNn~K_ZmO;H;xyUb0{^mOi_Xp&+(AKA{RQS{yPUj
z99NzIXV&y+r~g{5$AI#_&sy@m1XBPmH_`OZ_p&ID8JkKtt$U0;jXXT2DwpZ7A~^O^o~h3Dp)`~SM>
zG5aCLjp-$@9he;|N%&4QC$FsY=k%>}>p1;{j5v0-Ce@oU5R8>sGQJOOI4t*bw|m(u
zPt>mA@gUs+yh%<(f7Vsd+iywE=FC0Omy&x$SGt2P2f^O2CUiYA&PgbxXtSvM{P2+w
zt-ZSC6V2_(7MtMN+zSBJ7v#Z-9`kt$6p>(QR%{Dd##hd4$6K}7@OC!}e832K{d~*+
z%FWijMilcvsmhXNA>~-GTaN*2){CKKWgzKdE5GFL_!qFbEt!7u#~K_Pc9~C4{gjH}
zV$FR1%~z1d9TIR?z_@w6&0pJTp5)Fnp)t7i?+H%1qtRC4PvmZW0aix9?L=RW%PM`;
zH)FrSL;X>nyR8htIdio}$4yiYD9)6;QpQ>eyK5=K*?-1-K3g(JQWZ_GtczQk26=B_
zG@{K(<~4)Dx36?XNYBUUJk5vH8IhNNEAT+5+CAT^co%R(7R{uGyCPUcCr9ayZd?Eu
zoHNK}s1Vw{UX!Z>Tv2(egqc=xpuc%?ddHGf&8|o*``Ajy{~Yr
zU$15*arTn=t_R%0l%0SXanL)<~wl=GN$bFFzjci}d$ZG__6u}r15uUpVmLOn
zBD4Z6!Ovsk3rmCA_dq_o;&ib4O-s+dioVA3g33)MJq*ClA3ulB#4SC$b1j~Wf$R0n
z(r?cJ+JGC!=C^3L{_m$15SM_DAOMNFe`HY5WJhii2q~v`TPi&OCHzeA-lzNn=euIe
z`@Fej>6nLO)!J8Nj+W7bB;IMHUkR+2qR6g29aaNPrF;feZ^WOumHPZOjAh946VbP3lHU;Onq+dBPC!ap>3upl(!Yh4hZFhcvrVZq6Ve2eyk~urV{OsRIY&
zuQ2GAZGvJQv5toe+eXbDQ=!ZRg8@gj!*8OtY6(R}MdgE1nrRrcFMuiY?=SvA7nv>4
zWhi2u8L;cx2d`)CgKW11#i*ycJrewKaQ~(_(Y{+sN|?!@*wL(s%n;h^Si8MXWMaT>
zBjt0rSKGvX6bJDx$={2m=a3nK$yGxmU;j}I16UMiOs*CDya2Q{tzQ5DuTMb+=M5JC
zMD=-!<#ZSdA|KamnPowOw@(s?D>46VON)!UD$h|;j--YsZJOq0f=b0as+5i=Bo7`>kQNEPr(#-Td
zB_zIsRx$psBjGxj#LkHKIxiy&E-Q4wm?2PPKw>@LmIsvu_8>m9F+Htljse-U@Lk|}
z;uMVwfU_Za<)wA#`a1UU#NtD3I4?zIZo{2SIiFxvJ6eKLxP!&lrrnm!#>DY|?d?ip
zqp5<1JlPUAXX*^_Qe7!aQix)VIs&36Mq(ml6cMrvTM{+0dtQo%Wxl9|*vU5jy-Lk|asK*Wala2$`GEf$4ER-=+I#`HMf>H=y7tvABi$Q}K?aJS;Wr>v}
ziX}~&2;S%w8!+z{;!fpzrIemU6EP~Fk?c%&1^%#sSV39A_7vIN2-Z5Uk^N_u45tz~
zyhrF}^6=xv%?Lc1x4HE+!m-|{QPTQI=)skN(x<{U9Ew2X{j{^_B$UiKYv22jffN)g
z>n^7aB>na*GRVD*e{zwp-t+r>>My@Lg=}5#%u351_gh$)ITY(Q+<5c;>F_Uw!iSuP
zAGKfR*VQ)8+EjcPW)QZXzkg7-RMEx9WX4|!eOfc4660ZTH{H|hkd39_w^iPW_-Kn(
z&9Zw)sVdr*RNxcRe*xIbdy}z`>wBcQnRTsv#Ba;kIaK*cB}T3g=$81`?Ak|mv8c~R
z3zj5@tJC0@)5iggE~l_@*{|0&f)`1kXI-rc!*P62-Zwe-^Q<>BO|1Hd5(~vd;6cY^
z-%ADl*g^5kN28T1+=|tr$%FB~{3rm_g2zWpsfo27%KARj5L|wZQ8h<8ar2D*xV)gm
zq6rar0MglOliP
zb_Qf7R!*3B039;&G3
z6~J?XYYr8}^Nfjw#@}B+_xvP=B!x8L7tg!^^quXmdDPy`VwZgtO$7?_vSW&z|K45c
zKRziLYnrFmunVsX=r?6WZev2QQmFNK-^G8-Qr%>!czgdP`rCfVnh04g#V0FWY*e4@
zO4htu%9gw4A#X}6s$QDy#Nh1+tjP667y=j-N)K^Z)4;Ur=6Ao@;&ricq1PF@^0k0VBjhWOiR$E`(Cd-Nri80F)_5H&
z-SPlaqyo)r3^athZn(N|imCKoh1j)-g97<4UCvTR%H%61=!w|J!C=CFFwQuo`%IzN
zMpl(#Dlh*r{{v%F84Iw8KpilCn-6ewHLA!r?4d8TUb`P}1fV`x32aM=&kE%uB%p1f
zjPCEvV%yD1A%T(?fT%B(;_q_^O@`_ztxH>LTY)d?n*CqJZz(fe6)rLSOJ;_S;-
z-zO54gEh{R$ui{OJT9zMFUCSnrXR>19E5aB>MUZPrS7!vYT!xm0S7zRS+8bDUa4kH
zyE#dJ+R@lpGrG6ul9C5?a*SS6BM@;eThpliKBSJia|nvNapv*<*50V?{Oi6%$hz)v
zn(G$%fW=z_Ot}75k9`X6DcC{R_3Xhd8M#opb_AVjElQ#B7{#%e7VD4-Cm-F&^p5{e<0|Jpt)k)3BsTYl
zs0tP&P?=+O#~MpSTjqV8X!d!m0#1|$>9b6yCtZ|BJg!@msVHzPrhIX`P=e3qwSiv26B=H~^SoDkbv*rKL*l4?-V4t!A{m?zzB-fF}C%_VQ{
z3bYJywM9FgCuPe%&1ifr0SVogK}{qsS@pUdx5&+tdBD+NzX|4*H&HLcQt6+bz_;MwdPy?{
z??FG$-mM$5o^Z9U!9#^8$J*k-DlYPj1I=WoK=5-jYT>)cvEqbwDl`alt7dt5-9}#p
zt4|A>c2;7JkPL~E+e$46`EIohB*?%qAS@KWRgWEL!0zGi6JS`fpwVIev>z;1>!ppv
zSVF9|^|OI*>C_yj$?+3JSbeR+!KfSWpY>j_0AU@qPGS=P^RK*CTPo^@cdh!Er1(Z?
z%3Q%Yv%M4e=$$vq^aEKkPyv1#ff*5eE90Du24~Ew>_DgJiX5E3rc|T
zbqeg|QR5mj5H>oR-=4jW_gfWvq#&C%Bs;h45!Jc63s^yHY^H<9kg!W|L=6yY4qKeY
z2eq8T$Q&du8}CdmdvJ|a!
z+&mBH(YrPitf&xEsJOh#V_75U8dv=yPYEcFuDm6B^wk&RSO;r@?te$Z7q;;LtYa@M
zc1=1PEMN_J=~KSGBhb*91+K`LVc%$M3BL-h4^&g1FWLTPm0?cWG5#3y_Xz)m07Zc!
zVfB8P%Ce9kAHScn?+dL?vLTJ??;do%^_HgdiWD5U&;Kcc-YHLdV5N8o$`&e4&?a9A
zBm$O#LvU98A+CKOZav}P>r*6EoVNaCy^-v}vrFZ*y0e>oX_TlbFCcHZs=d<5lbIJd-e{nSEkpT!#Y^|h8O$|TWqY>~ey9>u?s6v6qsthB
z>)tR>Z$X@nLWO&cCV1c8yneZz;zws*1;%P_3x3)@FYUI!wPx!alsx+nw$F|*V7ZP~&IXua+=QBs^2it0*9}8B{m5Kv~_jR6EW`y=zVR6(xp$dE`M>#euTMRul4_
zG+?3xc}l>)7`+ZyC#hyDa0Y8uN9;LFxwKxov$85tQ8{EB)?;IDbv4xP?+u$Pzi@|a
z)=K2Nag29z{CfOgnQ>K=z$T+Zo?}7rlqyWz&BBHSB*9DHGi9pVVfZ)T@C{AzUrq
zH}NA{FB8%S-UZv|^h`tO2IjJwc;300zB`M}zHuvdxUTPoI?^V^^}gZr!vp#?DMG7m(Qo|i0h9oz2uc`*4U*x
zIz_H}AN8pT#3cG9Sq7$UqMOMUiH;|sZ
z;#i+toWJ98H0U5djcI7a)m!p*@}&^@N(b=634yA;x81xr^xu%5tABiOEj5$>#i`hn
z^GqEm)^z~L7%Yfmcv){27s@!PqmLv;in|!e{-afX)mLe_aNEI9_6fE<{s}=}_Ex7W{>&4Ef3j
zYVu`5FI_J`nT7Cj@ZRM3mjb)p$pHmVKkYAR+|4sSG|fuQ$)bJY-ht0QhRtd$W{mit
zmM56ctGd^ua6n4XP2?I?9oT-fw=$$ddbYB`4-H+6>CL-2rLsjHYsGn%F!8oXjkZQF
zn?|GLK2yK_i}qL*92*q$h8Dk^>wMOiicq-iI4TdvNufecdOHM>x|t0{J7n75@*?((
zJ!hWrqD0-9IF2vbn?et~c)y05Q&xwakzDzBv}FW@Xd<2!y~m?vNY}^{-IByR<1Iv=OuYYMDxEu
z!7iv4-s{Lp?hn<%9ehD+gMs}aXfObh@WciYF{Kzf79eKZmFcG90gbg-2cPNo{je6z
zL%ndADBFS6b=Dxwm`U!G=R6F4H|e2wbEPYfj}VESKo2GzGv=&%_Z-}Gn)@E{Hy~`a
zVbqdfYa}H6i$_A!l8}WO+#kZZ;)4hUWx@P}?fknxAjS7yncnYVHfAhLyeVNk#FWg(
z##(%Lu8=WLeVU|SNJw^f&73p2?E-hz^2|@ic-}4ljO1g}vrD(@xve}tKkdOH&mfKI
zMK3j|3M&}=_I;f;Z7@&Hkknq4Bw#a6M&Q;myxfjopaJM+&O
zUB_49{r3;sB0sTq9a07B6Ed(l*0s2lo(Z749&onYGiogAAjwC_Q(^&!`Di3qp30qi
za{ty)H#Hmcd*0rZB@oIXrP%Q`loS3M&flLunD1ai@X=2(h_px+3*bs!VfgVW&QeNJ
zX^*Pwu<5cPmN}GSle>-Kh%Y4+ZrEnpFNH`Aq!8|9bxrL)oCLB!Ux&0|pw4KJTg>nx
z9*l1SQ&w~%^hF?u=EF3OoNI1mNd4am)X`<;5;pOw)s6I_?fB&moK#%B<06sFmSoVIFT*!%;1RHW!*%q>
z;|w}6VdZIH@$oWS=B!Sk9_C-wm;xV#)c&SW&dFnp5y%N%KN2GQ2Y&&;R{ICumUwj+
zRxP|7aqvg+tIr(lIxtxPoIru-%zBkN3YE*t%t5_o1-mcM$@d{qs|_30%)iU=)y-nC
ztV)J5#XWUHo~13*QO>KMz1Ozj-{1tvEE3ne4Q2I4u0i%5%)wnx+iCm|Df<&@{Otzk
z-|~yd*BF=CMrM)?C(GyU4F!TGc?Uh)d3kkoF|
zd(?9-H@HDKA5`O{5>+r%j<{z#Jl>~OKEB)FFSSY|d`@3^L|LM%K1H=07w_Mt%;B~3
zB#;y9f>^&}bqt1^c$0J&=U;+eeQe%2i_uYbXL|I(Ia*llvGTtgUHkG`&nV&3GbaBP_(-(ViGGd(q*$r!Fi
z$B{SX;pd^tgtyjE*O0_jQd?Btnwys|sIBSwc!1YmV`%pjdyR8Co@e0+A`%C>0Carv
z4nGw}>57?89~&i?IjL4H9hXQp@+xvE^_i#U-hG~7WBHmQ*}RX2ngGr-Mv9lY0FW7O
zqYnvUA_cf61XmVCWjQ<9A>P9v!6nl`FPgq*6|);^h%r}frCW7hlm)*n>~XT#@G0y}
zkT~y>uYkZ5IdWp>W83fOF$3u?0M7-XD;Izs{qyhJr%md=z1Ll1TuFg}3Eiv0eWn9m
zj%;MgE3rnbiz9etf)7$qAqof#=ErXL=UvY{hHMY&$>E-mMZml^i?Ocu@(greGauXz
zSoRcidms%QePsEQb%c_igrxMXNnqy+aCH4zL^D){lFI9U1$5FaE=*1n8fn*oRixTM
zABP4a1IhQiv{RIDI7k*GfWMP&pMUdP7YG)e38q77V+{J6INr3%NB%UEnEQUs&Ym%L
z!Bk<~YUn8Rfp|@B|2Z$2fnvONY}RhY5&-M>TeUyTix
zp8xF$`G^Q!KFPBJkDXE2qj^9X5x)n*J^)e?Kz(H4^DlJC$E$AO^ry{cJo0I+hD}82
zp3?j&sEW~cli%_tln24n5=IVc2Ud=#_gi8c%kr(==Fhm>wftt2I+`+cvr>VJ=!RVH
zTWK0deGMs%J?o_yQWO}pw^?>3NO{>8%Cb><_1|0FfvkS7$r_X?;HTgZ@$4LmB|r+0
zCy9`Dq7*;zfu7|A#l|+YLErk2a4R2AeC*g^N*1dj?qJV0KqC+D`>3hquCR@x^rw~E
zMBSA}bfI_c_QK2h>BkXEGY0PLMh2vM=Ao)T24n_jig0Sc1j(0M
zw1iVjiXFf>!U+_IY_;*AmLuO;T(~!T>&?LytL1x8zdg?qYC^v)(wO%wr254V_xLf!
zDCXuL1CIDEXKF;HvYkVTv)3DBp3aS(ynp8`&~7C-BAa}TA`(c7pngT@l$9gfH5?cZ
zUMy(A9xj0DI!Evo3m~_g(IdSnibf3!`wWgKLc2%j${ApI#lN@l|KdZk
z&MTIQPOzZ~f;1&x#tuc~6m*>uC@BdB7)cVk6cVg_o{I<7hNP48T#nSw%g%G#v=0!Q
zR2wpc&D6}-Pk0oP1{+_0STps_vKrY|axLk%b#;v#S)hAo@cz~7?j~0q%isZLqIQMy
z1Q;V@rua}zBcy^~xL0T!v{|~+FFIJv1TbYExGkHfc-XD&$ZaU6=;#FQ+F88uG
z3%jYT4BT@ZK1tx|?HA!_`2cyj#>!SFmHN}B%z)F|`^xq-utKQYB}GeJU9H`2u->(*
z!lZb8$*8o?;$a~9=^2H2+78)Rm>}kWR!Sc`k=?Qb!`MAdA!s%k|88J@jIiA+^GQ1f3sQ93dKI8)0Oqw+f%
zKI5eEr2A&uJ0C;4Da+V~qT(jLAcCZP653vG9dQ-sJdyBYdP!gJ8u7}afjhnuKvd~I
z!o$pyI*z)>>Bf2IGFw%X<|AF3*4d(OAWv&*T~t027H;+IX6`3X3mGA;@qJPD^3!p(QQ-Az@?LLUF_9HfP9
zW&{LD4Ydqu4x=kCCvFvCx8lAp-&Tn8E2H14kLICW%wg8s2T%|>!^t3(j)=Zcm(SX@
zDR~c-1jRSQS2L(9e|qzMWV!vvK-Yjf1s~S+Jt+&g#{E5ixogrM^3X&9
z>?Y*Mp6|5kGK15{wKqIoy+2ANZ-3T8h(HiR&xP_7*YYP3GvJEg5!GMf5i(~IlnHrC
znUL%AWQGFuWKT~$bpy03E`O(#0Sm`N7ea^dNLCO@7#~!WR|ZfkCV3%qLbcrVuZX`>HG^j=jrumoa!m8WY0k%=h+llo}`5{
z*OPDhV1%@O9-I^R#YU>$pS<%TlIn7;kasg}ddA1OO%Er$EuliB9`DHVs%{>ARLExA
z@5kMNOfp!9NW9~YW8qN^@Rz>H0@)D51q>?0X!T|Cv*g81eMd%lc_uwoJABz8MXl
z;n2pNOOtyq0M-6kM1cuZt_3jG1jPvFU3N6h_@`cH>)Vv`uN%sLCcu
zQM}{??%)UZ&6@mqnCov6NInOTtlR~0Aem*QBwi)c*4jMNkx4rc18`|*C$B+fDWF?n
z{rgOI4I!>5*m&|lK8MT!Vy>rtIAv>A$eVlBC;5&d|A4N+j=gJSyAjgf>?y_1eNaYm
z+SIKUFI8YxonW1-lMkE4UnM$4!UoSI&ST&n6fE=>$_5#{-$_t)M}BTsbSt!9ShXeO
z`T;TY;Hk||wSnv^OmNo;Tv>VSaGw?0jKqCJW>Cl4k{Q;&LAcHOLVWtQ=98*aT~Zze
z@kFefMbGaC70@3&=z=d9NSrQWo@|?vWaU#Bt%C!LOeEk*?W~2nYBj$eO?Y$;Z*NM;
zdJ$Wc3fZh)uXN~nDne^RIl57TI}HkggoIur9Zi9NxO$NM7=sucreH(bo9}rd)4zTf
z0=Rqa26jDsU*|F6FtIhNzhQOQcEwEqlRxsdRVNyVx(qpcCOvl3~x;s9Vab2&G?Q)f9fdN2)V)@mOH6@2N~0o@
z@8vYu;y@+}h>=>=#lC0aOq>tEN%iQl5tLCOO1&tlIFGke2V;^Z9jcQa7XDGr3@xj-
z2lhSb+`Wg5Nwer@PX|8~&Zshj4ROwgAFAZ1jA%N;1LewXq@VoERAUY+GI%SU2dO+U
zXe4}5kD0c>1!p>_)ilK${WJRahv$A0Cu6N(j#ty@%buktiiw5+JUVH>j$RZQ+n
zQ6DJLgLZaQ^S_|$W7SitcjTQv4Q&Cc+4x*Hb#YxK@|*}zT)RPq9m%P>S}l~NK5if5
zIU2#L_9XRQt?b($_w4AHTg;84hn6FWPzsXSq^R^uh!dG%F$f)EaE}0^*dbn$6x9Q#
zH@;qPGC{A@A%MWCgH`-j#78S|$O2FUVn$}U(THP>_DkV1BdWM54gT2a*_xWaO(+)r
z*MC&q)0fJudqsGWx)CtQFMeg~ETMBVi24jhXM$!GBCHSSi4eR>AQQK1aasgZPhV4!
zt^Vin6Z*AH?7cd}<4mq2`i6)XTcFGhS7}yF6o`Rzd+Og_7441yrVj%JEgkqlS=~l|
zPlzG3*!t8qM3X;jBJ$R2VOWz1!j16?ih>C>;wr`quq@C^6mWLUcU@kSbPv3?Z+~7^
znq%U_ZdT`eS_dZ>r#gOtVl7|`251UO=^_X|s5>tz!qtmhub&kGigmHMVV=){oV#D9
zJ&;_-*9J1y7l;IWWu&S^{sSv-E5v3OC%~OyY_GMVtI*mBaQ32^XFnQ{Zt~ot<^zHw
z$$`RGq`s!5Zoc^Pv8Jo*4N)2Id0wk?180i|W8vY!bl=1sByh4*cD?U?TWC|kRNN*o
zErh&x72DGUNdvhdSv~Ml5_n|l#mqlzG{AP*Oe4wAo=BbcyoRpo0sdI^)E%o%W(g%&
z;ds%F3D^Ww)&7JJ0wZRhi>YXTqVqPZnlX%?{@2?VmrS1XtbeR%bK9|7Onw~I_t>~>
zL9b#fIES+F)y2UnrGXw*ByH^W{^ScZPBHy1_D2IaeT{FS
zYRywBbk%vI9vykHbvu&Gw@ZQ3`q3%hiuMCydL4KsJ$@T8?<)?yn+Un_oc+~pvFoyZ
z+K9_`;V1AxGlHDKejHII`2D0Feeh%K0W81aBofxA?4DF&+bf)-W6{vj{3Pa!>y7ZN
z`!ZLZZElV@@==l**hjhn^LHAZpJHm`
z)p2A?7ULCM%_r9p$Xgo0Cm+`GOdSs$Kg#OnZ4XldC?GI29?nTJ^9TvSo;;jxR|Xi=
z|2{5R8S>)2`q6OY?<>whQ8ul}r|rTxM}%2?yXg-?&vI9%_+-1ybMdvB5l-9F+P54-
zGK~Vab_;1%!|2650eLWd9N{h!Os^jPo$%tj(F-4xiA;yq)bztf;*+Ll->pf_=!C1C
zesY-`@&;5D-q3p9{VX&zERPXeoJ>|(S#-;jzOZ2
z!o0>`PePgo^VcZ@N{US?A<$m@!BTV^it$JClACxibA+nX;1yoE7OOOR?jvI83{Z%4
z31KSOlYIr^WHU8&_;u_r6!&XWXROl@FULDu$%8_{F^QSP1~5j%H8)g@R$k`
zRiS>dZNc`fX$AdZZT-z(&jNqAC~>cFDSf117!hR$pDvW`dm(&)?#W#OP1PE`FDw?`
z(3pg*;&{X3GIz#ztm!9c!YE~#AuG1_yU~t=(R(VmpKY7_h_@d~Ya0wAEV?5woc&9V
zaVmQr+){P(zHuhEef!w2Jx{(;i!S-~9764SQim5ik5m9TnZUT+DLY-{Pr{xxuE5#^
z`g6C@o$HTW=WLz-qL2?hvnT2MoiJJrprpGa)+;%u==HonJ*d_y?>TJ5Iwqy@@MK~^
zqv|nTl=Yi{(*1?=h!C?;1;$`A+(cyiowH`FW6X9K!hokhP`QWpozwGoEdIHp<)L;W
z40I-BUel7vt=zLwc%hbA
z6CETP9eIq3B}V<<>yDwUyM=uAE!4}w)6d_s-BlNNk`77e2R5jOh05=!;X%TJj|tAY
z&1r&+P#S_)m5K&Zyxx37HeE2sC6R{tnru$m{pa!i@#kBka|a)7)p(y*jkduE8k+^J
z*~8O#qt4h4n04tqS!P<5sb22`&gGx9$^il
z>kq?E^n^@l6f#k3BYmJHF$>#naMyXCb}+mSXm^@hGa>7pDEme_bD`qP5i~sXD#>YD
zht~%^5fiFAgU&KoRoz{<_v7y6O}oqYFG-KKi19i7s)EeHDbb&l#yr0C{&o8H+pdA`
z!ddC%OKXO>H8bUq*}sBYpF$9i*p%pLlrpE7Og)k+rO}z)IPp05QW^P;x+M&u14d-*K32E!a=kYa7%gV{oD+qsmYvZ6<
zpk&m7C;e5%4Q$Vxe+gBEM3Na!i1*(Rx(kqc6s594(UkZI_z~2Ja6z*
zAl!%`Y*|;fbVvqikKuT#HTfF_s+zxgo_vF^w!T6p!vbmGNc0Vz
zfm2hG$3SNC4MH)hX@3X!fn*d|w4+2&u)~hGc*)IGl#6@T*sOJ{yrgx!PQP|f`|{wY
zG>ejyTuNlig;H$5@>)y1kClt$gpE!r-hEV#y0O*FD=nVblD5mw?SK8s`fm3!$=fS}x=f;n|6t{PH$e3%L5e-!P^
zJ~XuG*yB6}b6BY%Fk>vaiNnS%Gwq>+E!{gSjE|Jf)d{A^0~sFv?s7OgDVPw27->e#
z`PP))Yt6uXoBH9jc{8Rk;zbzaJFAkxhdyTPI*;Q|61DOcvGXX`0q7-2iUZE77n5o^0GY6AVBa!VkSFydD&23yh*93&Qo}fm_Sw?ahPJ@uFT~qP`p@Uu<*=0s%+KyK
z1!TJHcp{U*sAT>yM@8Por-sx_Ug@?yd9sAaazdR0A@_T*(cE#u>cg@9%x%+7#m)h~
z;7{GsR?-I4(V{h|b~Ajb_u8QL%H}`T#VjQG$2_kdJ8tVc@|*kvt&qug_=qO~RxE7F
zQFp$*J!mpK%SRFeE(CZNWM-w(iKyKa=c7{Kf~QGa4TGg
zq+4PAi;yKck7+vhepOTr^ON>0_PB<7#8(H59uLg>dN?lDb+28TKVH?^w!o^T<2cTi
zt+RJEf{k=)O_^7w+ZqP51i!l~rAz8Px0AnB;SOUXanFzhvcdfzDjTEVdF2Tp-r-Z@
z7Eh}2#!988U9^4Q&m}|i`p7L2sq@e6RxjL(-Iqek+$-$`hhD}H-i=IMTmKDOeav)j0@Xm1HLkt26Pz1T{8qqopL$ib>qNvlr5)w<1Os_Ojwl*B
z-a3$uyq??=P-aIcGl@6x@FctE#7RQNmJBafWxi~ZX_BL6|FJ2}2N79V0a28e8E#wrC}9(uWqY#;G+zfIbC1E#0lydmyA4D3w~|
zV!f5&}Cj{)q~1L|}*OR|^m5O5I?TR9_#E^R*Nzr5m;wi9G3r(T$Wd%3X8hH7If1
z8weRrbF{lBAJxnxp!QeDw8p2s@m)wkPbQS^uwmJZ6LJASDSRMn?OA5)XLQv6{-Xs?
zT=l%>Hu53GBj6X4Ak3rrxBfAJr$t$IRcFqsu?WZ=@gg@75H<*jXU{k
z2@w>?%R{spxqkzC)6PIZU*Ox*N2N-v+_}Zegf_E0IRi&ZZK#jqEkVrtjMa%K9d*|m
z3pV~eatX&&Aa%i*ieO9;A;%VH6Tp9f5{(F&(Eq@VlN=W0Ik-KnH
zbSR|+0m!%6)fGeo58HA@zeQXUnHQvoeD_zpby=VH4To2`Fx
zYhmC$he5dd{s{5*o7cYydt^ov;+;2-xJDcCJV&+#=x=@5pTdAY%T2BDq6Dk5!EMs5
z^9GO`kYkmFDusW^=5i##roF{3Dh5;@~?!H#mt3XC#V5AB(v1got#|V_-vP}*h6PJxxx{*MwKRQ59uMq
z7NOe2uU{dW6iGTkyVVOHtE|(LY|D(-Ri^hqYzOv>#3R~?Pq>fEaZ8%0R#OGM{COk^
zba{u&eV(YJzEMJKI>XmMYk)I=A@ybwA8L0*uC}eJ3*MJ#yPpn!
z)O>>Okg+w{&Z0kAZY_2d*aO&-M6L$(Q@*WVkuU+Mm`R%@*Q0PR>uDS%Pc3(i#sKF@P
zQF!;zj!mOu6MRtk#RCvaCp&rZ{Yu3z9ylOavA{N;rHYeJUI`vH4w6TKW645Tq)ps}$kNqP`GiaCBt53jn5=JYvv$cpCh_dQ5?A(9H=-U(S9*E^?ja0suZ;-^sH|
zZz9_qjQbqu%1mf817ksdw=V!5ApEm{-m3pS5L*e~RlESCT>#EAeEZS{^R&GljxzjT
zI$A4Zp3M4=u|CYPngb&m8k+2#X}@
z!3NZm9Yy~rq&spkm7By?hrb*67_!*FS`JFSPuTsiP8d
zsZXyKN{UHR!7!S|7)|LYGFKFp%P?0|)RlYn3M5KjD+QF&0;s}h{ARb*src#UI<)d{
z`6vY&SYH6P7+}P}3&6g6$wFtw=EMEIjrp=C;lkb&gHsFLrIfv;gU*xuxJO8oT}hS#
z(T&o4_M5gi`oEtHthwZ$It9mTn<5wVgJ5M{bFEv+
z%CE2PVH}T*&7F-JXHS5obN95KQ1DyAqO286h@`I`c}#HeVT}srFCF4)^5Q5~xAJO{
zrC-YtTMj?ET;3&BN&xq~rwgxt^_z4xf7yDmuizEieXe)kZ%dU4T^Gy|M_0Vrr^~h>
zPmpgC3S?WeFcN5=(nUkAC5Xu#lO(P2v1*?!B|Bxin`q_?K$eE+PczHO`XuGsQ;5lW
zd!@DUsU=0>j*7y|*YnLMpK`r0s4l;u6QJVFHn3jGX%vtgm|vC4Hud_O)C1w7K0j
zNs7`^`J3hfa7f8GqudR8&xm_@{zNLej3)lzga10zP5i0Ct4yhrquWQqmV@SP(f1!q
zq`M}Kje}Opd;V0R`;s0=u(1e!J9|Re&K0JTum@(VyP*w~NT>io-5Mu6A5oveDd8x!;{3nx^#*qVo{MDZuq~ql?%!(5Zq=
z={;n0AWRQB$V~g1iam@OgU>c3{J~UsIE||h$$q#dZ@BSMvuXmgG)-pgMFxQ}tN?QF
znovr=)+l2=1pbs>WuZ_MJ=CaVpgYN@U3*PmOfXrv;Is@8v(NiqFP`DyK3P0;%8`#^
ziU0f`Y`texQ(yEg3Zj701f-Wp5tJ&@q$Maw5fMeC7ZC*!Lj9P(n1tdw%!6``-V<`@moTBP8dXz1Et0tvTmjc6Wpp`6cg<_gs)AO;=5%
zJ!Wi=Lrq<0-s15LI|rG~$BC!YDcNow-yiA@LqhI
z_vs!>;>Bw!Xo5-TiN=i@<>?m_vP1jL#>_mG+9l!&8qx;Qi)IS(x3pJ2&s<$GQ$LKj
zI?8+pq;Whl0m9m`g?YcKx$-W2R<9x?ko70UbrvK#lw6{q-DZc?4URq|I3%FD0I^F$fVB)a~a7HG1{?SMvNc@oh
z`Ge0J*4*2_XNFL?M_D@E5MY4tav5kB@KOb=QF8~a0&XS0<}kWHl9tN?{~!9Y=kJiLxv++z#zCd?jE^SvT3rc
ztW^-><}f=wQa>xZH}N3pkK=I9!kuAGjY>MX>D#6+R1QwT5rJ~)92nQ$#2zjuQ-)zD
z+mEIJVYb(KX=vNZ#)?OX@}JC24K@kJhT^vH0c=)NRl;8n`emZ(TnwUP
zK?X>HQS^|bT!$hETR1nI73{FC(&(e!G;?yM0_pk3U6si}TN*{j1MB
zL7lCf;V%S*u#XzD47U?4NVjQ+**<&Trf<4*To#^@+JhqLl8suSihU6Vb)zS@3m+d1
zI@M(mk!KNv$n)>o?0=6kd>^}ah34`9{_=ucX;mU$C5n7w;oRpLMwAEb6|VB6KO_pQjBo<)X?os*
z-#DjYqHkm{&cmzb18qVR=ee$Pdc0-XelS8TkZanhiDhJJ$+d=m
zG;>874w)18&Q9sDq_S`b&b(~iMJE~q^!m6@C#+3X;Oe^d#bM*Bzn_|Q8
z-uKwF?U2z+2eoWZOcUAYgK1VQwC<-$4PE~XLxWAX)Ea-PsUux
z>Mov*5JEvd;7>ru*qHG7=}k|`9%{O-V)Tvm27{UGmS*+2+qv%?i-+{axF7R>a{~1^
z-~jYTl<@5^!H5yylPDN$?qdgvqgnP_U!d$inuywcJos~@ZiR6yod@%S!T7e?-X(h5
zTc1%TZtEkM1fbpAH4-k&h)47w=WJH9>f*cwhC~ghB4jZ;yOY&5(!~%%m(W*l35^+D
zr4(&9BcQ^HKgymoh5h+uk^x+|gWNF>Z4
z&TII}3+AaZWo_|NV_)0$LeKc`mbNPGJs0$zhLtBK_3gc$0@h1@9V}%7)Wu?65q-m%
zedLT0ZGY0Gqa+8-SoO!u|IGA1+(_sb6-Y(2<`z8k=L&i|s!ceY@?@3LLAXkxJ>|Yf
z_B8pVwnO4*o`SF8!;=&XzHzmX<+_qQzj!3->7yW&*S(XXu*w(5Vr0xcsI7@mSGU3_
ztHge_`;~scfVr5WHs6IWeFYHJ3@(TH%M}}a(R2nc_dmw6MS?W1t7m9B
zd_4}g4S(iQ1t<42?xyIOt7UQ3ztc=5oD|e?&TcFDx|(SVB+ZxESu1ZuAo2yid`~LL
z%UIBt7=3B5%=eEbxQaPm?|{XqaLWh5NfjX~wjpVo6#~7A{g%6=b6u_79wh#;aL0EG
zLd8E`b;)@3*!R|lX5jN!Y)K>l47Y`#Ih9s=VF*05Wn)4mE*vH0#;>w~SD~bFOdoYp)nRNp45)_IQl+`o}I!ou7
zQ9s%`<2HZ4etZipYBh9A%1+4xeTiK_LFD2iE-SIWo4Uj-++lZPNl(ZsNa)qytkYkj
zHce27S-49G;Ewh2`4jPARVPjC?c0Cm`ak{bfApTmGopLD*+DoxuX|-XTkYk{{`i6A
z+LGx)#IZmjui)>Ks}|T9@)fV1zh~``c>%OT)yT(Y-yd$R!T=Q~&kuRW&caMlC+{J_
zv(xvDPN*TLt?j1)!I!~Ldp0O8lwU!&EXn6789?&=U{M$FgABB6(c6)gq&FKZXJKnnO<{^^!
z<4MSvf5Y0++D6m{s1g#bseqU4p0CZqC-kHNTzfDpNiw_{;?bx&UC6094RtqF3DHcC
z{o%v+Y}Rg2KnSvLgii--qd;r>4r>lwT5{4D+QLQR^M5qDs!YmZmHrkN-G6efyUL2!
zXB+%xHv0k+xi9h5CDM#*6bFF>EZvYRalE^}O>i2p83+jN{BQDMRg$`?wTKMN_HN776HJk&V
z+C00(!(MbZFRLkjC*Eu+Ld#MvVx0j1XD@+CV8%m0^nV%5J8Mfsw|j-kltRttXRI24
ztwBN>zwH@qbo=^}fBs8oXvccyYrSE$GB!){9il4mzJDty)y>jH`V3u&%Vdi&bZ1OK
z>_w5<39(6L)5?^dKI;vkJeJvI4PLwBKETw>RQ{lWZk_8-ST5Y3
zn2M$7-@!xTR6ZuTnHR6Elrj&nX#c=yOk>scfknWZ(8v?0@lP@`B7$5raK=z-0A4
z588UBQ|+XT=h{;61M3IbH!Dc8i!GrK#%ylXzT=qdRS==`&~OM}nMUu?VUZsZ^bl7E
zR=*~!>c^8fSbOc?K?|zHXpO-2JfqL!JkhF&y{ubf{r4Y1mk6T=&!fkh_<-_!MUKk7
zg`fwkMDR_dN
zc_qP9zzwM=C@Uj65SoZAsv+K}y+vz~ps_3TN96eewtXhGlMd2FzP6TyT!?0jpp}Eh
zpl(sfmM}BUp<`bWjp7Fj5+U7$X2tdzl!hy9A4+~wZNiZgst#~^zwob^UmWee+
zKTg@eFvS=};=qA}Ik6j!EfSDe5Y7lOU)GT=N0i08b)j%&a6^|>6lmTrzyvSb9fGG%
z5FhfO&njXRfPH4aX&E6U&UgNW@T@i+0f4Zf2>1g^1=%n#2d+MKQp{EE9a1jSEYa%;i?#ST6XG?1fI%YIQ8HG?sN6wgK--2a@3wl#H}DI7ede%w
zdQ!GT$tLnt0p{XUR9^QvV{pPyVI*@S=OSlY^2|=cJ_g@Oa3)&e655w*j0=_A>JMg_*w4SgcCg6?&c<>C7J#|70SRK@>lIby4Y^Ix`+?#J9&n1+t)|0eV
zStkL=DH(UzZVV%QO`%=_67q#B_`z)cOndxODxlM2eEr_6Zp@Ycx#i~)Z-Kcr|U42XM|<>IGtwjR5~{g63?g?_3?!r5Jt#K9^xY05HEKykLlz;
z-TTWBY4g}}ep)(Uz;&g&H~r#z#b;>+h^T*D?-O8`iZca;A<8AmoThB-K@-btHGu)E
zpqS5;bX_pR#J=*YLy1PpLgc&58YaR6gQR}Z3|jtb+WSy#)W`YZ)HQ(-2E#&w@5@#-
zFPYYbs`S0t2jHEbG+J$o`Y`QWe<64*^^(^L4AZoFkc3a^233Y9NruBES9aZC^NL+{
z=_M{jI}c>#a@Bp3w+w@OCmamUBoyjxx{9Tmuj6-4Ep)a#9dHJRG0W8ndo?5zxCsuu
zceW}^hYk{+Y~>j!@|)2!vf2^>P<1
zJ&6v?6Dmfmj_h~ibMH3d$~q4jEnAVfL{lS;Kt&_hm)Wk?_o9~$hq6a8HA7WkcW|d?
z9IdBORq|0bQ|og!Ngz}c{X9oH0OFVm>gBWUu$kay`GB{Z#UrwrUj(!sTDnMzpQ)Ac
z#T2gE1O2}XnhvUj{f4ih;2B(1$2g7tz!7Fa!t9K}+3V1rgU400iMtJ9eevubLfT#e
zZ(g*PS!|PKiX9@cW?jgtgI3&YdQRn2rkEBka{W;RaA>=tDp{TdO${rf^iRvb1vnq=
zWlz4?VAEJt07j@{??0OQnm}*hebs>v`W`MvXeZ`z7))
z6g;z`?{y`lR_1b2tdqyErvm>@5{%e^LkKx%2BZ8rHAr&N=AlIa|u+&
zbUY6sm$-?aAvjI7s`cSo&sK#S3g@en{ocBH4GDiEG?=O++Sy2zhcfxAUY8npoB#Hv
z^l1e35<74myrA+!VE#Z9Ga@Rg#(6JCHHIx^t+vRqc~RBB@j;Tcc|A{u|0Nky&BTuk-d4n&pX2TAaJN}YJ$DbWR(=SVFKlACHOq0jx#
z?KD1+pvhq|#X9e89t1N9I(?BHhjJoG&LOrp
zV2X%luo^+0WD1)(i|&THGD-yuA&8qd{dbto1+E%GJ9)GT(B_jH?Raq8-jjKaC?
zNna+eRs*r{(d{3NXEkjT5I(~|kVpt%Uj*^iwV3Oy)-(mAYA_}2+h6|bj#j%ZbkFo3
z&4;h2P?+8n_`Kbg#Vz!8VabuShX72CGI=~o1>gr!MdNA2lLq3Ji19aZ+#Gs_WI>if
zlLZF~GNY*UJGdo7n1>@IGOsq9&q`PYGKaX8dNovGQuw+}$RE4hZ4ig1)3;p&N3(Qc
zU-5RMaKSX^qSj1m=BKq+y;oNx%ntY%w2Hq^lZ`!obqgKjUL}!psq&GsHEMl`%Yl&{)2Nq%NKv~Dy@toxD*Ibr=;L)f>b^Q#g2dSV;T)D7eX~~V
z977VV6h!1tz%=Zwnz)|x&54Z~*D=StI^q3EvopJ?31k7Qx#j01&>UoY
z0|Bf()Es90Y_aWi9oWF;%jLtk*_&J1eYqyd8VM|OZv2fGX5>D^nR03BSI2!83ty?5
z5I*HQ?_IVPL%j&Bz4HJ)BjPnk2t>3;;xdr@08h__dLvcV9|Y?ASqH`{{OuKln(eku@PnTyGDrDEjpp9t32UB
zoUTDkqfh1H>l}-g;$ZtT%+FNPZ&rk3_Nr_?Z_F=)532=3>b|lOr6kE*7f~zu3~dqA
zD-L841TG8#4E_d(D7l1@T0q2@mGgf!kI>CcBrW-_L%4#S-`YsWd{9e~WtffZrN1(P
zdEk{JeR;5MUr*nqT2<96w3MgPT-y$PBb~S9VDF&P9oh1eu^AFc;;5IkM?H@Efy1;T
z8j+w>gk{YiU}#7W+?}C!|7aS3ZoB6s?Hr=LNmzNj`DroSb%HEkzyi59;fR}j?~}m@
z6NAiW#*@#@we8R=9NHqtLO@zpQl)OdC|whfl^MR~Wum(iXB*|zW^#%~cYTQ}hBgvC&nujs|RG*SF
zcaemWAfQFuUgAhg+^(bM$Nr;Pw+;qi!-qWmFEAW<83HG!pHTbD2BphBs?H#p8DcfT
zEPJ)4b(ox*Cn5^WER-Nyj@$yUy8#+fh{C;lD-%Qy1;#uVRjF%kM*$Bq{`TylPc7P6#1THevMn*{#dKiV)tV3`T1MjC+0fVUnZ)gPwT}P!
zm&{rZS_hV0NSJpyW{UZRU7x?~y@(zLU52b+r-%W?7g{K>`K{wF*WOW_i~#_Xky79D
zsEpt@yAXfN@9J3&Gf)xCTtkACv$Ueb{a}M5lx_veYe4?rFQAsW5`npO1<_%2%nZEp
z>d(J`=L2vrE=a^sFNpwG5J&ied*|o!5ZA2kWWAKY4!A2EdJ{_UsU%JNC-(?1M`W9Z
zs6x$DbKw5zAy!hY5oFFabi2w;xEq4k)~_S*o~b_WY?h_ImZYLcO}U;3*b)5+I)vkB
zk1kLOk~1G>{rD1LVIJW=;@CGblaO!9-0J#t+j^#vg%DSG?R2qV_Mp$kSts%LGtdU(
z=>?Pbea%+Y1Iv|m9r*Gdol>@|?0*Q#A6vy#>+P#u+0yo7Pn5ROhS!#sW*$Bm{&}x$
z1pnk~4`75-zf=}%unlC8r|JV;8ThPsy_#IklJ8`^X+_N06LQdMha-Y68GY_dUVq!a
z(!xRjdF@ahS3$Eu$R5;?EFWpIm
ztcn4YNv0qC2EJ7`+aE4~&QHYdf(CIgvcLt&aAi!OIf;PfDZeM-Tn(5zxUByv9uYuQ$3rgb|4{y0DCo279A-W&
z5@{hWDf9k7ZO6Z?-_3-ma!aQm@PjR-;Uy|r4l0?x+A|@5sEO0xcuc;Lc;!f12W(~w
z6zvbsO^+_#Bw1&??IHPX?b%`eOWC(*E9lFg=z=DpA`$(Ap)O+oiH(#UJZc*QO9A
zK(LynvLLVkUP=&p0a6`=Yaej=dcwQ8<3N;{A#>V~P%mP=ux^=<7@#yFI$T&kRVEPi
zsU6Uzk^KN9)P4=^HyP|0+szsI!S*Y
z=|+;IXz2W(lv77Q-^wkLQb&${i2FcSgSesY8&S%*%;_$6wK1?0@NzsQOaHAwG`jtf
zL#cR-u#X?)$I2;&?@mPTsCK=HI(77h{g64MlZ&a9Vf8pU2;6~P_KWFy`^v{>#L+`J
z?(6q}81pBfuLhdZ>_=bE(RXdWy-Dap)KbMxl{AwQs
z9jwlrMI}v$h3Ze_yIlW@E+lr|A#$yaMBLip7gVUN34Hc9Elvgbr<%D(B4X53YZ9qBg#TsI$qKLfYa
z9>^&sK$kZZy5P4Y01Lqr`_bLog-qhx)Gfwam`;C=%
zWz%Iu<&Np`S-4kyIJ2=lSxngLfHX^;ZX>IaceH_q*vcxBZkDVE*aGp~Yn?2p7Iui5
zDv07LkDtGrct>zOn(d)KLu`?iNCYKhnll77;O!}O9oJzWR|hc
z+Yl3;OuxHcBW(RkA#FVt2Wtot*`|EuKZEAETOYPQL5FL(vX6Te{Dgx&a;%GO%$&p(}#{Z={g
z=GRE7OYZmDM?JZTo#)df1F94ig=wfC5P%dC_i(_3DESAkT$+lP!*nJ2rFGtqcoQT*
zqODUwydK~rL^TfqRDwHT*KAcdS?1|GcEe^StYH2HqtS(w%Ym#$`Z
z#T42M?iUCctVH*@Sf*Yck|&cPYJwMpsvN!|YJKbGhqNj{b4rVV54Sgkm9saq;Q6iO#dfwu^0X-bKc5
z*3+zB2=?t~Ytsc^p<(a`a}Xt0F+@)XesU18f-UUb+2=UOLuTQ%w`{`)IUGp&tDbM$
z5^qW8FiVd_3CmR<12|pSwbr-d!IeI()bhL9l4v3anU&*A=iN|zsPNV2
zCWMmOESY6zv8KrIbq7;*=IklV7C@z{-i1W@1PS3NdlI{kaF``OBR%|0cHmRZP8(+03tvQzy!x8}z-y;hQAYnvPtT+!5WvUh>w&eK
z-wxy<8R6TP;oJ!w4DAkcna!xn%D5(y%REWBq#*`2=Sh3MJW=p^J8XcBc*9~(?7s?_
z8?#E;jBE}nY2BHZ>
z@dWjwQZxoYSFGn`z*noICx+e}gwWguC`Bk6B^Xg1u?ma27Gr{&*{gXAyZpzO+qx`v
zY22Wf<^zEF2+XV|Iz3vu-^NiZSR}_1(Z}S<4yb}FtK|D)-#G|bmLWZ8@Wr}PGQ;B@
zPmK`6;*z^t-{cagb$+^0>p*)b*3ZZG!q#c)ov$t?pU!VSP<9U*W6a>GREd
zA1?j8Cc>rt2%!hi!2o=d3An4D2rD&L5^upJ89v5mts4XIlpV!`dQCFdW2u%9MLPc$
znmg8TLJlkwuOE^*5mvxc2)egoC)SOs@r?wF+W567Un8mH0_kJFwHF%socWl~u66R(
zo-gvcyYn9(BC)yQDe>L;sljR@7V8FL##q9YiHPrpfrtXtIk@54knoO`(&BoWj&t$(
zr0_R-{Q8*c3+@KiB)}|CHkVjsjCVn^=%5|=VVYjQc07rm0de+Ju`2~4lpmLGN&6dx
zzHnIl?lQ1luE&Fg&7rB63~@TEE@36D!qf#wWl2ITG+W2T5&Lv3XcjdGKfW0TC%VMY
zUE?C}Fo1o`p-SMnrvxSeCvQ^UCD_-;|o2jn?|>}BH%NReELd+iKR0oppw`5w%|Icx#47^F!-#{qbymqCd7p
zvPTuk;O&2+fk~ano8s0IvkBjp{bs?wpPKU9!bRk%@5{&)VUHPO5-+iM?Q|$?`U8Hs
zLtQdr?NOad8d#ta)ogeibv`yvV*UA*0y68Zptt&Qez&kj#3!qrk{yT(x*fxo*=}a3
zj>nk-q4cw!ogPf>z1W(dR6k!Tt0zvcFz5~3ktD;$dcY`>eTKf(#{^y}{e|BybYATD
z7Yz02rTJ4`URfkrgyz*)+AIU^8V}?Jiauxn4>zpGEq9R?gU%eFysv$H6LolIgND-`
z@8`W{TzZ+J5n(y35SrVAT3L7Ca-(t}SF-b0hqT)PAIM~bonF3c_oxH?x3$cEpVJ7v
z7kCE<_deh;=4>NzrXtu^dsH?g1PZ$bvJU4@ZMmjqhkfb@y`+{Rt_Lv0ZQg8;6i
zD4fyqT-kqBF|Qj7C|n+v;p=+9iW!>^aW3K22)f}444UhgM^_T2)35)f;kv%!{hcsz
zk>I1znPQ&P_pU}y7j_{p`-VD_CZ18Bk~{riUG|q}WXqJbq>W-8SbouMgr83hi`N_V
z7`ab@;z4BY1VRzf+x-l}0a;12UdrOvv~j5=CJ9ZN2*`vjs9RnTxC=^5L>FDUX>zRq
z(cYWJW6qySEh35AxNf_>a;^wKMj3G6(d4R;S^A!nmCd9XnoZMRQkYL{gLW!`>N@~K
z&totO<_7=1{u0jfxJQ5U?~IpZcfut=Pop$vSU+=+TWcGLd~`Hao_@gLqw=1;N8K6q
z0=4oPg?sWFxPJ+30-veO=+*y#cU914R8KyjVg`<2$oh6fpk)5%{si4T`bR?mm^GnL
zBH$S^fv;Lo?JVPkL%~v`R5{`_R)4B=rgNZhs8Bb2jy5iCGXKl(##ndIPR9%J2qvpW
ze!D7$hTnf-`;vGX;^`@qmassaFiF3mdKwP-wa8uDXntNgK3`k_Ba`F4)fO+!^JMfS
z3t=<<90!i`a8HtnDyHN>!v}*4g8~Z0FRPuuOy^zZ3`~Jn(k=(}1@S|wg-h_hc6jrs
zmnCTCTlSxyH%u%&h@5_9Dlf6YN6UCb&TIa0_t~KXFaAU|NDRM``4bhjQa@S)z$C??
z31!CL*K=ra{bEivoiCo9oQ(*Y1xCZ>7W4>NxPT6O(f9xeA6yqAomOtFGFrZXWjQYA
zrK7w0Sfk#s(j4zF(i4moN=_tA>7wARQ#ISV)Y#NW
zS*wW?f!)RXS^{nJUEHQl>jCK+gSI@mB6k$P}egW<3zMab)eibPOK3fhuJIj>
zGS_z5CbqP7`@`C|y$_?auS}EgmF=eS07K}2=iC^bagOT+!3I{QCh2Z0CBnWb)rx77
zGIZ}vKAwzRCjMkZRKGK7>isrGoqDId=WNi#!Pv{da6aB+yfN@Y>sphV5hR`}m3{p;
z;0Y)6vGHk?O3DCDS6O{B#$<`%>V;ue#Tm8r68C0yx7BuN30c>uK~9;ccgoyf^UI4*
zRSz&vTTO=_GTdeC2$ps~AlXdm&eSe5?7xHFQoX@Ez5Y_6h}0jiaCCEC`>G(!
zW5KpAS7j2X20v_r9Us
z-dvTvZa`?NcoNrH=5cb?r~JBO`2m0GuO%*&V*p)oPu(cg@X}Z8`NX+xK)IWd&p6_F
zjk6N}k>~Bb+-+->vLcgweGkGmAkG^7QyOBV?j)n#i#h!Io6d6ivbV#PpEig$?XRbg
z4elD9^9nQ7eoSU4-<|#e2v7WyPTZCw(}HB6n%6KkwHdW-QS1*H#gQq=`gFX*CSn3h
zH^wv-zT3@iXWyKvf$G`08Gd+V>69cE@aly?Qe9C$w~$7nC%sy!6*`r(-xVlIME})g
z_U&}=Ey`!*gvj}4Rdw~XA&1yd0sq*LG^?v=l2-Ra66302QV-2kRLMGP5PrWx)5#L(
zNr{HZ*!)vF4I{UjsGK;j_W4D|Haf}kh#GW8U2LnEE8)l%@S}jtI#C?6LgqEtw-$qf
zk@nY3)|^{O=2>f6>c)32R9rV(O>HqmD@W%X*8h1tMr_<|o2P!Ahc2c^#1azGf`}LB
zFN^|p3RlU^@n3)EfE!jER-Y0IizQ-kO^@x&|C(dCv#wk6-__GjC_TW|U5!TeEJ@*(
z$^xOjt8C%#-Q$Z`2Y&2}Uaz_-sJ38o8@omCX(*}3s|4e+E+jZ~7SD-T-rrFu>Bz;>
zNejgCPGZCPt}
z;l3Z$73eaDzy^mz1IzsW){EN`>yONyF&_-!Re@GUID*65XN=6b`U4JmlO$%VoWkf7
zM}7X1U17d6jn?ATBYnvd*6M4GtY-GOpe%rrsPm!Alz2n)&NsUu!Gr)e6;nllm!ELj
z9*HXDBU0~gD(3@mPsv^qZ;Xmk%512PduRixYgEv=y2x`i)jc!hv=98W44_We
z;M~km39E-uE=XgSC)bMm_zQAk25Hspk7s
zKe363tvLAg{@r#w#qEaUtiw)v`*B>gR
z<+DV)!PwGj?{!7ZX4`9Z`?_D{?7{Ylr(|@HBm?5^El`AKAa4ND>i7A%Qxjf9`yKFQ
zYs|giBeuv|-#3bs*%E6*0qVJe=1qXl`}jX{MfisjBUiTlpls22W=iQlnoeQ1$Og~IsG?Yv*wW>``{xBV47ww&zZ;T6V0$Xxp7cdLj21kOqKN1JZDtr~Sv
zdXVO;*CHsVmgykelY}_>{ln_R*|WBbH+b*-AYvXRSy0c#HjsA*7=odh8^DwxW8aG`
z@;<0K4tHkgNKGwUGTNW`JE_;CwC?iF0nu&(B(FcAa_KmRm2hMo*d=>@z0Lh|4`!0C
z_5G$+#easNS4;?7EoSj-Lhzg8Q>^ef)wc0+9qK@zpCwhf7ZRcgN)|d~(*)ofyQz(z
zYP7n6H@--oDp`;L$Mj{aGky`;UjqY&M)bz%Vh+5C&b{^*D0n?xfECqdDCI
z{`rX7^l1W*Ulia~jfO<2=f$06J|?FA96)xCaD858gY$XPw2mFXCcl=qqUsy^u%rX8
zQSXuar*m>jU3@XdwSWB~l=pF`t`;9>X|}%kT?u%*?((S{hi)6q`s>iKDCv6^pT>4f
zUn4`7^y0ly=w~cqa-jd_Cq(vp(--B;C;^W*fh;$7IX}d09_M1dQ@&7c05S|a$bZtp
zheTlSD~}|M0NsjT`v|La+auJP{g}TED#phv!ExJYwZ-pMxZQN?v(IH3_&Pp<+OAdJ
zfntT{dbG%<;H6cn9CegRLsTgErQTZ1n!Co-kcQF66(9rH2;sS&z>w-X)`u6HydU=T
ze9G?T{K&pE9)z`sm!k?&dmA5S1vtJGvvz~9D+{G;*H)&!U7K^t684Qs=cD-sm(w1I
z$E^77&8?JuQ1$dna%r8`G(oT`C$R4{#-4H7HmLbLP1C;|?tZ_3w&O!$Bu`M-J2fG(
z6;wAWq4B$4CgH~wV}fU#*&U&cusPwa>-U?{r&?YZ`rb1%oDJ
z-UYJVudAx63D$9iK`-mZUwdk%&7e(lu1Pmss~)pdFMrwZliQkBZMH!IM2@oPu}n8!
z9+3PR4!-B462L{Rne3$v=>v5jQr~9t&7dmsULMu?fAsB9$T%3~dnV<#z?kpD@Wmc4
zmOw~2E_+q5u;ON(yNg~1_&}(@4*|VcstbEqoNcl+)i_|(5&^iyKQ-FEGoZF95&aNd<
z>hcj}l#G=0C08$g#m>HeM33wo1{S|3YPyL0hBasTH~;
zY9}S9Rm)y;o>~kEY3B3X0%?;p?v*UIS0%gemm}g5o;|?Pb
z<(h}U8M?!m|9yroA8>~5LE8ikZSeIx8QOLwo=3Ngc^uzKCNYrlCZykHUXf{h9Lh9%
zYBT{BpQsbaT0pNjMH2C|@*YWWxbkja_-M83z0b#VCW&u>H}t#>p7lDP7lZ$naXMdh
z{rt=(Y@Y>4#z~YSpu0L!O;)DFf%!*P?C96Cm#_{~u}^mExYsS6dA}v1UFtbh>3ttH
z=c%4xe*?doW$8&(OUc%5RvA0K`Gd4;y`5RC8FwbtwlT~!*2)o}0W4vk{`69H~
ze(SqqexKj3U51P+klrBu;Vv=HuUAZO>baHP?08v^T)y|T&ZTg
z!CDu*J?3O|yFoO72l6c0W!girCMA%o<3=>Tc7>dLx!UM#P{n)c7!QQEem
zrx=*^D%oVUJ*SV8QZ=NQ2H7F(_h=2jpH3&sIQ#_~bHCC5Xq;@VE6v_$tlbgY^V5Ql
zP|^~@|3Kal{@c#J9>l9gqUy*AClJa1aj5KsAFQ)@^-zPNF?2`~&36S|mwUV-@-T9J-uc*=+p3
zPi+>Xu>FtIguR7%a67~RAZI^3Zr(>74Y(^Hy>_)h0L!G^2lcweh5!tOLxXlxh(7Ck
zrx4*Crd4TKDtc7n})ySCmTkQ;D?9oZQ_0PLw61E++T5duq`YLY!Fa~7NTpoT}GOF^$R}-Y95!D|{*iITf9WtF%rszKkm%dR7@w&9i1%9N3
zyUhi&V&5@jPWx^s$E9-rn>zN}u>Yae1LO;_eFHO~d;ML_##ioMptxa+$^H52ak*0#
zTJIRd(u+iFy>Jal18Dy21GgtypA?Aba9k&gfPs0H-i6C=agH
zXEjnztvNE;qa)EEXJ7c$QRPO+*C4o0fK>7s#E~RF*E;r(#&~_XX&EH4QQ?q^hploKC%_m2QnIfxgY7hv(+A$N{LaZot;a2b|7)4GweC3hSSu*{
zdZ~?K4)}v|7g;o-A3xJI$Hegx-^Kb+;UCRSts5SHt^+XSw|PPYS*O$luVaTxT>$>#
zGWTj8y8i3CXsL6y%~Y!4RvV+})sU%7HRZmbAtXDQ6EE2X5c6wH&ikbeJgJL7@3Lq2
zdx)wGi9Jm4Ogj72?0Mbss6ubvl6jP&=&`*89!(gbe1^F27QkWv;eWNP=*F_JbH-WB
zwPoohux>yeonV@Du$s7YtNRpcr37?^dfNdKhH!+;Np{&wHd{PRx5W!JcDuUsO+C%H
zzRqk0G~g5sLhi3d{;53_)Uw)1oCjlFr2MxXl!C@VICI$SGQl(HBxYF{#I-9D9mm;z
zD74TPPk**lkt3Vh=YFXUB<<9%44G9W;3;ME&?kg;$`3iXa)CqexSw=;JbqsH!$G$w
z`X;LxxqU#x{mnuD4+lr05b&%EC^6s*b_GS>$ucokEs;Sxhh3fmReXn$!=gew#9Jz{
zf;0>|x#4;LXac1F=#zWk31vXw0UV4xP%Uer?<#B#w$eTG@!gl`Uo(WnYwO+3GORlG
zhLyakP{b0{^z-W2hg*dqh?U4?P>>oiiYi8(LGdF=*4vr`Nk5D|itYbo(YW#>ZruFF
zqfo{2Bj>^;8p@psr5F~Kc?~1!>p9UEE}nnX!}StJ^E6A6Zp4cSr`|mY5dJ&*izP&ykFK)}j47jO;xtSmwyW=(F1;B2nk`LbSIo
z7zGD-=g{2dYIop_>NeXfQI*^{&`27)#}yeGeLd&g|NM$guy(6|Ezol+&kPBPimdb
z}Vj-raHfwcUChXE6S&tVG!*nq&}f{w`22z~Iq6!J~?2
zwb+0=wW8y_KS{0r0Q(*e`TiO#O$i1wksa{N@2Beu+fZDGa?00b_YbN8Q-zZ0-zu1Z
z&_Pq@dDU)>o{00GIx)L$m$rx~0v13xK?&4L7WnZ2g280Fa1Qw;SqY9>V_(_E
zn&Y`_u9ACOjqxf8H$O$`ig!hte73t^(d+auwNP_tZz3V#<24$`mARX5P
zh^0YOuA6hl{^w&Wby@ey6*<(vUNmg!Bf)OIoDw-R7muR&`f0q5BBa=`b;|RZ3IWka
zoGQF)-?DdhrXN`Ka@DtsMmT3&`;?F=De+A{t&^O7DE;k$9Lh^W@pKM5gG%&_J$J^oFvV
z>pRkPxK5(KIb8|OMTD0-e8B&H`PMxnr(X;DLQ^$;$IqKI^7XYGG*yqWOGQnWTW%!%
zu0TK07OF+!Y$W-ZW<))k&(x|5l
z_pM@vUT^Nl2yd~ozj*teCWmd^vhMA!Y7oCLov>K5Fu$D}$qughtN6`KUb6FBnp=X3
z`^@*h`AX4h26Bvx#zP}f4W)>u?)qL
zSsDd;?fj!(&1O~JU~Y;sXUI5VaU+aTnv^p&v}~{p`22qIEtQnj?97QiU@CZ@SCqK&U+Oq%V|f9z+#KqISuv(leW`L
zxT+O2#8dnq)R%hEID|VV&Tcl+V-)L^~>jpH6!#)96W@Nf*lxdyJK9
z`9gKwJ*JD-WWW3`uHHJV$v$oy$0DRbN=jPkl-^XNL_kpKP`XQzA%h7@HwY*QQyMAB
z(E~;!pmej5(i`X)u?@B8_^?{obA5I8tw*R@^WPk+wyXvC*zc0@atSc!aVcq8ETwSI^qw4m5O;S)t)$LnHg>ALrd&12;u
za6>}++Son<2VeDpmqFc|uz^iKEU!-(5JJ`vwKdI@7*Vjhex(m_+G5rDF{@4qMa_o`
za9&a@*i9w@b`{`jQ-{B^vTMeKjDUOaOoZ?W+n)=jH4|HikAhJl)b~@o?RU^%B!-}j)Qy^XE6RIV897bbteW>@#n
z%0%6iq=ME;wjCtG5SFi0^GOBd(5MZuP5l$=9?4ko5T5Ilo&6D
zP3Ys&I~|4?V>vpMCW-f^Pw?mmh|)AQd&!ckRZo|$$-Z<^k<)MRKhrD#qSs@PrYDR>
zD)|i{R(o&Qw5_GNCcRhIgM-$acQ2ULcf25Rqw$kkhQE-a$?%)+1}WfMf%FP}XUR!{
z{G4teluc_dFuNA%a?LyAG_95w^7re3625Lpc$)1&qONTjTl$#b#
za;LKT{u`%h@h`P_Oj(;GDDDCZstaasTbzfy`RkED0Q2%TI_~1*#PRp;CNs7dBj`p-
zA#b%O4Xm~kuYWO-y>)aNIq*33;saBsS{z`E{ApEu8yI5YYF@j3u`O;LO402B3E2cPpZ4xozd#j9Xn}c(|9Fx><`Ra
zzv#uWm#>VwP0HyQ=Sej)phw6<`qn{XMgGoz9co_WnQHue9b&x3@vgzzb9zErz=pPb
z>2|Pp_O^xmY1y^!6Vy6Fw#}dbOl&%P)3Y*~PXsF~^{fD9x
z&TO|rpw$J|nf9Ufg*y`qOEy*`!RqSl-=gL%QaAnd^eAa1%8!tj!sMYgq%^W7#2%kU
zV1GHh|DrqH$a_!8;&Nuz4@2fSi(;E7agz_wii#3CYG>2>hU6;%eSV(66j1;iB^@Zi
z%Wc>CZR$p2vxOZ5lWqNKew`_DO5bdq9yR_M+dI?4cm?WBz8>*64C9`CU~ie(OEv`OzdH|M
z!yVF-2TIA|9jrg4HMB7Ca2o5mB|>K3X_5Kl7iKmF@5|a%ThJ0|8e`$$+1HByP;iCu
z5Y{ld#B1D^1od-uj~b$~iA;=!I@;D)hyAQLNk7T{+LBg*fo3-676&cc`dv0*L@coY
z;T@JibRE}l2Ws<+r0D7nNhk%jw;GkqPnbY)n1x+!#nI2qDRJ5CH8rM<;-zX)AJ>wc
zGQzNT-=iq4Tg11}pB<30Yit@Rzd*uCV_LY)g!#TihHBrCz#=6~xa$!D6NKK|XhD-C
zej#T^k2}Fvi6J=OUL>M_Z-jR`W#1><`;IM@Mf
zwVG{d+7s*P1O;v0mJdb8*A*W+a}fuH-R;*`qVg07+ko6+4dfgjOsEdU&)9z!=;)vEuoZJ)=|Tp}
z_=NPOLy+)w4yuIFr{6?kHYSXe^s@CqpE9vw@7QNY~b
z6#b%Q=XW4xTe@vHyG+rB=GO6f&-)hpVoerKb;0*zeozm|LMCfRgcA0}H5PzEjke7k
zi^5w>gvkKHk4=PDpk;C>8Eg%X^T}xUa*Hs{CWYBcD!?zBu(#EOqCvkaMy#q+Ew!9*mKR
zKbeQ`?M;7!vJrAH)&Q@*+w~_u-PX*o%ien$3pXWMCNuN}f}LE0IRXR}WZ5FKl=ckA
zMgdq5jE*cvl*O$g`xv9omfw~W^1GClG#h>uS}Iv=rQP#o%VpB5!u&%4`Iz|aY8`{;
zG8hfFrwAMaTse@~Xy&`w2}~3xW0qq*SCg3_s#4u{yZAH<55aZoZ+=e;^j0a3@*fIZ
zZb|*Mg7s70=fVQY?fboGdTz99QU%e#6C-ocdV4;qvOs@pfyOy)XH`QFq@$z%WkHbL
zd~p^m12P7xza_#ra5dO|gR5sXQ53!^6BTwF@|esvlb~^>q+5HRl=vIvX{En)KNg(#
z?2gmRJIOa?xaCJU@;jI-V>H}xaC|nQ6_ZTA2z^Z$`wIC4xvzgacU4_ezHeDVJu6(w
zbanqrxHGcZz^x#%s5#iCl#Y6xF9?&kLX_D4!ylI?fJ@5t{yJwjduruW#|nEb+C-hq
z10^3GvI>aD_pK^lJ?|&eY@hRljdZ92gg5}h4K?}JFOWPzteRk8KCVQPcse0i*HZsx
zNouL6VX6L`u2jQbP=_Ozl|Jv4yoZ^`wfDn}NUb12kGva%%2jU#i>bD2(~lr6Tit5f
zn~8u}vDRnN+}h`!z7p2Kb-wWZqOPFS!~&J0llMGR)GE~kai#pZum!8YW-|XU5+!OyZppWwJBDft4H3-H~{zDND6l>J3L;?6b>{t>=%}90o
zFsFYgN8x0T#!>%q_3Fa$}q#xaBky||#=V-=G)uY_T`snxlacPjjY
zGB-K4-lAG5RH4Vbn6@lXF>P7NRFGTQiJdyMsH=(Ph|OapBLvkm@AG-Y)3tEf`ZQr;emPxS^>CEx
zA^%VZ0(b+!(6UQm9rz(#e@)-J<5n-47ED?EL7?s}YGq!|Xa?r7^er@>Us7f+`lPPy
z<-(v-_a5mOphF~nBf!w4lRo2^Y=F}=y74-D_39Y=uK#ui$M_%3SIa|&_kt<=!s&-~
zc>?wvaD~;JnR?SoFBDUC#r}ZqQc)t|j06cWBFkYH3~M~Xio#z%N#^L6C+Z`nE@#KN
zjTMg-SQU
zGj8)SVl}JdynS9`Uj&6}E-jj6$rIB=vWt6V5r>w8URSC!o;CaBdAD#LsctlrJQv6u
z0Lu)kG#H(W1h720aGpb74bS3XcW)TB+yWD(zs3+++RnOZqU&hj^V>#!+Qn
zv`o5{yBcp}{#$QD;~#@eO8-f8
zu?@0q35oxhTbQjQbS0iOg^J?Y`HAXg(i4ReQcfEf%ijk;X?!Evq|R*!Ni*66Z%{25
zBRyHQUzEx+;^^Ww5Np~0VMvhfbB6q-9nZ7ORF>HC69Ytbv$sN4l+izXq(&&2I8>TdU*bvm{c|J(HWb+~G
z0+lA!=jzWex8%7KJpUxvp;Bz^u*-gd+W-gaAsO`O%ft{hl_pT7?)u^nvzP~6H7@R{
z8L+hsY7SRCd{yC9xzVp22bFDW!IH~`wgVI+LUa*ALSlX@_=<&x-Ftg&hJOXl$;_mr
zHl=eTg<>c`MX=HovN22n>m4Pw|XVr--m^y7`dxfEfoGe?R`jN=`^+$jQ>{d7GM27YjXbwi9h9oKvbhR9mL
z*4+SMQjY(^XB{BT2ZYZRnT`XnEfQV!H|8=Df};Vr4lWdl0s0y5B+efLDB69QMpE4#
z{P!QK|3s>f)x9_7!f`9a>r*EkY5=Oa1^}aa4SV?{bW!?oCnP!thBep#K85HxNS{7PzB+uO+tofJ^z{2LCz=xE&i>VYw}RkGt&MO+B*0iS94Y
zGhk{Ikp)~u2{<0;FWcyW{SH;&II?Sqkc!%1-Abea{7OS;7XQ(|df+Sn>4857{@@NE
zxvgdop#H&E2`E#-G!!UOqP~zm!2n&`QFe~ogBNrGgx*KuNVg05;Kw`S5|z@@_YKyQ
zaQPi!$3Zq?;V+PsJ>72F!tQn6%7a{grX*{9hf~3I;()>t!YH)-j2gJJzZWu^AYYY-
z`qB_2^B8@vBq|7K7?OaE-o{Ws&lSKnlm69n_1;=PM-px`(3VGkA}_r0P5Jg1JE1Z-
z3SJkn)i3ADl_7GcV?xDH%3#jX%zcWE2JFa>30hcxy)UWMp8FO70~iRP84I3k13odO
z|5Rhk|5J@E`Kubcl{^F|h*J@-0fRksek{7w5zwUN(GVaN>H#M?>s)Ie?r|>j9~e;i
z+!_D_3VC!wD@#1gTI^DN3vKSyMura5(j~l2fu&X&a}oMY3CU$P1Y~da)AtCIa_!60
zjl;|@#|E1pT|kyO67R3iXbK*P?@^i;xmiN`QP-VIW*gLLSL)>!uGM_~aP-=V@Aa8E
ztbI&(|X+DKdCIEMzM4YVCu`SSLBy~CW;ez@G_3p7NJ9zO2c84czN$d^E7e6HN1
z;b%fSoGF|<=P!F5-9a}Z<~yz06HM>n-~wE?+k{U*1-w+8pr-OG%_N6`&-bZ$ch>ie<_5KDo&1_4YOUCA^VK)BZU
zn+7J&L5AGIXyH_FYM85!7BFlENOQ{f?SJnxj39x(@l*#Cd^3Shbwj3R4c%&IO+x{^
zdh+E@j^w+6CTgj_ONgwO$TUzu+}Cq05X^(bDDugZNBW76?dMhiK8ktd|Kg*7oMw?3
zEHWG#35o{{;MIJ<-*!2N6%EcK;ehYEbwXsLlp)*<`-q{JC0+y3cVG)VDDG+K0+sltQ
zZsA)PzcT1gZs{-2Yd$`c(dmTPUHczdL{?=oO+zqnqz`?$8&>*(nuaj}R_6~4S;z1}
zv*iEnVopO({7=n+XY|*N=RsiPtEL(pq*kmGfuGDYwl57Jpnys$U{yfsgk8dgg-j
z1q6O!U`~p^;}5aSW7<61d01rRN2b7u$Pqpblv{pA2fUIy5Z7kNS4H>}HsjIjVXVe9
zyy~91tG@gEz}EiXXU>>~*!$P>_<}_MbtwAG9Q(*jly-Ic3Scp$v2P_#1H!wh1qjH!V8g?)MNW7br>~cb|8g#)ugR1`wJDn
z52Iw(_}L+Oee_0DA@NHr2Ie0N5-sn!XrDffmsL?w(fR~!-jdm})!8Il(?Tj%n++TB
zX6vW7(J!wS!tIP7Tij;R3N7vZs!M0Gyr_d4^cyws(x|1|;u8Dh__>mDywdyI#Gqi+
z>vD660ADjoSoV*H&)#DuiQPyWBtW9aQqnJ%2M0cZ1FH^w;F)>Ax+H)9wT{S>B;Wlo
z75HW4w-W}8z^UoSXDt}c%%1|zzf_|g{?}N8J}&rzD^{=%#t0&vfB!g{_X2K)%xX_C
zgaP$4z)SQ$^|SF`ZQlP$0B)&=9~%>SK4J_VSEVblK^ULtLVNw&b@`Bn!bwRRs@g>1
zpF*RH5ev&NjX0mlY2fVNcPQXQ8m6=O5SZ3rxIVVD={7q{T4iwNqGcHBLWaYh;6+5QuRFMtLfG$x2g
z6IpMA2_l*4=np<&U_|0)T)|G*LthMhZ_PmpG$04BAK5%Ow?9{Z6*R^$z`nKHPh^!L
z(?G!JPbVE3xgb?wrGqgMT&GbA#&D6x&Z*`=GkIYX$~nk=t=jz~pf?c24sHCJpe2S9`X7zUNZcV~<{W?%N)HUM;Xdh@e?u3xYSMm-VL!*6W}2_$JzSX!t}
zB@fE*LS?2kl*$b-?ye1=Zt73%yP97Reza2=;n&Hhi)eSyFE05oANuM32c!P^kT`NI
zU>3PxtdX}po~i{`bkJ~qXAyE_&!Jt^m*|h_nIHptG@j}@a~sFX4HI%=mzne>EwWE#
z$PBjG+}70*9HuK3B2=-jBt-Z7$s+c{XVWgwQexT9dV5;kFBKCzbXi|teSjvOf7p;c;va{hKZW;Wfe`nQ#+mlJuzO7$^LMs2z7_zJ}gD9>l=+1#Y%++)n{bP>#Tz
z?NpbLjjE@JF0x|Xgqh~Wm9AH>+oz^5g4REm+Nr!8ap;!=U?7L<0>RKlGf02~oEfsY
z^@O15y`~Sk0j!@4av5_JrCTedFH?#+*Pj!+0xCxruTbT#NIP*Nu_j1@$W7p8T$Tvx
zPwEKUK>V-hkZqW}5owR&e|COxj+Did^O3l;;-Q)4GkUtVaj5WQvRYs&a7-Nk=Mr~(
zd23)$RvU!k$r*M7
zGJ`iK7^27Kg1Y1vJjkMPow_|}^TZ8^b?xdj;~dgrOISJa#vF*k0KEP%IW
z1?w^acJvBI){qUvi}sh2uK+8!UJ|RwePcu@(6IWwOO;W#hurr8QtquSBv7$;HCrL$
ziPkcSGGLz&Wc`@?l{#JJH_(BCBw_0K>p+2W@iT*}q#kXWo@8)&*Wj1Rv3w-@J@Q&1
z0dcPQi!3+gqj-Nh5KttTQc6@t3CDYKv@}|>i0W)|&<<-)(cD-k_HQy0RL#5v$8-o<
zr5@BGd9169b=GO--^cJihdGUaz~7DxK@>oE8Up7tPioCNzB);2%^;a8S7qcDb_)-i
z8!4IZZ~#r*1}x1(6)b(5p?U2X*qX>VceW%i^KlXaG
z9m6S3dV)tE&1RdfSihD7OE2=C?qA{fvC~p>#270Ly@Jy+e2Y@cimdfEH=td9U!ax6XFqQ-OP%$8lWz(E4zyoZu
zVT)0}kB{I{t#bcRWC+>bDo!$68iHfJ_+#4is~Hn_S9x`~(Cwg#`T5n#Yq9p6g;
z6a1CCm>e+O-E!RvlMTng_-cTU=!MvshMl!8{?Xmj=4X(Fh~8Jw;V+Q%&aL^+|ra57BJl3=O9CY`IO!$=?c
zZAtf~&x173PmZkQI-Y>j#n+DTWTGZkxeLq$Wq^hNX8Pnz$kuMrdK;gen6JSSWAFHh
zgdvJWYI*{OD6+LbvMNIm*+3*o31NK_aqRqDFc(gt^u9EAiDFXvzGdI;E#jEMzSemw
zwCcP`GSz1EJPV92fdQTn@bW8_P*CR9nil{9;%qEwy3t{sSb96L7
z3_wAvo-jATdIlW9N~m4a#~#M%w9QY}NbILr=c)*@KXu?{9q7`Ps*k*kHV1Oke-X4csQ(Vk
zyR|{WJk$Z|PReP5(*y1ucq(uYpymFtZDa-b?)ukB9|2~V3tUPPPQDt=81C%@x~hw|
z_N8nrbY>wG0E{i0d-F`-wP{8f*E!=8L~w57amBaA$xpt`_ric)CHAolC!u;xCZ2c+
ztGsH(c)`+!)|T&V`I>dvSfLKggFDp^F@c1y#i$y&SKTby;lTuf&OOX%A;Yf5Az;`79CfwqNY%Z^$;|S_xrpWPQ
zL=SRa@DQMq=>z}#kKU`|5Xe+=I(T7Sh-FcN2~HOU=W4rf9%5WXNbYXIOuh^?TEOIU
z#AtZa{a_aTJoIyzE=^~rGlIY5V)OPy6yH?|(~0I^V60a4?7Ym}W$2S}7{=Qg(&w#n
z{IH>QQqkS3a6%w^)K=%#jmXLak-(3J%QmaP9I~}9l28iIhwyF5`D>UQAaz}8+bH;8;%G7?@^K^GwqYDN)nA@_Z7?C
z(5>UW86BT8?LAc@u6$fe(3|nut9fUOQrSu#1i~di%kj7eiKwbUvty&XPunh(LGCu(
zMx@EG(Ch9;Yt;hFgic-kTF^YWV<9J;XUJ>Ux6q>;U{!mN
z623wAr(kEY09$@x3UY3NyAsvl@BVdu&U;cYZleH^v8-A3WlC#HD&ddnSXnpM0Bj2viv!)Vr)6e+QrN1g{c)d>gRYKHxNz{**Kqw&U
zgrleazYv&pJ
zs8(HCK1ZKdGjunXj0yi^xV0xxy=_maA%hiwxmz6mPUP#Gri4ErcxC&}??0#Lt6ij7
zcX2&+3Vtu4u2C5oDj%UnK_5LQ*`us}R&umdd&}X6(A=Dp38(b;YP}7ob
zVxX?6vwB68J>pl`=Kj2_-!~9`?t~Mb)eTaGy8dVjDfk5(29ci!^*Lj-#=^>3?((pr
zn-@h!nG|%cE58s@fR4*{daoab&97h|A1*NB*WjqvjD5DOnBtu0*p^LO!#}wh@*)c1x|?X6@{`uL3n+mh!BW5WQ1Hj?6Zna+7oOdltNGAPEh_=|xTLLzb6TIXe{`)XUT<`v$P#9{%f4vAT@dT>oGosN@142rAuUDIRmwcBse6f53Xa7!ULS%|4QCQhdL{Q2o
zJjywVq*}&sBQIs|lW#&za9h39io1*n%d$9d@^69+akmghStlH!Q1e4C+^?qAX9)dV
z4}BrrM!Fgc>qg+us){gUao6q6xnDIQepU-PwdRRzd}w@d@s0!K9yB+&D)JjA9ueRv
zTWhE7`~2eFk9?en;0m?80=A#=`z)QyvqXuX4YmQ+5}bz(p;ofHSKdp{`hUZGt9ojQ
zL%euHbIE@re_ZL8qCu#&Nn9vs$h*Tb;1Y4JZ+)k^t1q}~RH7ihciNr-O7RkT^;Zp0
zt3%#CfG8O=bsy36NX}+Z^7#1y%Y$ZP&n~R3@NUa?QD$5VFxJra3c4eFqa}rci~rVd
zd}ZtDADT8*u_&UEbzne&Vy)#3|
z>mMsH(JXAw;5d~Mg9{-AT8N}jYrVvR_qkcDhRtx^kQaIkD;7UA9+!UuU2km!J3+@b
zcYYQ$&2=UkOXWtFKMu0n<0vNhEHDzY-S;`Amwxg%5-EM8LzeSnMlEMV)3
zdTqFw%ED$Su`eZ-Nh9`ZW|VqtJAKqpIi@31jn4Nfc_#1HglG~VtHfY1d!~tx^Xp&i
z7y5@nXufrfju?RDPcCvJ@~+-2bmmGM^<7%jF;;OK6T*(&NSf-lhYDe{dT#QY0H6vh
zU=!vV%VOER(6>x?)*3y>+Ptwsy)E=V-KLjMzo3G%ZI?waHBxg8s1EECW^AmKtl|2L
zgbW8^08S_R1*l8==5#=d$PDQAXXsNxJlZt-RXgu;kyjd}+{%X7JGzah&1*=_qmY?E
z3_K=}9an#kSN)NNbSG&)V{m@A-}jKw-*Pf>wDi~O_sa_Pcas}{*L;&*EtcT@ffWyI
zRdg8oJthRRpv(I{Xb`K@jpFZWb7wLWvNyK;f6?3Qxlo}`R8{?3qGo_@L!Lfr+3(5UC=jFQf3_701YXi(d
z+Lvqj4L^JxTRXKb&XL)A!fluhqOc!L-&1%kCF3To12YDC%kGD7!50-x0$0Jz^GiEH
zAk%F%`ggeY-imfC{L!q$id5OiHZ%)?%Q_Xc8ZciI8PcHz6sN6*#iLw@YUtQ?AZLaW<1l$ibH(
zaME7_(E#c*OACQpZg!(-&Fg>kX4aA-_}Jz6iH|1R-P0Hqu+Tm%66Px9+dIDj?*S*1
zrP`*II9&|NeHkoP3>acd1_~8DG8bY#eBnCgA@;0SJ@tE2B-1|@3+Is*s3T<%<*uCx
zE!8XYGyPHZUY|&JQzw?*?{*)`Q?c>kw)XvQ4v<{!jTAxDAI10nL%|HS2AJ%7e&YBV
zs%$jkJ-(pcAtFB%3i;7l(4R*yQ;1g=YBSE^c-%_kyYuNT{AN=Th@>!U>}Y6DgWQm`
zTDF&dgv(g}is0f&kX`rxl+;&D!9Y>^tTAdi*d>n5JQp=|=e*~p4ZKU+(CGWcI4!|j
zU5a@xof{9=e_yNU)QACZ#lcntGdi}b#Ma>ZB3MSeGqw!ZzkdZ6kLq9;j~j4knfar)
zPdtim1|SgE%`WAZaalK?9#
ztyi@}vRa#_?HMV&vu$iVIsoBZ5kCY#YnIuwc8CLs7@Ck{*rXnAnjQM!Aehp2E2B_?%Yp5|8>3DR>Zh+lao7@k5J3)WY-A{dNq!^wvPXN~
z<*B=9iX)*Yr4F&$*D4a}sKF325q@LcJR|
zAqL~#j_H`gewk}_g!t~Gg*U0&Cpmc`IlS+n;k2cCQy;u;4u)^)GXB10pA8ga(I{|C
zm;fmiKr%&lN_<^mj7Bm`{*f}*YD^(zuFhLX-*7K_%KbgY=6(VVh3$ii5{UTfyh8~8
z`yh@KdQ)!CTlYD(fmRN<
zIHUZWUqI65+>(v$g+G-tY;zU!?G=rUX()yYgIQhq{m>F&N-i||lF<*w_x{(ZFuBWHy>|)w$K;4_
z{OpcDY5(3fvJ$3VpkLa9ql+iCL){u_JGD9joIGn+c7ORd-CC{P@wjakTcyAm%$a#mM*4jCZU#t#0WtF{5O)Ur*ILEnekF6DLPH5`fc!U-~mds-MN
zI~i&shcOG$bVj-=ZG5P|`?`t6MzibHn&7M}j5n}D70owTWH&TuF)Ku-k~L(Xtzj5|
zcLj5m*qZIrr7!HNCHA!jZ+@b$mP{AC=m&C^`oR$XBvcCv>^mu+i-ZkdC&3dnxUf4X
z_S3l{u`(L^l?E?(L<(L~ERUX9h$Y_<1N~M0#9udhN8~B$aAnp
z-+eYs_&X3Q@j~>y@%q+~m<9t{L#r9<%LXjfpX_L3^3CJv1YSzY@5v7lTk)oV+wh0J
zB$YxXDj>7uaWy0=m+?`%f#=dCl(C%r;8jZ3DiwIc2;)SZKjag29J#bikc
zzhOgN6M!{d`XhC3ZEv5uOY!V>hIlU%eVx2U(Z>%*j;U|>p3ljaKgd|RS>={Nb*&oTiXHY-adB4RftDe5G+9yE#?gwi
zFTEL!qsB+)E?QeH+^^}s$`BX#`3m0@Wn{_ntdD+!XMDY5Mo50q`@912g5-@K{%@4t
zr^$OJGspBibvH66T@BK@nIG3Be=qK48AX2F&Hvv&ugDCVr-S=<7z$x6{`S&Dpidc5
zOCDLFsyx_lWA(0p-kq$^DV+dF{)=-ZinC}lpXOQ4UADHIalpj53{btbzaBOP!e$6s1rS=edSaC2iH32a3@LxyZrk09qm`j@3hh&I@&ePTL!
z?0V^B{UQC_EK&SNwG!tufSpm{1wQM;=}=CQG7shalR|Nw4W%s*7w&c|rgcR5(Ig9g
z*KAx7zPF^ZWTP)JZ^d*sf_nDUUuW)0rqZJxCt^?b58Nu$sI4aUK+Ao?-bP$2IK=PA
z>7_|FAGP0`ZPD>T4M1CaEF0!UQoz5%QgwOjJ{q?>G{V-uCRHV_+PWIGCaEW?y2cjG
z%Lm1*3_H80Fcpe@{}Rs#Z<8sWC-9LcdifZdn(ABMHDx+|%4z}BBx7%ZLoGQY7%M~j
z5MdVx$)&x|aojzRcR*~7?Y{fA^iO)(tEKyN-m45Irl=mRqa45cNUR#q^P_!Zz>fCu
z!`K&=Gdgu&JlVU?G5=5`JhM-?o>&iZW@RzXc)_(;tNqD!>!DC7uZ5+UU#ohkF)9n(
zAQrx$Lr}+rxIrxothv0}Ze8xMY?-?jrT5H*TR)I1(MY+LZD7}4qbsO*=SOIXag)4w
zE0FNW{_3?1{{2iBW7lwgYo~>b#DzQ}D8gB)+Spr8!l7EBUkEMzHc4H3f$>t(t054^
z57bT8nct{;8rW=Dt5_{mSI5v(BbEI6cf9o1
zNY4IiHo+!?JTY+p+R&YQ`HJ(#8YiVyTzb@cDDTRw`x)H^V8c3s^?#H
zt-NQCI`0+AoukIzC`l?^Zhu1UpG5sN`)$teG}Xsmsi4Q7HbP-%lOqCboYYh!hs!x-
zrl$nD*A!)ns$YA6(1ht**a&aNz9nIzFhKnH_Z5(KD43dp#?TH=3
zn7x;77#<&CIHBdCZjzC#fLKfh`uL~#C#0UyVy*G7Nl8e@5zs={>qAsEGtn5AQBV67
z61_@i^=_odddH-^R@dE4BSAPBS0S@F8=f2op
z|IpEFIgcM7#`OR%?_X2ne{Mx&!+|?FaNsDggYRXCxZ&&Z)p5_IYeOdhDm*IDCyJmq
z)W~r7(a%U?i~9m*mzPrN`h+1e^FX~g#Uto@q7bE+pLbz^`6W%2QB4;@B6|0biU6!|
z)AP?e)l5)yoP)L&C&<5H~wCWr`@H2v>re&_=n;c)8aCbdP2j*zodfh3^yZLv(B_|vCjsLYYODq
z@3)C-F%E|YukP-8g6SZE(pw#|n~l0nvzFH~hAB$i2UL01mfOK7K9;FAMvOTQ-s8*L
zrj~uDcy!#v_v?a|5Y?&clh9sTy4)dIPPxYkPc>ikcL?E%_Qs*&Smn2Y`hSFzsFK*+
z)B2-y=IC?;Pt690sw^qxMxI|Mwpf(;1b}3)uS&!s8tmVIq?*6QdbeqncpyyOmA`Dp
z9jr(S9#e3qa|em>HE$((8o(c6N6znl*%h4oKs_mupZS{yK3<3g_dWN=&X{ALfY`=;
z*4yRhbd~llbd0iiMZ8T?(Qy5?*?LGp9?uH9@UtP56}3VSRj1wY5ol^^s$1l8NO|ui
zr?Q?Vx4-?AYNRQx*}8vz5kx~!#zprAods6R_a|0m8UFD0{rDzD0Tpm0{X@rmFyN9m
zRbIP9k0QXpPgEsTml}wE#0ide(%xEc#Uf%A`;s=L;v>3F6kiBOD&KmlLU+k~*A^Jw
zUs<)K?UN~ix%MiIuvJ`2q4U#Yihbu=CG1`C0;m8caRb#-6Tfq;HBEs%6eCD}E*@2S
zYIMPO&Ey6z;WF!}M3s^xLQyl>mY|9o##`WwJ{In*G)DK7doJldXhL95qKE=Goj>*8
z>Iw~o%^y`nYS!XnXL~Wr2?u`pq~W%0w#h0?FDe0)>Mr*fZ2<4aFyny`5a5jm_%6
zK9eZzz3<$7M3-tH?04#BH|kHj&?_8$;x8%i#&mxTNk`^cS{cZbsjJC`i9VtGao-}+
zs~ED|e^g~)-iEvV5yy&xvk)1-j<|>ByW^>hCt0^>{C$PHGLY?)l#eI*UpJVN?i`YZ
z!p32IWI>=+&jRq(fajI_!xY+ZX4R_$m=>H!-`9Als_Z)X70X3dX3xw({TmNIiG3O)
zqk=hNT&yW|g9+r-f2i+^q|tqEa|#00@1_lfTA_1(HC0j77X`_gXRY-R5`3LEpqswc&2MghmAIM
zBZ=;g*LI|zn?DM37N$%1y=}K}E=1JFA|ip^0GUYOv@*V%c`&orAy06*R4%ps-q1Nn
zBQq=I)R}@CojdW$NG*K(SSR)xAOrm2<7^%?r9Sai9tcqE-3NEQj*gA)ruR?}5sRsR
z9R&>Rg^#(J<|N+csip23e{wx!MA=%QFx5o1sgCO(9TD|7aHLYQ=)$0Crh$7^;eAsT
zlMRi^u@mhO#J)P#`X1>kFrt*c%emHSQzWt16i|thF
z+>`hU$Q;Xw5~QynM)E9-y^_ogRmE|{dI@0<-(j%#3v58z-&3R}vl384?`sF!jd;O9
z4HgW>lR$mC*Lk*Uh04EkCj
z(c4T#F)WVq(AiPe`oP5{eTCx%v^|kcy$4>CRJjVCi#Y7hOd;s@U}p(weVYpgIo6(1
zcCj}NoK~WM-`82nf2H4VW>s)RI-zNf{$%t@>Bfb%b?3IZ@UMACo{p9B)-oj;d
zslAGIE&Rn#Z)Z>cYs;9$z!&drJxa-iX->0T7V^;xUgYrZtRCZ2CeS*ctA+jitFFsL
zRDP=+t&govE;PK3UtQS0o;aKFv>>qANuJa3(o2tVHWQga6wi53G}zlWVHr$tM#G}7
zpY+3H!Wi1IGSNP5$!wnVF@b3nPKh!p%)S~XJ+E|mzne2Nr~S-(f3HMV>;-F|{Wr$m
zLrPc|e+eOd)%WTk2Fyl$fa9&4hNH`EQ*Aw!!ShT&CNXPS#zNZ@A<^2jHr$dqU}pAS
ztP5y|<;fD#F@s9C0u5*p69<$9!nGcD+It*vx+7Z@PUdJf@BRn|)!wiiSeGV93Hk_|
zwRav2_$jj%U}zezAvr^4C&88M3=0Ow6-)Hzd
z>X3&tgTO*Z#LKWBR#jOz?ru2N_;6mF-t!6JR?w<-4w7b;prwDTZTz35pXuAU2KL1dVX_f#S9t{e|vAPuwc7@YJN_1rw7$ZIe%<;~%hO<0?vZ21#Ge}yd|Of1^SZ$te|550c0iJZuFv)>cUOfYz|ptm
z+yfx3cJRl*_|5v!