diff --git a/README.md b/README.md new file mode 100644 index 00000000..4ac7aecc --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# BLIP diff --git a/configs/bert_config.json b/configs/bert_config.json new file mode 100644 index 00000000..9b0a67d2 --- /dev/null +++ b/configs/bert_config.json @@ -0,0 +1,21 @@ +{ + "architectures": [ + "BertForMaskedLM" + ], + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 0, + "type_vocab_size": 2, + "vocab_size": 30522, + "encoder_width": 768, + "add_cross_attention": true +} \ No newline at end of file diff --git a/configs/caption_coco.yaml b/configs/caption_coco.yaml new file mode 100644 index 00000000..b398665c --- /dev/null +++ b/configs/caption_coco.yaml @@ -0,0 +1,33 @@ +image_root: '/export/share/datasets/vision/coco/images/' +ann_root: 'annotation' +coco_gt_root: 'annotation/coco_gt' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth' + +# size of vit model; base or large +vit: 'base' +vit_grad_ckpt: False +vit_ckpt_layer: 0 +batch_size: 32 +init_lr: 1e-5 + +# vit: 'large' +# vit_grad_ckpt: True +# vit_ckpt_layer: 5 +# batch_size: 16 +# init_lr: 2e-6 + +image_size: 384 + +# generation configs +max_length: 20 +min_length: 5 +num_beams: 3 +prompt: 'a picture of ' + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 5 + diff --git a/configs/med_config.json b/configs/med_config.json new file mode 100644 index 00000000..0ffad0a6 --- /dev/null +++ b/configs/med_config.json @@ -0,0 +1,21 @@ +{ + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 0, + "type_vocab_size": 2, + "vocab_size": 30524, + "encoder_width": 768, + "add_cross_attention": true +} diff --git a/configs/nlvr.yaml b/configs/nlvr.yaml new file mode 100644 index 00000000..2d1122aa --- /dev/null +++ b/configs/nlvr.yaml @@ -0,0 +1,21 @@ +image_root: '/export/share/datasets/vision/NLVR2/' +ann_root: 'annotation' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_nlvr.pth' + +#size of vit model; base or large +vit: 'base' +batch_size_train: 16 +batch_size_test: 64 +vit_grad_ckpt: False +vit_ckpt_layer: 0 +max_epoch: 15 + +image_size: 384 + +# optimizer +weight_decay: 0.05 +init_lr: 3e-5 +min_lr: 0 + diff --git a/configs/nocaps.yaml b/configs/nocaps.yaml new file mode 100644 index 00000000..27bb1153 --- /dev/null +++ b/configs/nocaps.yaml @@ -0,0 +1,15 @@ +image_root: '/export/share/datasets/vision/nocaps/' +ann_root: 'annotation' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth' + +vit: 'base' +batch_size: 32 + +image_size: 384 + +max_length: 20 +min_length: 5 +num_beams: 3 +prompt: 'a picture of ' \ No newline at end of file diff --git a/configs/pretrain.yaml b/configs/pretrain.yaml new file mode 100644 index 00000000..02355ee0 --- /dev/null +++ b/configs/pretrain.yaml @@ -0,0 +1,27 @@ +train_file: ['/export/share/junnan-li/VL_pretrain/annotation/coco_karpathy_train.json', + '/export/share/junnan-li/VL_pretrain/annotation/vg_caption.json', + ] +laion_path: '' + +# size of vit model; base or large +vit: 'base' +vit_grad_ckpt: False +vit_ckpt_layer: 0 + +image_size: 224 +batch_size: 75 + +queue_size: 57600 +alpha: 0.4 + +# optimizer +weight_decay: 0.05 +init_lr: 3e-4 +min_lr: 1e-6 +warmup_lr: 1e-6 +lr_decay_rate: 0.9 +max_epoch: 20 +warmup_steps: 3000 + + + diff --git a/configs/retrieval_coco.yaml b/configs/retrieval_coco.yaml new file mode 100644 index 00000000..a8569e9b --- /dev/null +++ b/configs/retrieval_coco.yaml @@ -0,0 +1,34 @@ +image_root: '/export/share/datasets/vision/coco/images/' +ann_root: 'annotation' +dataset: 'coco' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_retrieval_coco.pth' + +# size of vit model; base or large + +vit: 'base' +batch_size_train: 32 +batch_size_test: 64 +vit_grad_ckpt: True +vit_ckpt_layer: 4 +init_lr: 1e-5 + +# vit: 'large' +# batch_size_train: 16 +# batch_size_test: 32 +# vit_grad_ckpt: True +# vit_ckpt_layer: 12 +# init_lr: 5e-6 + +image_size: 384 +queue_size: 57600 +alpha: 0.4 +k_test: 256 +negative_all_rank: True + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 6 + diff --git a/configs/retrieval_flickr.yaml b/configs/retrieval_flickr.yaml new file mode 100644 index 00000000..d75ea4ee --- /dev/null +++ b/configs/retrieval_flickr.yaml @@ -0,0 +1,34 @@ +image_root: '/export/share/datasets/vision/flickr30k/' +ann_root: 'annotation' +dataset: 'flickr' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_retrieval_flickr.pth' + +# size of vit model; base or large + +vit: 'base' +batch_size_train: 32 +batch_size_test: 64 +vit_grad_ckpt: True +vit_ckpt_layer: 4 +init_lr: 1e-5 + +# vit: 'large' +# batch_size_train: 16 +# batch_size_test: 32 +# vit_grad_ckpt: True +# vit_ckpt_layer: 10 +# init_lr: 5e-6 + +image_size: 384 +queue_size: 57600 +alpha: 0.4 +k_test: 128 +negative_all_rank: False + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 6 + diff --git a/configs/vqa.yaml b/configs/vqa.yaml new file mode 100644 index 00000000..118f3968 --- /dev/null +++ b/configs/vqa.yaml @@ -0,0 +1,25 @@ +vqa_root: '/export/share/datasets/vision/VQA/Images/mscoco/' #followed by train2014/ +vg_root: '/export/share/datasets/vision/visual-genome/' #followed by image/ +train_files: ['vqa_train','vqa_val','vg_qa'] +ann_root: 'annotation' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_vqa.pth' + +# size of vit model; base or large +vit: 'base' +batch_size_train: 16 +batch_size_test: 32 +vit_grad_ckpt: False +vit_ckpt_layer: 0 +init_lr: 2e-5 + +image_size: 480 + +k_test: 128 +inference: 'rank' + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 10 \ No newline at end of file diff --git a/demo.ipynb b/demo.ipynb new file mode 100644 index 00000000..8dd5efdd --- /dev/null +++ b/demo.ipynb @@ -0,0 +1,173 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "cbcb066b", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "if 'google.colab' in sys.modules:\n", + " print('Running in Colab.')\n", + " !pip3 install transformers==4.15.0 timm==0.4.12 fairscale==0.4.4\n", + " !git clone https://github.com/\n", + " sys.path.append('./BLIP')" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "a811a65f", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZkAAAERCAIAAAAmJE0sAAEAAElEQVR4nOT9SbMsSXYmiH3nqJq53+FNEfFizIwhByAHFBJAAawBBaBQ1V0sVkt1C4UtLWyyueGK0iv+EG4owh2nRZOU5oIlTaFQqltqLrALKCAHZCLHyJjHF2+87w7ubmaq53ChqubmNrm53/teBNCKxAu/ZjocnT79ztGjavRP//iPDDExExETGyIiYmYKP0DtwOuHAJgZAIM4/AkmhJ9kiAAiApEi/Br4N/wIof7dektEqtp9CyA9AqWfiihEJ8919M1XCgAQgABKf26k7Q0tqbqpet+ORAgZ1hFGkjejDck2LsNIwpD5Tq+uJP+/ROEyVRjvkVbPbi2oG7837R7NvlP8utzxVNPzHIo51D7WIAEZs8E2INt8CABEDLCCANKAPeCIF0rUnqjYBK/mjG3CE/pmaQv1Wq9UFbSJbp1GCHFb8tQNUucffo+3IHYfHL3w1KxvN7feRtiKYlsz2Ts8CQy6Qmj7YqLkJaXaChDj61kYb3sI8IRacoowdYSJkyIES0wcEAzE6Q8CMRE1YIt7YI2IiHWNMcRECGCniZH1c7Huj9637Yd1ZeqHsaieGvYBB21G0/S/EB/YRNUpLdiLSq0IvYKNdNIWvMZazqFovc+ns7O9R/9QaOU2TvqaYL2fDM2ETwLdWhnWrdp9Hp5Mn43dt3WvTa/I0HjbKZNm5KFU46tyN9XWEVgXV+cwReY6gmUOKEZMgXRhjWKIGNbCtfUfNWIFUqNg0i6EdRGqi1xNubtJkPCr3U9AzcW6bdrMWVWDOhwarfFvf8Lxtrt8tNZk2wqUW1Gs9bAbrR4i4wIPTb9WhKbY44rMOHar6pBu0ipoSKRmSzYTdn+PCDkUeinwxFWkJdiuE7ub58gqtTWfZibTgb7b40Oy9SZsVbw1qcchvvt769C1hpQCK6MAZGDShF4gAkF5jVoRqhjKqKkYEcANItaqTOvPrpTjdYj/Dle7a0cbqi0AQFoDdARNuslb87Yu/fJKROvJCJo0Zb5kuUPTvre+vcknIvv00B36Q2KMyD+xiN58uqVMzHan8ISyfRLhL4uotlYf11okM9IyThQM9wHFqI4ZgQwEBQOkaNKx7o/ef0MIv7lWFptkrRF1GxnZeJ+mWf1Amz/GwXS0lDY6j9Sry8DH7Vzd58343TW/9XyI8HdTTWR2LbFHZG4lHMLB7vMpKvOQeFv10OlA/4TweryjL7MItTTxvQXbVYb9Fu+h+g4N+JF8xqmuJYq7mJFhRWSpiRqB4mYlRR00aZQJbTjsIA5DWHPa90IAJ+YV3zYharPafTAU/hKAOw1EgAKSsu9Z8OtGnDImRuJ0Xw2hz8TQ7a3mSJouW3fQN/OZMleHoGpiFfZ43guC42vDFOLWTLLrVLyS0Gp57ItorSVk76xaDT6xWerSx20L42WNxJkSoTeOZQrbmMHcj2D871r6UWNZQLFgVUPcsmxFw0Bz9/yLhkW/jtxJ0orQ+F1vTXahBA3T/m5rfuvVZUbzlPExEmcrRO5X4v/AQy8IXmG7bSXgT0I3b+a8X0X2pmm7pro86vXmY5t4Fb0zNiGsTpmADA1G1iBTHWm6ObSjTQC+bp6pt8AcrNQAwshoLeZ1If2lj7RR60nvyBvPsLmA9FZkaLhPsf1NEX685YdifkHCuMC9T5q/uzpsM9rQ211nzq6Re6doq1KXt7p25850qjVEh6ekHQrd8d80dAxlPhHsWiFiGTNHP4xNFIuusNFABiQgo8iF1lxs5Ac2Z06kXSPCdkTvGxmawFCDI1sdJRG+fUIv7ozH2S+3oVfMvPeA/gKi0hchjMxJmqx17qRPTZFnvIgvQthb/x0Ba2wCwkS4nFii5YaG2Q7JBBUYGYhYQcQAdVXL+s9uBXibvjn0Z1NcIgIEAHOgYCGChPehRpeBnlbRe3Cx6WUN5T8lk654XYp3GVC7vPrTuxG5U857W7Vb5Q61yZQVqzfb6fsMewjf6tkuyO6HoVPwotXgl7Hi4dJYj2kyd0u0HLzIEMCqAUzBzQK0Jl8aUkc3rSaQ9f6bfsTc0BlkXRDsxAlNHKoU/1UNeTaxfyNhd4+vWeHu84lhV+gZL25Itq6dvrt302yuEQH2C91m3DuH7vPL77FcPgwBWWv+dCdkrRZ1UeZqDW1NAS5j/2oG2lTrun+2Infl2aO46ZF7y+qu1uPbApaJkr9FxBQG1fpjgisKzIeRNMQWkFGMsEav+s9RCtY3aGtrvST+RZ0Ibahq5lPn35o5U4ZFF1IvEyZm0ou8TyFsHRy9T8aNUFtL3FHGL1DYT/hdl4QpK99lwq4o83TgbGtWzR9D0GZTpABPGogYc4NbEaITBoG0oVqGhADCJkBzQgZo25SjlqbxRBqIJylREBSN7PvzGXo1nmRoiqIzS8f5xVCEoQk/JeehgtDpzisJNRMcz5M2OWNXts/F0NM0IV9hts3B0EvNur/rHm+xnvrtlCExLszWEjGtH5s/eqUdSbVVS52++A0VNN1k2ZutrX3HCMQAJ/WtpmkJyIgBorUb7YYVrIUaHayJf6ZmTxBWx6m1QmjjjHczh6E/t4LaRKTbkDPINGGrsZXbkKkIfWNiKP/W4tMr/MQwZapvjdA7wj53Q/XQyrQ17KTnjgy8bgcNKaHTDV5TwKgXT7emHQk7ceoRi+F4wikFDWH3UD6t9rQATHDf5xqeGkCWUCcwLxoGmvF/IwXj8Hvtkd8dMVuBbCRc7So9ERn3LnHiKnqZcLWc5crD1rm9a2546iDbHHLd6lxtBa88jDPKoSRPp0Z7GAqtCf4WyT8WtQpJ0YzGDQpGm+5azV7chKeQWDcjA40jROGfVibNamAYTSYCSjfDkci9z5udPaXE3rStyEND//K4c+XjbOJSPDFCl7pOMb1Nr9QlbXbNXt6Dz3blfEK41jtf9si/OctajHKiANP5Zivt9FJ649cP2/aycP6ovkNiDWRIThrhOTaMZZs/gI2bcyj4fCWoWr9F3wjorcP4qynPh+CjG20PQ8BQ6HqHjaBh/fCLs3S38Hc/u/V06Nk6/59mmLLaTclhqApfnF4OYai+O2HiF2oA2xY2EdWbkvGGRYRHa/CS4DEWatFkSem31pnVz1v/og/URgbTTgC3E+RhoCO7JU5cqHshYHwN32/mdMOToDldC+AIwLWarv5zpFDqaDdd9N+VcWwNV55hK3MMTO9uv08RY6vl9PMClNbUGLKrTp9Bu5be4WXdENVJ0PpGHyQHNKDvVNDWNW0ivkwHpumRd8qzFw6GIu+Rf/12Sr9efsp1k0/ZsWr+OYSPTehpytmFPN08xN5Cul4ZxhnreA5TwlOY9t1FcTzaeJyJJaKz0uxR06tVF6ZMyb0Lasm59vaPr9PojPeRgUijT0Yjh0b8YURr5TxewynzfzxC71ScCBlb4+waeiFgqDWeUHhyZV35Alv/nj6se3X58XH1NBt/SggWn63L+VMLveNzD3me8jivgw3Ctgz/rBQ3A7CBYRP/bYXeCL2DbISRjvzZfdW7JI6071bA3XXMXUlftuxWEy2mTTsuDTCm+s9xhXGoT5sMq6s0DbXbRKrSlK37sLfE+mHLft+byVPTxXrbDZ2KtLppuhG9Nn7vhNdbW2Bohk7hj/u1bW93T49ZP7TU+HxReGE0KZTxtFCbU9S/R0Cq+7DVNNOhaqhKQ6E1nbbi1JRX3Qy32sUnRtsaeudks5TmZOgiV3ed6JWnu4oM6adDS3cvy+iVdqQWvdn2itRbQUxD5z0kvJIwPupaSxdGp/dOq8J4id2cW6tRK7fmGBtab6YU1CxrIryOZEhEduNjSwBr2IAkauBRLzSMPOyFsz3CFBgagq39BuX0wTERDYeijYg33rvdGY6BSX61YTo3bIYnIcx+cDmUz1NouolhCEF2DRNhDpPbamRt2FW2KWVNQfDeaJZIkzqJeB8G0sUYm2U0/63f1l4IvXDWSt77djxmb5yRVM1qD0HbThxwerQpK0wrfmuJ6133Wt02olYM8Y6W0b23UkMtPLJK9ebQYn8junlL7Ilrw0ho5txLLnqLRqMjpmvxU+SZLnBTmJFCLwk93ThDHLk3Zu9QGZd5epze4ddLvkYiWMSPGYE1/uRNlbMeZ10gG5Kj9WrKMG1JtnXtnRLtkmEiNu0KYSOhF4yemn3nixku38VX24CfS3f8FR4DQwxjovrZDOkuxnRnP693MvuX5a0r8xB+TcS1cTq296txc0xvJuNcfQqdHA9dttUSrF6Kh/JsRehd50OXdTOZ8mSPMH2d2ylOM3T16xZT2MoImi2zU9FPNGylaS2DWou/j1PR3oKmc8yRWTBkdMPmjB63mXSf7MGRbbjnGum2shaQ9TKyZlv0zqLuxB5nc+OZDNW5W7GJz8fhcisq7ToBhiCj7rPW6NxqpEejxeq+HAHrXjgbb5adwnQDzfQMxyfhiNG6NWJ7tfhuyw/Nn+5cnT7/d63dSNHdQrca3fdYqKaDSG9B43pVd+3p1mh8fNbPWwXVfzKB4/eW4isigBTxX12nmQIuE6WZPu57G+UK5+FfutCq/tbGfAots1OHXj5cCX+cHq68rC/UWN1pwb4qVTewyImStN6OJLRE1Ni1JAIad2QgQhso7m4iROnnaN0/uyIO/dkbf2tuGF4nd8W7iatWHUa4Q1cJGhG1t75bTYcjOwAT5f8ih516qnds9HLb3oRNjtb83coHDVVuSJ69GdwQsR0HlFraS0JMr/xDc6rXNIFOY44X19pyGQnd9h/J0xKSYglqELSNSsYfCiLSxmnMCHGND/0OldRlhj3REIGymxydQTm0sbLTOjPyfI+EvR05DmToUO4Wx9mqbX1xwla6tB+f2i9Vtw17tfiRspr90h17mDAVp0PMxAq25NkJwoaKaCnaW4frFMBtitfb1FPgqRWzK2E3JjdZWP20WVgLMtaf+w0dikjkmvFr9BnKtvUwvRpcby85dZ/CzB9pwPBjfE5+AbHpr0zods301h6P2ezc6bntN557V+76ya4L3lVpi7uGXlY7vTXGLbMWiF+HawJLN0HPQwrfx1yTOQKSo21PDuiMqrYs1B+zN4duVlNsh9RHkrsRtmaCjhrYVWp6jfTNDJt6zXhZTyHsyqqGthpGUm2tS28RV9UCU1Cp+++4JaEp3ghdogYf7I2wX6BpxLA3WqtTxis7XZ5m0TuN5611acnZG4fTScyNYdOaY709XWMYYb1L0CvpELSNyz3+ezzV3tEuM84ub7n4qxd2mhtbB/TnG1qGqumjdOue45WEy4//KYvTFzlwBDKAWGuzf3yXvmKOviULHW5CyUmtGae341tAOWVYdFu2RXmGMmyGEWW7m8NE6Nza5a0IW00247k9oTCxFq35vFNoYf1Wc8xTm0sjbU6bJqTekY8O4br8IJwYto7SXlF3YhVT5kJvwm7RVxWG5LEAqDblN1wzupK1ednAZKa0RdBb3ggETESxZjP1IuxItkMRxvXwKflMR7SmtBM1tSsJU+q4X7YjeY7omBMn4VNAtO5isxVSu3Vp7i3sF3ZKO2JTn7iNOEWYIeyemBs1bC9bC9219ZrVDwl5kyIBtGH36Z2Bg71LEQyVNp93yv6ihZ1km65LXnJwX0m4DJPqhvEVaL9wySb63Fu4FVrL/CXbZ+8VeihMXCFGSGg3ty9ICLyslpsQlM1oCNsOZ+sK17uZGjcT6lr25oDO25HfzdDqjJFe2fqqqe5NHzTNoscXpaFXO8XpDd1leRyz9rajX6E9eChDGtiQGad74xFGwtYaTcy25bQxOC/69ny6EboC7L1R0M25y4yGsh232V9GmCnr+n6l1PFt/xoSvlOi7R5qybdOGBFs4wbaWtlEXx9PGbv7aShbW6TVvrtKUv/Z/HdrWVvDroaMnR6OLAwTJ/beoEYTtob3yLP7uynhiLRXpR9MzKc7h2nz+CRtmjiucAepi6cje6ytJL35DC2Tu4o0UQueCG21VOtv9278aDiuDk2G1jpP6Gxi0oay2ZtJ91Wd7ZUzgktG+4LQ6UtajkcW5PGEl6GQW+NcFb50M/yC9NoXLVx5+1xtDzaxZXrOG/ayNkKTgqRpQWvh17qkpF1uvMImPg4YEVpy15DcG3+EZfSK1xttaOnr5tCM2eUC45Rna+gu183n9dvmn1MsqeMLe3/3PQE02S/sMbXGleut9erdkdwpTBlyrfyHemGPzKeE1hx8Ep0+fcW6JEfB8Ajf4GXNItPvfYbIurH2E7aRT6ugofm/X7ZbO3UKOO4dhrjeyEDvjoZereELAkxPJ1yystNh5UmHpuLZfPiECrrybJ9oGGI/zWB75/PmbNdkE+tHvW4OTWto3EfQfmDbdYmYOP+3Pu8V4PIrxn5hylo9vNh8bmJPD9MlfHIVmSjD1W77joSh/EdMWkODfygM2YJblP+SpewRv/53J7vY1rCxjxn+rb9hvi6bNGwE1AawOn53HrYAjuvzTZvSdy2OzayGpm7zz27M3mjd582+HCE4veFqgWO8ChMF+AICWW//fr7h8mB6JV3fWu+xufC3yuq1Nlw+tMZ8L5R0Eap3xW1G7nZ6c7Jv3WRrJdfOvt9W0ORWK46ACBDcNfqtRZQCOnCjjU2Aca44XtsnHaaU9ZSn5ZXrAl8EWNkvTJT8L5321A1XUoW9kfcyCVtPwgQfAcGrDcyabl7sIAsagLV+S9Ibp1bCW0CGyMn6c+6i78QwErkWtbvUNKsTHj41naIVNIXWw9bv6XJO55V7SLtrGNozuWSeU17tUVxvzr0zc4owE0Nz/Debq/ujd2JeSaixpjvYRuywQ0aeLrUcomMTZRsqZShDS0T1xmEro+7Mjw8bdenywB5RFIRIzdqUbcdR2G2vEWY3Erm7Vjwhq1MrW214FY0nbGH9xPhXGOhybihdFL6qnEfKutrkI0NiaOTsOoq6nTvSbjuF6WJcVYnom9RPKFCfpYiBtWtYE/aGcBGNwwDT22vI0WwKvRoJu4oxnlXvcvQ0wxB7vZKsdg1PQr19omP9KevjT2fGThTmaWoV02flrt09vb69qaJPRvpwSTuLHrKWXNJar7rQQ82gaH5zc5zAD6HYyFTvFaArzB5FbA01Ak7ps53o1WUG6NMH5SnSDqknn68A++XWombTZdhJgBoOWkXstOxNXKR7p/B4ziNZjUg1ZZ7uoScxgLTTqNxydu2vyRgMjYRAzRh79nZvKV8oQ+/4YJoY+QtSo/0w/ZIRnkJ4EvxligJxhaG7Y4jRtt26zHfjt7K98o7bmuF+TRq/XVK76EOh3JNNI3cFlCKhS6k6kbvpoQnGqPXf0YT7suvW8ykwP735tm5Z0Ob2zfhCNNS1410+knCPaN2ixxNOz7YZs15yW2Xtms9l5HlCYWi3bid+0Wr23jHWNNVrn+tGr2DTq/CEwq4dNAXvWpGjr2z9yZI6QhMa+xQfBVQBgmlN7FanbnSkRiJYSzCOCHs8r2Vo7SrU8XflrvVw2Un723sAjdubm4N4YhG7DtCtc2PXMGW+Tcxn77dPIewxUIcid5dhbC4DvZogJqxAU0pvhW7pV5LtFYZaqs2bYwkU7pedOm83jHx1/MEk1FFQ9wpPk9I3B0rr1WVm19DbnRTVzzG0hBnvkadsJv8rFkYm1GVq/eQ65fJ7aE1mOr2Omy7+6XoM1vWJo15dKang6+S9pbbiY+DG4d4QInTnzNZUtcDjIu0UhhRJ7IVKl5x4O9Hvy2e+R247cdg98uyGLxTQt8IVytY7pId2HrbOlK3F7UHKdMeb8i7fOPVM580/I+I0ydoGcWtno1FD7SugT/AemtOt+ZS26DLBlsxbJNk9DI2hPfLfFf52LWK6BWpKEVNMAa3ebAqw1eo3knPz4RcZrcbDTliwU6jzHFHhd0KWKWn3hsheejHU73XM6e3GgNKmK3996f+4cLEsUmIJm5tbQYQasHdV8/ZzDJckLN2wx3DfOjImjrwpGnFrSevNuQVqW3v5qVnBrmRcjU/Uy+e/d6DhL4p3wyXNXrsyr6sNY13w8O6nRETEYfhtJGACEVQTbg2MY0qf10wq5/ZlnMB1oglSjsdprd5X2MpXm9sXNtDnvQm4a5giMG0aNP5yVfAyQQeOmgyxtl1b5kpacusa1sXluhbN6jRjcief9fIbP8+0DYOpT8cckTiCesp+Svi81sNxzevJldsb9sb6rWFKXXoZX3fR2qpi7CFeN9UUgafruVcePl/cpB23uT+X1foyhTZr18ynvldWezJXJd1SamMQo9Y0RyReP9mlLr20tndu7Kpj95bVzaFlmMATxtARwbbGaTHoKwy9Kkyzwder4JUqdDvtau2Hy1cbpuS/67b49DDkP9Rqvcs0wkRMGJFwPFrvvK4hbGgYcMOuoYDQZgBAsmUnMTxR1aaWig6ojQzENgBpO9UIJvZCz/R+Gtq269ZxSs5by73CST6e1ZVsR27NszUoNYWh5LtO1y56NnMYHxgjYWi2X0a2vcOVgxptcyquS3lCcDZ9n31kgvemqoGit4jOtYtoX+kDIJ5tWmPWhkC9f2K0SrT5pq0ObKtXb833mCpXHrYKcMnJPCWTq7X4bg29zb53Wc36PrUO3a+UK5dt4nrZK8nEtbxX2/jcZ81OYaRxGO11vg3b4d9wjrJ3QWug22CpLRWXiCjcm5b+1xEDmNbKdaqJ6N7K+apW1ysPba7aCUOvdmqH8YTjfbF3QSNh4rxqrs/7ifEkeOv0onWbm8tOy8M4YZkCWCNtONJQVzsApmc1pJfYTrxo8EfLsQhAUv4GuyFcpQ3uNYYNyZqeT1pYetnfHg36JAbuCLenzeNHUyJPJOq90XrHX2/MViYtiG8pdC3Jt1ZkouTUd8PyeGgT+R1Ln9ho+4WtFZmoT11JqvrhePKRSbRTuZdhuCMCTBzP3JeB9muaIJ4CUqQg0c0cmvDXP1F1WDnqfXrpcOXUegqHGkKNqxVsep7NYTQRIHr1lN4leuKCcfmZP5J2ykzeL+ehV18QlW0rBDRb5mrbZ9eREPW0vs+/Yxshbf5pgfV3fesxHZXAjgZKSP+kYobaIu4lbL7pRt7gqw3Y6lLCifVphUsuOE8htOjJHgknPu8NIyt5688hFtZdk1vEbVzCPWjd9NBdOXql2kOGPWTeT4G4TPIW4W0lr/uojrafYN3W6/bvk+hi2oQsRJ+M/mr0PRzxJeumJRkiVZfEkSfUOpcMe2sBW98OIU7vArhH245bZ7baFoeG8hRJPpd+7J3VT7nQpxC2WuWejgwTXw1xsemSc0hK/Vaw7ifXCNDaVD9MJpMotDNCUwq9z7GmjZNqODRq9x6+uyoXI9Om++ryVGvEUtsrW6vQ5io3RBibikCTgrVIdzPOuBi15L1dP65ijNdupKyJMT/HcEkhe4Gspce1uruXj08RoNVxQ52+XztPH/xrLRINwwdFhU8BofVhcyIgsDjWtl/FgKxr01uzHXvxiIgYBO3ncvWsG8Lv8dBSkfYevtNL3NqRQ/xlomyhQbrYMZ5Vs9mnzJYmkGkjdHGwNRlay8ZWkN261E1spaHaXSbmxHC1mDie2/TajScZmU3jf47L1ouPE9F572bccmlPkGG/rPeUSZU+v/3yncI442jO4S6Gtvp4P4AeKXcoq/2gfJwubWVAl+y+oUa7TJ7/Aw+0i4lwv+4LY2b64nSZELLaOA0+lLeq7yIaATzBAEQM0G5aIYGaHme43MBtosllbJxDOU+Rbatu2zvnW7x9a8LeON3FsMlwp3PArhit3umqGEOyNXWQOow0Ud3OXdAfilyXOMIFvlBo2EujrgQImg3SW0q3L1oF7YR6rUy2rnPj+UwptG6ocLd/Y8yll01R6sHZLlKp1jS7TbD5c5cdOgoeGv19ucEIhl81Jdl16k4Je1Cb1u9uf3dZfXOG946zZsxWi/UiY6sphkCqK1IvKAzNwBF1pjfnKZO2i5W90erfvUT4CvHrCscSphlb9wtDmbQ6orVgDI3JiWG8T6dnMiVaLW39CZIpqqyidU8ZIdrOtnWGIhjY+i1Em2UoVOtbG0dOO7WFHspw9M6TvcMlqWIXp3YqcSIu7zcZhqq2dYnevvYMyzOyFHfxaI/Gv1pKjmH+eFW5Xa20Q6G3lJEBMCXaEwpbG8QCUK3hrDGYelIqGucBhonYBuSn1RgIH2LS1vM1ymwspNQGqjpmN/ORctGZCZcPQ1OaOvt9mIwLvUO5S4WG4K/5u4vdQzGHBGtlMo4gvbVGozfHadqUOM3hMSTAeGOOP98jjPQFhkfIeIZXJdvW0F0Up6w3vQNyqIjuTNyDUuyUkNId2Z0XHfUtPdaws9l6T33bmn0hHL/cp+carK5tUL/CsAc/6j7vbfpuqomwMiVJb1lb9cShfPZu1ZHlbejJFHn2jrNTcROp7pRSnjJh2TXUNe3yrCcHqU8BrC0rdH2QKWjLHAgYh17Z7GMiSspmxxLR4GBD0q/1WQUwCekpRlcgltmlWlt5ShP+xjTcYSo0Jf6Ut+MTfkTXqOlwb6O18KubeYu51KO5BbLd9mmtHFsRqhVtiLNsJWK7LuPd0Oz6iZEnxt8p50uG/SBma6rufNk6mMejDY2cifJMCeMCMNJ2JAHApurX+NHMK/Q4IC3fMdYe9jTA/wNB6z+zOVaZZv5oT8KR+g9NrWbYFcgm5tOSRBuh+ecUeXp/tx7WE3JI7WpBSXMC9w64Vrkj0naZ6VDkZtGt30OIuV8Y74tu5Ok9/tTI13hTXBLrm5y92Wu9RW/tze6TOs+RkTMemqNiPHlLx2zYttJNs9yQcevICLI39c0t1J16DrGPB9K4OZC2Iq4sbKVIlwxTMmyysKGEI0A2VOgIQk2H0aH5P1GSXcN+Q3965jvJ2Y2897Sc8vCSYaJsrfUDDXI6FH8/ecZ1kasKHFRGViDdgdgdQy3z2WaEjbVXw3GofnMbMFSNCVf/r+ceoqikgGr4dzDnjhlrp3AlndpiH90chphznXA/ybuEtEnc0BnBveJtnX7d/FsyNEFzZI61SNzW2XiZ+TBlke9NgoGWmZjV1mXjkvjY4to7ZUID5pfWmNk1524ptJe600w7JADXdnvaTFn/XjdQXbGNHgVFxW8D0anz8fPBViABJImxYwdETfOJwDxGTQNDIvVG7qIGOgy8d+Vvpe2FA22EcQlHlL7Ww51UiW6SXlTq1rElzE7zZIhvTgmXx8ErzHAkn13n/FYxto7nVne04rTWgJH+Gl+0LrlID2XOG9FS7GbRzVyo9SjFoYYdqy6p+/HzbaFtQdt41zfZWhztSV11NjlMgYAmnO2xntO2HfQw5oYGYmv81QOrN9vW2O2F0d5SmqHFAbth4rAeWifGU10+PIUihsKUqb61eXcqbrzE6ZRqIkhdbdvWWKZEGv342/tQ2EC0HkFrnVKwOTTjFdhJ7nG0Dv8FRZ//raK351goqP7SisZXBMLuOkVv/JEeGiLnI7l11Zb6zy53a0XoNuaIFtClME1om1iFEQWkJUCdc5MwjjRdi3hujTYUroocXW140lC4tXmbYSha74rY+rMLZOP9NRJqmaeIPVREN23AsvrifRAo8ql2FmvmNVS+xlsx2ra2SJ2mCl3j356DIGqrms4a7Bu2dvDEV5cXoDuMWsAxJEwLLlsQuTWH5jyZuP7vSjm7Mn/RwiUh8ikg7HS6NDHo5rbj57tITC99feePqoIEJKDaobULnKNwFu+5VkA2FBbEDLtXbDcnFeoBHWxwa71RGyX3h+7SFDIlSl9HV6WQ/16m1l27szm8htRADLN67dvD7mqC2IZKveqbbrZArwBNitcUY/r63+rZiWGPJHXY2pvjuL9rblcYLg8W03OYUq/mmtf8MVLKdAGGBsb0ru+dCAB4bemK4ioo7UVKAKfWIr+Gs9GSNwlaZHqDdusBqTV9r0m0QQwx2nY1p0iCKCRomgGlUzaqvayt1+p2SZYx3klNXawZv6XBbRWjjtPb0+N65UjO41DY/d3ULluC9cJidzHbb24PCTZUkdbDnbpsj7B1xD6dsBPotEbmSKtubfxuaE38phIwUbzWEwaUoLVTRDQ9pVk+cDhpu75JYY+xo2+Sblc2B3LsaK+jg6N/4CaBKUDjE+POLTAaYkxDD4eGztCKtDVn9EHGUKruIOuFv62/ty7mQ+Hyc/vKkegvS+iiQ2+ciS08Qq6vhKP1yrZ32nBOqTuysf4UU4dw1uXW5cftgp7Zu9YQCRQQc4oNq7UaxOlEAEXvjZH+GF9pt7iyafykQSvt1iZuKXpbV7NmtN5XTZlb+fSOlRH0qV91cxgiVs0II+g5IvwQm6unR1OqPVb1bhihrlPyvCr4m95WGFCsRlajraW3VpTeYbCTIl/PPu04Ibay3TXsgZLdGd2MzPFPlWZGqtHTgQisG94VoI2ppa3bjvoECPpmc8hy7UzWqU8dbaCqGv8XQK2TcIgddBuFatapiY4289mMv7Xvu2K31rShbusdbc0k0yGs+WcXEHujjcz/ZoTBNmwoj80nvQXVENbtqZGih0rfmmrrwycUdiprZDEb6bWnKVhriLZAbSdMHJJqfBa0Inej1Q8bPhkqHZWxrWG2VM5mRep1fEiOtlhYE6wtMTeq2pRwzB8Nm2jb9zq1QtxgUNQAB6zNatPq0i0XOw6+iSNjPM/uIJuCbrtO9en1uhIQueQc/iKEPdphfA170uHKC52+dO0XH+07slUp+k80UUqAyF+0waTSvxu585gW3biFNjxR1NdhD4Uu72gtZcRKvN6B7Wp21HdTWA99Q1BiQZR04RZ2NwttCTlY5fUi1p2QtLlF2K1aU7yhkd0kLyMaX2/aVpyRtbEFiN2YvfStt+9aLd9dlv9qhKEG7IYplP9qZNoxw+5Ia+qbQ8P1MmFoXE0M3EwZr75QgFpmdkGa282QUunwdF7HTT82mZQqAaztM+77BYp3b4RT8WtEu+xU0XSaPYWJB+JbqIrNOb+rbFMIbCtmS8ntzWFIKevlcT1rwAASDUk7ffT/FaBjU8LT1ByvSownuvb0ahVTAkM39gdVFVDqGIzCqUmCkrRdGRI8q6poOlveeL4RM/w32qiajCB5yLJuxKeOf1Ovdh0exmikGg54Ni7TGIKVLmWLT5qqdNi1IKpBm/Ydgl0xhqwPvbrhUJM2F8mdKMBVRe4VmDadM7oVvJLlvbehJqZqtt7W/Hd63pqNV1LBPd6ORN5VpC6n3jX51gjd+T4xtzC0bKQwxFjfS60IgJAuZUwJwisiKClJtDW1ahhyII6noXomraqk4utXANau/qwAkQDUuaxxvGJt6hG/5gRVDUyTKHKsuppNwbZ2FW2kA1RBscY7uXf0qr3o9FC3w9CYeL1rVzOrOv5+xppu8ina0EjkbnX2Xg96e2piNbVztrnVkr3t1m3GoeJ6ozWtDbuGKyRBzZpOrE5vhNYIbObZHcPT5d815gYZWtvLIKTSHX19xixRCEi47e8VUsT/AtvPLaluUKcNWbHrrRl1hkC7UeoiouToWO72CNG+JgFa+nLT9W1wXUFHhB8J3Tjd7m+O1y6n68XHbugCxFbxWgAxJfnec/syNGdraDLcJ1rQEwrTe+pq8+/tl11Xmp1CS0uj4tEn68KI2rsBRAA3SUedOKihBGpcsd0r9IZH10CVmjadntcaHPaJhDaAYNcGaq8tSsRU39Y9klv3bXfdBhEapwYiY9u9B7vKb28VdLMdujjei+wt6oHJbTjUPlsX3pYY09FhbyIzIsYQ1eota+vDq5XwqYUR+jmeapyoXhXu9/K7EUnqhxZAsv8EyJKwiddUCcOR8xgxCk01tSFNV9BqUug28Di+0GE5mupecN9NUoUcEkio1kKEr86N1K0OzT6rZdPkfKsqMft17Ta3cRuZoDEOWkMhRegklUjPwssW16TUfmjg0RRUbanVvWrmSOjVynt5e6/a26z7yIC+5DzvHa/Ts60bamgGtt4OcYpWjZqjqDfbcXmmSP7kQlf4PRTAut/bs2nHMJKqd3x2JWk9ZNKWiqdQaW5jRjDafNg0GlHYiBSlXlVrHS38b0PWviC1SthUD4Ms9ZUblNDx6oZIoyyShh9tN4zpUBtDP4x+pahua9w5ofWdaw21fHPbBJfGgq1hCCy6v1tJmoN4KPlQDp9L6IVdbFNXwyLRZbXdOFco6h7h8xWgXoB3TXgZpao3cOBggG5CdQSmhoiqKlBBd5pR9MJvumiND5HJcmssFyBC8FCLo0cb3nBDp8Q7Vo/meoJON2wiiKqKQjWcTY+7vTVX7Tc/9VewbqrmXbuNKDVmqkJl4wJLqlsjPaXOvkGrj7t/aiO0onV4ZTth/XtIQ2zm3MKLiYN1oorazX9r/DrzFhcbitYVbGQkj3R978DYykH2DkM9OFTWJXXDbhfTZuhG2y/sKmdt+18ThDotxS8tNZ6rQoUCqDXKUI0XIVKDvg2NgcTOpi7pTVwDgFoqAjQehyIo178Hri1rTrleyfrmc/zfOl+S5MW2hoOkPrfp96SQtNt06j5eu5Z08ih5PI1AsZihtur+2apgc9w3oaobsxe2Rt721GxaI3y+tII6SvoIcD85Aa42wyca/wrD1VbcKoTAkYRF5wwAABEUTEiX7dQKUM0XgtdEk6AhTDeoEAUzEIlI33KqsYA0Z3vZRLPCnQiI6BaNXPF9Qx+IhwoUGw4T2ojTVPVb5oCh9lJVCoBC0dhPRKrS4E41nWqH/mxpME50KWnskqYnFJ2ZG5WhAX24TeI6dZzCIIbgsknutq75LbyYOI6nz7TxhaqV4Uj1h2QeejKS+cRwJWhyhbgwpSW1YY4cH04jYYgON7lzM/8tjV8++oB0/YXMyJp0YwEnSp5aKVpTDm0g2votGt73je/S9SHFuohxHMHmHGhpSUkYJTA6+5KR5fSCyY5GXIwymlCMQus2bCccLWDihFQgXcbWCaNmy9iVowb+wYQD0VqdsjW3K2ciO4WhiXFVUg3B+lZ5PsfQFHhveVoN22rPoebdWtz0bmJSAtJ5RoAULGGR1+5MRPSn2sy9GyswBlWKO4/B3tQfohLX2BnYNWzIE+mKpD/iu8AqWUGiPG3Edm1Mm2KvNZF2HFKKV+NKunYtKshEhMEsY7ZbKlhXZyATbSja/XE2V9HLa4LjRGbvrHZ69ZTVtM9XL77a0DKMXBWwdjWe3rBrcSNMmampPaaDhgmDgLQD0Ii2tqDVliJSASR+jamhwgWU4hhX0HXHbVcMNaLV2lCv3F1dph7Q6Yeoikb73fp5SBiMa9whSS3+WPfx0NgdaNkAqOkVqUK0vho3fSFmuBF6rFc9ZWmrF9rPU+EbG6YajqBJfzPuEfoMCP3RWsVtdtakgsZlmI4vzZGAvuadQi1HMh/5c6esnlDCkQx3XZZ6YzaHREth3Ck0x8Z474doNpq9IjsjQAgEcHDoCiahDvdSVZ80x/g2Gae7kYH4SGvFs5VXR9YAZ9RrA+qOvG2KUgJc6jnATqIgSK2Wp9ijSnHPVs7Qn+vnAEiioS9y1xq0u7ct9eXQynlgw6jfBpFaPq4QQNMZsCdyfzf2iNRa1YdCd0DvZ1oaF2bXmBNUbK0ZcGtxnVjclfOdKw9Dg3kPHbnbv7iE8t7UecdzCDGtarjg36whK41lbngLrFOh/luxkQaI3S4AafyQWxqyjboDAnBrrvRBhgJx86FpGroEva/1XGpJxABUKRnQ4z5r3wI71MFd+anPMroRwuVrsXJpvugTG/q6MSWjg4hG/tj08VWsm6du9179f0QB/8uohfVAGxlVIXEK1vDtHWKgvZU8JdspYavxqBV5ugxPJ0w3v16ylHqONMuyQFx7iDjARtCSggU9sC3pePOno+NorveNfJUoHAfYoEK6RoQaB7f1XPrcSMgwOHh1lKoNT8jeptwcJS0Vb0OMaIoKHI02ALtHuj71pLXsd9XhjSfUSlszV23gxxXgGm3UMeL2hjDaiNdpwvURekmXVXK7wXXArN4KW8G6N8JO83wktBj3oFZFLOrc2aeollk+J3UAYA6FiLIjMnOwgQLqu8KP85Qpsv0lCr1aS63Fj4yEZrTwpLffm5NofAzYxkDWADJJ9dFAiACwYouhK1KtRogqTHDO4IHUodDRu8vCFFr7xGuyyFGLr10iBLBqkKMUSEC0wVPW03UTYNqUcveJ15OkWbnhIdGoQq9SPhY2UXVCfE3+cH0NP0ZChwrdK0I3TGzwbdFSYxL75aOHP/2n2fH12cF1d/qZWzxWX3J+zFJIec7zm/bZr2S3XuNrr8DmEIlH/TbL2rs6f9lDF8WGcG2KVtvEuKZNo12EPHwX4PV3SoiDBT6FAEPhH2ooGmvqQetLJ6h+1pJXEb0B0uURLaE3FdVRW1V8tfbmQrQG7UjOJ4zpWtGKOqdQ4zPsEVmjwE1K0sz5qtjEtrD3JvCVhcDod2Vb3ThdA1MTFIbauZXDFlEH8gci7dTlgwe/+OfZ7Jo9++jszT91pw8twRpDRIBnqCGAQfNr5tYr+au/zV/5A3P8PLwCvrsmfJGx7MlZA4b46a6ZTI9M/sE7ACnV12MwwC3lL+qbRAA8Ka3xqBmpqRC1AUvXsEAaLyjr7eCN/YRmfRqqkCpRzQyisWetkKWIVzC3N2tX28MTtAUWW//ZsCmmyYZms2yEK+IRvWkI0ORn16xIm3XukOVEaVFb35BWr3rRQfP3UCnj+fcuEi3Fdj9O1/xTXLH65T9d3fm5O3mIx5/Be3hnLTHEGGuYAWF4ImYQ1In3OLrJ3/iH5pv/mLJD6midU/SjvxphZIHpPpwIo9PhLNyTobw+CKgRr7S+zKepUBA3zThQ1fUWbG0m7trCGlgDgmgLB9ch6Kqi8csC6GV5m39u6FVrS/baX37vAdTMt7l0bzLbmjTUZqNGvEsO3n1GP2mirXWHRL1wQ7onwOMojRWqBcG6wSjJMH57ZYuXTR/Ku9rX+rGPWD/74eLn/3bx2UduuWJj89wY9QIQk0jFxhhjmZkhzExkkWVUnvkf/N/LT39if/e/tNdehLTh7IscnuhGzZRt7vGemk7u1t9hYsg6GepvfGOdFSQsrZxsWIEhhZ9ooVfcr6y1T2o4OoFVEQ91tuVTlbClkBzEfDcO1fMhHVSsJwmDgkk7xcOG5+glzWsEQNaSqyT8CudD02Xa4dx7QGHVtWLf0PC36mJTHg5H68A9tSI0/XgROjf8WR8HDd1dS7sDpmz8S82HGACy7uZJE9HqoH0NuBPcd40sm/Z+0vJx+dF3Tz56/+z0TKAqTrwT9eK9elFRFS++EhEFiVTh0gHKMjs7zj/7ofvX/zt/+rGy6Rb6hSVlW3t2v+Wk26dD2XYXoa2l9DZmy+6+/uhk7ADVzdsR4/EAbiCDNkJLJlUNDquoP1gXZ3U8FUAaj6PHf/tM1wnRBKihrdMKmtpA43fhtL7aoiNX4387ho2M6kxkI0+VdFQzIho3LvPglBKqUKW+duvt2okzYZJZXdegkC7aFUpUKm0xh6MLUvf4FI7TX6KuGaGqatrNCYsjybp2RBuW2uaC3MS1kdKbYnST1znU4NK121J1cfHgnnM0y2fMmTEzXX/cVQGVkJd4VQCZKoXBBmNtfpg/fLv47v9ZLj4LrhvdDferZUBfWHzshulo3ot6LWTszW0DyyK+pCRrYtXCgnRhKgGQWp1Jc2CTB2zM+W4/hgml0kjed/f2Rn1aINLIqmEXityiO3TWZG1dl94wddhpTV8USNlulEjxLDgRsOanih7pLh+2L7NxStfgP5qChrXBRom7kqN0OH7r/vgOYUiMIQTsCUzVarFa2OMbLx8c3TZm5nylIK/sRb2oVxJRcaKiIk6VVI0IIArvlAyztZ+9WX7wR1KeY+wLi1cTpgzRp493NdZcLRvdmhUDm+X1kvANUETSUKI6o+1i0hrYLitRofZqhTTP0fO2U581pWhXhZDmXetsZ+8I7xWxVdbQi5hj8wcS11iHBK66Rrv1OwIRxc/Gd5h5L78YD0OMvU98TRHXYShu49+RDHt6vyHYmlpSWkmQfDuotgmsWRtp8uCNDRpShIHYMErG5S1Mm9QIDR2h9b/0dkO2TUascvLB+9Yczo+fmx3dmh9dB9nSOS9wHs6Lc9559aqV996JqyoRUSH1Kk7ECcjacpV9+AN//lHL6FI3Ql1kzeQ3m2v/9W0nu/uu2e4hWCvh3lWrh2hzrHbniA1+FooRBFUApJQOFrWjRXNVW4+j9ev0Ki3Hw6aitKO6raU1McqGSpLEbFZvWxi9U6JRWFPzGYwSN0BqYG60AOpPFmxmp61KBB1wsvBod+dAruMhbNAOR298HL4eP1QXTqkNaWPIEnEEIqpn7LpB1jAfF0OF6oYIAo1Hu5rUOyBdFy4V3XHVHpDNTOrVcL0qEpvTzz48e/8Xx4aBnDAnXxg7LxaVdz63pKKe1ZCKqGGASeFUocYQxBCzgSqBDO69ox99zx2+YvOjtTJBCuVgid6UlYbgrDYJbtaiv6+oscBffsv4asMT3V5Aqo6Nml1EENZks1hPKEKwj4VfNccItAKqoDULSqbnJtGLqSgQJ2oUHmOlqzSIqO6SZgYdoQGoBkceCrYpbYBEOj61Y9tRfZ5gDUQRfIKDQ6yjSMszngKzkHoVIkDWF3VgY/BpbDA0p9G6qNbkVcQFYN28TQxZS1i7DWuNl81GDI3dmjOhN1RVsT5KQXVsbGSxAWLoIxQN+RPlWg/fjXmlPRFSERFWqZVko5imf8w67xYWrkG2buy6UrrRPvWIknu/+AFLRZkNXmLEGXHmPVWi3rtZZiCwDFVVQyreGKKqzJATkaAyaogZZFCs+P3vu+e/w7e/Qenj2RBEe3RYdBviNNahWqRg1lzva61fb5IDpEUkau7xCY2yk8GG7Zpup/P9ZqrxzGv182oBzhZVpUpsMjA1u7xeEkK7huZaOyCgnkxUu1Fpo5XXbl5o5AZufnMkvmjNL209J6A+802Nrk70dc0LBKkO2uC3scnaM6c/9Lbv5hK3AXj1iNHGMfykhlMNH61JljJqTDAAa+PCRnGNj7msBUgirc1eoaEoARTaWN7sh7p5GmKvswj/TcecGgpCmlcb3LK2iUQk3ew49IeNNm7aVhqKGDWoX/iHGnI1Byk1VtiQjOLarAAgqqCmm3MKcYCEEo0vF9XDT45sPG4pIkTG5vMK7Dwq54nYMKl4NUZVxahCmUCVszb36lWVKSMVKNGjO+beT/21V+zBcWNa1dWOT0I/1bWOXVOPblAboVuXgtWzAI2Ma4cm3WrqHO6VgSc75VMrhr0RLqNy9opn75+vAGWyRKThNHkYB2kAaRw+wS4WM0urHsWjtvFhGFEMgDfVrFqMKET6o965aiJE8o7SeAQ9HFFce/qvB3SI1JzusflaCKnpHpB1BhuNgsacbFSkV3bCeuTFJqpHXrvGAUBTpdKUHxtf7Rm8+Uq1edtFLVZd9VaSersn4UwjbQOCG0+Ig8FaFVBR8YCytSCjAhFtuNFoKqJVZv24bvIantCxxtKa8YXDIcwp03r+00ZjJ1+XZiZNkt9snPpzCYBpDI8EyzGiACAmcZWWFzTPwzsN0phcVL2IV6xKN8+txIs2oSpqyDKrOLAlhVdhrdhmTJmuLrK7P/cv/SbNr9VVrJElejXV4rZ8NlHzgqCQrvtto6La3ipd/1HDogIE2RyxraCjb/cIe9vXLhns6cUCAAgcvxoX77eHKhhMNq6AbKh2Ja/XRqJGG1C4vJkCusV31KA1Gr9Ntx6pRMRpNW46iIf/aJwBlOK2mrumWkQ+mWaGkKJlQYi5aWTmkd2kh+20ARGSyGvojiAh6+0PQrJfN/B2rRIkSZoAs86yObNTWbGWKW5gJtjMtlPlBGGJeKgS8drVawPRYkcxEYiKxfn5yf2Tux+d3vu0XJ6JK6F6dHTw3ItfevbLvzK/+SLszHufVLR6SVvnG7NudFUNl2tUq6UEiNZndYmIyKgqsPY1TdcTpLkd7zTZaKLNugsatwrX6kHsww2PSEr8VaAkVUEgZgtS4JBgSUopy6KoRJSJCucBykzwFmcvYkGqzFApSzbMqmxAEMNGlfn++2b5KW58OWFVooxAKLG+jpkUPsgWhvxaznW79uh6wZ9lY37FG5VVwycpKJTCayBsNBTicL1CFGuGJstuSt7SNFtJ6ghbNdCW7V9VbeUlYA5RzW8kzmtlZo2jVlyAnbUarmA2IZeIFERM4GB5C66joSQ0Br/6BBr1B+Jqnr+2m8e5vbasrC1sVO8EJfxKtlRtal9hltUqXm15r4f4WnuCYm1sijC02aybm+tr4hZrJnH51HD5m5JG7dIDQJqtGr1piRDu54UCytR0wAr/oSbAbZKOhhSNvQSk+gdek6ofES3ianQLWVetHmTG2uXF2afv/uLe+z8/P7m7OHnoypIZxlgmPsvto7uffvzeW7deePWl179+7ZWvi7KKn7L81g2+HtZN7rBJ0+oJXGvNRJR8p8EcuqzeEAhg1AKs6P7FbKJZJGYVFFhKjE/TqhCfMPHd995EsRCUH9+9s1wVGePoYDbLUQlc5WcZO6cK8aK5NVAQqajAMDGcOotcvTOaWVUxSpRV54/N/bfkhd80lGlc7hr9mFZpBZTTgEyW5y68bKRtkvv16YrWPVqUplzdtmlGbK7pI903Eta2hWE07H2+FaTq/LeW3jSAALDKXLeHiTf5EHFYLxlEtQ+a+nCzf/SKEFULpeAsHuEp3FlTn5SMWqLIusiNs0VrjApvw+qdaAPVcTZaImLDWrWTRMnSLK1TERA8VMNaF+RstAVAGl0hwxQPM6leLDWB8nrnieLop7VypqmwqFchasTrho+iN8dW4wW1epeY0Jzz9XNsjHFNGSbsblxku1aWY6pgpmxCMqUA6L1PPvj47R8//vTd8uJUVfJZPp/NvUpmrWG99szzPDs02eG5p3fefuvV8we3f+U7zhyKaFMVr2vROwpF2pekBxaWkAUiYQX1ib1QYmSa/l2LX98lNTziN2q6SRbrxS50KhOYjFlV1aN7n+U3j+998sH8YPb8i8+LcxePz0onpQMzQVgrVUNQFauG2Hsn4q1hhVdSFYjzqmSIDFmpnNx7l32hWT5w94FKGjuJHjQwamjOayQASLwuonHrZk1dW4riA6Qt1B3xq2nQ3CnhUFaXzAQDVbDWZCAO66O1magQETFz3U6aJoMoxSu1lUAQARMxqyoHQ0vQY9KJ4hSZuJ5mafKkedbu4M0/68m2wUtjs27aR+phmhb7NWxroIIgUkjiKqoKcMRrTV+KamsuNb5pja6tzkj7RiE2E6d9hk6l2oBFRDAAmLmZ57rKmxOVa8K1ke9Gc619dhMJajZXc57Et8xQ+fijd++887PTO+8X56dZlpEKsSFm8n51fmKNHMxncvHY2vzwmec1v/HmT36yuP/xl37rd2l+W4Vba2NsfKynUWsaJNlM3cjRwkB1O3GzhbE24KFurmZ71kMKiOBIyfKxYT/qtH+yQxEA7/1r3/rN5U/+SOGevX54dP3GPMtg9CA7fvDgwWJVrUpvmeBcaOdwToXAPvBrJYVjJu89YJQ8ssw72NMHWXmi+XW0TRfhu7Tr4dEcKs2GQmQApuEZQzU6q9S5hpWgPUhqYkHpkptdgaRHt60bsP/sRDt5i76NwNlOJrbuoLIgY0y8+J6ZGSwihuOqqKoiwkxxXeQ4NzhM8yhcWt4aylc9c9bWo0R6gu1GRVSFg/mjueZ2mq/3z+Z0bTSBqraBTzS5sCaboAKixNDEVExNuhLFWytHIadIlDTKudGIDf5YzzTFWjw0Oj5pQOsx16lCu77UubIuybQRmmjYzCR80w8JiNeFsvGuuvPh23c/ent5cg8KO5tHwXxFBDZ2fnRttTg7e3xijV26QqTKj5eL1cUPv/uLg3n+3K9+B8dfFh/uAmiIkTpoo/qBFnQhe23aI+pb/GtDZ3hV87sNDA03ViBw+7gS1LZUaawOKevArUlqKIWSsR4MO3eC+49Ozi8utCqOj2bGZl5WK4E1IJCo5AoFWU7WMqgh9qVYawAoiQWBPWDKs8fz8pREiFg3Oyze9YIpgTQRuI0c1o2OTUq30XyNXHq2NQeSNVJdBY1CYzC3Hu6EX+PBroqVtSYoicaYejYCUs8iZiZiUMOiDAKRVxgwkzaMiNQVOtrYgtyqYcJo0sniMo7ABTecUJrTr4eYNNoIDZxurRj12tucPHVuNQuISVJWNbxqsleFqRC05jD1amjrXXOaFak9f5rCtHqxmcm6BRpzvlXx1rRv8aNGBXuWfWPtxenJuz/97v0Pf3lx8hBk2JhAU4lg87mvVgTMjq+rCKnk126dn58tVpW9ZmcHR3rr9qMHD80H715/uTI3X3di08mzBi0Nlog1FqOmzBoubaJ6y7KHj4RREcx/gvglHRXB5ghJSYSINVKmYOCIQLbZNkg4K7HrUZs91SvK7NpqcZ/s/ONP7hTF6ptfef3m7S8vyvfK+6cEiKi17CvxIl6QZ4YURqFQw0oQJVZAg9EdLs/nbrXC4mO68SugrAZroHai7OHv6z/XQNTcIdekKwHJzhI6vLMGSKxbcwAkl+U1qDUaaq1LpriXB7KRHJrDo7mMtenzANi1sEJV7XJ5EUz4AM3ncWX2XlTFGJNmX7hftWYWGiFAlQyMsd57EdEapxqkqS4s/BaRwBRquAkJiUlEiMN0Wk9+EQ8FM4sqoCrJmVfr/XuBRnNME0ZFpBamyX3q591orZhDnRE3oFLlVKJNP5ywIYCjzr7BROJwCQy1AZAtot4UoIllLYDuStjo1/XdJ3XCOi2xUZV7n7z33l/8ycOP3z4/eeS9miyzs4P54TWvCudsxjbLfLFanT8WV86u33KKoxu3L85OV6siP7rBVXm+qg5XbvGLn778LbbXXvfC2rnoJpSnqrXlK4FIbYNf79qsgQkJcRo6dG1KTfueKVZyPUx0OLY0UVxum41DRMqAj9+ojhJwINTGLS/y5Vm5Ws4Ojr7y6usKfPn1r1nWZ555wX7w8aJwFWummhn2TgQQqGWIqldYoxAf1WGvChUSEmHAn3xGL9UwBCKKH8FKcNQXNI4zAqjebK1roSmOiUoREdBDaTfsjeuna0tEdNiLfdFmIVthrI2eo7aw3rethb+FTc0nWwUgIgsyAZhE1XlP4kPTee9ns7kJIEJkjamzrlylqlk2g6r3Pk4wVS+igIhXUUCZmdkEtgUg4F1TiFoUL+JFAARiyMwqcfIpEDZHFeq9iPdMbJjrdShouGS4eWkHh1KZ0zjYWAGwyfJCfG3wiKZsIScRDZjQxB0m1jgpom4VFrS4igYyweuhFzYSVJUpHILYcE8JLkWEJmZR0thAlGbmAJEJJTOZAB+B+9SVQpLh7OTuhz//wZ1f/qhaPC5WRVWubDbL8iyfZa64MNnc5vlysTDixLlycTY7ug7w8uzM6OPDG88szk6z2Tw/vPbgwZ2bt4vlyunPfvLsl1b25pdofiPJJqrgUMGIM+F52mhLTUgbTmekwbLR6IhGH8ejwxvwFOheZB9IVC8sVFxrEaoanMiYOZl/a2fJNch++oM/Lk4fVKvFLLevfP072bVbWi6cWx4cXbtx7fhk8dAA3qtmZJlLpwoVBpPmlgJfLx2MgaiXUB2qstzIo/thTwxxk5LAkjpF08WmmzMUadkjQGuflc3NEzXr8RNbs/ES0Hrk1dC4WVLULimtsj3gNYWUNejj5LBVsR2HsKH49vj4OPaxwhiGalD2RISNCXM+4YsYY4wxtrKVdwFxAIDIMLMxIsJE3gejNgwb5obrFZExps6tlsNaW8MwM7MxtU2K4iYEMTMUhsiFh+H/mlanBCFr/SXwN1VF8OGuMUhVUSOspoWtyYzSWwqIDHDw9G7RYFFh4rBzEuExOFJCOd1gVcePSlPkAsxsCKQUtmFrqPVehGrjJRkJzE3XVAK1GETx3sqoDlOSeaMuofAwez95889/8u//1fmDT3PLxMaLtzbPstyyyWzmnIp4gs4Pr12cPKgWF8VySdm8evjZ2fni2WefPTt5eOP5V89PTq4994IQf/Lem1/+xnfef/vNs8fnr375wfzrf0soI0CVa76FREAbTJaiG1WCJdXQtnF8N1eLZhuGbq5Rfj23w2igxNyQCuQ6Pyj5gHZMrGQBDYc9CQyosi7u3jl/8/uHB4d6cPDw3mePH91/8doN8ZW60lfljeNjfPbIi4oAEM2tVYH3UMNMcKoMw+QgtZ2CSJ0UN+bHVK4gjsws3SCDhvVCG3CfbA9R/vrscu0O0FURkukwLugAotEmZBS/r01xpWwARASxqK2G1mqqrljbvpPKCWpESAcWIm2k+ttUkVCnOmhikRpKiNIi4V9ymtrQNLF7CAntjes3JDhNpPYgIu6oOaoKa8OV58w801nACFG1xhhr6z3hriqXBE3rJlPNHWIpTAQKGBcaUgFB2Bng+FlJgjHGWDRHuqRdiNTEcYqEVjcc1avAKVOtOcFZnZJVnUi8qT0aB0GAiigRJ9YWEkpUiiNUNude7SCbWFV6plDvfRxsHG/HAIBovDOpOBJ4inhqAiSrguqhHNvMh5WGDBMZpGvRpAGLFJ1jAlYIEZ/c/+jdH/73J599zIY9mGHI5qRkZ3PK5nk+s/lsVXqAraGj6zfPykKxWlyc5wYsJXyZ5cfl8nx+eK1aXsyOb9z75N0Xz885m733ix+/eMPOw4lUXdvmiQim/lhT1BPD9ExTKZk40ybseigr6o2mNU4RB+tZXKvIJ+slE5lU7IaXbFpLTHMRQl1G0PdEZ9dvaj57/+2fZvmsWl3M7rx76zBjw361dOVilttZZs+LyhKVXrSsKLPqVUSsgSqLIgORSgYCCQxBPZGWzul8rr6ijBIqgSRN6mRkhGq8OSTurgo2zI4JEQgNHIi2sgAIUVEMG1qajkBHKETtt4eku1C6ST06giqFr61RvM80HOgM/nvSVAXSmUFN/LEe5xprAa0/ElKfeiGA0j1+oHVVSDnqKxM0yvFARNaY5O+q6rw3xrDhYNMJsBXbMlGbWB6BKHrKWmsDd2qOvJY/ETMbY4lIxCNONg4jPxCZUFYoToK5TgQcqCKrgpiJwCBmCpDnRSjd7G0o9oEhAuDEm4RBaaaoSPi4OhKtWdNDkYDeQS+2lJRiIJmZasVefOyiaDneWEnq7YvQnNFfOJ4J4zgwVb16FlGoj4hvjbFIux9Y0644Iuo1RUTAHPaAoQiuc2m8BozzAQeTZT3CLjG7YlmKZPksm+VsDIG8917Euep4Puf5QUZMXHiyQppZvvncbZAuLs6Xq1VROvv49Pkv3z49fXjjuZeWxQoq2cH1T97/5XOvfu3hL8+z229QdiS+QkIo5XUVNCmE8eI21aA51W2FtN9KHExQXlXJMKfB2ViENN3KqXXezJQu+9R6ra93G0ASOFEQQsJ1nqEBA9slzQ4OXvidv3f3zierxdnxwZG19pOP3r11/bqqOFcQcP1w9nhRhkkqKkSaGYaKKDvylsmJGo5eXoTgRYtVWbIvdXUCewQv4V7aNGLCsDfBEkIRYiR9e1upuRWWDA5JqTQJQ2qfbSEAHBRPD5W68dcolHgTrTcQEpEPGkVcCTMoEwkFx+MahIkonCDQRN+I08/k46sblzysbR+oV24NUyYZz4JRJcXaxKbmzJpC2WyYTuK98z4YrURUAWM4N4aIFZGJqKpLnuVMYW2ENRz0OMMGSqIVUVA6o7JWsxhmJjLJB4eYDHFQBIJXlkpiHQowMaIWqSLhbm7LFG12qhog2AuJeFUJ7iSo0Tbs4ogENZCiqQKqEHHMtrlHEYCGSImk4fdUT0NfVyHQAY4UIFz07ePFueDaUyyATiBizDZ4BTAHoxurqvdeACi81m1bMwhJBvIwjSWWuwYFCBFZA7AX8SpAOFTEUHEiBJDW38LTpFnJvbufutKxMeq9qBpjrM1yY1xVrpyX08cHR9eMNUzkYUtfZHZ+dOMmE+59tviLNz95+YVbb/zab54X7uT+J9df+mpx+jDP7ONH99/45l975Y03TG6VCZ7X34sMi8d63Mf2r/fJWl0QujXgTVRVvPiwywSASEkCLSbmGuDroiJ+1V7ZceaE2R6GB4dRIeKjf7c4ESFiY633/sVv/879D95974//xfnKl7rSqjxbLF967oaIqNL1wwNjzksXjKBKYQdBIywLq0K9qCEwlCBCwpkty4oefIrliTc3vKiqj2bMYCdTIWJig/riZQKQDpxRWvLZpLoJKQABB0IazHKhkQUAyIDrw2oa727hLNoaRTU6FzHSJ9WUTCRvyayM8PUisJIqCYiC6a2meWnzgRo9ELQPRToHqevXpMSaxmLom8a5tAYCtsx5m8g1hbJZw8xsrDHWmGj+D+qeMcHo413lvcznc8PsvIsaEJlg1mc23rswPr1479U5Zzh41Ub7mkq0nqhKYFWSvidIVNtRRLwPm+6GDQWHHFJVie670aRF3jtVSZcYEhGrioeSCiuEIlsWVa8CVSGmeH+GEjjMcQl4ofWSEpZrYmhilLFlaxxHhMoAdpJcfmojiAb9s2bLYUeluWeaaAiQrjiK/m4KkYCYNd0Asw9Vq3lWg/EFTSb+kQwDCGfKkDThtOwTk/nolz/89J2fEoStkarUqnSqNrNsrMlyJq4qr8vl4eGhYWOYV56qqiKTZVl2MJ9dP5p98Mn9P//e9/7ab/2Nux+/f+N2kR9eqxaPws0kX/7aNxafvlNV9vjVb2uWeVd5X0EhjRNFxlisVw6i5t5xTeXCVpKCo3OFQkAkAKJhMfgkGktsjGFt7FPXjRy7T1RFEviDjdH4Rbh4tEUDtw1bVU6ZrSXz2m///sHRoa3KxcOP7n34rlstvHBRrAhirTmYzc6LpSFIsGLCW2PghRnBV8SQliQzywDEe1j1lfdVZbwHGZAnsgxWqjVKMBkC0nXuyZzIJh7+CIyeSbk+8CfiXTiXgzipPIUdZE2k11iwgXiSKg5ONoAJziLhnA9CoQlckhNfUGLqs1ABFVlB4HSkHuuFAqg5VXPGECF6L6UFleu3uuaCa6pFwY9XJ5GvkWBDd1prbZaJSFAYQeyc8+KN4WjYgxomwKiqeHgIMYgsUvUq58NcrKoqs5pzJqre+3CkU9RrJURkraW6KZXj91AkakiqEm60Cya8sCQbkzETc20NI60JCKLC772oeiYKlBAEBsdPrUQuLgoYNqrqxROUyXgSkuA9GwACQhK04OCPEkdMBBHBBpULkoQBuGZztW8qsw1HCFvqdmhPonqLmoggoiJaa4iI89wj6kpxU7IBZxtixKEU1Y3wNlr62Jjl44cf/eLPtSoW5+fMZPIDX5WuKliESLQqRXx2cGSzzCkVZWmJDHMBZdHZtZvPEX9nnn386b2f/OSt6zdvvfSlV4uzhzdffBXVM7I6M/ns1su37/zwz37ys3/2wuvv33rtV2889zLPDr2rsDZXyYaPq0ApTiQN3wQByDASj/Ii3jsAbC0Tiai6yjsXaa8XNhZqlUS9FyiH1dcaJB9G0tCkjkhV1LuKjQnWgxr7mMNgduKcGgXR9eeef5Ad3vn5T84f3jFa3bpxvagqESUmVTqa2XsgJ6qKyguzUfGc27BseBFidYIDyoPpJDC46vRiVhWcWXUcP/mevEoCZSBCIrDB5JRWPjawJtrLgHDOOZwrJSKEzSUiogxhVVavUkEkfOEzGCUbNvB47Dc8UhCIqR4qYfxE8EkIB4RTjMmJLe4grW+FUghrYFVJ9aRkNq85W4Mpo61Irl8lQ1737XSAs0G+aJkmyrNMVUTBAdpB1mZELKKrskrDEQRyXkQcCBDvG0d8mYy1mTHGACZMS1IIvIalkmCMBQWsj8fVgx5EgJqgU0gkpZSW3PjdTCIiY0Sji4aqx3pik1fPLISMDYfLa4LZO4yt2OPB3qEAx20pXtNyiIhzVdRi2WognwHlyAQQEfFEhqhGH9SGZ4lcgBPzABReRESiswmzqho2os2t1bVRLGiQAa+RLGi1Q2+t7dam4eaQSLSuRlImIqnKD375o6oqHt65U1WVEomIODef5WqMzfOAjn61YCJDyLNZ5bwsV4EROScHN55h472visL9yZ/++X/8pdeeuXmzWpyZLL/+7EuczRaPT++frxbIP/3k4/c+eP/42s2vfOOvvfD1X3dKEB9Ov2lSvSWYSANDEhEXvkcDA0PBOhaNXAxCsJcRKcEqyLtK4/ZSaoeoioqqUVU2nPSksAmfKVTgEiWMsBC9QJmD6Z8Q9bmqrO798uenn7x/eDw/PrrOJL4sFCIepXPiPTOVlRhm8V5YPUFEM2YRHyAPqt57a1nCfrNoWZZaLeFdoJsatp05bMQbYlbxlGoF8fF0kyqJsg86ocBDWYlZQcommq4SAIQKK1mAiJyKwpWqmkBz4yhMNHwF/XjNkdZmsSbCxDtg4p9BMybEKxSCzhn2RzfUjlrzjxsSCR+DpT/tztZFBGpdk4M2bE1najZMs0CpiIxzwWoWsJclGp1UCUzMJrF6AKDKe1VhKLEJBjEGZ4Zza5nZsGFKlxGyihrvJQC4SSqjJQIjnVk3pApO1oy0u6pRAQx9IURMEBUR74JNxCQ7d0jofQUYZU4fGpeo0oazUwjgCh+cewMmUVwdgq5HaSc00CqEgyoQUQ1IZ1hUjYio+nB0tXGvLAI+eh9gjk3an11vbkAs2RrIAkQyQ5Vrg10whRuz3oAT8SK+4RIV5qNJehyrrkk7ImXDmz/4o0/f/8Xi8UmwqbiiKorSWDZWRJ2qZNYAyGa5F4di6cuSbZYfHop3y/OC4KvTZW7yLJvdvD6/d3Lx5k9/9vv/wd//7MMPYcy1Z154eFbMdfXo9PzWs68IcXHy6OHjk3v/5r999e2ffPv3/xGObql3QBhdUErb2EQqKirMRJRFoZkpLAts1MZKAYAIrGVjsnwm4oPnA3OwtBsTVX44VxklZmvYhMEaG5aio1/Tmh7uOFJW4hliY3F2cHB8+xk5uZlnnM0ycYX3ogpfVctVcbJ0q0rDjFeiynu2XFZVNssBeFEvDKgTDQgqqlmel2AtlxAHWBABXstCVMhYzWYUFFQIyEBFXanMHEz4rAH3ow1eOTqKrwMphLxT8TAZ2IBZNQMcwkQjE2zZpA7gpDzGPXrESc7JVlJvI4TAia9Jw9IlYT6zpC37uI0RKAnVn5fWJsVbbwhINPEh7YnWkdPuR3qwXt53wLKiLGd5Ho6IQ0Ul+m7VM4SJjSWNTlGsoqJqiL2KDWoXYNkQI2yeMZAZAw5nSiIQM5OCHYmIZw5jDVz7iAXXGwocCt6rEIEjDQnfpItQxoCKIWTMWZYhcRYR8eKR3OgVJN7HVkhbx1CBMlPYawi7UZEcMEVfNCbKs4xqyhRQXFUBCabiCEmUZl9GBO+d946IjbHr7TOo9xJskU3zUKtvNFptolZlrY2nXhv7l41llcMU9b7SeMoibn0GsGbmtAoom+zRpx989uE7xfkFVG2WqYphymcZEVarQq1lEoiwtZnJsvmhzWdsM1F2VaXOzWb5YrGolsXjxVlmMZ/NXn35ubOH9y9OHznx6v1yufRqDl64XVXO+vLwxvOLi4XzhZ8f/7s/+WPrz7/9D/+X3h6LlCwKQdq1DsuDiDgAzBmj/rD0WmuWda1Jg9sjMwnD+zBSpRIKtjCycJ7ryIjYH5EdSUPaUMnTlDUmLQwGokfPvoSLM39yl8iXZbkqC8t0erF8eF48uqhEVMN2AcMLvCiYVpWb5ca5KqzQXlCJEvNF4W4d5hAI8uCMSYGYVYW4AsaKODY5MXN0qWQ1FipKZGxObBBueBBREVDt9SQAh/WVFOAMbEAm7HWCFCYjtkh4kOzuQVUkjbeZRS+OOENq1SbsSySvr9RI6/uJEo9NTyjNrQYupQkdncal0QlUo1QdFangNCES4WjKNRhqiwEAa4wRkcCkALXGBLN4cLUPUMEcbXwa99xEAKI4l7z3qj4nmxtjjIkO7wTRdIwOQsQCGFI2bI1JYBCuQEAkotFRvr5Zn4hJJPS/wjCBwskCQyazsf+Dt7yIMx6qEA3gE8kXUWD0LALxXsWnJg6LCCmUAx2K3q2xhUVENFB8EHNUQkAgWJtFo2ZtEtD4B6UQGjOwSx/cxKKWq3Xr64YpLepNEmz9yVk3blwnC3qdEMg07HCrBJIq0LjTohpNe7768Jc/Xp6eFMsFkYp6a9irgKmsqvl8bm1G6kEa5pLNZrNrt8hmytnMV6vHD88fPrCGK4Koz+eHh4eHN265jz5+8O6bv7z53PPOueXZYyUU7nYgGibPTWapWBoyN5576c1fvvX2R/+Hb/6df/TVb/6ak6r2bvLehUp4L4GhAVAfkaZ2Y0aCs2BeEFEJ/++cqsS7CWxGxoBAhtlkxAziYNekWusJc9B7Cl0ex1Y0ZaYeAzFEsXj4oDq971fnjxanp2ePQ5H3H188vKiKpKDDCVm2TJVXIiq8N56YTOmFyJTeW8sZGRCcIj+c8cEBQFk2B0iNFZNBJSjRRIbYUFAboSbLAtxQvVNBACefCIDUq9QGNQaMruuEiNkRt0FQASUYa7q9EoCgCMQxH01gm599jL3AWg+6OOFY47xNKFYjToQ2XbM8WmMXUUKsNOtjhPQw7IW20KtFzbp/1r9tGDHOCwejiCuYTNBuoADDqQ+3MmU2U6ghIkJZld67PMuIjIHkRBkjM8SkwUmC2DCC0ZoBE7gKyACwxMGEIlJCBSJePLMhNhxMJGCOfNgQh615VS9KEO9Jla2ykjE2OFYh+MOwibuB8eimatCMw+Y0ACKvQiADNmGLMEyV2pKVjPAAmEjIrFuLyFD8EnJabxTJB9gYjnsFcenSePBUPZSiH7zqGupq2hdxh4hMZsM2bCBikixi68NV3Nz7i4SWRJzEb5SGfUCJlTHZ/Y/e+uyjt8uq8s6JiLWGmAROfYANYzJr2UK9MawiIl5FDBtkOc0PD4wlY5ePH+aZPXrhpcMbN42dsYg5uHHnk09Wy4vDaze9e3i4vHj2xVfnh8cZa7B8MbF31bWjg8ePV7j/2ds//NOvfvuvabBhBbcVwxBVVWNsIOdJq4g7NetzGnFxBpMJV6uA2bBJPmrEbNhYEKtGz6zkdrv2c1RSBgf7UZrL4Q6FeKguoadaa2+8+vrPf/zHD+9/dnJx4Zw3rMuVK5wvvQaHZ1WFR6EemTXEpVdLuirdPMuDddALvFcGMmZic35eXF8+ZoAMgw0jM/ksXiSmyQExzm5Vygjr+zTX87WesWrSqsoIm5tU7/lAEfZ8CQj+tQTQ+p4RSPMgU/QUQDLsR5bEtQevkkaDRpQzQk46wELJErWBf4n0Jcxar/fpyTrimom3qhvr2tihro0nI0wtcFE454kVqmVVMTtrrA/marCIOOcIgT97w2SMCVpVBc2NuXEwz5grESkLBC9bISFPCi/eZhnZjFLnqUjpC1H1zjnnCUJhL4vUEhk2mTXEhowBscJ4lco571zUGXxpFGaWsc3FGJDRcCApXKjgHalyYCpp5yBdB0PGWElKBwfzeZzTJi4nTCCjESXIJOAAwARNzrdR5YmG3OTaujafanBkZQDGUuJTddD1CfZg5IpYxumUQjCNJa4n9V5qPSCCeS64DxtjmU3c1FcNp2uZIb766Jc/Xp6frRaLqiwNkRfPzGBbFisQpFpBKp3ZmbUEWGtNlilnXtT6Sr1wfnD03EvZ/KA8fZTN59nsAN6BcZNfZJUH9x9QtVA1n3384XNfeuPg8LoRJ2Vp81nm/er+3YvCHR0ev/jM7Du//zeViK2pHYmbzAu6Pi6WLCdqTNzESAM+6kfx1v6NaQDENb5xojbEofUakLym08IUMzTGhIfRji7effk7f2N2MP/Rf/2//+zk7KLwqnLrKDfOlReFqnivzOxVvFfAZ5ZNBBQtnZtl7EQsGydijS0qx2wJ4h/fZ/HiKrasHG1JlJwRw6SFqIiDKllbz8pkJNk0kgV+llyksd6IBIWDoGuyo7FhBekmd0C5fp9aj9IyHB5wYtBr2AOUJFjxofVJPQo6mKbrUai25iOB6zrXaaFZ19bvdf+utZONYImoKAqwMdGwLwpWQJxja0FkwsFycVIuSLwxGXvOIfPMMgFSodIKqMrSeQ9oZpnYOAcPqGqe55m1YHJOnHdFVTlXBS5cq8QKEvXkq9D3kBIKEMNYFamqKjqjAQRhUMlkDAsxmZzJGGvI5opwqIApXOwdtguNgbHGZmzCkSAE5kQEYktKzGSsUZAkb/vYVRraQkzqlLAwBdzyGthisLZQcpFD3DEKDjtcn4APZ6HqH7UKGSxlADRYF9PhsUANg0NG2LDjOl8QNHxHRKP/HoEVJCKcRieR+ejN73/6wTtl6ZR4Nj/wPlCzzDufHR6piuVwkB9qDWXh6iax1qqxkj5ipKL50S0zu+aWJ8vH92yWszFEms9mLzz/3ME8u3jw4Pozxx/86I+Pj+bPf+kr1fmJUTbizk4f5/Oja0eHL7947drNW9UGiCNQ4TgAlShauOqZWespyTdZSYNLoHoig9qeg2TBaSz/TewjEg6alGhjs4ypvgAStEbG2EP+5gsv337+dvHmhxer8saMROyt44OTi4KZvfjA3Ym08uHMFtjAi3pS53WWcTDBiFcmOrlYHRgjxSmRR6hCPAxCoNpdNa2ZKnBeqWZGiaGJrg/hRgsuoKwUlHAfaRWSWk0cVFdVILyVCr6CMSAbdgACJEQ3isTdKK7yYWT6Wl1NgkqKHFMpUN8QGYc/xcnVOHzaMoxNDdsS9bAz670450grYyzn2WE+M4YJ6tVn3otTFWfUW4KKKNSghIcXD5+LalmuVlCC+qBTiWcmw4CykAGZgtmyCqisvFetwtJBBgRNl/8ABHVSLNjOROFW574qZH21dtyD4rglmI4uxycmaCapUZnYgg0BTGrYmHyezw6y2cyYLKIIMRkTXefC2FblqOPEK1ZqbSQ6CabF3keToaZzxpRuoUE9XU04xxJkFxVIHD0iIt4YG2iC94HyVhqPMXCqXVBQfTLocH0gKWEpifeAQryItTaT8KkgMqxqDD2+98nH7/yMmA7muXNhmmfeOfFiDDORyWaZzQSiomRMPj+aXbtGgF+c2DxXNgIiYhVlm/P8cJY/bw+uuWJVVUtyhT284RanR9dvzth+9N476iW/cQ02d0UV9rpza40vZ4z59VsyvxabQlXTrVC1B3KtboeZpY0AeAMGJJhRRL1qtOKmnqb1kTSQxCMQQWdqGJN1vQ8gBunUPxQ+WCHChOS4wefBpjSHvqqOcnNzRvfOl6dLZ409X1VewUqi8XYDUfXKJARWr/Ci4gVsFewVxtiLZSXXZ8X99/PFIxwfqXj1XoNnfzBSEiniUIIIVL2vOJmtiE3tbBixHorocrC+7CiM21jXQPniUReOO4wEsK2NWYkrJbhJK0lcXCPxC1aUuOm4tpbVuuK60BpelGtB2UTNN5LBtYVeG783UzcY3Ca1q5e39UuthUpiqFpXFG658quLitnms3DWg+FJsRTvvRP16XRQZC0AwskdLx7qOVLWCOqR4EXbnwmX0Xqo96pklLOw/SK+cm5FRBAXrn6VamWzmcK4YuWqwjtHCIuQGCYTzyIQEYkKkWVjCcIEitvG4eBF2KmxsV+MITMz2czk1pjM2sxam1mbZVlus4BnwuTFg42187gSpkYjijeqhWURRCwkquHqQkQ6Fj3kAp4hkfz6plxR9mlXunm/Y31MYH1YvYGJaVmK1rc07aOmxpRFwA1aAccDXkRUFcsP3vrRanGmriIVy0QQArI8c64MlwZbq6Lu6PCI8hnZzJjMKWg2tzajfEak5Cu/Wki5FJuzL2x+aGeH2dFNEcfE2ewQrsLiUVVWy9OTg+MjgLyStZmIU9HDg7kvVzcO8psvfxmzG6jvtg32bFDwumuo26FBNOmDiBaFeNZLEa82iW0VLTg2szbzVZXmgo/TXWuyEElfYHwhoWxchE9E6Vu8CeO8UHX07CwzR4dzX5XEVFTOsKlEyRhiDgbdQPKNsbkxqj5c9VMpUDmizBrjVJ3TsxXwwQfXlnfo+BUVgQhRuGEFSmHnP4xoVSgFoF1byTW6wsXr5zVcEhe3xeP4C84WSsGBozG3KXwPmwlkFUzx7HdwpeU1nmj00Y1qY/Sj4nAKJmEfR+tw0ms0YkF0VeDk0oBIc5MnH9K2KQIPlgijaVMTiRom62hY01zyHUFEXg03nZCqkHeoHsCdEhTZs+AcsoSS/ezhiS8Lt1xaY1TPxZcZG5vNsnwmKuK9iAvmJyZmZu8KFUdJm+ZEG5iITEbM1pANrhvilYKjgAWxqJBhiDhx4p240lcFVFWdihdRqDIXSgxVcU5cpeIQTzupYZh0VSOI2ObMM6hTX1E8jBZWS0tsEQ7lEzFbykCVx4qZLRs21gZOlkMta1C0RCGcZfksWuuSiywbzjLLxgAGShoPGAhRuJ8r8CUfrpcMklM47RZPy4WlSRkwTFASiUcIahZmrY0zGWu6F4za9Rf8ICrp7LRJFzHVgRvmI5B++IufP7zzYbG8KBYX3jlfVYaJMpMZS+GzTyquWDCbalFlemjNDWsznl3j2dy5ikye5zkAM78h5ULKlaoTKdkRxJt8ZmaHbIw5OMhvPMvER4ffL1elKLGrXFWqwldlPj/kPLt2NJ+/+KtErOI04U28FKJh+ECibCmCIQKMEQkecHFjo14kCAaknJnF3bs/+bM/+/bv/8HxtRuqQjDxExNkgvbFzPEcLtfb4bq5YKxDEgBKPH/htfl8ls/yx6UjqypYVH6Wz0Rclh9amxWLs6osGALvxNhZnlmbq/qy8mR5WXqA5pkxzBcrx1VZvP2nB7d/jcxx8AcyCXFElJlhLHG4bouITESk4InEHNsFQpqUTRAofss2sgwNovsAOoiKbgmwsqX1HVbB1kbpTGVCmQ0DGhAN//Fmk7iloAh35yId5gwHMCmJkTxvNaFhfZ1fHK5UncJfwMyIcxKnUqI8g5Ywc7CB99CKxGm1gFsiP6TsGADYqC/hlwCIMyLS8pSqk6ik03ukBuxhr9nF+SJgk2EDZSm9B7FgtVoVq6WqCtSrOPGiyjYTEfFV0KyMNdZkHOetJ3KBbKTbYYmtjU6KzARlNiJOXCW+griw7YVwzi42YkVEogTvxTvxHuJUBOo5IEhYdwyTKY0pOV48K9H2SDDEzJaMhcmCM1bGuapXX6rJFSacZ4NUS1+xKhtjjAFbJaMcNDBia4kYkNzaeT63sxlxFqwTzlWAcJCEDEXPXVUyZK1quPojOdIACtboRswajiqAyTAZGyzbDV8y1nRQiSleYiOqUC8kSYVQiq7Y0byRHJLj8vjxez/76O0fnT9+uDw/Xy2XVVFVZckMJhweHuSznDn4lLA1hkGGmKWSlWP10AOTHYNtWVXkfX5waPJnxFXeLaVYSbmEnKuba7kUQ94YzI4Pnn3l9d/+w89+9t2jw8PFYvXDH/7oYD67eeP60eHhCy/c/srv/UdiDshVwfCsUFA4Fbg2cyEdgE24FlRFDaZGJgMoUzSAItw3BrI2//hnf/H9f/WvPlmcffMP/tAYFu9BwcJgaI2VceEnGICUVclBa/NQaF5RbJDi/ODg2ZdfVTtfLlcXqwImB3FRFTcPb77w0qtf/ZVfZ3tQVI7yzLuiOHtw8uEvTx58XPnyaHYIyGq1MKxetKjk5lF++9WX9fRk+fFbB8t7dHwccSoesI12A2YGbNrpTg3jFVCOurgk6qIcBhgzYIihgTEwwxDCLn0kTw6qIGHE76uAWWEAIY0Xx0QuBAnDDmGjEww2JFW0GfsC4sLVCICA86SjWWULcDTJUfAv94CoK0g8Fg+IDfJjtYcqFZ3f0UdvQ0rk19TO1JVwS/gFgTQ7CIdzyJcqBUDEBmTUZESk2SExq1+SVCAFMWkFcwiTE+WAgjzYQr19+PAzEjXMeZ4ZkHoxTKvlhfNVuVyI88GS5AO7zXJjcwptyiajTJjYs6j3voL4gD7MbLOMidlFJKL6iLWqegdITUBUPUTjxbCqBBIVcV7Vq/PBKYxq/wkiqDCD2BhTxf2psDAwsZKQEgtbtcphIHAmpCzOqXhgDmb1XqsVqScQW0tmBvZsQIbCZUNUlmEWFEQXdmGzWVBWCEQqFkJx+8AQKTGLUpgQSFeHBzYeplagr+E+MnDGxMaED1fFPYBwnSNBlSkco0wHoePtN+KdiA8ZCnHAdHBwnbEwBsxQqsri03d+sjo/PX98RkCW58Hk4r2Dl+WqKMqSIYdHR7PZbLVYmCwry5LwOM8zqBLz/Potc3AEM7P5vFRnQCB1ZeGXZ+X5I3gXMMIYzudHeuNZc/OFay++tvjwF4cZ/8V7H/x//+gHrzx762tfev5rL1x75fd+3+fXyuXCWKsq6TSlIUr+90RRa6h3vwJTCW52yQitSuEz396XwexibfbgnZ//d//1/+2Xy8V/8nf/8Llnb1QXC6lWogRjo69WjU1IyhhHzSdpPPFixsRD6j0EGM5uPvNsmR0sTx8fGFoaC+Wvfvs3n3vmpeWju3fv3j28cXueH7/0+tdtnqn3q1/96x//6I/f/cV3H50/vn44m82PlqtFVTjD3rlqdXZ++9a1j97+4MZnP7Y33gjdGM3kNZRIVPSQTq6BKO4ghBFFEmz2xBw3p9gQGUAIlYoABt5BHGwGzuAdqpKIdL0pGXxYjIonKTWuuAwi+ArOqXoKRq5wMLFagQ28I1cgHBaUitTDzuCdlqeUHyA/UJthcUpEajLlDNWSRCGOoHpxD75SO4PNSQnMZAyyG1CDooQv4FaQCmRIC5CqnRPPAIaxYKtSgWfgjHyl3sWr6NNdDOQWUAfjwTOYTH1F/sye3L2TZ3lms8oa9Q4ieT5njrejePXkldkk0NVwfI5AWZ579VoJVEVFvRNx3lVEmueHSpmTSqpSfQURSpMW0ORAELVCqEKFhSh1pap65xC9QAOchYsxQPEcrxK8sDAZcDw/HrYUgzMRvBMyYbtKqsrMDIi98wRHxooqcaZqoE5ECQ4iUCEJp+iErSU2zFaZvfNanUO9YYIxJsqPyAKIiIMCQ/F4Qhgf0RGNg5t1NIexMdkBGUOcvhVC6SohEZWSwlZbcJ5UH+7PIdSDvDbkKgHEFqAV1Bhmk2ez2dmDz84e3Xt47361Ws1mGTOpUVbKs9y74HnniWm1XBaLZVWWJrPhnENV2FCjxfnZ/ODAZBlMbrKZVGUA62DUUdBqVTBjfnTNHN2io2f54MaBsc9fOzp/8NEn739qrb1YLp6zZ3/3f/pf3Hj9r60WF+DMV6WKV+fijTfBZBa2lYnIWGarKsGNuT7ZGo3c3qv34Rya+tIQcTarqupf/z//q3fe+sXx8dHXv/b6P/u//B9f/rVffe2F2855jUduiawhYlI1gSQbw9E/nokYbMgYYy0RQST5ejIo3IVJ12Z2bumj08WXnr0xz49f+/XfMWQ/fe8XN2489+0//EfCVi5O7n34vsIZ5duvvfHCN3777PTxh+/8+OH54tpBziavfOXF56A7H3763Jd+02Xz85//yTOv/wHscTyV7EVJ4x6leKiQ9wpVNrBZuNcMliPTXJtwFd5R8PzSAn5FqyW5CkQqjhTKlkym4uG8QuKlaeLJl2BWtqSqpAkuPXylvoKrVCFguFKLU62W8GWEuWC45/BpG0U+h1QoL8QeID8mMNQrcW1aRpZTNgNA5hpkSWrUByVuDg6mPQsmEMPkUEA9pAIJ8UxBCFqkXwFBvELhQATKAIEXmIxgAa/iSJzSiuyMRJXYgiAiy2KROSYFA8xk2CizA5x3gBgVRrjrRJ13lYhAvZ9nrgobc1k2IzLely5cSGCttflqVbqqUl+p+vpQPxGFO3lUo0Wp1tGZ4jlLEhXvwvZO/fn4cKCP4rHW4LngmcFCxGAGBJRZJlLx4kWMN8hVxBULiqYtFVUSH65MUO/FeXEVk1eCOK51vWBSDV5H4krxJVE4OaLhWo205U/hoAkoixYFVQo+t4EHqCgY0YgTnLYXkZhwBiJVx2zs7BBsICZMYFIl8QSv4RIkBENtcxdJFcLkQazeB98MPte3fvLd5cX58vw8s+yKZeWCV4yyMd6LcxVBhShs4Jo8F+/JGq9wVcnEomqzzC2K+aGZZSwAWWuYwGyYbTbnfD67CSKy8yOaHUo2L0VvXj967Xd+//v/n//H8cy+fvvWMVW/9tVXn3n5a6UatvOww6FCSuzVUzKKhfnhoOS8MV5UxLvg4hvBm0i8k7JQcUzExrLJLipxXj774Z+99dYvGfLa6195+OOf/eKH3/vW3/27YBL1UC/ekQoqUmIALn7nPGwNssQBR0QUjkmkwRdO1pMymG02y22eFVVF82d+7ff/8dGtl/7Nf/NflavlrRe+cvLg4vlXX1w8Prn9la//+F/+vxdnZw8/++jGM7ef//LXjbUfvvOTk4uTo5kNO67npXu8ohkKzvKff/8nf+u3fqS3vq3xdm4OFjEVDw0+J56gxBmLI2OQ50TBWJZFW5gCKrS6UFmpISoKWa3c8twVBYgzAxB7MILB0VXV6kyd86IEGL8kNjA5WWuyOdlcxcGV8IWKB1g5995DCioXrI7YJC1/pjAgKCllBzq/xszkCzU5OIN4qFdXkC/Yr2AyEKkrEK7qMDmZcEjGoyzjZyeJoFV0vmEGGJxBCq0KIoJUUCUyyA9hs7B9B7YAwa/iLoR40iIeFCUDD0BI1JbFynG4AgwWNMtz78jBgeEAVzkvzhDZ8AEfhhCrihBW/rwqlqLI8oyIs3zGxrAPl2SE6HBVqfEiTU22cFD6Wl3YLUeyqAlIvBcvBCUJzt8Stj3CpS6qyXwbQEfFq0CJES4JiCY6aNwd0uBA7wpXqMkOyFjVuFJRNleQ+oCdwZZTAeFmIVZ1xEazObGRagl18RYPYkqFhyVdRcnO2WYaFM944SkRsYiIL4ksYMhkxmRK8FWBcF1t2DUXT2xE1OQHcYszwJlXVYEEvTjYiUgR/aRApCQEgoJUiOA9ffzhO6eP7harFTOvyrKqSu/ifZDOVd4LAUywmWFFvNMpnECGWsuZNVmW5XkO1cVyyY9PZ7O5zTNrTT6bZbM5E7OxNpsLyLMhV7qTe1WWZ2557aUXbn/918zP/9vf+PL1v/33/vArv/V77uB28BqJvlMQUa/ik4tSuHLRqXgBNPjxe1FjYHV9JwrUZjMn+XnploVXwIlUD+788me/8DbLZ5rPDv703/7L41u3njk+JDtz8hgqxPHGFyQVLvSJipB6jp97BlTViahnir51BAVbZgtjOZtn158le/jcV//6/PiFn/37P1msyoP5cTY7evTZncVn7509Xs5uzE8e3rekdz548/FnH778xtdu3H7p0cnd5Z3VeVHOMhhmFf7gtPj07Q9fuzX/6d3l2Zt/fPSt5ytnvHdhE1u8K6syy+dZlhkmAyWiSj2RmqND8Q6rM2MymBxK6kV85VdLuMI5p8SqplheIJjgXQHvhJnJGkNGnCsuQMzZDCYL18KoVAC8FFgVzpdkrPEunFAmFTKGeI5sDjYAqVQqnuyBEpEKtFIE7zk1oiAnZaGuTEehSCkHLJyjYA1nJgqOWQzKQAxjiBXigvuaike1gi+IAJgQAVCwgZ2DDEAgi8CispmaQ/IOIJBTLyAC5+AcpKoOfmXL1TKsUcThi6kqVeW89yqO4EREnCH2xJaJvAS/BxApUJEnoPSOwN5X4p13pbU5lJ2rxFU+YBni4ecwtonq46tM6iEqzIbDcUunogxSkAZbIwBQOqsXgEZqN6VwIZTWc0ZESSic91SG9xCvql4LBRmaWXvIxrpqpeVSvZNyBUi8bl89wlknAlcEYnjPJgMkKUHC1hDniSmqBjceMjAGysFSo1VF3oEgXsQ7SmdMYWwyD4mKix8WE4URKpeqIGOZTToZDRVR76HCxqiK8w4qYXGO3tdAVDaNXVxcPLr3cbG4eHT/waooV8uVc855UfWx11WZWVTZh7tDFKKGSbwPh9VEtKoq733lRUHz+dxX1fxg7piKi/NslhfZYzufZ7NDZZOub6VsdlAuzqvl+Rvf/I1/ALzw1/9w9vwb3sG5QpxP0YL1MDjN1BcFKMIV58HxNNyiThy/sylcercoitOL5fnFReXFGCvlCuXy4+/96Z99+N7q0cmM6f6H7y/PHv+Pfvf3lg8fzI+OTJapV+XQOGGPDeHEGsJtKL4kX0AkdiATxRsODBGTychYcAa24NmrX/nGB+98tli5n3//3/3oB3/ixL3+2lcfPbg3m2W/fPfN45u3v/+n3yX4m9dv3rx5y2TzsijmN547vH6b7t/zjip1RJTZrCr9n7/34GuvvP61b33t9L03D7/2cHFxsHSuKlblcqXexbPl3lny7EuFVM7ZbMb5nFT0/JHNDB9cY3ugqr5akghDyWTM7EVBZIwlYhioEqtotXBQsTN7cIOtDWScaBbGMJTFC4h5dg1spFqB1cTvOjLncxAJIF4gQnDwFRlDBIVV8bI4YTZKTFWpvmCoCquZazYPB+4JXi2TZmQysOVgzOWMoFqe6Pld+JLIABkYxEbtHGyhjsRrPF5oAFVxFHYVok+Gw/XbKC5ABnycvOGcViX5itQryLqqZGaosLUgI86LlJWrvKoDqTFQ8fACeGIb3JWJAVZGOOXKSuXiolotRJyqIsw6QlWufFV574iSATNtsaczI1V0rCMmazVcZBg94UhEIBJsnQCcc8aYcCdj3KViQ/WBtuTVQOGLbSART85Hr1VidZVTMXZuDg5U1ZVLKZbqS4IKlaThzjkFSJmUmVR9VYqIzeecHwUirQQRH3wCIqMEi3daVWwzJaK4FylwEi5sDq50CnVV9EwiO4Pm4fBguGLbewesSDNlWx8gF++kKsHJZcpX6ipSD2g6VB/2w0lI7n324emjh6cnp2Xli6J0Gk4ksBeELUuKt0sGVxKFCgGWmZlUxXkvCu/AhsSDWZk0n+VZlmV5BgUxC+CrStw5AC8a6ut9dQh1F2Jfeu5Lf+8/W6y0OjmBksJFChmIeDS8R/eadFZLSIOBi4zhVVHev/PZoiiz+aEHiqI6Oz8lhc1sns0Wp4/cxXl175Of/sX3Htx/fPPa4fmiePj4kbL5+JO718z3vvW3/44x1quLgyHkzEwmA9mwA6MygxxAfLyTmuLNv0GVc0reqZdC/JIuFq9947f/5b/40w8/euezTz+syvLll16x+WE2Mx++9ReLovzkzvsPH9+fZfmqXF2//dJLv/rrF3fvZNduv/AGPbh/Z3lxejjPlucPjo6OWN2jC/ev/+KTv/8P/8b9t95y9943N3/TWghlGWXqHcS7YoFq5X2lxQUZouxAaaY0V5Tm6BnNZj6bh4vN2M6NiooXgncVSAyExJHNJT8QEIoLIqYsVzPTLPfMqo5Aykzi4CtxS85mNDvg2dx7DztjyyRC6sWX0ck4fBLQWIaEOzLDKX9PBs6JW8HOmGCI2FhVKMXLuNkSGYN4wwfFW7hIKVxntFqSE8Ao52RyIobNaH5dzQxuAVdSfhisTupWgMLMEO9DFzVAsYDJ1YSLZESdQ1WSO1VfEJTMzDrnLDOcU1eZPDeQ4FHpFY6U1AQ3Re+9U3gmQwgYIiDPbKz1IuIDjfcEXbqqKlb1bpt3DhrOgIXb68PmOnFQGGuTWfB917CARhdKIfKiRAhnkkXC3YgW4sPGbVC+g5sGkVENW4fhAygVrW/p1HB9uS8WjlkBZgObKYcjbD4cCg6uSYH9BT/p4C0TfaI4I3XiKsAheE2QFVWtChVV78gwcxZQWqWK916QECkoj/qOMRTvzgrg7tX7YCAIXFS9ImxiyPrWqsDAFLVjUfDFZRCDTVGcP/j0w2VRemIFzQ/mmfequlquvI93tlvE7wwYAhNEXGZM5SpVMcY68dHorkqkM2st03w2C0AWHEF85aC5sZw8HZyr9ORstXhQuaXe/tI3b66W7mLJ0cKp9fZIcj6KV9aoiIZP+6iCyLBV0MeffvLhxx+tVqvi7LwqyuNbN00+Axv1KrPsoihO7t/HxfmDd39+//TcGi4qd3x44CHvfvLoUfGnX//S37cH14tVcfrogSiyPHciXmANZ9aaLHcCVVhjmUnFR3soaXCdrrwsV6uirMqyLItVnueHh8ff/8Evjm/e/vj9N0X0tTe++fJrv+JFrT+7/+ixzejO3U+YWUSO7fFsdnixEOXDZ195WfSlBx+//eG7P1sWpRddFEVG/OJx/uDBxY+++4tr1+f48z+6/gff9CWYMD++VpWFL4rMGKszOK83noXNK+c9VIi1EmHOzAyqpGEvT2ACezLCK3hH6pWAfA6yRh3zIRsLm4MMSIkzkEK8gNQx2KsB8gOeHcPOVZbQSpwXApERGHWOOWMGaUUEzI7J5oCqL1FVTIQMLIZsRsYC0GwG76g6JylJLbNVeBIPIVVB+Jg8vCrIl5wfanagomALOHUreI+qgCiqAm4FMmRn8CV5B2aIV60i5DkHX4JBYLhCIYBVFRIXuAWqpXVVKQQmNg5wFR8cQNQ751U9KZwLNxOIFwW8FwsyBsHKAFFFtINHv4FwlYr3gBrOZrM5gSpXRpW6tuWnyzMDcjNRpapijYmbj4AGvVNV42lpxDuFmSncTBrtVuHSuGAbJ1JiTRewC9YnkxBPFakvF6owxtpsrpiFI+a+WqIqQw4gQyZjzoiYDLOdIVyrAGjlRYJDBhMsWRZV9YW4UojJWJMdhPvFiCzC1y+lQriHFhRulVLxKk5VFMHwGFCN4zYCQJ6ZWMWD2HCevi3AbPJg2UheSGHvgc/vPqqqyjvvirIsVgxUzjkvlfPee2ssVJ2UJGAim1kRL04rcfG7us6JCoWr3aDzPDPMRFyWBaL3AomqeiGtylVROHlc8GeP/d0FTsr8Jusje3D+z376v/mf//48N2VRhY/ZgojJEFtiluhOE+5ZTBvZIDZZ6eWjjz/45OMPrbVH16/NcuuWS5CXYqFgNubOOx989OFHX37m9sndjz6593DlhJhEdJ6ZR6dnb3z9K3//P/wPf/0P/l7l/Pn5+d2TczAzL1el96rqKiLM89yL2iwnw96Fe1ayslhlhJvXr+WzXMHCxokwm2vHNw4O5p9+ev+jt9+SYjE/OCA2r37tGwc3Xnrp5ed++if//Quv/cpf/Pm/DYZaFZkfHIkXVX9484asTk9OHj8+OymKUtQfH90qLx7l89m3v3J0bNzPPr73+DP3ne98i8tHq0cVspk5uObKlaiabK6aeazAhmzOLCYcysxmKk4AKUuoM6pEopoZkxOBbUZ2RkTpwy2wJicizzY42blVQeyIABWYXMlQfghXiquoOFfviIwaq2RAqmxMdqjiyBjjSxQVTC5kvRctF+RWKo44AxkN+cdr7MUYJjtT76EL9Uziw1lMRQaTIXwWI5wZUJCZgQlupdWC3HnYoQZZFQcoigWKBXyppAhO71ppUARJVVeQJZm5cgYGwGRyGBs2Q1XE+qrygGFWJogUxIZRlaVX9RQszOyYrM1EAXGqJF4z9rAGxNFNxVDwnFCOd0gAqgZcseHAgqugFYXbXVkZxsT5KEoadmjUsCJ4lib/K4XGi/7D+h43EDlsHFBw5EimtKBKiHgmNiZTKK1P/AV8ZCISV1a+MiZjYxXMxho7F3D4Qh2xNTYL/juc5WwzNhbxLJWQVOKr6PklnmyOoECSUVGv6oOniZ2xycI+qIioKz2RqnDY6AmedOJFHJEBs/gygFdwWWA2wTtDEvaFGipEmQhklMPBKSUsFudhA1tcKeKdqHNBb+X5wUycEkn68LIw1BpjZ3CuCi2tIhnPiEikmmU2yzM2BgpXVOo9iIht5XG2LB+v9N65Plzyqcuq55jm8+OL/Ddz+sVy8cHHH/zg3//7v/V3frdYrk4vzk+LEpwdHxwczOdhSbDMXrUsCnWVEpssN/ns5NGjB/furBbnh4eHQbux80OdHRhrq7Iol6vHj07efuvtZ/OZO7nz6OH9u2dLsHHOZVoefOnF//X/9r+cP/d65fT9T+7eyOj+u2+/9+6HbPngxjMvf/VXV2WRzebEfHGx4Mxks7kxpqLSVe7s/GJxfqqqID6onPeSzQ88c2ZsRlw4970/+6GHc5XzSt/4rb/5/Fe/mWfZ3ffenN987u4nb4nS7OC4WC0PZrPj4+unj0+Or91/85cfu+JxfuPF5156496nH68uLm5cuyV0tnL63qn7ve+89Bqbh+9/Yl9+VeY3bXZSsQ2ugrnNTD4jZprNg+cKWWts2LuEwhsib3MJH0BggrGVeJQVM1nLZGy8WjZ8r9Z5mKjQi3PhxCrEc0ac5SY7QD6LBiZfgYL/GtR7lMtwcl4JfvmYqiVlB3pwEyYT70gBcxiwCerhFd6TOHjRPLfZEcgpkcnzeMoxePmpJ1/BexUHZYEwCfmVuiWgambEVnlGxpJTrS7UFTAZjCUyIKNQMnMAsBZE0BnpIWwGk5Gv4g5PsJSTpeyaFe+JSDRemFiImODIrirhY6pESpBws5WXcD+hD054LPAizIjfglYWIB2HU+eLqsryQyKrWpEIjMZbGBHdYpnCN2YkuplqxLDgxRBu7jCGTbiaPdhfkj9qRLUwy8mwzSIFQ7QmcTxfkdwjlcTH20ch3od9RiVfhe8wBvWIjc3Dxerx3ls2BBZxYSUMBiCR4PPCxAYmI3HRD0yUGUjfUtPwpUINJwYKLx7ZoclnNj8gw+qFfRnc/6QqBSXEhREM8UQEEz5JEDzxGFB4R6IgVjIUtlTgq3JVLpfWWmI+Pj4qitJaEbUEUlExjkDW2tk8m2fhtkJ4V62Wq9J5aw0TiRcCrJ2bcLpVdFXJ8ry8KPWs5GVlzjx7xwtvCpobElJDL1r/uMjL8iWe/7hY3Pm0/Kf//O6zB/bFr/yKybNc+HRZWC6ryhVe81lujYHqarkoy9X86NiQXZw9WCzOnfdsLRlriNVXIM5mB+Vq6S8uHt67+5O/+OlN5kMtP7hz78OHp5USiffO//bv/v5//J/+z45v3CrKapbl7/3Jv3n7kzcNuz/+N9+7cDg+PHrjm9/663//H9z80htFVdg8I2OtzQBitvnMOe/4+Hg2n1+/devw8MiH71aIikrhqsf3Hv3sz//02q3nHp2fZQfXX3jt145uvfjsoTz86Ijs6Xtv/yyfzbxz128cPv/cC2zz+Tz/4K2f5sfPnJ2cXZvdVJGiXC1Wy7sPPrt2kJ9dPP7R28vf/Jvfuv2KuWV9XpyX+XW+eZDDg4wak9lgG6F8dhROUCoxRLyrvKjhHExEjqhQkNiMGexKAOp9VVbWeg6fScxnfuV9uKeXMzCxmRt49ZW4SojDqUxrZ2Rn4gutCnVV8LgWt9LFBRjKGQjkQWRYlX2hbE1+qJzBWEhF1QJkOT8gFVcsgjlBfBXWel8WGu7kUGU4SAUVmEzJgDJl1nCEnhgmJwDhU6euJFWyB+Id8QzqAIExxAZsYOLd4kwzQU52BghcmbheBXhSq+rjl76C66oKOVEhDSAQvCZ8NDU7MZEKhK0FESGlcPUIa9hKVWj8qJfGWwsgspzN5myMiI+HN5PCGP18AHiicINXugw+AhSTAeVsDcfzaVJfV4Kwh0BRwSTmBGSGTaBj0eG7DkHVlPjZShCpeoVQvG+MowdiMFoREO46BYwN12YYgg/oBiizpWwOAomYbA7vJV75T0TpYyjep2tlEXm2OHEhr4yMMXwQfIuC4wV8+EaBInyTGMkLOFwrUp/hAYJ1j5iL1cXpg3smz5enZ/lsXlVVNlMtytzkrqwE/vqNAxGZ5ZkxNrOGs0yNLZbFolQV/+B0eefh6aKCMbmBu3H9eIV8Vel5gaXOnLI+A8kMnepvHOSlwU9K/6vXTh6Uh59+PzMGZ0z/3VxeeO3WrdX58dy+/9mnC/XfeOPV5778yp27jyxwURQgE+6NyLL8xuGhJ75Yrk5OT8rVyrsSzAwrorAcrvaunPjK3fnoox9873uyKl568fmPHt7/4OHJymlmDKv/T/8X//lv/O0/yDJTOg9jK9VnX/ry+w/e/fKv/NoLv3j33Q/uLovqvZ///KMPP/of/2f/+Svf/PY1okJ0UVaIrHt2dO0Ycnh8dI0ti3fGZJm1eZYFP7cP33nn4uLs+o1nTs/P3nj+lXd/+mO3XPz8kw9e/Oo33vnuD69dv7lcnMHwl778+osvvVGV1fmD986XS+PuyepiuTp79PBOUa5EpSiL5555tlhdLIrqpz/42W/9xtfKg2v2wOAgOzKH4SS8FxVfQSXLc2OteAnbzc5XNs+JwtaR8+KlcOIrEsvGZLMcmlVlId6JAsaqydXM9cCyP1RmgZIXYqHg5JLPiXM1TCJevBpjTE6cYSYiqjYHWGhBojyzNpv5fB721pQZSloVxCUVDm5FNkd+CF8KAlkIVwESEcRVXpzxy3jW0s7AhrOZ2DmrgDMNR1vZqmZQT/DhmItWC5EKZABFuPDU5jAMUZQXCg8iMjkYlGWQKvozGEPeqQr5EloS2Iqmr6ME8xeCcgqi8Bk/8hIvHmSV8Eh4bYpiRERhMmDy3kHTTdoAwuc1w8c3KWiOJhxi5Gg+ZwJ5VMGDlJk5RmMizplzIhvPPbICFdTFb1vGTy1RYmFEjX9MxhTOw8SzfqFIIN6pSGTYWFWJWwTpbpIAeqoafF1ZRV0pqmQtsYm3MjGTGhibHRxmWS4i3jmpCqpW0a5MTMkfzZgsQGo6KsgIKqc64iyYk0BsaMbGQlzww4geBfWnnICyrM6Wy6IsDfPxbHY0y4LrycN7n5p8JqXL5vNqVS6WK4aQ6vnpmXfVfJZBZXYwn88PiABmIfvw8dmbnyw+fey909PH5xcnD2fPfTl78dbqgw9vPC6ev/bMe6LWZjN2PjOzX+GLN8svAf+AzD8F2JcPVhak33rp4I0vPfulV27fvHXTkBKTr8pitbx7766uzl/9ql47vD7L8xt8/WzlS1ElWpXlycV5VRbeOSfel6t4LL8qma2K+qJwvvKnj88fPvzJj39y9869b7/x5VW5Ol+VCjKMyhV/62/+zd/+e/+TlRObz+Jti4IHH/380/sPn/u1G7fe+NbNg+vv33twcrbwq9X/6//6f/rH/8X/6uu/8TuL1fKiKPN8lmUZWMlmlZOz0nEpUGGQzexsNqtEjo6OP/3kzmq1+uGf/3tjbVmVh9B7H/zyYuXf+uf/5P4n712/du346PrR9esvfOn1+fFzt2f40SfvVK74+M4HLz7/8vG14yzL89lssVyUzsEeXLv+vFud/OiXJ1//lpydXby4OjerR0S3wRbiSR1DyRovUi1XWpXWMOc5VA2RYfLOExT5zIvo8kKcq0QFavNDc3A8I1UVYzNX+bAeq5mHOazGMyNslAV3qDCYvA9fhlZVsX6FaiU2V7KYHWtxzmzN7CjcBSVQdSWqBUsVlCaaH3F2oGTVVyxOpLTZzLD6aqkqho1KOOdTsrtQiGRH6r2Wp+RXUBd8axVe7RGREYrfiWQzA4fzWIhn172j4lzDh4YDdfFnogVKCzsjzhVeycCHA24MIqLMUvgIEaVD8WHbihAONyvigZ1wNgQULNrwkQ2FT/4a0eD8aeBdcH6KdvRgEBMfNhCMsUFfjEe4iY2J24VEhghMCJfaMEcgmxGxCXf9c6UwKsoEVXC8hAzhS9ERFZmZa48bYs/hGFDAhXB+ZY2iFgRmI64MZy2TAmvYxGvUI7RByHtVwBhrDkQzZmPz2ezgaHZ0TKCyKFxZlMtztzwPHu1kLQiMjNjG2484mvOImYNDkEJ8BSEYQ2yz/ICZXVn4siyqsqw8kxgS7/3KyflyVTpniJy4pcdCkOeUO1dUhZ3NH77/oSsLAzEE5/zFxUVZlNaa1arIMgNjZweUzQ8Wjt56/97JRfnLjxer/Ja6kjzlRCDvf+85/mcnr9wrfms2O6/01o37N7n4/qOXlt91VMlD1X9SnBS5/uqz9Mozx6+/9sKzzz0Lwwpgdeo5XDkN8k589emjk4u33zo4usnV4uj4us+OSi+ld+HyEvXOVYUq1Hvv432fTqpquRBflcvVyWefnt67+9pzt+j8NDO0XK1EQSDnPYN//Xd+1zDNMq7EswLqien+w5O/+LPv4+zs6y/eenx9fu5vlGJmRh+v5F/8N//k9utfKR2zOJvbg9zmeUZ0pCDnKu8qJprP5lmWiaiWxcXFBbw11lS+evmVNzibS3X2wYefivr33v/5LMsePXY2f/F3fufv5vkh3Pm9d9+69fwrH3z8rhd1Vbksll6Q54dszgmqnL3y+rcf3H3r/OFn/79/+6Nvv/Hs2enZzZNf4MatVenCvRdhf6Wqymp1od5lTCbLjMnA8Orc+am4yh5dswpRD2Ywe+elPIFqQRR0K1eujM15dkg2V0SnVFXxbDxxOtEc1sXgGuW1WolUrMJW2Fo2x2QNCKWriHMyrOJhjOHj4KRGxqqIc0stz1AuiVQhVbn0xCQF1CO/JvbAZ9cJQm5BZkY2gzilkszcULjm0MEtIU6hUK/wyI8rysBZuuiZSRx5T64AkWYHZEj9iohhDuBLcivgglRQGOIM2QE4V5PBexvO4DMxkwn6jmi60wOwTNBwj7pJV9DEo1eBRRljws1ZIkImfrzLxOsMItkKyGXDgQBrw5dAApqxYSZjjAVUvRDDGsNEM6IZITOcERGxEofFhcGWEO6YY8OGbdgRYEo2NiJO16ipEsMGh9LgEgmioF0SEbNVAomReJ9UxDs2Nuw3R5oWLNfWqjHx2CfPsnye5TM21tiZiffNq3FzqFbFqnCeSdlYy+RcVVaucs5YY7PcEoxWXpdCRGSzfOa9K0U9ZdYwMa1WhVucXxTLx+fn3pUHhqydl8QADjJ7PJ9RnlOW5fnM2NxVj5bL4t/+qz/+0Y9/8ezNo+PD+c3rR847Q0TZ/MKJU338yMmjM3sgBS3PCj2Y5/lR/lWsPoYuTfXX3zj7dz8WXXn7zx5hmc/k4kRJ4IuKHpO5zmdH2f+fqT971jTL8jKxNezhHb7pDH58jDHnqUZqAKopuqFBbahpBpNMSKbmQrpoM5lMJuur/gMk3craZDJZywTCBLSgaKCFRAMFxVDUkJVZQ1ZlRkZExuAR4cNx9zN9wzvtvddaunhPFPhF3HiEux+P873v3mv9fs9Dm7acrvyq8esmLpoqBE9UUrcl500Vybl6ARAAzHkvJUBOeRqN9pKn/ZTI7ch5A0LniJyqWMl5mtQMDTRnA5imaRqGNE6766uv3ll88T/4S1f7Yf///htq2qeciqQiOee33n7zzsNHV9tt0y6NLIk656q6Xj94e3Xnterktfc/fGezrKOWDMAcOGi3318+efzmN38aJHv27B27eRBDzDyZTSkTZ0RjdrGun7/36Xd+41+/eHl+7+w+IHnIH77/Qw7h2bPHapDAqlCtlkeHbf/lH//i1UfvZvTPn78PCE2ziPVCFOvl8ZSz3x0AZEzT2Re+ubmzef+3//Wnz5/91L1q17t1vowuj+KMEW6dEuB98FWDpiYZSxYp/fVFun5u4x7BuFm5xTFXS2QmFdMCebCcjNzsAQ2xNl8xO5OEZVJJjjwgUAi+2RShUpKZhRDJe1PVnIDZuQgIpgnBTEx9xb4iJNVsAICMs4p5OpAV4iDzZcFXSKgye3dQSiKsARQVZgGjcQCOiMrE6CvLk5nZLVK7IEdAIJksT+Ci+qYYYJF5jo46mCUiMgrgHLoIuUMtRh4pgvMAAjKB5fnsYgDoGjQ0GBx+Poy/7YbavMIFB7P3DXG+whKTmyHMTDQjW8mxc26WWd1e6ZiZyNGMa6X5vgaEyERM5Jl5ntCDzsckAEAwZjJAco6dR9OIWIF5BEf4OYUC52Ld7Y5zfsYTId/O1oiYAAF1/hPPvzcTwx8SWQmYAtJcyTZVLTPYi9m5gETswxwTIOdn0BGgs9vUHAI6H2tidysBAswpFenTNIUQUpq6wy5N2VSu94dDylWsfAhoethvS8khVuQCoFSMNYHML0wTGEvOKThHrqSU+767urke9lskjM2ybpfmXAFgxiZW0Xsl8D44H9oqDPvL7/32b//LX/nVjz567EN8vkuyzfRiyuhCuwR2Q9EvsH8O2Ispinc9luELrx0vl8ev/IshH1IeP75qsvQwbOG8gJYf6tjx87c38WizXAauA3hHBBaDN1XJxUoyJlFFIkYGU+cZVIAEAcwwhKYAq2ZNA5JDRJUMgOwjMs/DTS3zi+kWfFVKKtOkohcvXx57/Ymf+2Pu0Tef/KO/CTLXTyCLFpHA9Ed+/heO7z+Kjg3JAOe2jkkZ9/3S+TsP3vjk6vL65vwgoCXvRNjXUy6Xz5995cd/phh750TL0KfD0AFSrNrusL+8vJE0kua7Dx7eXO7/27/+33z0wfuL5fLh61/ZnD7sLj919Wp78zJlZcYipW2aEGO/v/7gt3/T1xtXxZvLF/Pm6ujOIzTb3lzfe/jW2B9MxfKoNi2P72XFjOH8+fVapkdXn/DdG+a7875hjp8bgqGBigyHPPVqqOT96q7b3GXHHCL5SlR1ZpQyU73BlrVMaHo7XTUzySVNlpNpzihE7GMbnEPUnCZVSSWF+XNuqmmccgIklQRSANmIKWdGIDAjkpzRlH001xrNq8siqkjsqzWB4TzsQwJNs1KAyjTHqIA9GliZDMnsD+ficLuoNVNE9TWCkElw0RgtT6BzFNyhTrcl89xjGcESSA9aWVwgONMOdASo0FeIwQRQBtTkqlirFgBzn0+1TQvNBxjHzB6I5oWmIbALzG7G+DATO+fYIQAiFdM8f9cSxrqJzWKx2YRQjUOXx75Mo+aCM7LRDDkQADtnZsyuXW1EhZhXx/em/ZXtbiowB0qAhljACtKthB7/8MIIcPtkZCSM9RKZNY2gAlI+bwMgzDH3+VTGrpjmaVLAV9c317vdom3WiyURVsHbMDJx3TSikIYhOqoad9n149iPU1p5SEAQF5EdmQ45W55AS1W3Ptam4kyAeDLM5IPHaIUKFJGKIK436AOCpVRyKVVdVXXTZ5Fh0KlDQkAvJRPIcVU1J0epbesqro9OY1UT09yVspnhpSUQDfubX/mX/8N3f+PXX//6T7ije1ptpVpM5IjD5HR4GJqrNoz2syjfMP8DK99hftB0P3Xv8rtPli42oWnwdH02dkfeK2wiaCmJaTo+3jw8e+N06aqqbtdHkrOWSVJixwaQUhYRIjJV8h6kmGZkb7dbmVl5pUjEIZgKGqoIzv2w+ewxm70B2XlAVNWckkguYsBBu+4nHh195Wf++HV7l169ODz5GBBFM6BlKT/9J//TL3z1az/x41/ddd29oxURGqBjRuJx7KGunp8/ffw3/59ffXR/GKcB6G7rP70ZxixA9PSTj1WLiA0yTqXkIod+MMCFoWc+2yzIGkBIo/zOt783TXm9OVkfnx3ff+Ps7p0PXj1frE5evXwSQ8xlWrSLqlmUIjDt3//4o+XpyeP3v1eFSqSwi+vje6WUq4vf/+Sjaya4vrmIsX75+Eebk6Ptoas5PO8mfTU8/s4P3jr+Gq2XRQGIkqjdwl6Ab0GuSMQu1LRYOQRQYbSxP5RSbD5oiLFH59EM1MWMzKg6o998Tb7m4A1I1ZRdlzKUIohAJEWT9LcbdyMwQ0bwrQVAAJp/GbhlWgHXc9gRzCSPJgnKhDoSUnYeOXCogbwGB9AwO5bRDuc2zg1wULeEao1QiByQQ+edaQExTaCiyGCiMppdUVxhvZp5Tei8iIAEsERFkBjcEiyDZZg7lM6jX4NUQLO+gEEVgA29CzGq8PyIYWYi1EIwR+di8Mwz00YBjh++uTg6url8tb+8IARiR4SemQCQ0YmyEnnXLlfrk7M7999oFhvHvpRUShr6fR66PA67i3Mp5ej0ATr2sSo5h1gvNicAKiLL5aYMdw5PPobD1rFzi7UQSt/pOJjK7dUUYCpl3/fOx5PjmjgQcwEmo5tDV2kGMwGsYmRPaiBGjChSpjJup2k/JkNOCtXmBHxISGY2DlPX7UOs4jQZuyyQpTSHPpfCRIWqiTlPQ9dd1Y6W0RexouYRdRoJLHhHjkOs1lW7n9J2vwMF7znG6GnhQihmVgqrTgDTfKI2BS2OcFG35HxXihnE6NrVsp8SgJFntUzKgQmZbfb2kANL//Tv/LV33vlh8atPnr5w9VI4gMEm5CTY11GWwC8moOY1xQubeiTNg4q87PzkVqFpp378E8PiVyCONX7jQUt61wwXjV8tF5bGkhMRSynEbDI/mPT2ZD07oRHnjZtJRmIthYMDUCSHBloKzS4rx2RGoWIXbgcTZghsOKtOeWaiADGa5ctnX3/j7sOv/fSVVcMo1bQv/TZ4FqNxmCgu/8Sf+/PtanX30Z2+6w5D7x2wI0SQlIzo537xPwpj9/1/9v/FkndDWqyWhuQZh2kSoPOnz4bDIWG4vrpAwrZdnBwdT7k47wIjVWHRtNnwd37rB1mBfX18uvr5P/3n2uWqf/mkT+MnH/8AzJjdYrFYLJbOV5HtnXd+//js4fd/79dyGaMLZ3fuf/FLXz+9ey/n6XC5+fjjd4ahM9WHq9Pl8ngcx3FKVe33Cb5ysvi1914++NoP9fW3Lycg50K7AYLZx06goKpANvUw9kyW82h50pIptrzYGDr8vPBnCODjraYEiQmZEG4JAmFeuyGaFFEyP5eNTNnN7UrJ01TSiKpMiuQpRGZHCM65JFKK3E7LVWanMZDjqkJaqYpKRpvBD3M2SA3QXOTlXZl6ywOhUtUURMsT6aTFQWLJe8jDjA4DVyGSAoGhjB2kAV1F5LEkAAPywJXdIv8cECEooIDNflsC187PfEBCnGm67BabjSN2zuVpIqIYwzQOzvm2XVZVlfsDM6mCb5o3vvHTi/Xm+vrVp+9/f9jvvA+hrquqlmnw3rsQqaqqdrVYHccqxqqdr5MV1Eykx2emksbhenMKAKvNndvogt1WrA1n+Il69g6AXMCqgcWGQ9W0I+6vcRyRyHu/PRzO93sBV8eQ0TtDmabrwzWyE5FkYAZFcwO88JVzvnYsami+qsKCQ4GZ16pEfBiGbrv1hEerjayOlEilKBGGZijFZFoRmRmFagIKVbUGCFWsHDv2THR5dXWxvcokZ4vVYrFAQiBesSOkPsv1OFWiG6/Xl9uk2NZ1AUpmICp9h7dLJmAGlYlVCFG01BROly0YkmO5PfAaluIYWc0AvPevv/7wxdNPp+r4cdq/XQUCqjB99Ti9f23xSsNLVUKAq79nUyCNiK3mhM37B26aG+/f6HfDq2wHKZ91w6fPL333MmX9yhce/thb+fhoTexBNaeR2ZkZMZlqSZmdM5O5vahmwKyAPE8rAcGA4HZ6O/eUPAVkdr4CmjmlCgiqxZAAUXLJOZUiROZ2lz/zzS+tv/RT+8wBzHtUywKwiG4aJKv9+f/F//Kttx7uD+P17nC8Wh0Mtjc3YJrGw/X1tfPh5OT0p/7H/9NVjL/6D//usm2bGFwV6TB5pznJ1fX2yeP363tf8qFCxFyMiU7WyxADIU5ZRpUXLy8ff/Tp9777by4vz9/6wjeHwe6/dnLz8tmzZx8hWJGyXh+98eaXi2LF8uknH7pYf/jhH3T9wTufUY7uPLhz996rZx8d+t1HH7+72+8WdTtOQ0pTd7jebi+GsfPUPB3Lj90cPrkqL89fnXxtuViucHaAlbkcYoSI6NXAea9p7F89Kal33hEySpbDjZJ3zlMMrl65WJkaoM3eczYFxzZHsVVn2xk5h2zeVUykt4yYUrSUsdexszyhiRJzuyYfzABAZexQbQa364xmUQUrIFm1ICGwR1eBqqmATiTJ0kHYAxo1i3DyICdBK+y95uzqNSKUNOVcLByjS7er+RmyhDNzQcEEQVkFGAyMzGBmN/va5q6e49sMMCBImTmGgAaSkL1qgdy7r/7kz3vnhr5z7BbLdaiqkrObBSQiaRyYGJFdDPViiYgnx2fuq9/a31yzD4vVJga/vXzlnavaRb1YE7mUhpLSZIZInt2hlG7opSRAakL0VdOGoJp3+90wdMGHQNQftuM4MbLmjGmkcWgWaxPor66V2DnWgqOxKTXGvl7cPQJxQQyLUpl6VDlqatJMlQcXssKQpuhCqOq2ik3w6IMAMrGYjlMiROTYlxKZO9BU5Gq/y0jgXBknAPRhiHWzWSxuQULEIYTgVghmyJOI5rLvume73cV+XLfu6sWrk36MMfbjUFVNNsylRB8kjc+3h27osqEbxxhi9Bydr2NoY2TiQOCZprHf9TkpdCKZHNnIRG27dN4RYDElMAPsS7nc7urgv/Gzv/jOxy8vdtZs8Orpi5Ojesr6u58cxlKC9yd3TtatP6odg0getEjJxYDIuaaJ+fLJzYC/MybvKn1AN+fXcLk/ev2LVzl8573nP/+teLJZAXMZeiKaKwczrweJUVFV2QEYuhAQqah48HMaUXJvwEjOEBwTMpOL5AMSATmVQsSWRhUpU0o5pZTLOLx51Lz9C3+sLO6/3I02Q0GQqV40r3158YPfmkL8C//5/+onfuE/zNNQez4MaZqujtatIZy/uhyHwTkvxs8vb7Zjqr/+R77+5LMXjz/YTSM6x0ya0BSGMZ1//MFXHn5lP+m847s57IvkWiukoEhg7uWzV9/5zV9++vTjdrGum+Xzjz949vgH3/v2vwjeNU2zWh3df+3No+NHd+6dffKD33XVcrt9udtvGclUT45ONQ2//E/+wTR23WFnoIToQ61qpmUaDjdXr1QklWKGn24lMF/suzuydfUZIUgRBCo557GbeZBg6hzr0E/TQOwhruZMBnJwVWOSy9gROwPVPIFKRmeSLScixlihC3OnhtDlKQEYmPCM1YMZps6uWkG9QlQUAWJyQcBEVHNvUwemIJmrJbioalAy9FeWBrxlLs6vMEQXKdSiCshURsm9pIMr2biyPJJMYFpcAF9hmTBNMBcfgQ0KipoWY0ZXAwITIoCUEVMCMLFCIMAR6hNkgDIgGYAAIviIYmAArgJ2BgAcwDnU5B688UVCGscBFKq6mlMOjEg4P8UVAGWauv31R+9+r++HsZSjzVEbw2LVENjFxYvzzz4JzrsqrjbrYZo++exxnlL0PhA/unt2yDZOw9LzZ7vegNaVf3C0QZOLy0tGmLheBm95Qna1j5umparxxzyJ3YzTNA4EOpia2SBFDK4lN3VdhYhS2KgK7mpMU5qW7NbtYlnFUFUK2A+9AC0Wy1jdLrVGtf6wn4ZhyhkQqqreDSOoYbX0zjnNr61bpTDlMo3jMHaYBrdoqqqO3hc1A8ySVbVY6qd8GMeU89Fyeff4xPtoaJ4IiIlYRD0gIxxVDpt1Xi7NxBEbspnC1BEYkTktqaTtNJWSV3VT0BHbpm4pNkWFEEfV0nWeeZ5iFPDAvlluHGodN2//3J94/tvvLtL23aefre48ODk7fbG/+Pobpw/urTfLJWhOXT+OhWJImCn6XASR6tohTFevtnS5LWFp7cKub8LyxNXrL9yHd955+hvffefP/sKPu3bpmpbMkJzkiXkwCVIyec+30ikzEzNl9iKCkmkexjMbM6qSC/DvYsyoqkhODYBc6vo8TUTYaHr4+tlrX//Wi53imNq68oRmBuzQB/q5P12G/ps/9vOnX/vJYTjEqlHVwHR107188eqtR3fiwxgZdtubKefQtIaOfNh89ZvDzauqtK/2O7UZYW59Lu+988Nv/MKfWbaLGGNV1cE7QptyFsk+xPe+//t//f/6X58//5TZ37332tgfxn734Ye/b8WKMBJ97cd//o2vfKvbXk+7C/WL1dHpJ5++Owd4mqZlxo8fvw+mSDQDIA3MeY9W+1ABkshty9qAP9qVOtKLJy++/PT96Y3XJfWSE6qaiaohE5FDNLOi5OLpa8RhNmYisa8qxzRtOymifU8pu7kHPUsBspiOOPbkAoIwe65q4OB8mDOdNputDGcWCztnmk0NVHA8aEkmBfIAJatkRiuSAQzjkmNtq7sqtwYA+jxFBeQECcMCJNm0w2qFAABMxBZagorII1rK2VEEh2AG7MBXCAgqoMWsgClqMSUlh65FXgEYlsmsABFJRjEEgnxAm8AAmcFF4AryYLkAobEAknHlrl6et03rQ8wyXby8Rsm1ZyPH3msppuWw314//URLrpkXhKDu/NNPCbVZrg/dbuq2R+v14EMeu8uriz5lBV5UTQW6T2V3GKu6Pjs9VYQDxMvtwZXsyiTkzo5PHpyc7ARTyf1uq0DH6yPPGBBzKaDCTFVwAbUfevZ+vbkTm1pEgMn5aFPWadAyrtfr6yHtlDFpmrbp5gY45JTbumLvnWNn2k/D1ZiHcRQF9JGR9mMualUVHbn9MIJk33NwucsqKmrmCbth3I3TTOIf1FA1hjBXENZNo1JASlGx6eCZ2qquqipHPvRDVgjtyjEqzO4oRyqjlMvd/ub6YspjYH+yWs0bSSXcjTk41pIIbdlUXEV2rApZlQCq4M1sbuWPPnd9txvG7fbmLt10lqqqPrpzLzA09zb3NrXL47gtRDgLkovO8BWIMQBA6kcfw+v3j6rgzl9d7z/eHvpJq8PQ7f7tD93+6c3dH3szpYlCIB8EIMSavJ/2BYD8/GI3I2LH/jbHi+TYgYGYOl+DqomwC+Tj/MGRaQJCIGdIuWgZ+3HoEfGY8Zs/+2PV8cMffnx+KHN5mJrg2LngHOBwUPran/2LLFPevcpcPX/84T/57/5e3493H77eHbqf+aM/s7r78B//f/7hB+/+EE0fPrj35a99442vffPRl7754v0f4OUrj+Hm0GdTEVHVTz592t+8XL3+TTUbRfb9UEo2FUZ88ezV/+u/+b89efJxuzz6yZ/8o/ury6HfPn/+yTiO3nlQrJslhoWrV1+7d/JvfvlXYs3f/94feF+B6aJdBOf6vlu0G2TYby8BAJEJzPuwrJvlehPrlY81u0DsHdp2TD2gvEo/b6n03TgNCkqfi2JR84w2LNMEasTOiMBUi6CnkpMkVY5AUfOkZhYbIJt1nRiAwc8qH+crjjV7x84je2JCRClFBVALmgCQDTuUCSnI1KtmkmSpFxHk4AhFFKZ+hrpYPqALBGBA6Cub9SU+zLQbMDNQq4/moKuogAgRKTAR5GnCkkRGnGngwiATEBkFJCIjxw6oRWZmRFBNk4hhaBFtvsMCKPganDOw2SMDJoAMVkAJRGEY5tid+9f/v7/PDF96801kV4os23aLMys8PX/+0jkfm6YOIfi2WJ6mUdkYNIl98P47ovnOZll7Wh4f90Uurrc+1Ow8mPr15tQ5IicEB0Ao4jQ/OF6tqlhHTqKrdkEhrl1QKceLBQIs23Z2L1VIG+fvGpgpmopokTIqHobRFLAkV/I4TVpEy7TZHKfQwDiI6S6rlbJo3M1hS6lrHdKiLRQJyknroK5cjOSclVszbp9S33eNI/D15ZCdDkg8qdUuTJJ311f9NGUgIm+AMfhlSzXDwjue5Ssq0fliWAcOwRUzINe2i0DovBtzEbWis+nLkcmd1fLuqk2lgEHlOEsx9gY4pakgQsC9SHdz49mxj+QDAILJrtt772OInsik+FhdvXyBu5eboLFavf2Tf5QZbz78/QhlGPpC4J3z3s2liJQzwMxKuu0/SC7MdrxwARdTksNQPXt5c/jshxwXy+Byv3hyvnt0T0O7YA4qBYnYR5MsSRkJHftQiYgUmUvJMOd8VCSPZkDOz1RFdqyqn2PD3bDfD/udATjQ+8vw1Z/48cTLdz86nww2y5X3XgymoTs/P0ez46N1RAlZC3ntdz9857f+h7/3dwNKVdfP37sci/zzF49f/8KXXz158vTZc0T47Pn5t7/7u8GHt7/ytdfvHHXPngUEU90Pg2f2SMM4/Z2//tf/Z/+b/zJu7u2319vrK9OyXC481r/yT39lv99/7cf/+J/8j/+Tj37/D7rQXZw/7vqOCEspq2V7enbv+uKqv3z+nR+8eO3LX/mVf/Q3h6Hz3r/+6MulTPvdflXXm5P7aiVPaRwnRCSwtl2dbI6JPFeL28cZc9vUw25IRoehOHKr9SqmmM3mDPlsd56LxrS6HR/D50Ua5NsuOqcxj8Ps8Z05DszsyELdGEcznWtuapBN8zQ5ljmwZKIGBsNB06B5smFLhNAeIzCAOQ5YOZt6nXqTAX0FHM2KTj1wgJRMM7nAUDi2iL7kvpSCWsD0toFjOpvxzDQXQwwpJyuCJaFMqAKzrcIKIEO1Ih8IBKcBLKMVI9MsyA4wIDYUIoYGrAEsaAgYwXsEtrkKLdkE0AUkNh1ABhh7960vfTGlqWTZ3uyPlos6uIRB03Sz2ykgx2a9WoMPOI3DKG272qw3R6tlVjH9Uqii996xB3bTNJ6tjwSAEQODC7UPFRE6RhFlxHJ6bECHLAIo/e7l9bXqzKIW7zgi5b5ZVXWoqoxw2N3kImaWp7GtayO3P/QvL15RHttYZcD9OPi6GVO+mc65WpY0DiV75op9Uov1IqE92/c7fQm+AtM7y6VzBCKTFBCrQhBEUQvOLdtGDJtpVL0NYRvQkJLnEIIGJCQuop6MiMkxxhBi9FUVmImwGNTM7OZQLs34VNES2Clq5TAlrUIoEZBJSp5KSYL7vk/jNAzbtmlOj9aOnUrZDtPVYUi5Z5689wSW8uQQYwh1dIsq6OFm7LZpf13l66ubq2e7y8W9t149/hEOB79sRdTjrPs00LmkSqVkREwpmWioQiBPaCzFg1ZNWK+a1bL+6NMXl/3u63e//D9vHvyd84uqTXd5UqfgonOeQtBCM7waiATRmJEYkUUF1FBSnqeQhBxrQAMT8jX5WFIau25K16+eP9VS7p+d/ti3vr65//r7zy4fP/3+7uYqVtVyfeKqpomRykCau2H0IawWbT/um4Zf7ftv/+t/VZEuqqqO3nzss/ZD+vi9d771xa+ebja/9t3fVhBmzirv/MHvvQMUg28cVI7VUFTR4VTs+z/88Ff/wd/+y//F/64+Oz09OUGwMpZf/dXvHt1/9Jd//hdXdXj3O/82mRYZh7FnZlXxIWyONl03vH7i3/nOr3G1eO/d7xwOu8Vic3J0YmDPXzz92ld+8mi1On39S1cvX5SSxrFTVef85uy145NTRDp57QvoXb+9MpOT4+UnNy+YcMzQ9aVuWnCO5hhETjruoGRgz6FysTbzmoaSshZRAMgTMhMhqHKshJBUHRqAmmRABnbsfc55ypncDGj1M+lUzDQlVYE8levnMB6IAKQYEcK1X96BUElJhIHa2ppjm/Yz/WZWiwE7kAKSQhUpzK+JnFJCACsJAM1H5QgqlkbQDLPBAAcgIheUHOICEcAF4IBgxM6YAVBMgUaarnUaQTO4CJYIQceMWmGlJqoqiI68J8cmCWZArAsQm7lHaQlxMvDBFaKTVXNTcOFj3Taxqk/rUPk1vXa367p916uIQVrf2TTN/cVyhc6pmRhKSePQ39zcpJTBeURkF5IZmuSc+6vrBMTMIUYiAiAR7XOWXBxCxcjMqchUspgZJKeWxjL4zsUoHMDUezdNGQwvr/ZFxIdwtjpuyKrAh34cciaCo9Vyl2UoChwdMBK5tq68q1pkdknVz50pxF3Wsc+Qd0O/W4TA3hcpZuaIur05FwBpvVp2WVIaSTWYrGo/FVu1rQt+yAJaHFFb19H74FxRBYAspSJSsDRNkQmJRbWUlMZegSZRQozeZSnETtQEyJD7PF50fR6nKU2HnJqmOfQ3Hk3Is/fLqgIpklPwTHULzpN3ijQZUKiG8w8/e/f7F88vhm4s17v67G5omjIwETvngCBLYSMgmFJmvvVFIigwIWIM4XbcwTwOQ2D36P7xogk/+PDVU67+7zcXzw32z+kXGw0yxSURO1MD5xEM2Kkomakau2Dk5iAyIrhZN6UiJUuevK/AZzWahn5/6M6fv+x2u7ffePitr30hnL7xsoOq3dy7S+vlkXfoXABmQJoyW2g9VZfb3bNXF2sPp9b98j/5Z8Or58fLitiJCMIU0FngMZcP3nvn7v2Hf+HP/Mnf+L3f++zpS3bzGwXEdD9ZnzUwOgRNUnt3vKp/+9vfXa7+2l/+X/9vP/vs/Le/81vf/re/6erNycMvxqrF8ZrbTRk+e/rpB94H57yIrFdLBHaEl88+6Ic8XDw7f/7pZnX05S9/48mTD5988LipG3bOOe9CvTo+m7qL3dW5qVZVc//Rm3fu3T/sdymNIimXpJLRHRs6BBGzKZUgue8POWdVKeNYxg6tBB9inSUnIE79DpBcqKJjMGMw59iIBZyUZCIGAERspgCTUlBj5xV5ZjUTM4KZqszspvlDevyAStE8zFhwAFDnkZwxiWRGFjSLS8BZ0T1P/4xcQPGjmQ0TzryzPPCsjCJPhA5EY22hBhXvyIAMREsCQwuzeoYBQctMlDQr2TSDJAODnAn9zKkgQNTRcpLhRg4OfYOhIUog2aRCQPOBDGBGSzKBKrCHaoEG7kfPL1lLE9yd9fLhujmk8tF5Z1rqqrre758+e5bFELGuIpqs27pp25QKg5naME1TKaFq1PlYVW3buKrRNJEMTCwcx2KHmQxpWAUfqpZraB07oiLFN9AAV1VUURKNaKg6pdR325JHlDKmslit6qqJde3IMSKpFhWumnXdAuI4DG1dMQVlzmaEKMw9E5EjAKHb1KyKpMPusLuZ0jiO06KuwVdgEqwsYrzMulqsg+d+dwCDfpg8Yxu9ItZ1yGAll9bTYbRXu9111zV1c7JYoKbLq8v3P/2kaVr2QQ03y2XTtmbEqh7KbpouupSLNFWom9b56JkDQpLkEU/bpg8RaQ3II3qoV4rgiSr+d315RfTemYFo8Wau7EsZ9ofu4uWFEpvB0arRYbs+u//qcA1z/YtARRBAJIOpZKnqepomAwsheM+l5BhjvVrmXIJjJFw31aIKqvYHnzx5aa/DuvpgLydPx598Pco0ECIxO+cVCV0sKZupobIL7CIys3e3CJAZUyUi05DTJOM0jdPV5dX58wu08h/8/M986af/eGmPtqOMU+q6PaoET2MuQxmnnExhnAYiij4q0WK5qJrFxfbyZ/+zvzLtb975x7/EZAUpSSHUxgVHsYf0/Omny9XqP/65n/rk2YvvfP/d/WEwBFBz3ovZWEDNGkdikgqqyMX55f/5//B//K3vfOfQ9/fuvf7ozZPU7Z++972r6/1bX/niu7/7b4lYTb3zr735Vtsury9eNpU/f/6saLl4dd62y5OT048fv3/+/DPvvZrtbi6oTKFdTSmfP3+y328JCYgunj1GGW8OfVs3/e5mnMZx6u8I1ot12r0ShW67X6tUzcIVRTRb6DyPBzBTnXUQFKrZhyQ55d2lTj0isvNcrzDUyGE+e7OjuQIopkQUaTaaoeEtiAPgFo6APoAPljPWrZWieYSUJM0GOQDkbECMTA59gNm+MEPTnScXHBMSFwOdDiwFJSMoUAEkUUEVY29qWQ0RHBFZnkVoJQ0EhqZmxQCBKwi1aSEwQEbfAhiBMoFzHgkhVoQGpiBgYICCRpAGQ8AyAqDNSmNWsIQcQMCA3Dj0kkvv3OX17mq7Xx4dx3aNoRkY16fL03sPRYwJxv6Qp1GKJCmxXbSWQeTuetmsj3yz9IGZmJkUSKXgLAHxseTJgYUQEEgMhpSmaWod9lPK08A+7lPe7/d5HCNaJCYRBCCCtm6C9y6E0LTgPALhXKVPud9v94d9n9Iw9Oq8Q2tXq9gufNMugnPOF+JBIGUTK3NJCczGEKpm4bxXgzJNqgWd76fpkNLJpjXmNoQmOAQ4WmMpEh3tp/GmTzkP3nHl+fnV1a7rDWDRLF8sutMmNqG+e/ZwFFMgILdTf7kbUy55msbh0B92zjkFcj4sF6mtIpqAFArxuG3OVstCVBQZkQjVlMyAKHpXFLKYImURZ+qIkBykw/bZ+1cX55fnL+p21V3vqqbqDp2ExYsnT3LBF90BjmERHYHNyUI1UNGcEiLGGInw1h0AYGrsaLFsTXXsutjUb75+H5F+78NPDnfe2n2j/e0/SGfr6u04FkBk9o3j2FDVWgRAdCFWTeN81FKwJBAxKZonUxZMWlyZBgIahuHq5cuo+Y/9qT+lZ2+9c9Hx1YimpWRAaqoqME4ibd1ueJlTMmmd91XThBjnz6jcf2SmwXvK6Ye//A/QjMyKFjLzHGrPBNYf9n/w++/cPTv7i3/qF7aH3UdPr956683f+M3fuNwexHDdNrnoIYsvednW16P96rf/jWM8Ojo5Or7Tj+PxRt793nePHrzxz//7v6V5Wi43/dAtlkdvf+kbh14e3H/4w9/9zazlxcvni7o5PjrebW9CCDFGZs6lbPfbw/ZmHIe+iImQC1IyIOapAJApbG9uZJhE1UqZxqldHo03LwDx6mbfvHg2KcSq5hDVDAAcRFMZh15KJmYDqmKlxcrYIVJsV4gIJWvqTUtsFiFWxUBU0czRTOu0nDOB2izQmy3aLtxCC0V06i1NyExSShoBDIgpLJ1jchWYgYrlySFQCCKGFAXARLNKAWMg1IwqzjtlRhcUWUpGnkVXCU0AwFIHpZBzLtTIHp3J1GHukQDMCBF5gT6wJskJTRgECRG8GDIGBMnqcBxwvADLSBWFCp0HX1tJoBk0QelADpC3gIj+BNzS3d2szs/PnePNvQexaWJwrx81RpSNyOy4DSnloT8smoibZTG83O76YejZWyooJYiw5GBcOSQtg8JhTEW1n6ZYhUPfpa4TgVg3CDgNXd/1knPVLhbLlU7lMJaU89j1DVMbKo+AYOi4ZiUyTGkQQ2AXY2hbBUxWEpEwh6Z1zaJerIgZUGY+RpfFAWUt+76fNXQUfBUo+tBWnoij51yKqZoUQ0qiKtmx65MagpoVlcOY9sOAAEPKgNTEgEjbsThfvXbaFpzBTvr80EtRMKhizKUgGJm0zrXOTYxHVdCjkxA8qAGjGeRpBFXvQ6zaq7Ec8u6N43UqMuWSchqnMeeiQFUVkd3MelNVUCklO4S7jbt6+vh6e5OyTKJEJFIIyYMEKOfnz6vFqu4LGxBqQ6SiM99TRNmR954IpZSSCwCUXHwMi+XCRPphNIPo+LUHJ8Mw/PDTT8y9nUb+weNXp4u7TUmuXtg4hMY5F6t2kbMAYhFkkHWg48WyqhyY6th3+8NhcD1I3110GRYev/mVN9/+1s9eh5NhTK4MaZwORc3HZrHOHF0IbWy8c4QwmR2mw7jbh/3h7vHRqm0Gka4fUkqMRnffOC/xBAciRLGSM6tEF9ExEaasz8+fPX/x4u7p0X/4Z/7y2z/xcz/1C3/m0x/81m//zm9+8PiZKhbFguCTPHn6dL3epGn0IeYsm5Y/fv8POLYfv/s7V5fPY1U3VXt69/Vv/vTP3lzt33jt7LP33vHt8sWTj5q6bduFqanKlOdZroiUYezYcJyGru/SsD90HSPmXA7d4eLF891uPw43aRz2+62W6ebq1dnJRgGagL2FKUufUirZDKBkH+IBEFVmADwxE3GfRis5DzsrBUNcnpxVzdKj5ZKnPAEAhzkHa3Iru0BhEJV50n8LpJ9x7DnhbNFWI0Jix7EBduhr8g5NRYulpFNvJSUASgmZXWw8k8kkZsxN5SEPaUrjlHtAQi8zmWPO2SIQhso5D7FRKYQWQnS+MjDnSadJFFCSaS4yl1XFTDV1ljtAQ99ybKkMcnODzs/XWwMC4Pkzy7oDK6aT2YDTFqWAFZNkOaHv3Z2To3unx+xD9ME3bcn5+tCzyZhyTgk27XYqL169UqC7Dx6EUBHR0WZztFwG5820LzKIpiROkMyGoc855ZK7oY+Oq3YF9ZGphlgB0nJ9mkvp+44QY9XEyh8TgFlOWZMwIIOBWhrHNAwvdztQKdMIJbd1vVivmkWTDKrVarImpTz2Hcm0PrnjYzXr56ZpPAxjXVW1Yxm7ECvmCKWMJU9F1LRyzocQQkDgw2E/5klT1jL5agGxEQABBIAmuCbWhuBjlYtMKUVvbdsUtbmYXkpyIqkMKZc0DexcDFUdwvGi9oTbqSpqRQTAQDSV/OrySvKomlW0Xayr5aabbDe8BLOh25ecsmrKyRNXMQKC88F7j0CiAmCOXSSplms35svzp1OWKWdQYMf9xfNHD1+btlvZdXsRSdRUDpkZjAnYIwJ6H5z3xOi8l1wkF3OGmaZxUlUzKznX7aKpm7ffuFfy0/fe/1CO3qqqw/vP4zcerXDsEUmlLnnk2JLzMvQ47E6r8sabZ/XbXzBqd9fdbiiH0PfDkOruy6+9Pu2vVw/eeLadng6GWWdeiBq2i1W9XPtY25xuMnOIdVW17eLs9CyXYqKM5mKIRcMmOCCQ/P/4e3//o5syBX3UEgJmhJyzqTK7QA4dIXIp+vTVtt7DO//oX2oZ7x0//Kv/xX/1yQc/unr2o2//zu+1i/VXvvrNT5483733g7g6ald3nOfu5tWQptLvLy7OnfM5pRFpc3y6ufPw3n364Ld/bXe4efrkw9OTu6plGIemWbat64aOkGYQmOTi2DsXYmUgWXZbNb28eNbUjcnoQ5WmAUxUi6iOaQRAUUy5qK/q03uuZEM0EZsmQ3AusnMqeT5Cl7Efx8HKBGIhNhiqaRimfkDv6rqpqmDk0JRNzQAJTCX3E5ohOyCaYe3oGAwIHQUPgKJtmb2utxAJIiaVMi9R0UVsPCFYHm3orIxp7MV7YAfkJYtCAkFlb1ojAIigjkw05wfNO0REKwDiTMAsp7HkAmAZiow9kJdpB9NhRssgAvkGKRqN6Cqr1qAJiLE+tjKgqwUMVDhUcBsdmIAboyUwwXELRWA8YOmV0Vzl+ilhKVWt037bP38aHZuU01WzCFWuY6cUm9XXvnHP2BEYO5cMmVxRFQB2DqAsomOHBEgGtaOS01hkuTwi70qRlp0jlFKcd1VVAzGcHLNz4BhyRsLDYb+/upiG0QE5JDQIsY51m3TvQCi4i64buu5mt11t1sWF4eKqz+Ww21LJSD48eXK03riqXjRV3SwcYkVQxRiXjYJldGA2TtmX7BDykMy0S/n84tUP3383p5TFvONFHckFZm6bpvHu9OgoQihGkkZS81qSApmaasrlpKljuwoEa3fUZ7kY0nYqLgTn/ah40Y83u72BEYCUIqZqtjvs++0FIMXYFOzm4cQ0DlO/Q7PV+oidPz46ioySUxKLMXr2BqYYYqzVoIL9TUrPzl/mXEpRZlYQKZDz9PKD94a+t5S8hgHItHimKjgmUjHvmYikFER27Hxd5TSJqIoMXe+CDyGUkqd+8CG0Tfv2mw+66fGnuyc/gjPYDlVsfuLtlSKY5NR3gaKP7aIOR+wfVFP12usFq2efXTy56adSyjQlySXlbVdAa/d4exA0MC17FQlV3SxXjh2amaS6qqL3nmc2HVeeDSDzbNoxNQwODDGl8vf/9t9+9w9+P6fp3etejqs3NtEhCWgpQqpI6skhOoSyT/hr//SXQrU4unP/7NHPnZfNb/7gU7l+8hf+s7/80fPh99/93mcf/dA5/8abX82Kq+CfPXtMTK/OnwNYkezRr5YbKeXlZ5+erOoupQ8+eLdp2pSmrt+fnZ61y6MHZ2efPP5RybnrdkzkY+XII6JzFTXq3LWUZFb6ft9U1XK5GUOVp845L5JL0RBqZtYyrY5aX9eQGAjRQKtKdQb8G3OcrYwh+NrWKlnSSFKmNMl4qJsFqqZhSP0AaJpHnAXvoKCmOSN7DMHHmpwDZvbRFECSQwsxNlWNvpYiKc8AVM4pmxS79XELaDEpKglAGI2cd4sjHyswMANjsFC55CGU4F0eehfCHB8JxIWciZSU0AqDIZGlXKQHBE09qKAZ5M7KiOxRheqFX6xAtUwekSwns1JyT+QBiVQNGX1tYJQGAkUkEwUCFLapEAIg2cxxlN4Nw2hq2370zGOattfX/X7PIMRM7IhxuVycHm/u379/587Z6XKVRXdTmZPIKeftzXWvetheUUnBu3XbbFYrBY9ElUP0vmqWiOAJCvms2g+jIhaANOVx7Pf7w36/H4dey9Qgr+o6sAdEZq4Wq3EcFnX11mtfKGbIFLwbhiHmdC/GMQ3jft8JilFYLQH0crd/3TOKdCYvc1HVJgZkh8ip5H5/c31zjT5WzWrM+dnTT0qaQtV459tm0QTfp9yXYkkVZby4yHi9aJo2+KO2rj3eTNonudkdLnf7Z6ptUyNT9EFLFiTzsRYpJROQGIhamgYzFTNiunNyevf0TslvIaALXlQd8YzkdUyMlosR2jAmQlif3a/r+va7GlEBplxySemzD55++mQakw9hTB0zqZmIEjJCWaRyJdLEGUqOWYQFCcE7JqJSSoyRiWf6pY+RRXIpPoQ5eRRiJSKkxQAXbf2lN+9N7z8977auWj97//psHV97eArsitl42EYk6q/WbVl++Vu4fHT55PrlITG7ihyGwOxUtR+nfd/tpomI6hjb5tQQ6hBDCNFxdG4SCd7NCXJCRAYyJWJAUzORWVCsQfUf/f3/7t/8i19mxjylouUHL3dFFm8d1Y5QFUpRpEzOHLEgTql0u+31xdOTB6+PSX7tn/3D73/nX6Sx++CT53W9uHr1xMCmachqq9V63G0fvf317/3WL5vq7CH0ITTt0kSvn374/e98ur253Gw2Fxcvh7EPIXgfQ92ePnqz64eqbZ9+9mGRUlXLOjTHZ69tdwfNzvuIAGBMyDmV5WpDvn757EdAjjiw8+uTe3VV3asFaVF5ryqzgIMdA9ItkdnUSi4ppXGAMoGqqeZpNABC108peHU+lFKccxwqM3ChMWR2fsYuMqPCLAnLkjozAaSxiO12SBSCZ2YTqaLHUJVUcF4yqsK4x6lnEPYVxEZKLiZQimhPCCjFkSJxKUigqslSl7rL0t8AEbkKLAGyiwsfKpE0966894YkWvRwgQgUa2PmUIe6ShBTEgAzVyMyaLLSE0YDIhfBBWZC9hgqyh5yD5oQnRmZTjSOEALECIVBEuTODeNUeecJFp7ONid050SQZgRNMWyqql4snI9oEpmGnPeTANA0JSm5G4bd1RWVZIjDoU/TFIkqz7nbOkBQa2O13iz3fe9CrE/vV3cejKVQqBSK845is2K/Ojo2wFxyIGxCCHh7NxaVTajEzEw9opn0w9CnnJF23SglKzrzbrVYFc2WJlc1L4uWLFCGYUpILqNXTTOoP1F98mDjvY+OD92e8U0mi/WirSsHYOQ8ASJE5xZtkxV3qcxqvZtcLnMh4GIQm8WG/DD0GaAu2QiJMKWpDnHsuu3QB+9NJTAunA+hqaqqrpuqabx3s0wNAcaccpGUctd1w9jJ1InacrEC5uBjmRFOhEyemJIURN9S+uj805ILMU25eO/HcZimJFlFxcjU8zJrARB0qlaKQsB5qkuOGQnAfPQzSZOIYl2FoiVndixFgA0ARTSEAGanx5svvSH7956OOXyR1/zDm/3J0eY4MlVaJjfctOVQ1ytbPjjcpOtBOFSRnZnmkopoMiDv75yc5pSY3aqtGWeKOyJhxejQmF3ORUDZ11kBAZIUADGwqUgR9d5try5/6W/8jR+9833nME1JpKiKAfzw5b6YvLluwRTBVKFMyTlmZBFJeWLnp93L7/2rf/Dkkx/5EJyPIrnvrr/09W/92f/0z/21/8t/nabp6sWz5XL18ukHAFBVzTgNdV2dHN/JabLcvf/x46vri6PNcS4l58l7H52v6ubo5F5c36mXL/o0laJFStMsV6uT9ck9810eYgifzruV5fpOIDazUC+rZu3clYq4UC+O71F9dHF9dbKst6/OBYB9MDMpeY7+z8VJNLJSmJiblSGAgVcVyQTgHM8lpIAUHCFILiqippJKgpwADYZsacjDDkStJAoRASm26CuVPKYEoN47dkQKCqil4GzpZifkSsmWJlTEEFEtpQImiIimkHvIA0iaadeOvUq2OdCbJ3SemrXVGyEWY9FkYlgGBHPOVyePVMUvm357yKUotobOyMgEiUALqEFcmYpjRh+Q0catjTecowGCDCgZYYC4wGZNcQHeWU4mgxmhc+7l+XmeBjOrYmiaxdHJnbt3z1br9Wa5AICrq8uS85SzEO8+eQLs+mEIjoNzF5eXu8NepQTCzclpu1yuV0suidlV602IVUFf1dWqictpHKQMWWgaTTOzVaEJwTnHxYiIc1E0rqoIZpYFsrFDAFfKOMPHqhAYafJusWxn2LQDzSVPWVMupXDvuBhQrOLCFdGm0UBUB48AbfSqWlQPUx7HlK2sAk7Bn1/fpKstykQlA7sQ/HFTbbsDsYPQ+HZ51C4JVDgQsY/VKjgHNk4JzG66/bTfqmZmDibpcHOYspkStc75drk0pKw2ZM3SlTTeXbXMnIoSYsmpTHldVScna7XVNKVSigAOabq5vrq4eKkGsW42i/Z4vQ6xiYH3zz7t9ttYx0Es5SHnLKI6X19VwWAAqapQEyWzsVifZNGii87MCLFZtpJLSXkWfs57zFgFZoL5w6PqQ4UE7F2eEhHdv3t6dn752cXzH7Gvb6L8zuM//x+tgWwZq5i7anPsj852nX56fTMmISbQwoieCQEcOyRUVYfBMY5DN01JDNC5RbtgcM6TR0SmLJjTmKYxl5SKhbqtfSglA2Hq+l/7V7/64fvvi5QiUkqe9RGzIfj9l92U7Y1NBaZqqGYpK5Kcntx7te9Sml4+f0Lkf+KP/OKXv/ljHz99uVwuL598ePHi4+/+xq8ichUb7935Z+/v95chhJLzen28XKyZHKO8//7390P3xqM31+3COIxj33X7onJ5eeHCR9OYJB26w0FUc8pVEx88ev3NL7190DBcv7h59Xy/v4IiZ4/eXFbN6cNH2z4fS7/fXo3Mm9O7R/cfHd+7d/7qR8Ssw8EQTXMWA1Uo4zglQOJYhaoFJGSHzM4xwBwU86pmZkSKqmoiGQnBI0ZPiE7VyYhaJq4DNdVUVYyqhuSr+YBj5EwKex+rCp035DElwkSABgpaAD23xzijfszQMQCqZgIy0+AdlKxSrCS5HbkBpcnyBCAIAOSNuIz7nNLnjTc0UzRUUw2R6xaJ6wUZWBpGgsmQ2fH8vxWdA80YHEi2rIoejIBrK4IIqIhcu1Dz+lQUc9+h9AhKqjBdgIJ7++E9ZL9cLtqqqquQgTBEBbwcRVWxOQKVPPX9fn91cZ5TMbOqaTfr1erk7PjeQ1IpUjyqcWDN3cXBBSyISI6btvg4eG+hhpyWTFkhjUXGfL19uTvsneOSi6SplLys4luvv6ZGBIg2n49h6tOYBldXpa4WlUP23TCFGBjB2CVyXZE+la7rhZ2ask1VRU1dIVLJaTdMU1HY93kaD/stGIYQvHfGy9VRu9ocd8MYGBfRD2nqx3SnDW+jXuz7i24ig34YUQtwQqI8djuwbNyN451Fe1rHa21HsUWo/Ab7XNZIy7ZFFxQAAZMUnjKaTP3h/Obw4qVNuSRRNRAVAHIxVlUTvR+mIU0DAx4tWhGtY+V8iFXd1JHAagcB5PzyWYjVdj9MQ5rGUUpOqQCic2Tmpykt25AKjsJr1ePXvphimG4+bpHAJKckpbBzYMpzxxiBAFXEBZ/HqUjx3hPP33k0G1eqiG8+OivpyafpstOlf3z1Mx8df/OnvlWji0ZhtYb2aFLvHNUks7jTsWMKJlpK6odhSmnou5ub6yK6WG7qdrFsIgFsu/5VyaZK7GIIhAiKzKFiQqJp9h4a/sH339lvt6fHRx9+fImzP2u2882AIYTH14dDSl84ah2JGaiCFL2etmbKTHW7/Nk//ZcChd2rDx/dO/nB7/3e0N2EevVbv/btR6+9dbyM11eXh8PW+5Bzruv63mtfWiyPr55/+MknHyWRo9WmbVdVVV9ebwm5lKJm+8O+2b3SokfrlZYcfBiGw+XFizYuPefiFpb7nKeSEwKoZh9jbCpOCqi5JAWNwZnp3bv36ENg76jdoJRYNw5McmZr44ocz15ImqcMKkJmJomZjD15BDNvkxSbxMh7dr4UmaYhDwfNk+XJxg5KT7ExjugcKmh3IOfgc62XI0zM3nsOVUoZyCHemnaQ1FlBHzk4RAQEETFAMCXvCdF8YGKOnsl2l1cldaAC8yW9TAA8r0iBeK6fG7uZLEnemauykWUtqZALzXIFKgBYbFYHmeURJRuiWUYdbBhAherVvM0wawjNIMvFx6aG5I0rAJTccR6Bonvy7FmaxllJWcWoiCFWVb04Oj1pYojR3T09wdXJTdsuj45R1RE550IVxCx33Z1VyGokBdIAaUA2zv3ZnQdbgd04dLvt0B9GwVwE0XJO3f4wD3FExDnWPB2vN+hCr/jB84sYY+t8reKRPNOijuyW4Gno+lfn1251JLFGSdGEvDNyuUh0fHz/bkHOZklExMYpiUHK+fLiFYE1ISyjj6tV8H61WoQQi1maRjBbVD7WTWDHoYq+H0RHo+QqZTnsd+1ydeg6Yl4u156clWxgJ0fHQ5JX4tr1WcNewaac0WvtGbx3SAaW0iSlDFOfcgbTxWpT+yAqV10vc4sYnUPbLBeOHWPbRG9GIQRmVwWvpo7QO+cQJtGSOgTtxymXkqZRRdKUTY09ajEAY6a6ijXg/noPi+Ozb3zt+UWXh5tp2i2WNQPllLim+UHG7KZp8BUTsxZxzt1Kp5AALQ3DnI30sXr94d1y2G9G+eHFrn+9/aXf+f7X3n7QPnjw8iq/dreJ7erZ9c0wiotVW1ee3JjLrktSSpEyFZGivl6eNitAqEIkoqyS0oQGqZRSxHsAQkJSs5JLkeK8b+rm8Tvf/9F7Pzp//qImG8e+5OK9+1xHdXsyAzBCfHVIfdLXN/XCo6qOBV5sX4gaEr16/vi93/zHZ2dn3/3Ob1XBL9Z3Tk7vvP21b427m+jw8Yfv7rvDMPab9ZFzoV1szu49HLub5+dPi2pgx0jbm2tclnHYA82OES0ldYetZKmcpGHH7FWl5KLIIpBLktQXLcM0kMHN1avWx5fP8PLy4vrlp0N/MNNpHK/Onx9v1oslOnLieNXWggRiaOaI5rWdRyKElJOo5lJMBCShCIUKERUIZts0USoGmNL+SqeDjAcysJJmUw5OBT2zzC8AZQIkQgNDSoaqKFMqhw6cD7FBZrLMCCWXLEWniQg51FKSGKCI824ZYzHKkhXQYYDgw1HIY29mt90SSURILmYFlQwi+DnOTgwUGLKy56wOmBBZxJgYAcvhQqY9IcLuKRLirDpzEdkRe+13xG4uIVjpFMQoAHvgCK6Z8yvmKyvJffXH/0gpaeh6NFmu14FvBe4qJaAtPbrUpZxrQu/x8vKmN6ibpjtId+hEyhXjqo7DlFqC/mbLCNvr68uuW917zZZH0Tuo6g1xyQm0XF++cpHvnBy/OIyb9SbWTZ8KzNJdgMO+x24MPj46Pa3QnKPFyXEQEFU62jSLtpATdgpQANmRdw7FxGAEGlPOaez6/rDboeYqxlVb39ssYtVWVeWdG3MmJGW+2G4lT7t+ErNlHRdqDtDKkIrU3t85PuZYXe4PDsE5bwDdlJNBVVWEJArOs4kaYFEJwTHisq4OQ0+mJDkbOkKvkrvrE1N/dIQATRXBbEy5ib6pmxg9I5no8bIxoizmEJHRgMQsi4wZB5EXu5vDzcWu688a7PfbcRxLmdEXikSOoJRCiGDgHM9ehXrZXPQH/9E77dmbN6v7w1VXZyNnIpKnpISA6EMEgDSOoWpUpeTMziFiTskHDwDMzjkfYzha1PnBaXu9dxX9hisvDP7uP/vuf/4Xf/Fgfrcfl2d0ud0JcJRcoYL3RNxUYSpMxTWNL1oAwDmeHSVEWBPOmhtYLhFx7pZJkVksZmAhVjdX19/+9W+ff/ZJ8H7y/tB1AEBE+jlJ4vZpZjab+w65vHuxe7BaomRFOjo+vri6nlJCxDTsDlvwntWw5LHfbp+89167PL66fhVjLCUVKX3f3X/45o/9kT/26Y/e+dG730tpJLAiOeWR2am1Ivk2XDxjdVIOvgzjRIQ+eCI3DIf99qpxpmEhucs5IaCoDEOfjSi07LqikFIiBGQei7z54M7R6xuNATj0UxaVlFPJGZHyNFi51Uiz8z4GJiQ0ZFdyxjwwuxArqhrnAzOLWlaoqnjbSwdD/fyZbzoDl5AZYXZeAwDN60FTQTBvOsenBdEQgJCYTMxyynksWcCQnQNy5v2QhRiYufLeAKYxmQHH2kz1NikNTLeaEAR3GzqbAQQla0oGqglmsYGBls/tRVgydBeaD6AFZQRiimtABiRjh8jzn5BNoYwWN+oCmhrXMy2DyIurjbN788EdFSmih34aStlvt9vr62kapmlkohi9gcUQ1qtlFWMIfuxGK6kKcdE0CBiqUHu/LAKKq9WdYb8P67sWfPY+CxiRiwHYT0rRw/pYH96/D76CY+wPh8OQEG2cRnIhxmpRLwAU2W9Fi3OqzEkCMcfoQU6P1lngcrd1aMvF6ubQ92kKde28l1LylFCVDc42ayYeh8Nuu18d3xGz691uHX1OkyEC+9pz0y43q1VRzKoI0k+T91GIBsarqZSxT0WKyGlLTCyiU0qsEmJdxVAH109ZzZhw6g+Vj1eH/fmrC4/UVkEJHblu7F+9eF6mwcfKxbZZrJGg73cyDgCA7KrgPLsqxhBr8m7ZLs+ONpsm7rruqutUrJ8Vub45PdvE/ZMX+900pqEfDYCZbB4uAEjOzKwixKiiTe33Y37y0UdfaBfV8jhNdw7D+aL2JakWCZ7JsTfvmMEgjWMpCRFExTs/L9CcY/beeWdSqKru3zvL03h0trn6wcv3XqRXdv7+D3/3jW/+8Rh9NitZ+jSOzu3H3LaNIdTMYFB7z1iMwBCZybPT28ijesasZoDMLACgFhwggCIi8asXL37zN7/Lc4N16p0FLUXNTE1EiW4xxLcdL4CZpQWGnfLN7nB0dLTg4Jhd07RV/eTps08/e7ZYLphIjQpY1hybBrZw/vxxLnn+JC1Xx5999N7jj95BU0fcpRERp5Q8T/1wsDmCimimYDoX9ZF92yz7lOuqSWm4unwq45Wgy2Xc72+mcQSw/fby/LMPoYyHvuv22ymNhDAc9ttXz7cnD19/48d2Q5e2l7VjI45Mra/RTILzPjBzykWk5Jx1mvI4iOQ0jUhMzoWiy8WqCoZgsw/X2DH7Wd44n75V1dSAEGcyrCkhIii72R+sYFSKEBCaKpACMDPYDFAzF0KE1ko2VQBjImQENVUUFUABANOihkiMpp4DMqsUQy4iSASA5CKCAaKBAXuQwiqINrcRyHRWUhqYeaD1a3p4Aakj18Ks3y0TajHJSEbNKdUrkLHwXeAKNQPMiglD9lASphEA3A9++F4RGadsZs67NI55HEpJ7Hy7XB+dnCKj5CQq+6Q2DTlnG8zhZMRV00bFXRkXbZP7HpkxNEJOmYaSVDOQ09QF71RKEhSBNEkNsqkqKZXl1EZuT49uMphzznkfghqAKYWA7PdFLPWQ0jISg/Vd3+fSjVM3fkJVnRCR/d3jNbMrScUKse9zRswuROeq0ei0rsNisXDW8KIAX02lCd7ArJhDrNk7hFTy3I+KxJFBgMg5xyxSGAl99CmLSkEa+mHX91PWrJZzykUAu5RGc4EcF+fMAIhc3d594wtsttteqyE7FzwvqzsBIUvZjamO0TObFnSuaRZ10/ZiNOZF8ONA131valXlmqqpPOxeXI/DpAB2+11ozrmSCyEas5rNC/VZALhexPOr7vlHH3zhp37+Zbqb8z6XgRFFtQgxyNj3IYZZ7DCfcECknwZ2QUpORE1dEQLVLTIvl4tVWxfEP/nTbz3959+/3Mrf+uUf/CVd/dRf/Z9szT2/2grhZnMcQhzE2qpWBJC0O3RDSjlPWgQBQuA2xvXmGJw3s8gISIxoRKkIAjFY7fmTp+e/9Ev/cNjesMl62Z4ftn0aEQwRxHQ+dHx+x7RbGQrM1Hvsh369XoHqi5fnqrperbzzrz96cP7i1dXVpXOOaLvfhaef/khFionOGkWk1Xrz8vnHl6/OEbEKnghmc72ImGnKxTOORZm4qCISkVtv7rTrs7tHm5dX12qyu3mZS1IJ4JyJ5DSJFgBUKfv9ZWBJCjkl1QKEY3/YXb746L3xy9/6SjfmV08/dUSxadvNsQuNgSK6YhCAwMcpSxZTQyFP7H1cAJALnpC6KfdDLzkRIxiEqkEfnI+AlHMhRL41YyB7VlMAYxDPvqiqSBU8k08AY7cdDvu2adrFMpmiimMGQJXkCH0VZt0PEc+5Vr39m4FSCiI6ZnZehUTNAUTnxDS4Wg1FMiCZFNNC6NATOIrseFZjGmgZJWc1I8cUKrAFt6cwHqx0hABaQMVAGTI3G+TahQrDWorKNAAENcCSZsY3wFwETm447LtxdC60wZmkRV2NYIKVGUiR8+fPZ5aLC26W8IpBiFVRrdtVyUl31+vlERjFxQLYg/fMrGCU0zh0QOh8BSpCLIjiKwEqgqVoXCwMEJ3HOh6zKyJ9343DGIOPziPSYRz6rkOzYHp1vRuuL3LfgXOT2pBTYa6P79BiMx2y98gEgX0dqyo475ypKjECZimBaZtMK8+EgcFrTob7rm+8K2X3ohuymRSZ0qgKTDCHKRCgjiE6but6FUJWTMjE5JnW7ACgqKQiRQrjhtk7MCAUs+CcZwJAEaFHDz1B14/XN1dd16UQwPmq8T5EVSPwVR0DQQ0yjtO21z1x9OHRSShqqqKWqN8fbi6LzTBXYGRjEBHCWTJ/+0NV5vtD5XnVhOurm4tPP3SLu9t9dlGaJuQiAIbozcSVYio4T/tV2DkFRLOSRgDsykS4ClWVc64rF2KlqRyfHP/sj739z3/9HVN97/c+HP/KlEbzMa7a1aKpmVlUCABAV3U8amoxUBVR9UwKpgKVZ/vcV5gkFxHvKBVTUwO83B/+7a/+6rS9oTyUkolw0dbzrpCJENDM/r0v93ZkdrsHAGAmAhinMeVMiOMwvPXaF8acDV6ZmaqCFQJfwICAFFWNHC2a9nDYpqGLjoeUD2NZVJUjVDU1nf96m3ZJkyDQods6JGZu2sZ5Xh9v9ofu6Pi0P1zXzQqZgObhDM77PQMMoSbi+ewMgMw+xoqcv7i82vkvt4++kpMO+5tx6KlqmrhQVUYsBlKyc8FVcY7sG5iUomZoxrOMzAWCSlNiZlM1Iod8Ky5xbGBaCqMhQi5FVAk5FektmRQw3W+TTIPmUaYBFPqr8+hYAXXqiQyJTQuZOR9CXZeS2VcutqFZcGyMHBE1nhK6qVhwbIxmRhhUlUCrGBAJIQx9Nw6D98GzgYqaQJ5ymkqeiB2H6BeLJGZazBTNTAuECn0wYkLFMmKZHGZCQA+gQlWkEOgmyzDOJmBgZxghBIKMuXefPXsuAk3b7BBzGp333oUQIxFzG9dNS8Q+eHJkObPzoWkQCBCKaB7IO66bBpGMSZ2byyVIhOx9uxbAImVKoxqw41BVyFyFColVhcCI4Pr6OudpGMeUVcxUoW7bWZzBxABqpTC69t5r66qKdUPOa5lMc103nfFkOKbkET0iaErTsNvnPKUhZxFBpOBDqGsfg0N2oF6mF7vu5cWF5slM0cXV0bGZ5iIu1M4HyLkv2A+dlky3MxqM0QPhZrXZrNeb1YoRPaCPwcyr2TClyNTEWNRExUqeRMeiY8r77c1ue+N8aNqFjyF4v3KuraIj1lKK5H6aPn72rO92JU3eeQQk7xixit75ivaf7bc3RVRV1ZRuLzs2U1xUjQhVZ84iGIIZbNqqH/NnH3wQxx/GIvnuuoQZ2XOrkRdVR94MiGnOehOh5GRgRIxIJSXLkxY/ZXPeWypDtp/61td/9/sfuEN58Gz89t/658f/yf/o/oMHoELEZrBaLKJHBFBDIqiYHMJUSlF0RAjgyRBRAAGdd0xFwcyzqJr3br991Vx+2owXB4hFxJIQcT9O/TgyMyLNX6Aq3J7FPpfezLw5kTJlTDkzkXOuSOnHjtjnPCc5jImY2Dnz3hFyyRkJU5qGYVg0zRwqHqZJRKsY+2FgpBCq1fLo7PT08uKibpppODCSqYxDx2GR1Jqmcs3rr148Wx/dXwbqi7FDvrpwLPOTd7k+PTo54WGyC6Obq7pdL1ZHrq4vzy8uhvygWR2vvJ6coAGFMNfCb3PCImWack7OOwCQXEw1VBEMQBRptsqCc8zMYASIKU0gk3NepRQRMBhyEjUtyUwgT5ozmM0iHiS2WWPOAR0BhCxiZVIxGTqaFRehSSWNQwZUwsHoBgHJM8UFxSV7H5vWhYrRhmkCmz1QoKqDCCJqzv1uW6SQGzVNSAgy6djBeABL5BuOTdycGHozm3OCJhOKaO7IudgsgMgvN8ZRczJNmItsr9B5dg5l0N0Lk4QIiGShVWSz4r709psx1otFE70bp5Tma6yPwI6IUhqlWN22TLg6jYwoRMOYCNEjxkVYNi2qpTy5WBXnCzISiiqZTTlv+0nJ7pzdAdMh5cP+ZkrThNwPkwJKKXns9vu9qKHzVbtsl0dNvajrxnuOIYTgCSBLCTN6iQkBi6kQMbU3peSSx1TGaZyHGmRiKnXdbNpFmzM6DwZTKV3XX19fSSnzrJHZoQ9TMeddXdepFGL2dbto1967EHxdRYfg0YhJDYmZPncXO56BKgZgNVFkpwi1cx5VDHLORiQKhhw9R+dWdXPv7CyGaGDzyVxEFbCAGdGiWW7Wq7TZIAIyZzE2USRC8uxkvPn0s9/rhmma8uHQzWkyFStFReZzFs4fZmY2lduCG9OmDYfDSGhI1N/0VHlsfEQGAETSedFFRGizctE5RwBFCqLNV8+p79oqIKOpGtBU9Liq//TPff03/tU7Ofj//p/9+s+dLF7/2o83wVXtclLohn4cLYSQBczMobWVr3wIjFltHnsTkYiNaZxSJjVkmoolKc3+5slv/Vqd9q/58elhf6nVkMrl1dWhPwR2olpmnSDA/AV+Ltz8d+e0IqJq8+2z5KImP3jvB+t2sWpqRPLBBxdWq816czRNk3e83908ef4kpQQAqkbeOcPoQyr5ZHNUSgEEM4nRE/sYnLKfQ3OERBwevP7W0b1HNYeLbnQ+1quT9bK1fqAOnK9KzvNqL03J+di6FUG5ePnMh/rs/ptJZJg++uzTz17/aTfmnIuYpFob75iYRdRK9swQvYthZlswOVMBg5zGEGsphRyzczV7xhkdYuoYAMkRiud5wghIOZkpOw8uQNQ5dUEuAgGxB0kGQOzmWQOYaCmaR0kjqIJmU0MkirFerJgduTDbmo38/MxlBTHzPoLpOE2eaUw5TROCSs4ghZwHX5GriBklZWADsKmTnBWmcvmKfEREco6IZzZRiEvnfTKylHM+IO6gJDDh2MoEyKyWEMTqjY0HnXZoAiJgigCu2RxZzmiaSwnOmSgTh+iT2bC7mrKMWXY31+ujo2nsApOa7ftxGgfLuTJxhui9omLdivPd9ppBRTWEiCG2qyNyYTtdLptqGqYW4dG9s6sEizUUtTwOppuzR0ENmF0xa+u6aRYiJTqqYgBEEQVTAmMEKGLMkiYxy4pSFE09WVw0VaxjjERERAq3EHJHBApIkHK+vLwc+p4Qh7FfLldn9+4ZICN4x1WI7AN7VzvnEQS5qHiee0Y0nwqYGAhADUwDIRCVklgLEU2lMOIkoGDGPCs14+ewRiZSQQXx7D0hIhKS3R6uNMmsKXWiyoQI5n00uF1zby8+6w43qjYOo6ipmJoJgKoSkarMfhlE8I5UMZdCRKrW1jF6HsZ8OBQRSNspVDGLIiL7MD+ZEQxM2TlVnVtEoDhfAlXKKGXs3DKQAWRByyWn8WS9Oi/93+nMUPp/+et/9QtfSfEoFkk5MzpAENE2OCKHxMVslxSRFKwUSaUggpVyc3O1vbkuOUkp5MMmcP74ve2nH8jY1UQPg6b9/vluKpKrECrPU5GpGxWsiMy+tH/vUGa3zzUDNb29bpsCACHuusOU/Hp9tN6cOHLtYrVZtZcXr56/eHpx8SqLzIvREKr1et33XWV6tb02s+BDLklyRjQM8fU339r1ZTjsrq9fGcL2+uLV08dXl1c1ydNnL6dpuvPgwdnp2QmH7fkn+6vLPZFIOb378OjoQbNc6kTOn9XNMjaL5d2HRcX/6PvXF9cheGOOBmiRCT0zAjrmCUFKAQTnAswvTSRE9sR1DFWMZjamVEoZRQkJUYkZgHDWx3pmAFAxtyAA1SNDNFEzQ5qZ2lZyIvKKRIR0S7WecWnoeQ1mjn1gUBFRM1UkZCZEAjMwZQZRMPLMKKKiwGQKWBQcO4qIzGCWSiEEycm0SBrZM4XK2ImroYxA3tBMCoCJ5JInIuRmqexLmmTYokxoCXztqjWSk5LBgMDYB5kGFUWO1twxUwQCE5Xs3v3R42a5kJR9DGbYNDUhjsOL2js0OUx53ulOr16FKqZxGIehrioCw5xC9MaBmBQ4GYlAXG6Gwy6XBA67XTcW7Yb+4tUFEk3DsFgsXnvrrdff/NKijlMqulwgggE5do5ZtETn6hCvt3sG8EjFQM0cOzNz7PZTT5NFtoboop9EJZVsau1yKTqLmZGREEzNEA1MeTZtONc8fDjz4UytmJlJZI8zFofIAJkITE1FVZbezQwBk1wHDwCIWRUZASTv9l1SOL/ZgkrrCc2Wy42SQyJmDs4TMyKhmZbs0LN3kaiYGThRCQ4ZIAkWNMcOVJoqmFnOOXp0HpGcqGkedy8+6buh6/qu70tRABORMk9MEch5UEVUhzjfOlXnk5oxETnvGgKDbp/SMO1vhuqsAfTzcUbN0JCdY6JSyvzah5mfh4QKPrhpHKTUSakgOcPc9482i5//0t1ff3zFr1cvuosqHUa+n1RO24ZDzIZdypMg5NRP0zBOIkLIBUzMTLXkHJgX7SLWzTROCob9Ln/w/ZtPPyjDnrSAZcdwr9ZhkpcUh1xUhZDmXa3MIHxARFIT/PyCOa81/3CcNgvu5o9uKuXq5moch7au97vLZ0/k6ua6H4b5iP2Hm4Tlcp2mkTgQYs6yWiwO3YGIEJk51u3q+YvH6EM39LMBJMYYq83p0XI6XIPJy+ePcRpO3/pCbFsfg/OeGTcnd07uPbr32p2X1730FGMVq7hcLV1TBx9E1DMCORGdUp7jE6YGzGImagagNuvgbB6KJVUEwJwYrQ5emYsKIqoaoiGhAkgphCQAIiIimhOwN1AiBgCG2yC08xEAXQhIICmzY7vNqhpoMQMxmQp4ZkZl70zVEEVVAFQMckFE74kc1TEMu21/+Sp1O7Ai02DkwuoOuuDrhfNuyiAA5r2xs5yROSzXrIvovDEzCCIVZFAB50TA0pByhwaEDusNNxsDgzyAAhGoSsrgq4VnyH1n3TWkgxEiR9Dizh4+8qEKnh1T3SxSTiVPR3fOxmEglQgaqqo7DAZICHW7OHEuON+2DeYcvUdiY06qh5xTTs55rldE7ENYIZY0teyOH30xODbTxWLpHDvvEdF7zmNBJM8wHG5205izFNEYQlU3m5OTjJhSnmdzZorsqlihSl033vu7i0IAOvPnZuYJUynFCBEgem+mUuanlqkVRAzsAqGqVI6ZGADAcMw5iVQhACgTErtgxgSilpMES6+2l9uxeOeQaEjl6ub6ars/Pj1j5984WaHKxXb7yePHN9u95qmuau9d3bRVUx8tV0ftoq68IhqCJ1RDM2K0yAwEDXksBTzXFaecG3crmiwiwcebq6e768sp5+7Q5yJgoCozosfMippDnF+rYoqiJsZIZf4XUD2S8z6EDJHSfupfXG1N3cMNTLmweEL0jjwTURV9zkjBl5S0FJViRA4cGKRxEItAToqMQ3/v7PS/+t//lf/y//Tfvtf19ytshk8vd3de9Yc09MG706PNWIRdYLBUBECbunGAqoJIPrQK6BjqunbEIXoY++//i29fPv6RpRGkqGZVIYDW+y8e4aqXjzu7OhQwCZ6zlNuZP96CPv8wbjb/899Ln/17oQ0EEdl3h313+MOfYuf+/f9wyunuwzdUNJUcw5WasK+LaknT8/OnWWDcr3bXL7KJqc6Tqf6wB3DdXvO0Ny3DYT9Wq+7y8vr6Yhi6cexV5PriVXCLOtJ224+78+6wRcfPP/no6P7DJPnxZ6/2N9fN5mQUMeSxpFGUANjMAMk5AFARAAihAhMDKKWUnMfJQO12jEs4O8glZ8dujmoFB1UMRUoRf9C5LmHzK07KXPQEpFtyFYoREbs58oWICILztQaRjFAVixQTiSF670ilAFIIEdQQgHj8/5P1J7G2tdt5HjaKr5jFKnZ1yr+6NQuREklVtCRSgmNFkQRZliIrSSOBgiSAe3YjCAIjSTdA0jXScsduOIocBFYKO04URVIo01KsSOTlveSt+N+/OP+pdrnWmnN+1RgjjXn+S1JunI2zsYC9gYW1vzm+d7zv8845He/T6diAKOzAjT5EdkHJlSZVjJzv42CtVTGMg4FCa/2m90zFrCZRBSRjIkfATErRhSvER2BgpgCAoBa3CGYtwfrHXCqTxn4jfW/HN6AN/cgxuifPHpdSWxMVqSW1lACk1rrpB/YBiLwP+yu3Wp8lp5oX50OZl8jkfUDvpnkmB5tIzrl+6FJ2SDQXCV2MZwMAmItZjRA2Xe955RZxFuFNbE2qtkPKtSoC+37ALvq+I0RAGhzFEGUt6SJq/p0Bb865SfMhOOQ1mqOq0ppzbqVrogkADjEgWBML5Lz3BGBgDR0RizTP6MgNgRltvWKpSFWcW1lEjqncT8vN7U3LOfZjjBFBjZxxfP7ebrfZ5FrnIpvon15cfv2Dj6poLtlUvI9mOvQ9EnfOmQm9y2WBR1xHwVkszfNxSQ/H6fru5mq/A+eZWVpbUhKVzsczfbssy+EwqxkT1dII0RCJKOcEhmAQ/BpMQUMCtnd6GdHqMAIj5xxsSQHK/fLw+g5VLj+4CMHV2hCAwGIIPnqKQUUUUQCYHTAhoEoDkRA8GeZSGGxelstvfvXP/uI3fvB//ifhg3F8/MGH+/NDGlboH5rsiYE9Em2IHfEQQxGtUk3VsUN20lpae6nuH17++t9//f3val7WtotaZV1EqBkh7gM8LbowHKsRQHRuXcWuLtl3BY34brD6/Y4zNcPfN6z9/pfWy9TvvQRoaKXmt9fXS6lVW1Mt81xbFZEmYseHcdh4IrBmIgDYVMjsdHxAQscirYDp3fWr3eZyyAXAMXs1MMCUM/kIFHzPUk+ADMCOgg/x4smzz35wNx0eGkcjR8TSzDmEVYdFYMfaBBFErImAKjEQUfBeDQ1UaiUkYDYwrQ3JmfMGpqK1lWoWvfOex7FvrTV9pxwYrJ8KECnSGiMhYSSFhxMMG0AiEWI0twbLzBExw2rzV1U0dUQMZCZNGjsPVRSgv3h6+fwjYDfPi9RiLbVSipDVYkQiWRcgk2KI65uvemqz5YkRnHP92aWiU9UlLc73PkRC0FocgbkA0lrOJorMxh6UAEykqZiUewMz3oJzII0KuOPhOAS3pAXA5brcvf6izlOupaYkqqHvY9d34whIecnWqoFGFy7PzrZDJ6VaiNX5U6qe4WwccnOGNqJddNx3FPvYTEXVyAUfKlJr0tQUwDN1jBkhNXzvvQ+JEA2bigIQgCkQ2hDC0qRJRaYEUGsFxNok+mCA07wMXQ+guZTWJITQr0txMAZTs2pQSg2es5oArQbXpTTFRoAOWnBNpQFQMUBVh2DEBrYJgV0c++Gjx1fBcUc4VSHCy6GbqubW9n1YVXwzbYbs2FTUOodUzBwYAWS1qkKmDi03bUCgpgj303I/nVTs7uEBRDl0GZjVjsspuKDMuZRox+n45nRaRHT9fBORmdbaRARhBYZaLVnUHL1TKJjZuXWvp+S9mUXyThQMVFXmdvcwmw9Pnl/0fXQEQCSqaEBMDokAiB0TNWmMykxKNGx2B62oFRE+//zl9b//d37r298JbH/m5z48u/yg+v6sH5pAbZWZnWNSXbeOqlBb9cHH0K8bhlLXJICHlj//r/7Rm+/9FpSEUlSKqoqtb6etFZapqUe4CGCKyTA4ePdWAKjaO8Mw/IFj6r/+7R84yH5PXVvfTFuJXa3Wm5vXIG1OqTU10xXspWa5lOPhHhE9WiuFHddcEazULK1O80wIgHy4e/O5gdZUAZY05ZwM7HB/83D3hmRp3Of52EQPh9vD8SEez3Iq3bifD4s7g5orszjvCUxVkRgBTJTQkL7cxBCqrqYsAzRQdc6FGHXt7CNenzprfSUaNbVymg3ReWb0Wpfl7tV8uOU4sg+RcbPdUNwYIhPVWs130KRpQWKpBijeB1HVmhGE2O+GQARLKmDmYxA1BAKDmhcEbK0+PNyBSF0mMy3zSfLJua7lhVChVWmFY+QwADpi0po0L2it64b++UdFKLeMhghccmJrqhUIVQhaQx+6Yawp1byg80CIoABk1hpFXJMK7MA1NHEs5fXbh5SrD50BdpszcLFOh+U0b7puvz8fhj50nfmoajF2XT92zIN3jhkAKITFtGOyJue7AdkdU12LvJpzjn3vXZOWGwBi57ioNAFFEG0b78bOH7Pk1tSI1geuiJllUwSqxqk2VIsxNpNUpItekeZcHLsq9jDNS04p1+iDHqfTaQrBsyMwYB824zB0/f3h6H0AaIgQogfRIq2J5lzUzDsXvQPiKgIATBqYTjUDUiBCM2IqqpvIpcHdnBjBRGqx3nkgWqpFQjJtpmJ0qpURBcyZriJsMmhiQH5pMi+J2XEIO9xG5vefPFklDzJzq2bfBSIoVdLdZz/44jv3D8fWVr87MlOt0mqrTZiQEU0VTRkIAU0FaZ1IEWBNNQEh1QZq4D332+Hk6/FU59upAH3to4swBgbzzgORiQIisQvM2hoTqSlSoG7rxr1LR1WcT/c/fvny//3/+0E1+8azy1/6o39Mz9+PSyUyimxC1UCMgJlRHRIg5qJgVlVE2uA9R18FoOYv/uk/eP3dfyFlRimg695JqmoRVVNVq2KlaVXzCBtWbJDMCM3eGc1MFPjL8+hfOrD+ZdXsD776k1lund0UQM1Ox/shRmltJVSbNVp9Wwi5pCXNvu9ryc45SAaIVRoi+jCeXVzd3R9Vb3OapsO1G7Y1zbUVU1kQDzdfQB6y2ny8n6YHJPjkR99xrA931/e31w83N0++9VNYqpmwCRObd6Jrgl6JAMxCCGBqost0AlDvPSPE2Bn79SQSkaZK3pmoM/FEzZjAiAne1YhUQ8K4iTvqhsEBEGij6NgTmnNETK0pgZJxYBIDMBBphEAxlCpSy8NJPBMhN2lSCiI54iWlVrLVxLg+I1BUSVs/brGP8/Fo2mqZ0VRrUalS2lqXR74L+20fAzlvYasi0Koxq6ktD4ebIxKxcyjF+UBhwNADALEHJEQFJWCEdcozQ2ux3yCaqrqz/X672c1iZqAizEzswMDHiKbDbidNHBMgppSDw23fsUqHREyKuCgwYURA8EWNQKJjEXTRBaLc2nHJfXBJQFLyzBS6LhIBHJZ22/IYw8AGQFUBDRShiKgoMTFYK7W2lkudlymEkIq21t6FStJ8FniZT/Ph1Lg7PDyUkkXBwFrO49ifnV9IiAskacoOANTUWmoAaIZNbRgG711t1lohMwJsIp6dAU6pOEbyDoyWOUVmi5hrW6r0wYHqSesR25KzW1ksRkygyMlIzZBJRJ1BCFyaIhNiMSDm1dEB59uNKhiCa0ZMVbSZIUKTVufUalpuXrz47Ivb22M/xPVW3qSVUphJTdFARM3MhXe5E4BVFQey9U/0S4fCuiUgdJ4HM6faDkt6lV6W/OG3noddz0wKxohgxs4BYVMLPqhp6CK4MfSbfhAQ+a1PXvzT3/xeFd1G/gt/9o+e/eFfuXmYVMQcE9smMLdGBLnp6XQE17F3ZDpGTyrs3FKbEGteXv2zf/j2t37DcmItZk21FdEiWlVLUzWook0sixTRqoaGXtLTD77aXz79L3/914mIEFVNQOndZfMPzF+rcKa/by77yYlGv6ejvRPX1hONaA2HAq0rHgJcpzyDJqJNDImZx3i+pOSQnPP99nx3/vTRxW5Jv5uzb1Kn6eGj9795d/Pm7v5m/RViKqred02aqjgOTaSkZbc/e/npj9Iy9x5XmL2sDILYA0LOBZFW+w1araVcf/Hi4fqtmQIiMw7j5vLJky4G4G7sO0FiZiBKy5LT0nI2E6lFDGI/un50zo+PHq3BiUAo0gwweBe8V9Ncm2eHJgGptQYARMCOg2MViY4JiFysrY1DD0S1ZGlSW2UEc06pL8vsCfvOx/5RUxU1R7odL1uaTWvJBVoGKc656Bm9c9wpuTKd6uk0nU7EZKbE7HxA14XzLSBhy5huLD+06S36gbut9ntQJCYkB1pBlQk5Bu+4LofVn+wiQiPm6Jvh+ulvVTh6T+hDl1MKDjt0a5rhzPktiaoyYxe4IZmatNqFDpGCSq41OL+05pRUbei7yNJqCQbQdac5BaqjCwiw7XypTczSmidRdd6RNmAm53NJX7x9qzV7QlXAVu6aqguK5Hx0wV+/flPy8uHzZ9v9OZArIgoYnOu7/jQvzntAyCJLWjaxFxXvAyISUa2NCB1zU7XaEMAxxRhMjQDVjBGpiwh2zHmaUwgBEU1VRItIa0VbLSmp6ul0VBFQQUSBtTA8bnbbq6ur87Nzh2hEnk1UmKg1UwA0RNXWpBrAyvIn8mgC+Obu4f7+bkrp2YZe//ZvffH5F2kpMbqi0pq0Wr5MYqOqNlHvnCgQERkQARCtSeImzVQQANkxIiORB4BWEcyhDbEd8+2bB2zywU+9Z7s+BiYERlwpgOzY1ACJXaTYk4tdF+f7u/H88Z/4pf7pxeYbX/vw5/74L//mx68FoO96IKpNTFtg50Poo9+MIyIuTRvQbcq9cx3hdtNZa9/+tX/05jv/XGsiySZVpBaRXDU1raJNrYoWETXMYkWgquVcPvzmT//Nf+d/9ub129/8zd+cDkfnHSKoChLT75vO1nfmD1w1f9+Mhv+1/cBP/h+7zX53luW21lJrBQIiRiAzY+e32/OzR0/HYWqIx+MdqiIhIsQYu3HDTMzeiAm9mT7+ys+mNN+++ZydZ98RB+OOXAfIBhj7sTZjH5B5Oh03wZMZsZPgVaSUTESeUA2cp3mRw2muOU3zosQq2NJS0zwdp+Nx2l5eDruLualKk7QgACG2mjXNraRpSSiyuXh0OW6lpjnVruuICMibGjG1WsnMee5DMLMG5J1zCV7/+Hun+1tA0ppUxIWIyN3+0pzLm2HoBvbBEfSbQWozhNZUtjuRhmD9MNQmpYmJIkK/v1Q0yRm1AYMU0dqQgRyneRYf2XdWFyMmDtaKASIHUUWoBub8znc7h1WOb9rd79qhR+eVHfoOgYhJrEG3a0CWJ2iLmblD0Twdh90uBBd8EGmzwuCZVEYWCy5Pc4eR2J7uYh9jdCwKzoSJcqto0DNGh6np4Gnru+BZeg+IxQzQgsdFKTjyIZ4PoSo8nObA7GO4n4WabPuuqhWD+XDaR7yIEcwqR370yNjfPzwAUc7Fiwx913Xx9jgvKZ8/fnJ/PL1eWidLjIHZOUcuBiDcjkMIzpNr2lY6oJoRGCAQo0MHZgDqiVZDearl/vZ6maZaGxCx80vOJmLIYggAwXvnvXccnDvbnw2OHLRcm+GzsevQTKTdp9YAnePtZhu9h3cFONhUg/cESKgqCoBZpGVTEU9oYFOpIu205JRybg2JXXn49OMfi6hjktoMLOVislplxTObGREhArMDUGRSVVIwNCVVVVoDSmDI3ERKlZRqSnXONZVmbNFAHub2o1fPP7zabkNA67sAaOyirVcI58BFpdhE9wwXH7z37A//aQDooz/Oy6//8DVQKDm54FXEezfuL54+ebzbDL2nwBSZNypLlQa+iFZ0mJbv/4P/++vf/k0smSWZ1abamua2HmS2VM2tGUBVa2JZNJVaqvzsL//pP//f/Vvq+w++9o1/+9/9X/1f/uP/w4++/zunw4GYBRRWyBbYmrcnIgMQkZ+cVrgeZD/xbax6Kvy+qU1ld355cXaZwJeWW62I4H1QVVMJPmzOLvpxf9b3t6fJO9dqcc6Fro+7s7Mnzx5dXxvi3d217zYOLY6X3/rFX/neP/sHOc/jxfPt0C8N9hdyd/NGTUK33V08aXdvun5TGi5paaItFXbsmV0Mq3dszpW9Q+/7zWY1eSBxSQlMvA9EnHMm78h7EWVi6rdoStZidLDdAWC/LDklF+M0J6mlthrq6jICdM45MjFtlZiRuI+uzKfj/V10sOl70P3d/UOt0nLBYn7YWG2M7ub64W19bVp96MaLq67rg2MDHKKH2DnvakomuumC984IpGkrmYfgw0bBUsqoUEXYuYC++g5MTAZkByoIQAAqFc1MKjrXFASJqA/nX/P70o7Xmg+Q74G9ASkS9pc1C1qxdLI2o6nbQH3/+XmqZqjHZe4d+7qgpv1mI4B3c3vxyecjCXl3Ohy7fhOc88zahPthVptMY+wC8el4OO/9xdWVJ1TV/dBxNzhmdOy8V4DcxNQY7dHZ9n7OuUrv3BCcJ4hEcXB1DNO8VLHoSKVebDeietFfrSU2O9aBpBmk7f4ouBg/vboEwi54RyTSHDt654O3VIvmqbV6zJUQjinneU7TBFr6cZOazsdjiKEpOOfENKcktSmR906lgTkX/Aq62253RAQqNaUKeDzcM0PvuOWs0i4uzs7Orh6d7x8xdcyz6lxE1ZqqiBKSghJgUQns2JGoMq7oOTjm0kQAwBN77wnxbL9zjufvf7KkpCLe+ybNOWbClNe4D4ms5WGgaiKNmZjWx5QCkOlqGUVibM1Kq7nU45RKqat2Hh1XsoR6anJze7ot+vNfO//w6UV3fsWh01Y2BCK1mFHoYzd8cDm+/9WvVdz91hezqKnZzsenjx83ETHru67zDsCQ427bd86B6lyKVAnBBwZWcdHVefruP/hP3/7wu06KWAGr0lppUtSKWhbNTbNIUauyimWSSr16/8M/85f++s//yX8FV3kb5Ctf/9q//T//d9+8fvWd3/zNX/t7/9nDy8+956wwV0F4ZxtG/D3v2O+/e9qqkcEf/AeABt45x+j9l74cJOdcLRmRAAgdK7rQBy/kfUfI3odhs+/GvWCnRk2N2D358OvvffTBxBvHMt2+un39WejH/dMPeoO7F+JcNFSOI3dDt9sjO/Rdmw6fffZpMx7PHnHw+/1uGxwwxsEbOEUMMRZxNm7YsZo5JpHWqpqqQ6srchdBDKSqGIMQE4xD8Az73dZ3sdbWJDYFBaw5I77jYbR3hEUAkfubu3K4K/NpNr0n7vqN317Fyw4AWs3MDAqtikqz0qRlZC9NlmWpPiJAU0Os3jtpamYIGbUZO1B1RE1VDdP1a2MOwyayq6UQIjunSsjunXZJBGbkuX8ZuwAA8JRJREFUHBNqmdn34KuB0mrVdWzDJY+PDN8F2Fb8hi0PWhKaAJIBuX/0//2NLvrN5SX7cH9/dEido2HwkW7csKFhK6H//O4hzfdg2NWM5T4C+Bjrze3tPM01iWjOyfsOEboYO+e8DzF2SQGYN5txt9koArnoEJtKcKjAm83m2dmOAnZdLyqkMiCcnY1IJAYwATI5woiqrjPRoNm5ziOcBf+MvSI3kZulPuTaWlGFKc8IcFiWnOq0LCJNpJW8tJy7bkgpxeDaMkvJz957j7c9mExLZudM7cTo2I19F/vOO+e9N3JJLPjYBa8iMfiSZmv1MC2qCiKxG4T40EinfL/kJjo6vDrbBXYC6tHEERMbUFVDYlUzBDEDwFIleLfxXkUNTEWWnM92O+9cj/njPPkQQ1tnCDCDVldQBMjqLXrXCYaoSIQqSo6RSJqtTu7WpDYAgzmXw7TU0jrP5/suBu+dK1WOU8rK43h19fjxz/zyL73/9a9ligq4d/jVM19uPv2Nb3+79hdff7z/2i98Q7ePrj+b33+6c4wK4AAZLC1LQWai0EWPMKd0nPONLNZqFzuRiik7F435+ObVi//i750++xFbBSnaaqmtNSmquWmpWsRy09w0NyuiKZc4bv61v/I3fvUv/ZXNdpuXxTGK41ostcrOffjBez/7za//2V/+Y3/73/vfvvzkk4L8Si03xfURYbZuQdTMTNcj61/acq50jZ+wNx49eXY+7kuXbt9+arUiUddv1tMu9pvLJx/srt4bNN3Nn5ZatdVS0svPf1wrnO5ujrdv7+9u8ny/nG5PN/GEDymdHu7ezMvp7s0nz9//6KOPPtpfnh3v39y/eXH57Nmzr35teLj97Pu/tb26cv24f/zckIr51trt/aF0HqW1ZZqXJK1FtqHvK/nT6Wiqm/25C9E57rquGQFoxyqAjkjYGWBpsiz5+jBpSr7rRsPoOZXaRMEAEYhZVVsthFjmh0qOAJiQh613vuWspc6nY02v2QeOwzrqcoih6xC8djj0F2EYmVBEvOMuhi6GVrM4BsLeew7O1hYKh+wCSjNt24vzqoZq3jOHAacMoOCCga0oFDDVWtY1NqFHpBXwGB35fsMotfVYllqTLJPWAmBiBCorl5OaGao7CRjGdH9suTSRYRwF4nRIreTgH8i0KQKAlExEVl2bJzGxCadlrjE2Q3Lx0fnVZuhjP+w247brmxrHTkXKO4NM3TiaSmlIq8dG1JroJ29umKDVOi+LqVbyiGAqTFxKQeeQPIGSo8Dcxb6L8XI3Rk6m5pj2nQe1VvW4FB8CsF9KRuY5n/abgbWBypxc/+QxszPis+12G72oxa6LDPsuujUZC6REc0qEOJfmnBs8e+9PTVJpnQuClqsE773jyyesptExmXbBLzmbQh8YkOfalLj3vOQyxDjnBWohpOhiBfiyNomGMapakxpCBIDWWm2t7yIiMogcrmtJ/TiWZq01U/1SkaW1cwwZCZGQ1ZSZV9+s1qaqgLgObSJam6ZScqnR0dV+tx3iuOkMcF4KD/03f/kX3vvat7ZXT5g9ET+AqgiCTcA3zX3w1Z//Y9vLF59+crmJ1m/TElI5Jm2gxqvfogkgTDkDYl/EWiXHBOCYkUlNgw8A4Ls4Xb/85B/+35bPf0zWRItprU1Lk6aWqi5VmlkWSU1SsyW3VOsf+oVf+pt/63/05IMPT0uSZQrMpWbvuBu6rrmHORlANnny9Mn/+H/6v/hP/qP/8J//+q/tPdwZr2ggA7NVI1svmPb7XLU/mcW+DAysfhcRvbl/WBTa+vYhEbMBmKiKHB4OS0GXj/e3b2opTQqAHg93m2EL0tJylFbmefr8k9/xVsdHH0k1Zldzfvvix99Vig7Vh77vXi2nN5/98L333x/HPjiHCE3l9vY+BN8UU1qY/avDXZ6n7e6s1ayIx+uXvXMX++049knx9euaawUV5wOFmB4eEMwPYwjRhbDb7foYzncjgNJurGZoRmCXu81pSbnmMk3UjUBsKgKK5Mt8QqLYD+iDNUFn7LoyTaKzLDPkJfQjD9t+HEXVb3YEpq3lXMl7IjdN08PNmzJPokbDpovdGCiGLqul+eQ9d8NuWZZWM6FprTUlJgJURKemiAA1YUvkHBKZqus2sKbfJZP30Kq0rGmyOHpC3j7qCCDPLS3akpXUSq5lXX0joXfvvf9BFx2aAvApF2kteB+Ca6UikUOrpbFjx5cpzZ1jIOv7jhwX8osZdcN2tw9d55nJeRNTMBBVwsVqQahWwVB8VNI55Trnw+evzYyJFKyUQoRmwMQ+RAM0AEIQaS5ENImxZ+eQ3Qj+VOWzt7cAto1x3Ay1SfB+txlj8KlJU8ulMtrZ/qy0Jsx9759c9WMXilgqlYJ7ezqJUTsuyzyl6WA5TUuaco7syDtg148b9sGz956HvheR3rvt2O+6gIin0jpv0XtVWIvKt9ErMIKZwb73BhiYi/q5WTOH7EW1ldZUQDXXWmsBM3znP1lt3u9GhmleYoxn6UZFlnlZj7kvR+oVVgieqErzziEiA634dluBgYBrg6Gq5NbSUlXtbBPP99thiM6RiB7mcv78q9/8o798/uQ5IVdV0UogIMVqA7Bk/uVDbqIfPf3Kh8BeHszIck1Ns0JkYsZAXNm11py33rnBczFJUkPwnUciYgAmIh/efvK73/7P/5Py9qUnMG26hjAVqmqqWkSzWKotNZmLnOY07s7+wn/v3/yVP//nYwz3Dw+eUEwZwBNAyae01FJ3sZvSfCuKxATwV/+H/9ZP/8Iv/Wf/x7+TPv18AnoXxDVQ1He7y3eAoHcn2h/wyuKa6Hf3Nzd1ThnwdDpIa0QkanmZwUwN7q9f+W72bW4lr0ZNAsjLMp3uc0mSTtqaqJbS7o+T0utDSnmZck4IdjzcLKeD64e8zICQ5vnzH37/2de/1qR9/vHvHr96+fE/+8eK4IfNw+11jP0yL90w1PnI7LaXj7phd7i9PhwPMURk2l48IheRXKuCmqo0a82IATnPR8nzvUkfw3Z3ZmZd8HOVudSLzehNl2VSg5wX7xwTAfvQDew9M7PzqBKJpDppbeh2uhtUGoWuiQBxadLS3GpR1diPrVWcp1bmlpLUoqb9/rIDzPMpn1REXL9DNNUiMKuhAOeUTEQVmqnWDO0AbdGamVnTEeuskgnMdVuKAzOjCwSG5BKz73e935qnNJ+wVcZ1V+h4cxZVFGCeZikLmLgQu6XkdXAdXai1dn3fj72VhkyMxI4dc/CBmcboURoCEFMxOJWagUqTvMxVFVSjc330HrDrd110Rnw4nlqDWuvb29tWxTlGomU6aavsfecDEZghe2cAa0Xb7vIqMB2myYehtpZT7gcWsNqkmEkup9OUXnwRQ3AhxBCGvjvb7ZqqmlUA57HrOs/QhQAr5Lq1t29fH2+ul5zKPBtAiOF4f7e6t4hpJvKxA6RW2/78bDvGoevHzeZqGKKDU9OHadn2fR+5iaBIM00CDbB3HD0VsVOuqoJoBOC9P6XcWltykVpBZduFwdNyfLh+mKqaIJUlA5GZsmcVkVqJ8PHZfs/T/c3tNC/47nIEIkqEovpO4jYzMEdUW1O1EDwYVJE1cd2aKkAuwozn22637XbbEQjvD9OS67d+/o+891M/EzYbLQu4HrQxgtbaalERQ1Dxxu5lK3OtZ5vzKzfku/zidtKyOAqpyM3D3X7sh802Ote5UEXmnGLwj0Nw3uemrVUgitp+8Ou/9tu/9vcpnwigNjWVXGpqImalShFIrU1FU21TLrnqT//RX/7X/tt/88OvfKXW6msJ3teU7uYEzkdHI6EYZHDWIAY/MlelQy6vj6f3//Af+1tf+6n//G//B3//H/5aMv7JmfXlcpPsy2QIffkS/H4yLbkmdUmnYrZ2oaupa1VVwKzWkpaDiKiVeUm27rMRoZVSFgV0SGpqZqfTg3dhOtxktbKcUprMVK6/eP3FZ+P+/Obtq5zz8fDgQ//2izcXV09bKobcXTw+PdwvS+l3VzUlYrecjul08N5Px9thd8GxDzF2230rZZoLe0Wz4Di6cP70ypkGx+l09D0oJHOR4nCXmtQafaPQxX4UtWKEcUO1OceqYgZd6AwgDgQAKmJAvh/6YWS0oQ/SpLZ2mnNpotKQOXS9mQBQq01Vynws0wGJOcYYoqkc33xuBt3uzFQ9KppJKX3Xbc7OxCCllOZFVcBUsrOMis0zITs3bAGASSU9WJ6QFAwJGrk+bM7csPfdxqSJkvfOtLWUiRygmYrUStZicBLOgNCtavGainZizjvvPZg5z57fzSZjcIOngLDbBinYcgKHxdSpfHZ9++LNTZ5Py7x4wnEYPVLfdxiDurDdjCHGR9vhbLfTp5ev7w8Fea5Sa3NkZ9sNk8vL0kcvAGdDPwRen5jR+dyk1lJEjL1nTk2yMYJimj9/+Sq6AZ27ONtfbHddFy6GPqsmWcPk/rRk50lVpdqyTD/83nfTMp8OMzuoOV2en//Uhx88+cU/fDoe3tzcXD9M5tzZbvv82XvjMLgQe8+d90DkALpAzDyXhsyDJ1OHgLkJqM65ESJAy6U+nCZAVLDT4YiEOWdSobWpV/TOOTLb9OFst0dQh3b02I2jKCiRKXTBPb66hHK4//6PXr167QlyLi6EJeUV6s+EiJByI8YVxEjEzO88imYIAGoqImLmmTa9343RB59LfX17PJ3mn/25b5xdnWurbTnxsG3l9E5LkmIlg1ZUa6oKJv0gNc8lHzf7bilvX7748cc/5uCBnCK/CaGLrjWNMQIAe2dNvEMy88HvdmdB8m/8F3//89/5TQ+iiNLEEJpIE8lNS9MqlprMpc1VT0vaXjz6a3/j3/y5P/ErjdzdcSIwtxmbVB/CVReZqDZRtbbM237UNUDvw6Ayxk40llplu/03/tb/ZHv5+O/+3f/rUhvRu1lsDeO+82qYrYW+7/abv5cNgJrTkk5NQaUaACpWqe8wka2m+QDggsNa8k9muhVCQKpGsPb1lpKrVI8GyMRMhCJYa769ftUFn/Kc0vLm9aevPv/hdn/x6ac/enr1RM02j98LZ4/yPNXawiBMePP6i7cvPtmM45KWaS6b/SVgq/e3Pg7o3OXV1TBuQghFNOeshMbUX27HPpxynYuaITnnYyetmSqbEa98J0aiJkI+gJpIXVeaAMYIBDjPU6sFVW8O7IIDVQRyIZo5ACDXSc0ibXtx2UTc1eO6nEpVadn7WNO0wrAQwXWDIYlUoHj7ME1L3m+Hoev3mwtClCZNBFpZuXskVWoyz9SaylWMHW+2oI1cBPJEqMaSlnKcoObT21spGcpCzhsSmGo5mSohx91lOLt0ZuKIvfOe2cC2Q+cJj1PyQ8/M96c5TdOLkjCXcejJzK/+666/T+nt4eHwcFiz9ezcIk2XPPhQbcFc5tpeXhN3w9Nnz9+j+NWLzbP9cNvgMOclJSSKIT7My36/CQhEqK064n0I0zzPDXPTJhqYY+d3fTcE/5BqErt8cv6LX/vgNpWbLAqmTYBoNuhj1NqmJbXcnHOguJwmRsxpDv12Li1u3cVuHDfbMG4mpocK+/Mnv/DNbwDBq4d0tt/MVUX1mMqSageW2TdpJtSQU8pzWrTkVUNVQAPLuX5eynw6llrQRxHbbUZViH3cbDZ1nqZ5XkqrKrvYH6epII8js/MK8PjR5mzsx3E00+C4tpbFoOqPb96WtMLtSFVXG6eosHOpFEAj4iamWkPwzvvWmpiowpr/ISbP7Bm7QM65pvry+nh9+/Ctjx6fne3QTEtp6JhneEfPUW255WytWS0tzUjY785olLkuOZ2GYffe86dpSR9/9nk/7sbdJtd8/fr1/HBjKsNmG4KvTbph2203fYu3v/Pt+x9+J51uwazoaiNHFa1NishSpIhV1bnIcclV6Rf/1K/+N//6f2c8O59zYu8cs4oeHw7Be99FMVuWkkoeu24zDKC1GSjStGRJs7Z2d3tbqhyOJyL45T/zKwjwd/5Pf3clavxEGVsT1msByu/tN81W+WwVChEJUH7SjAIG74Cuqjln9kujoAAuREgLIzFz12277dXgsQnmkqXVVWpzzpk5Imcm7FxtKiJPnn14vL+tJR0P95dXj1utX3z2u/l49LFbluRij9TUJC/zePbIxXE53XXd8OTZ04urJ8G7ptYHFmQcz+c037y9WQVARNJW2Ec6UD/0xPxl+R5yjIwIBsmQ7R3kjpmlVQUspTrn0rKM0SNirdl5JyqmhkS1KYiAZEEKsXOO0Jqp+BBKWhDdXBYiGvY7aAWJht2OwNYnqYqSc4BAxGiKiIcl23xEq4QIxC0lSSdNJ8mT5gNIBVMwQRPnAxG5EIbLD7aXlwy63N2WVqVUlkXLiXyEliSvihuAVjNTBHl7nF//0HV1+uDxZdhsjupuj9NxTmWeTtOspk0kH4+OGRD7fqOujV3nvVuOB/a+24wfnO1Hhpvbu2lO1SAv6XK3/ei9Z+z9lLP3tPXsx20z/Ozm4e3dPSEQUS3ZDLb7PcxZ1KaUc1qcc86Ht0s778OG8XJDrwzvk5qAa+22lrNorVXv/bHUE7msQIgqIEhVDIrU0oAwF021Eabf/eH3AODi8uLq7Oy950/P9/tpSb7rWlv9lDQ3tSq/+frhauzRAIilpVeH5bDUZZkIMTVhk+lwVDAFbLWWtBCzNCFGzwSq2+22EQN6MiLPFTB2Yeji8fBQRc832wsmFyK6UC/OgbiWXJog8Uno/n7qpyy19Q42fc8hHF9+8nB/v3L9ibi2CmqmAmi1VRVlJlMrtbLjJqKaVEHaenECInbrZgCMiBXg7pjeXt+9/2R/eXXO7KHVlqZAWK2uatFayaF5aTmbFANy7KWWkk7Uota8iNpmc35x/tnt0bybl9T3nW3ONmeXtZXWWuiGPvab3TYcbw/f/qfp9Y+pFUZaSpb2DtSjps1gKbo0zbWl0k6p7B+/96f/8l/91s//4SKNTgdkn4+n4Fw2KqVO6bDdbEDleH87Hw8lLa216f6OmVNaus3uo298c56W+7tbNtBW5iXdvL25ePLer/7qn/1//r/+XvBuPYx+ssD8l5Qy+/JQq9JiP8QkaG21p4EBM0tDeFeBFMbNxePHj/T1G9WWlnllMsYujtttgOZ9YKKGFruRyXy34dD541GXhcj1210c9ufkdy8/Pdzf5JKbyP7i0Rcff09EwzDEsTXVpqc6LdK0H8fd+Vnsvj449PsLQxJAZhJCaWKlMHPsIgAZMYCZRDMNMRI7RGDniGi1cDvvCUhNalNEFTVQBTQkx96ZoQFM8wzEiBAdUwiNvYGYQVFFHxmgpDlLY0biYIg+9rQiyU1BG4CSSiBWVTFjBEDQnMykqXR+bSkGQiqlSk4tHcvhWsoMOYE1Zg+I7AMAgJihkiRtjfzb+dWPWpkcIwD64cxMyXVGARnRTFsB50wFrRkgoDNQ9zs/+PEnn73ouyEb5VZbqaptJbjHGIOPPsa+H5x3nWNSCeyGi535sNntfAxXg4eP3hOkqYgBcKv3dw/kORe82I5IjERW24dX5+QQkXKVUnLsBlsLhMCamGxGQPLeK2BqbSnNFS1iQxeQKJWSa71RIyLJCRGAyBMxaPCeV6qYShWQUoPH25sbNXHOP3r6lBFl1ciJrx4/9iGIwdVu6B0P3rdWamuZPDHenuZdFx9t8dkZI16Y6SdfvDmVNjy6Oh4nYJ5SJh8RLTCWlHzoxs14Ng7eORfCYZpiCMh+XtJSq7nIILdT7oKrD6fOUz8M6AIZdiGIaG01eldrraKvbg7ojk83vrz6eJ4XACDCXFqtbX1IrvRhIiJClRUUQSLWTFe77+rL9Y4JQVSQmRyflnZ3Nz2+GB9d7vu+B9NWFofQErLzREyOVVRq0ZxEZBUWCIHYWxOtkzonarfX7tHVs298I6ytBjEE75yZLXm5vz/u97t2vL35zj+5/vi7WCZCRNSU8zG1XJuqREeGMFedsmSRZSkYh5//1X/1j/zKn3Oxe3tzz+zM1FTZBTSrIuzYe/7ke799//YNrL0KtXRd143bVuv26unTDz7anJ2HPvWbkVpxJlXFmrDzX//qh2l6+Ee//k/WXervj2SuxXQ/iQoQ0UpQ2p2dm4VGcPv2xYq72Z09Ot7fiBRGjDF0fbc7vzodjrLKq4hm1moxFUE1NPbeSgHyrus3l09zntM0A9wgou+Gp1//qeubm/1nPyy1IjpFDv2WXAjBWfTgLkC1dJ3uz8jx6e4mzadWSx3Gbk4xBjNgIfLvLla1ViACtRjj2rIMoK00ZZNcgdb5yy2H02TQjYOUSgxMRMitJhd7rZmJVMW5oIxqIjm3DDFGz2gK7L03qTVXIyISxSbgnAM1mWdYn5QqJi2fDqHrGK211m33xzeft5xNtabJhY68C8MOkbvtznUbH3vr+64fW5pqmus8GWjX9SpNapY2A3dNIIZRMJgfXRhwxRFqQxekZmj5nbmsZZPGPoiCiSBkMHTbi/MYY0Tbgk5JjoBqMQ6bbhgeP7qMMWqtu83QOeq6jthbbYQQoh87X3NqDY61HKf55jD7cdNOp9vr21zb3d1NtxkuznfB+1mITDuGYRycweX52Ra9d476vjQJnjvnEHCuNXrHaHPTVKVD80CHlKNzhOydU2uI1sdAzIc5JdUm2oXQTEyaY5e1gcEwbjbj+OyZm5YFifq+Y6b9mTGxZz4t83FeFseLbxHNWo3kAlMfow8+NFEzBzAE98e/+ZEAgdWcxQhPRXOTrgvesdWWc01ItRQKvjS9iF2rNQbfhSAiLjgHqIitFCKejwcRcJ5idIjEiKSNzSrytu8uhni57evhzXfvb5sYIOVaa2uiRmAIuHLrkckUSq2OHSGJqCGIrhIBv+tUJWJCYqhNlqWMkR9fbPu+BzCpVZxXm1WFOboQSBkUtLWVuE3EoPJOSdJmRCCCUm/vrjeb7ePLy4c5g4EhVREw67sB43T7nX96/cPfyrfX3qGJGGAp9bjUubQiMgRvCFMuxyxz09bsG3/kj/0rf/GvPv7wI22iqgBnZmKmtVQwXOfNUlvLqZRCzpei7L0fxu351bjd7s525/vdzcvPv/tPvp3nGQyWaQpD/+i9D3abYRyHovQX/vK//rufffHFixdrc+gfdJb93okGsO7b0TGSI3p3M0Uijl2/BGepwvrcYFSzGHzzfd/1CoqAIcSzy8dd302nOaVlmk7duHny5P1HX/+p2zevrc61zGBa0mnYjj/z/rP7lz9My8l3W3Rx3F/2wxD7XkDzdHLO932vYK1V1w2di877YbvzTNYKEabapFZTiZsNh+C8l1IRjZnNEMmH0FEI6TSZCRMhYux6ZDIAMQVwtLaLUWfvytxYmyxpAbAVOokuNMJpWVQUsZqptUbO98OmH7paW2uy5lzXohwkkqbkfFVLJYNJef15Ph0FwBRc2FCMplLmxF2/nE6whhQJESNthuE8MEhkA2nNsJWsUolIazapdb5HBdEGuo6E3mqBmtl7A9OaAYlibAqILNpAkV1w3/jZP4SqxC6YXO37syG+OrWDQisN2U3zBMQ3p6XlonkhNSnN0MyHY17NsFxrM7DaCuWGIpePnyzTfEql+HD36UsBdb5TWAkh4JnRh24t4I0xxBEAzvbbPgYjeu98dznEsy4o6iLwUNVa8Y6NYC6LitRcRVofArRqqnOD1kSJA6N3biCvZsTOO4+IZ7ttEyFCBgzeidmcy/FwUtHzi7NS6v3dLYDdPtz1seuH0XfDkpaxH47TnEoaPGFeWmsiuhkG7jpr7VSKKpq0oQ+ltWHcKwD76IMnpmVZQgzsPJp1jNFT5rA0O99t3Ip/ZGrkailLysfTSYCIOTp3nE7neoMuVLXazBRrFXZoCk2aiCKiIyi1rZh7ERFTIvIrrx1UVNBAVZ0jJG7NEGy37fqhQ0IDQCIzRXQmolBAQJVMVFWJWUXABJGZnEkFYiJGE6uFO3d3d3sZ90PfWS1sDZGWh7svvvftF9/7ren+znumVqUZEEXHzlnfByDshIkpVZ0aHXLZbve/+lf+2s/8yV9Vs/l48s4xc85L8I6IjCmlrOqIaYg+7LbjZjNPE4IS0bDZra0LjvGLTz/5zj/7r46311ILAF2c7z68uHo86uH4ytphO/TOD3/tv/UX/oP//d+eU0b8vaslAOhakmT26Orq/v6+tsrsnjx9j0M6pqMLkdkz8bg7X+bjSj10vr948sHu8fPOhQehMGzycjK1U0qH+7t57k7H+2meCLHrvPfIWqaH2+PhZjodAOyLH3//B9/+zW/+9LdQGxI9fv786r2P7u9uzs4euRCoCgKp1KrmvfMIj54+Jtc5MGm5GqRa7q6vn3zwIQJqkziOpWTnQ9f1a1mNcy66d/p+gUrM75BQ7AEpzVNGYyKQGhyi62tOIOIgDR2z6ytwMwVRI2pNnQ8YUVtb3a6quqRcagFpxBxib9qAGQHKdEJE9CHGgLu9lXy6fY0+khhFh0yiCFUMVGHJtRB9+TEE6YKncUtdv9zdNxeIGZG579EkOAak1m9NirW0Zo21ZgTDKK0sUGfqzgD92oqu1sBt2Hdg5rYMGQC03R7nuehLOk5rEZVqKXWZppRSTfn9b/7sw9sbnR6Y2Ds37redY47xfOiY3ZxzbsIiu2EIXdcdo3oWRNpuAHATeNeFLvjrh1OqImYAcL7dbvb752eb4N3Y9475LtXa2pIBTdTFm9N0PnbDtm+IMTrneiSac21NHOFZ5wghlTqlXBCboIp1fcg5R+dKbT5Gz2QI0nReplRyCLHU1kTmaXp4uJmPh5RziH1TY5831eiUxr57uL5lwlbb/cNcSybEcRxvru9zTtbasNs+3D+gSpkPngmRybk4jj72TCzSxs2uibCP0mQ/9k+uzrfjyBSqaAUAIIcIIZTaxu1OVbuuq00IoJ5EVI+HSURaEyJm5tLqWm9ra/XB6kddyY7kAFaKsqkKIRKTmREhIzapnrAL3gwQgJ0DRFEFlRX9pepU2socRVk98sropFXV5kK3JlilZAGYTHfzuQOud68//t53S9UvPvmxLCeH4AlRrVRBgkjs2ak0BPDOMWpTq2rHUr7+tQ//8v/g3+ovn5VclpQQsdZi5p1zq/veB8/Oi6h3rta0TMfY9UN/Ka12XVTV0zQD4u3D/e319fmTZ/1ml+dpuxk/eHJ5/ebl93/0ceg3yzztOj7rxpefff7e+f5Hr64BvyzSfBfHfHfrzDm7ELAUM725uXk4wZyPpgCMyOz70fmu0gKIrWnO+eXL1zAfru/u53leTgdAbCJMPnab1kpOSWp9/eJTrIJdtywzO4dIBlZyPhwf5qXUJmmZmUTKUss8dL4butNhYUc3r18uhyMj5rQAmI8xOtfF8Pjpk0cXTzbbLSA6x+Cd97ztttEhmzIhEaecoMp0PBITMqE4NZinU4wxlfbmzduW5lYyd0OI0ZCcD0CwPNzqfOy7Dp33sXOxC3FAQjZFJu8DuEDOmahzVEsxVOcdYxMCVFXVbhwccxXN08khOh+63eWsN60UFwdCZB/NzGomk37Txd6RYj9umiEhAruqoE2tVd9vAJSdB4O6GIeOvcpycP3A7AEAWkNGEHUtg4gxAyCyN61WMyKg661V9yzWudohmbvYNZG3t6fD/f08zczMlj27/Wbnzva73u2eP92E55uxd8x9F81HdUwmYhgJi5op7js3dKGVcpiPlfxBeSn1G/v4ZL8ptR2Xer+k60aHVPd9BB8WkWHoQwzR82YwaG0GfHlKc7pL81weYNyOs4K7uWGCo/lZsYk83N/dvH07Dj2YHQ4HNH385HHsR2IexxHZiSgS1VZSKv0wotn1zS05l9OyLBOTY0Yw5NAhub4P+7ML7/3t3Z1n6rtunicDY/Zx2w9Dv9tuVPThcKgl11piv/Wh63aXUhOCqbQslg5HQNxdXKgqu74PYfdo9ERVreZkMfbegfPHVKSJIRLT2bAlgOhYABzidKxvX72dp4mdd96JNVNjJufcUgsYAOK7npF1wU5ICK01M3COVxipERIjAjAT4uqeY+c8I6oIE9GXnJu1ZWe1+BCiSlVFFzp4B8yxVhISioosBWt6uHl5++kXbz/5+OHNKy3JEQXHtRREcIhDF1NrSy5oUmtbC+wICUAQcNfHv/iv/6Xu/LHW5D0ShSWV9eLMjoILq8ehSVvT8o5dyRm/xGGLiEqTkrt+OD8/i94T4nw6bcaBpLz+9JO0VOAOgAz5Ry+uT6ePr7Y7LXkb+VgF3vkz3h1k6+3y4XBY0xRg9sUnv7tomKaHaTlyds65u7evToe7vExrd+Lm9Qvkric93d+aShNBxFpTzlMTqXlprRhYzktOS5qXUmtrWmtZmwBf/PgHkfjs4sp7X1I+PNy/+Pj7/XIbrJIPogkouNjPx5lcPN7dtJvbvMwA1n2//+q3fuan/9DPso/RU80pzdMnL19AK2bQWiX2DVFqqyWhj87HPB1B2wqiYnYlZ0lH9s5VCaUyO3YpbvZxfzUrHlvDkuv9gX2HhC1Na08ZEfbbs/PHzzebvo9evB87N3QBzA7zIqW6NTUEouCL25SUpnl2Pgz7M98aKICZc4zMofcdA5iAFOe7vu8M/VzqsiQVYR/CMBIoCBAAM/G4LSk5NNeNlhdkr62hC0gE2Ezbu05UhLac3sUzDLFlIHb/4uUEyALoi+3H/moII23nIZa8WCmbLoTRs4+dpOFsA0Cnqmju7aGkfGrMRZSc8851BOfRP5zmpdT7lFOtTWrXjwJ6/bo8vTgfh/76OE9iHIdt388ikXXJtRh8/+X18XTM01RTQh8pBEIsKaVpfvT0cXAOAE6nU02pLsfd+Xmu9Xh/B1pPdzcqCuyNoBQVNTQlRy54FQCEYbt//PT5bhyGvsutjUO/326aggu+Sbs8O9sNg5oCIhgMnV/mueuCttKPY8p1sxmic2AyteqcV7VUhEM0QgTH2DOz98REV+f7q7Md+U6bxs6HEHIptFaiGZRSFmmuWa7FEfcxBscMRkiEsO2cSPv4s0/evr1mAhNBpi4GRFDTnJsjUjNGWgvWGIAYzFDBAMF7DoGJWYgdAYCtMGnHzOyYmZhMV8ZQY3bGRty9S434gAitZARExFaKC5GZzBoYIDlVaaV4hHk+Tjevrz/+gV8nQ20oZkjOu7OxC8xzSsdlvaw4VEHTlZgnCkwuNXf/8Y+wFu9RxM6fvDdsNlUEiUuuSOCcj0RqKk1MWmDOhwcFOz083Lx88flnH0MTH+Lh+DCfjqDy6L2P/uSf/JMvPvsEiX3Xd2ppnj758ceH03S526RWwrjpclakqVT80v2/NnSs0Mp3Fg2Dw/2NhU1ZTu9aucFqmqWWJhUAEfPp4RY5qMe8HEWaqqw/p+REteW8IJgaLPPp/uFWP9X7h0NOD6lm1QZgx/s3n/+uffVb36xVajMkz767fX1kZ3WZog9P3v/g7RcvT8d5OR4BQA3jsKnSKPSI8Pnnn/swpHxKtbVmYJbnYzrcO7cSUxx73407JK9qvutvX77oh5GIVCqYgPPsIxBjiN24RQRVi/0wnF/l6QSmbrCakqq4fitSl7SA6pyLIi9T3EbSnO+tpSIp5VxbWSYmbGbe+XF3zqahD+NmFyNLHA3AOVJgR2uBdJnnuSi1bAjH64cHZh63W+ciBmdI0sR5n8tcaiFbW6e1mrrYC7m2zM53AaymCcEAjENvxCAV2aspgAIwaAVT93opjsghDojXt7c3n3+SU2qt1VyQnSH58Nb74JD24/CN959e7nbUxwk6F8NJYDme2jxJq6ks9zmVKhJi9v7+cGytaqs+RgP60aubzWaz2e1D8PsBCU2lvfziOvbd8TodT8daqkhzzjnE4D2z22xG9+Sx976lxQy2m03rhwPB61evHdN8OkhJwC4MW0XMzQyhH8daSi0514V85x1vd/vtZozB7Z4/HYex1ppSGoderHU+MEJrLQuumuTFZkhjn4qWVEKIxOwIgmdpdr7Z7AbJtSqcSxPvue86RDwejvvNdhi61Y095QqeRPU4TaI6dISqTEhDV0s1U+ccqrVWTUUAvXPGfH9KWI+31288U6ut63yq7zLngEhoffQGaGrOkYqtqAwmNhNEQMY1Jc2OiFCattpMzQVa4dqqKtK8iwCm0oj8muP1IYJpq9VaA2IAJe9gLTdc0QWiWlurBcBKGMZN3wde0/iISACOuO9677ypjD727I8pNVUCZBMmcMqHZdoNg0d4OBzv3r5RbZvtltJpd35+tt8LOQ4ekE2XLvhZcCo25Xy8vX3z+ecANk3z/fWrVuvpcD9Np2EYAPH58yc//Yd++vb1i5xSNwz399evXr89naZUWpV2//BwnKau71Nrg/civLT2+4BlvxfPXL9My+SUcp6/DFfUkqaVqAaAVm1ZHpACBl/LYmAKsBaxgalqa7JyGnRaJmZmFDN6t3RWMtPWSm4VjC6fPLt+/SKl+XD39nA8zoe7NOurNw9PPvrw7NGj0MebVy/n+2NpBw7dkydP+hhOp/u761fXr18/+vBrT7/yDQ+GRLLblrPzKlJKrcvSalmWidlzCKEbLp5/dLq/A0RQQ/ab/QW4NWHoaslq5hyn40MruSxTTcmsMTtDRAqAHDd7bZWsvfnx96xmM2utgjZgcqEHAG0FTLzvQz9O8wRArt9096ddF4bttt/uPEVECJ76fijLgo4oi9vtW2tpmZAInSvpJK0hoBi24AMRMpdSiYjYOybHDrseJYCtOBlDIkO3PsYVEJnQEJuhY2lJ07Te6hXJzafjm88+SacHMCUkQnKo3Tj4riMDIj7m9sMXry8OpyXlLKrs/O78cDrm6WTL4kABSBAX0URohLU2NCyldl232e43u/35xYUZpNJyKsTc7c63mwH5UFpNSyq1UggCMGz33rk+uBA8gVnfqZrUTMQfPL5a8vL27n7YX1ZV7wMSbTbbGCMzxND1jlOttdXYdbHrEVBEiOjm7v7TTz4NXdfU9tsNM0u9i46L6vn+LJVS1EBPwTnP7tHFWdZGHKS1KZW1FbSPYbsZmAiRRCHVOi9L1/Wptds314y03fTesQG2ZmbahSBq0oQADGoXfHAMKqICzKUaIMwp5em02Z+TIbPz3gFQiP44H0oR59g5dkxr+wmAAjgzUGNVMwV512L9zmeAhk3X2cLIMQe/qjbr8q6V4ry3VQWrhRFqBiI0FRDV1SHKDRoiEYCqKjLVVtOcXBD2qfeui14MVkRPa0qOG0BT7b3fBK85d8GjmYksayoXbBPDOHblcNzunji41NaCo9sXn9+9fPlwdfX6ixdsGoP/9IuXX/3oQ3AunD3aP37CWs7PNiklSfboyZPY9Z1H1prSEjd7NH396e8e74/nT56l+ThPp8uLi29+7avLPC1LOs2nt29vSq0p11rKV549+uLu8HCa7Cc9dO++4DtDrIgHU2n2pYlDVlamrYsCbbX64FVFTZFWCRsNwZCc7/pOfdDj8d50rb4DJCb2TE5AkVgNXBiKwPOv/czLH/42gJ6Od/fH48vPPy90fpxO7eMfd8PY9f2zD74CH7pas7SaDvc3b14haJnnZ1/5xsWz99lMtKFRN46b3Q6ZUyqH2+vpeASz+XjcnJ37EAGgk7aautmt7TYKpjVnHzqQery/iyFIKQqG7E1gnmdml1MGUADSmgEQyGHnTaUbvUoD0NAN0pqIxhjgywo7Ju9cBKKb43yfxR+m3jsfguvGwIf9ZnDOBdFS8pITlyTs7k4H8r6lTAQKMIawtGqtaS1SU4gdjmM+PRA7kEpo5CNIA0FkJyKAtnaDIZiZQppKmiWdHDpGxDevXp1uromwGUqWcQg+REQMIa5Ou82wIaRSyzG1YbM7vH57e/1FtY+9d9aaI1ZEck58uD8cDtPy6L3nT589n+f07MOPhmEYul6amKqqhlWgAWuitw8PQ9e9//x53/XzktG73TjudyOIlVbmRWqr0iR6t9tswayKXFw9fv7ee10IpbWltLnUJZfb+wcm70MI0TfA0A8hRgTzjMF10lofwr1oXlIXgnNO1Zri8TAhc6q3JtJ1seu6ptC0emZrWlVMrakIEyMuafHOm+l2HGqppdbjkjzTMI7eh+idKYhhLSUE7zkaqLbWamHHona8m4fgrRZ0Dl3zTE0VgajfHObFafax6/qI1NbNS5Pm/FoxvCr6QMQiWkWZyTHm0lbqtqpVUVNdRw9CJOIYPDO/y/G8wz/gGj9UNQJspTgPAGyiIg1EjBhrUVJmBlVdW/lqW1LqiarUgXnKcreUpUoudanNkMa+68KyiX7fxY5wDH7Tx5KSGp+WtCJZhnHTbzdVGwVPQyy5jlfPlmn6jd/4jbIs2z7mZT7OE7Y6nF/+9PtfsXyC080ZqwTZXwwUujnN5ML1yzebiyvvw+l0bEq7y6tSsuTT1fl+s7tSIoz95pK2y/Lk+Ydoenv9utV0fXN7XvtS67TkL2NMBPBlBgDMzOKw53kiYgAjotANpVVIaW2hQ3Ycus3ZucJd07ZMJyRCpO3u0ocuH3Ep2Tm3dhgi+67ftRr86dSawHoqdENtOoyX28vHWrIPfW12d/22eElzKtP0ANfORx/9Zne2zPMQ3H47Xj76QwrIxOjYkGpKvt+S41SLA5aSx3G4CM8uHz8ztNPDwYXQDT05d/HosuRCzK3kNXHpY2RiMCNHpmoA6XRaptPp7rY1NQoAKFoBFIEMCJm1NHLeqJZamb3vOva+H30cNtqqinAIw/5ifR4QU6slT1NJ0/QwO3K+62PXPxxPgBBCHIdhHMfC1EoK/aCAyJWdh5prmk1bW078zmzU8nRCVW1l7a41LCtzAls1a+QiOzRDq8VEwCqTYYwupVzm08PDgxpyMw7RhZ6dE1XvnYgisHfB+857T8Om72If/QcfjVePHxPqdrMJiIdp+s63v/PNn/mmdt15bsL+7OKSiJeUVC30QzM0lLosisjMzjuHEAMtqaQq3vHTZ8+6GJdlWduVl7QY2GqR34z9mkiP3jfVuTZBEigOYBtd71D78GS/NURCdExnuy0iliaiyqie6P6UHl3snj9+lEpBQiZCwCIioillIDK0lgsTtVqHLgbndmNfxaZlPi2SS90MnaggEjER824bifAJEiGZyZSSqKFBYELHtZSGhYCqCDQprTnv1ez17X3fxYBMWpJZ33WeicC8966m1sp2uz2erl9f36lZE6mlqQgjAGIMfqVuOc+tiamJKBgAG6DV2kSUeS0ucZ3j6B0TO2Y1WVG6RIiEa/8zMtlaL4aIvE6aigA1F4AUYtdqa6JAkHOrTaKptpZK+93b6fo0L7V1ITx9dPGVD97f7XYhhmlOJS1LSlXadFrYQI0WAUEsgGee6JPvg0KrLW42290+PPvwze3Ds/qhD6ELcZmOtZYnT58BaJsPdw/3bT7Vzt0fHsY4uN3V7tHTPsbf+e3vvHh7G/thmpbrN6+GwCFEM/3gww8oehFhz9YEwZZlIpNa0+vXb5dcpdVnF9tPX7fchBEBFAHRdMWbqcH2/OrheONzQIQQurNHz4C45ISICNT122F3cXbxqOYqBPe3b3BFpGkzsM3Z+f0nP5RW1TTXepqPERAA2Dm3Xu5C3F487vtxc3bRwUcPb15dPn7++Sc/Sg2atXQ4gRURUQUfu7ZMj5893V5e1aYCwMS1NTQwLc59WdXctEiOXTwejq1WQuLgu+3GOcc+MCGoduOAACFGUJtPJ23i+pWXB0DEzPurR7vzi0fPnpeS5sPB1FQlpaXlTESByXU9EEktqjqM226zzTkvx/tcKjJR8OScthqGEYiZyIVuGDc5LdP9XTod8uFhergzs2G7ZXa3quN27z0zmHds7BxvnXMivWk1YSJuKamBDx2aKapzHZmYCgCpVhMBE9VmVXGawERbRedUmrVEhE7NpIlzXomlNlVlgNzEEXl0YdhE72Pst7vtbju6NSGRU/Dh8vKSERHM9/3YZP/sw+zDseSnXd/M0pIAIHTd4e5BVYN3xl7UiHBKeU45BI8GQx/Pui61Zqq11hhDa03BtrtNLs1yBsBSKzpuCnlOCtpyrqXUnJZpaikbiA/hbL8nNBEZx40Bdn3nY6y5iqkpAMDrN9daCjuO/eAdGbJj573vd2NubUkVkAggdiGXJLqS1HQI4Wwcc6uAWBZ0jn3wqFKq5VJVBUXHzbB1uO+7+zm1Voq0vCRBZqJaKpj5wAiw3Wy247jWNJRS0ryw94A4dh2CHd58tyxJFF7fHO8eZmJyTEyqhoLIjEuqTNT1wVTXVBCAiRispTuAaqZN1/uo8wzv3DfmkaU1RBRmrLLmb9Z2jzUsjUhAROwJCVBFdF6mdxBWNSJH7NSAwa5fvpXUnj179vjxo0fn+7HrQj+UUvphGIYtOZrnJS2pLvNxnksqMviUEwxd//gDGTelVn+xMx8n55fT6fzy/OLq8vNPf3zx7Nmw+2lTMm3H+9s8P7Dv3pzefPydT589e4/7ITT1y/T6sx8j8Xa33ezPuzGTc9Phnr0/u7hUhd/5rd8otW63u4fDQZoA2OF4fDgc+hhj8GbWWtv14WGpVRoCrhnMVWgUESIEs7XBxHnvfYghfkkLUkRj513Xx9hB7NYblgEi0+bs4tUnP5iXU1MDhFKWV68ngxerGxeBxuhH9sFmj0Hn+9LasN1KjP24bRYe5qpq05xM2zgMH773ZAj+cHyYUuq60QxKKUAUQkjziZ3bnl3FkQ1Ra6W+i/3oOyOi1pqa5pQxVyZWrc6HWnLoe0IKfed8YMfSCgIhQZqXBqWW4kLg0O8fj0xcS2GmEIKqtFZNAQj13bBJNZc4boftdjqeSlqI0HU9MrUmCIIhtlrZcezH0A0Iz8t0qqc7SZNJhbacnZ2Hjpx3XTeo2ZwLALZaTBuxA1TnA5MDqVYK9QPUsl5sPIGBAiERAzhUJ00VGhBRP4IaIwAPZuYcatiNu/0WmR1T9KHVBmKeXRdD9B7Uur6vOUvKceMc+2M+LYcleAci/XZbDO6nJavG87PaJMK7FpzSWozh8eNLAog+IMLY93PKrJpzNoPYdVXEtaZNFEFUNkOHYAiIqgjqEAXUpE6HnHJO06ks83w6Sq3OR98PahpjXE7pYUogUluBlZPJawEHbPdnRq7lpE3AjJikFtcShWChP9vuvPfb3WYYNtraotoPHQDX0hYARGBEQquleqKzoS81l2VhRAXLot65udW3X7xW1YtNv+k6NBm6btP19cv15Xrp88Sd49YaMPfBx01fN/2cFs/oCOrp5vqT7xPhJ59dv747RecBrIhSE0TwzE2UiLzHVKoZECGza9qcI6TfQwHpWvLqPCABgq3Cv6mt7WQAq4pP9E5jEwAkUhDvvZmWlERljWiu2WDnIxqy82v74us3Ny/e3P47/+of33zlp75IlEs6pjLuzhpCqdNuexH94rda83LlfFPwaF0Mfd8NZxe+G7A2dk7VIiCS1Zzykij2Syrq0vxw3/Udxn6zuxhy+vzVmzjuwA/UbwDhRz/4wecvXjnn9xyIaRxiWkJKgUP49LPPTKW2CgalNABUbTf39/O8DF1vAKLGjqsIMz672Ly8PRQRQlp9Z2ZA5M4fPbl+87mKIGK/vdg9/UiU3OvPHTl27uzx+xfv/9Tz995zgAnZ/7iTVg3t/v7mxWc/aqUgk6w+PkAi/okv12EtWfJRrr///1k224ubp+Pu4qYOz3/q558+fX7x6FE+wVU3bo8xdPH86pEL8eF4bMZQWisHH0M3bvvNVs1C1/nYiTQA2OzPtFUm8jGqNjPs+6HUrNIQEEzTqbTanONyPHTbHTERAyC2KoiKouwcIfkY87LUnJwLzSoxl9qOhwMzWpNWqphKy924YeelNsjZxy7Ejpl1zX8hETMgIBF5T4iAIK310Q8X5+OTyzFw8M4QXrx6+3D3kO8PANdiCNaYXIg9IBAhAZB3uH6jaLU4YiAEViMyExc6yUVraWnysWcfAMHEgNEwQE2+G90Xn3xCYpv9GTh/eXU+OOJxP+cGZuhDKpWYZZ4lZ+r7eph2e96dn2ltaZ7Bh2wCjaEfkPB+Wvq+r7XWFfGJJKK7cRMYfQzrdmzXd810CV4N2bEnX0si5CXlaVmOBz+djseHWyLXWs3LYghlXojZhzCfJhe8AQ7jFokM0LmwTIupxmEwYscu9iuj24PBagvU2vI05yWFYUilgkIxT0J7pm2gt8eH4F0wDT4M221ali74ods0kVyKY9cxzWYKqGD7zea4LLJSXVTMrN9trs72r67vGrpGbMC1GUFzCIAIhK1K8E5MW2u51tKap+308HYTPOUiYqJyfPlxOh1F2osv3qZUt5cdIbQqSARgTU3NUEEMsIpfuy8Jmbk1QYPaWm26HmQrGgjfQRlVRNZ2JmlNib8cfuO6YwNAQCHmWqsDaqWJKSAQM6g553yMrama5SWdPX/y5/70H//+9z/+9/7j/8d//1de/tKf+nOvn3w4gZPWgMx7/3B7fby7S+n0cDwx4tgNYM17V1vr+uHy8or7cdjsTQUJ+nHbmoZx3MdQUj7cH1qVMPoVNFZEP/rpn/vwmz9bSyHnHMI+F/FbJigp/fN/8S9Ox0Mr9fHjR7vt/tF7w9APNWfv+cmzJ59+8uLt9e3Vk3GZT8eHg9ZWSgWzZppqHbv45GL38ubQRN6Z7QCqtOk0n6bTel9DYgh9f/7Yuw7QDHieUri//6zKw5vXc63zstS66PpmevSO86qLGa6tKKuTLbK9t6HXM1zPTWyqKZXDbTL/0S/+N7TWj549vnryJHdNVYevfqUf+ynl1qRXM5U4bNhxiF0TCV0vquaDmQ6bkdgjoQI003o6xhidI6nJExmHFRwzbPc1La3WZT42g64fy5IoeoaVHJW6zdiaWHsHbV/miZyLPKLV2EVRAWRQ885z5el0ImIm140jvguuG3+5QamlSCuFXOi6pkIACLDkVJCmQ0GEuqTz891uv4/eHx7u8jzXWk6nh1KK7wZib1K9d3Gzc84zgHNcWwnd4EOU3KQWcr7Msws9GlCE2goBIBIgYq0r67wpuD/xrY8SeGU/dP5sOzrTUyMN2NSMnR9dF3xExOUo5MAo1VIrOu81BBdCroUYi7YqWEtuTYJ3yCilVbNhv717eIgxDmKeafQsJiDWETVVqfXt4eFwd+dDSCk7pvuc52kCZrOcUwIw5z11A4Ll1uJur7V6H0pTszb0vQ++5NRtt10I+912GPux6x2zEROxtlZang4n18ru+RPysdRSWy1ViQhqvj0usd+Gzflc5g0gAZLzTSCX6hD6EJpKqrbr+7FzYjA4yNrMx1N+N+D0Zpsudk8fVVOsFZCJyJgDg6r14qw3h+CZbu8PPeOzq13OlaOfioAL7Hxe5pqWXFJKGX6vbsOcf5clFJUmRqirRbaJMaGZrlbPWmWFUAbHMQbnSFUFhNkpQBNBgXEzqFlrTcUAV78Ar3QaIm4iaCaS1FYXmvpIKzm61WYGjt1S50fn+0DDX//ln/+tH332H/693/jH/+V3/sZf/LPf+uU/9aLbHquKaK1tqbmWWnJd3Xyt1SAiSqlOr16/vbx69Oyjr4YQvA/zvAAyqnofiJko8/oZ8F6tEOI4bjiGZZq0ianuHz07e8ZoBkhPPvpKy6WV2lqNMcSu64fewADw9u0Nx+Hxs0iEKaenIjktyzy31mot56qtFFuWs21/OKamiohgtOT51YsfLqejSEVEOty9/vEPrWku+Z0t481nau2eoqbD/d3bJZ1W0zITNzFZewYA1k6aFdPNAAR0vYAqMOFUoYoK4Nvj9CzXl5/++JtXQ2vWB8dMzoW0pJxKiD4OQy0lzRM79jFudnszja5X0VqSmq3GZmZSEQVQlVJaqxUA+nFDxNEHMQ1MgOi6IU+naZqG3UZrI+eQeNjvHZOwOh8AQaWtP6rk0mqV1gwsxg4QybHjwfejmVkT8s6FoK0JExOBqYgQE2FgF9bNCYioak0ZAYnZ1vacF68dQ2RgRlPpg3/04XMA4tDND7c3N3M9nVKaDJmYnPNdPyCApMlaamkiwDDuQ+yUPcQAaCpgrRCzipOaTNhawf/o3//fvbhfYvRj9FWxGRhS7OJhSYaMZqLmiDqywN7UQhcR0NTYsZgVE2Ne0lqQF+elVtVcCoXAxM7xOsiLGBNt+7jkFJ1jMGRkDvOylNamJd/f3uZldkStFAHyQ9eaaJNaMnkfY99ttl0/lJo9syMaulBK9sy7/X4YN47QO9bWAJCZ2DlVTalIq8TIACH25LjV4gmLkUiLPjjHJtrH0AzMmgOLMUqp1TRX01YGT+ZCKs2kOR+9tcfb3thPuXSOOu9MBAAULCmU0kpOTDxsNgYWCEttVS1NEzm31NI5d96HORWO8XQ8mVmV1scotx+//ME///GnL3/02VtR7LpYW1EBpHfytKg5jwgEZjE475gIwUDNcimICIDeUfSBCVb5a+ijY66teeZ+07cquZSVwBG8Z+aV0+18EFWRSujIMQKKKRE7x0TsYxc9T6mVXH7+61979cX1t7/3oz/+R3/h9sXrf/Ht7372409/4SvP/sKf+PmLD7+yPPvGm+NSa2ZiBj0dHlprzrn10nF3e+vZXzx71g9DNwzjuBGRWlsquaYECMy+idB6KBCJitbmQyi5LMs8bDbEbCLLNMWuV1MCAMdpXgih1Sa2ovkFwIL3iGsyuh0Oh9ev356Op5xTyTnnVHOdl6m1mpY8lbLGwlRluz2ruVSpCBi6YXf+BAAO1y9KSQAYYjw7uyp5OR3vU07MBPCu7kkNzBTeFXCsAhutgMOwhrzRxshoOlcExDmXn/sjv/z8vQ//jT/+/NlXvr6Az6kcsjzMqeSsavvL81IaI7Jz6+o89L0hIXrVqirIzgChtSYNibz3pmqqTRohheBBhX0EQseO3RpKk9ZERVcRFcG460vKpuqCRxUy4xCYmZhbrdPpaKt3et0IlaIinpmZWi3rBizEDomsCjpsKcWuVxAzzMtiTdhhXmYmMrUw9FKbtqoq8/2t866mKXZhM+42+/0wDpJmRlQDlbZMx6ZG7ImgplnzJGVpywlAw7CPu8s4bNCUAJRWZlq1Wnw3MCH+L//X/xsmxFod4dluIwBZbNd33lMFXprUJmoUHYcYeuahi1JLzmW/HYjpfi73ub6z66j5Lq7gZiPKpQqAiuWcOERGi97Pp1nMtmM/RE+EuVS33h9TrbXmWk6HoyLutlsXPCGejvOwGUOMXRfAIOe82wyB2DlWtVJySrnreyAys4hIqMQ8zamLgZ03EWJUUTFYvZ/StOs7JkQkZtQq7Kgp5JybioiNXRy6bsnltCQ0Dd577xFRagNC7xkRmSgCkpSpmZmlvCaTIDjene2YqJYSQJpCAySkUpsjQKTampqZqpXFhdjE2DmX71799j/+3vc/fjjl07wsy1qCDGtxFgI6ZiJQQ8/oPRORd+SI18KEddZ3jhgwxBVoSH3f0bsIp3rvmqiIEuNq2ida/xLRYJVU7f/P05/8WNZu+XnY6t5m79NERGZ++TW3qXtvdSRFWmwkAZRokRIEmzIsuBFAG7DlgQbWyAY88sCGRwZs2VP7v7DhmQcGPJAgCaJkyqRYVS6y6tat23xdZkZGxDln7/12ay0PdpI5zEECkbHPPu+71u/3PI6YpsQkqp8cuhJijDGir83c/fd//KN/+k/+/Jffv/+X/9bfQsUY4uOf/tP/+j//L37zy1//1sPx3/6X//Lv/J2/9Xz/4+eq6NZ1mOPoXVUf7o6AqMimjmCt9a3UEEIUISEzB7PhFnKOISDiXgFO8wyIOsb15QJuISft5qbuHqe89yslxFK2VlsrpfeRDrP15gDqbg7v3727vLysy/L88clUzXT3h9vQ3lstpY9RahtD1QzcU0ilV9gV5YczIrWytFb3DQARug/bfUYIiET7bweRmREJEQiRiEbvgsDk6ijMTMhMgmDuaq5mMR3/1b/2l/+l3/1ypcPd2y9QwtZ0Ot+Zac7T9XYLMQKSMGnvXQ0AjudTnI/WOzCFENy9lVJr633wLtYcHUnMTHX3e6bArO4iIiKEHkMY5qUU08HMMU2tbGN0lvCJewHe20DGIGHfWxAhII6hZV0AsV6uLhRETNXBYppijOYObm46+iBEIgwxIaBq77W0uvmwkLK7OXwCZI7RCbFvC8FQd5HYtyuh7P+PTBCmmYi0lb5crK7aNm0beXM3kpiOryQfJeYQEzGbdlNFMBsq756e0DmkYK3eXp4f3r69dnv/dAkxxcjuGFIy9GVZaquM/Op8jOC11tvT0/k0W8zaGyIBy7qVM+E5hXkKax/bGGMoieSUEDFPE7jd3Z1TFB29DiWEpn5bt9OJcs4xpTPTm1f34EjMQy0EuTudeCfkITBASKFvpZhN0xRCkMB38bhuxRENsLS+rYubHY/Hx5frNGUhyikgko1+dzgquwvkFNEMCfpQ9R0ApTGIVaBI7l5qRabz6eAO6KajA2FMUmptTQOLErxsNZOWrQBzqaOs9eHNnSFebwsRja4SOInUsgExInW1LGBqSBACh3wvzDvVw6PXbiHl0ExaH1r2lT8RAsAYaqq9KyC5cR8mgm7S6RODZR+fuSMwEhIRCTO4l1Z3ikkfA/b5AqATm1kfivgpmz7G2Ge3sEFK6OCmgzkBoLq13m3AF28e7mf+5vv36PD08SWn7ECvf/f3/95f/Ivf/eIXf//v/5f/l//oH/z4D/7kv/ev/ZWf/s7P3j38rCu8mkIjLDg/vSwOkIK4KyF28+kwL8u6rktZlsPd3fnuPCHGVntdfT4xo40GlkhCnPLhcKi1ltq25TnEZNpLqb12B8uHoxkYEYV4mGdwaK2a+zbs+eV6XbfSuyM4Qmk1iJh5b+MTxUcEAGJKaTrMx+N8OiNQCHy+vze1p48fvvv2ewBF4fzPXrKH4/FwPCFaiCmGAOBlK+42TVOMqW4rs/TWMrZXPLZSvn26ff1xRQQW2mvbAigxnM93PpZffPvhsT/nd8/5eOYQ7kqNacrqQUIbSlGGegw5ZTTV1vp0dM6p1Xa9XERknqa782kr9batCD7UTLvgnjdydxvqkpKO0XqzMWKQnOYpT0O7da3rojpCDAhIQUwN3ac5m5urgyowqWGKEd1pnpD5eJiHmUgopYBZlADgQwdKAN3RGlZLrbUhoLsi4Xw4melOkDJAHUNCCEwEyAi3x+96XYnIgRAbujOiauNrJCJhcm06xujNzMkIwb2Pcn2cWehwarUScwh7GNJAGP+D//X/dpRtjD7U+rrlaUIEcKTA5phiQIkGwERpmoAopTjHUMv6/HIz89dv3qjZblhtrdfWYs6ff/bq/pCH08fnCyDOhwMhEOJhmlSH7Mk6JzXtuhuhkIWZeZIgQQgscGi9m5uZphgZvJu1ruZAhMI0hg8zIjqmWFrlmITJ1dpO43I3pDknIQLEbSvCNOWUU0QHdSu1mhkDUghmFpljlForE+ac3X0MHQZqZg7kykymllIcvdfW9xAcmuUQlBABt9JL3bSP0+kQYhDiWpvqAKA8TwSOAIl5x5w7IIMTASGR0Dd/9J//wd//T4v6VurLy3UrHRERIcawC+V6b74H9gFVnXnXljsLgwMRiEiMIcWQhHhPHADUWkOIap8mayEI4j8Pu0MI8ol8j2QIMUZEijESoQSREB0I3H/n7eEY5PVnX/xXf/SL/9d/8o9utSGF3//h27/9N/+l8zS9/a2fPL1cjyH+43/8h//p//s//u5P/+y3T/L3/u7fPP2df3ujwzzJyza+e75ta9mZJa3WIPzm9V0O8bpsgFAvlw//9E/qy8vTt98i02d/+18nDsfTmRENgJjLVhC9D1vK5o6HeT6eTtt6G2Pkw2G0vpW6h3rJ/fLy1EbblFOe1qW8vDx9/PD9tq5gvl+qx2j7RZAQX795/bu///vnh4d1Lc6cRHw0ZAohiHB3Xktf14K2PwVovXGQ3lqe596aD0XCMUpdt2meCGBbFgMA7XdTOES6p/6Pfv71N08LEpoZOgwb0zQfz3fn4/l4OISUAGmMAYgppulwBKYpynycHz57G0LettWGnu7vhamPLiFGCX0PSSAjgqkyk4MLi5lt28YiZr1t1YaeHh4IIeY8+lBTQgLmUdsnJjWhsKQYFdFUR22AYAhojoimSsxRGMHNHZGBUHWvNgwYBrhP3juAhxj2BgkiJaFW2rJtqr2VNXBAZjA7HGZHcAfX4eAhhHq9PH/4bg+UoEOvSyt1zokIADFIcFcfXftmY6DbDjYS4ePDZzKd94QAovdlURshTfg//vf/56BmCPuMiT8tAXE+HQDI3fbALjOfcgwS1KG3WmpNOQtD78YiFBK6nVJ42upwDEFen+aY0rWU29bcsZZiZvd3JwKIgduwOSdm2VofY6QUkVAN0I0IACiFoDrM3AElMOgARAc8froegiDDjtPTkXN0g6G6E2zMQRDUTNXUFIlrV3Ko622ep/P51Ht3Iu3WeyUmQJjytEeQzIxFsoipxhja0DZs6P5L9CwkCEDU1RyglMKIOacUI7gPhxQCIfTWa9mGe0zJHUw1MquZEAlDChnBYwpl20S4Pv36P/t//t/ffbyGGM395eVatmLmIQoiAsKOAEUkd9g5hbuIgxkZCRBZOAjlGPIUQXVX+u0Gd1OTKAhoZsxMLPvXNhIFYQAHZAcw0/1CGWMiohCDRAEUVP03/8ZP73PSeP4P/6//t//65788zMcfffbqX/yLvytEh8N8nLMj3929Or160Lr9w3/wD58G/LW/8Dtf/uyneT6eZ8laH6/bz99dt61FBvn43Xd//Cfv3r27uheDk/fL9x9v19t1Ss+lvU3yo7/6L7Z1m1KQkO5Tmr766qLmqqfz3XDDkNw9CK/LtfURmThkZHbtOkbZNkRqOrqCOezzsrqtTDLPKU85xKStgFsvRUR+/OPfulwuqoNTOp1P27L2ruBuo4aY57sHByAJ7u5mIOH28ly3xZ0IoZZCiCwcU3K30dpe7pEgKScg1t6D65cz/vz721LbdJgkBABMzMfDnOfj64dzd7gttbW2bav2YdbruiK4mzHj+XR68+VXr7/8AXLYe2xm0FoPQVRHjEGC2BgsMQipqupwAxZurXU1dGQGQmQEYmq1swiHYGO00bT36XCwWgPhPE2B3IB3xcxSuw3dN0KjtRgD7U04JjBnkdG7mYE7gLuZmhHT3lcbY9gYSERmCEYhurkTah+9bDZ6iMEBQNVGOdy9ArDYNxzlZSnoY13rVgoH8TEQzM1E2Ha95ujMgowpxHQ4AzIQjbp5L0Ac84QkEqYppUzMgBRSFOb9GU9TdnM1q+va+8t8mMrtdjydU4zqxCG1PoZLrf0QEgE6QGA65XRpWoZ98/hynicSOk8psFwYnl6uj0/POafJMzFXNQatrSKi7K9RMEAspW615RgPh4Oz6zByNwcbQ4eWrd6djwiw3V7ynJ241jqVcMxpEqlDibD0jlEOTBzjc+m1dwQoY6ikBlyGzjkzYoXmKOiQU2y1ITM4uHktXUVNh5oBUo7BjNysmy7XNaQYBZIIIpwOD6Pr3oQJOSZEUxujxxRzTvu4AR1ab2spqkMNOczNB/VOjCFNOMq3//Qfla2oGqs5YgiinXdH3C4uIQDJCUmGDi8WmAxhT40ZOCKYmRuC+x4pMDMdigg7PXt05f3173uFzIMIuI+hBhAimdo+PHZ3QJAURAQo9NYm4SlKR1k6bsvtf/R3/82/8Zd+Z5SNgrj5+XR6WhvFmae52/hY+2d/5a98kebFxp8/Xeh6s1rz9eP09PWXd/e//PXz08vtP//Hf/DHv/zN1bURB/MT4RcpOeK1j9r617W+/Gf/RSBYJYC5XC8/+st/+ad/828h+mXd5uNJYiprUdM4n+eYJcj1+eJuo2uajoET+sA+oKsaIrWHV2+Ox0MMcr08A2LZltEqmn388J6IpxgdtLSqj+37Px/T+Xy8ezMc5sMp50QxlGXTWmOQ3tvxfH8+nXpObtZqCSlsa9uWlx3AO+W5lXJ+darbioCt9SnlKYVHh1c/fPWGaWdapBg/n8Pr0wyAH6ueU7473zlJG7qU8uH770dtxDwdDrWs33333eOHDz+r9ce//xdrqSGEoU4iHEIfvdSGtbhjSAAQ3H2ojW7Q2jRNIXIrm+oAkeXlkuZZRMAdAWIK05R6qa22Vgckebpc96PA/XG6P80EuNKw5iIiKZqahDBGZ0QgAFMfo9uwvVmvigg8cI86MjGQiQRwq2VFb8QsSGmeulCvdYyxk1dGa6O/J8YJ7TDnN68PMZIbXK/X2kbrfTjVbXXrrqP35goc+Pz2yxhn145uwEIkmA8UExJq3eT152/RUZhrrQCgfThAyIncT8cDijyHoGoKDkybY+sjxTBLvJ+Cur+XeGvNSmWAGEKe8inAZVlK1+d1zdOUgAjGOUfzkwGqGQAys6lNU94HQ631obrn3B1pnufRe2lNmFJK7mbqABRTZPRa6tYHAbBZYEkpL6WU1l8d56Gm7iKBOH5cFhHZUygSeE9e6VA3vy7bthZ1m3M+H9JwkBiHWW39/nRW09o7EO/5q2YWEIcBOD7c3ccUCGGY9tpHHTEKCX/CyZjtTMR9PV9bi0xTislJprnJqKO5WSll1AJbzSn3l+/rugCj6mjNJWViQkIGAgA3R3IWYgIWQg5gXrwZudm+QYP9LQXgw2zUPdYPgIDuoACAzND7CEFsv7juYGhAYASEMZSIkAX2tA4RAnKImCYFSpGJ8HbbfvwXfu8/+Hv/TpjOw/rD61cAMB0OmGe/bavStfWtbigizNeXRwBDZjc3s8Oc/vQf/fLv/yf/jx8c5o/P7ZmCnE5Zx9QHp/zAsC5lCC211TEIoGhFJmaeI72L0xenu8DkIQQSBYjAx/MZmJF49D6ahhgICfIMiBwzaGsRwn7+JLjd1lrX6+Nza0Nt1PXmNlxNYs4xfXj//es3r/pQDtP5PJXl2tva1dl6X17CdAhxmk8nNbfR3U3dQghA4e71Azq+vFyvMYQUCR2A7u7ulrXknGOIGObWh0wBx2AWFgHXOfAs/Hi9vbtup8NcIcqo4COmycwOSeYf/aB99hrAFSgEAe1pmiQEdz8eDiHGVsswMNPDYUYi7WNv/FYwJk4xaVv3ALAEDIfJAcwsvnpgJDQlNyRolxeKfDqewxyQhJnMxvNSt2FjtG1dUeIUYxK6rQ0IWaTU2moB4hQDAZJQsv2BcVdDwv2cAYTsioyunUKY5kPZVqEAaih2mOcmsleDe2uMd72sFGIpa+3MY4znYnWdUwjCISRCqgG6gY4ZbDCiCDEFR2SR/UCIRByS60BgZpbM3HojYDfbV7zMlOYpE90nOmV+HU/fL22pffcheu+fn5ITqXkQmgLV6qYGQZ7Xkh1yCA+nyU9zqX04uPtaK6h2w3UrccrgPmrNMd6WrdUaRHKOOWQHDyIOcLve8pznPOHe8VeXKa/LOrSCkBDdHQ9laGkVgCJRjnJIiQnVISMB0fW2Docco7YWUmxdGai1jrtZkvH+4bwvsIZZH7qu9XQ6HHKSQKM0Yc4hEEHXkUmQ0NwjSyBBAhsK4IAQYwhBAGDs83U3QGbhWtuw4eYapLUuxAQ+R46Sam0xZkBqt9u3v/qz5eO7oO5DwUFVl5crgKl5a2N3XnbhKLtPZDgR77RnM3dnFlUXIgnUh3rrDkBECIZIzIT7Zw8AkdScEcGdkADZfBDs/4Kqqog4kAOZGYrQdAzToTtNU2jXRWEWDC8Vnj6+d4dJ4PO3D8/ePn7/0oY1GzIdiXC01SUut6uZPz8+qVsQihMgp1/y3S8LTKTNOkv4HGCa80VCX24rmAFExyzEQELYQzwe8/mrH8zN318v/+A//o/+pb/1t3FOeZokBBZat0JsPgYKC+c+lJncIQZhmAI6mI3WMcYMECIS8Qz47ptv+gAmOd/fHY/H6/NHHUGm8+cPn398923Ztul4JOIoEYSG0tP7j2+/+mFmAYTp/BAY3ay1Os+hlhqZ37y+Px2Py7Ye5rwsRREPpzuBnnMuw46nGYhqadrbFHmtupRmQfj0EEWYCerYtu045ZTSMHh+fkH00TWmFFLsZgihF43qy21BlpgyuyFxLQuoAUCeJwfiGPqywTA/nfJhqrUsZcPGCM78iYEeg8Qg5BQIcJq6+rKsxyg58bpuc45f3h0HeNnWUjsCai3dBgCOoRwEiRjZiWrvgWgKGdjH/oy6MbAjCtMhhZDYHK+tg6sBpHm2Plqry/U5cDic71hCWW91WT5Vl3QTliyMhCNIRVj6QHP2Yb2iK3FgJuBIAH20DpXKpjaYhBGAEMDB1K3b6HJMUtCRieWobqqeQrw/zMH6UUBb064MsB83WEgNh8NnxxxDUPdb08Pp0NogEWYiolqrQDwcpihc2yitOXNzH675MAWiecq9DzXvve3COkTKKTgguPc+7u/PZuam6EYiHFiYbARr/XiYWGQM3S7L3d3dcHhebjlPZWgBbKWGKO6QcgpO19tW1yUdjiJSdSBynlJtrdQaRIR5jjLUWzeJYZgLkfZ+nKfRR2ltraOOMRIKk5mV1glaDpxEEosgGUJrnZmcMKdEbkJibvORzXyoNx1tjLUUG+N8PE4x9lokUAy5aLdXr+5fv/H68de/+rXZrau/XEvTAQ616VDd07JMezcTkVCYP+ECAZB017XUgTpU9pkKmfCOnN178Ajg5u4u9CmCgYhogA5gwwD3cx26g7srEqccpgNwdODjnP/pn/zi85/87uPT49s3d/X6q7svfhByXNSX69LGAISug3xwOlLMyOHtDw/E9PmP/XZ5+eUf/MG3l/qjL376P/y3P//mu3ePT8/f/Pqby9PHCLSBLa1aHwVAxuiOApDIlUN3649PerpTpLqt27DrVs93r4HZiT++XISl1y1KSMTLuqWchbm2ttZqZmM0GBaCdO11K+guwpePj4x+OBxce9lueY7pOKdwxym8f//u+++++62f/vb9lz843T0EibfbjUUePu/pMPnQGJARxxgShUNSMzfYxhi6Hg7HNzOj9tfp7qmO6oiURkjEKimAw+F4N8YgxHjw0boTlVpxtC4cQ3LHjnwrnTk081E3Rs7HuCxrDMHc67a5K1EAAKbNECTGutwEab1d0DqHgEzau/du2j//6svD+S5Px73W1hWRRUJwt7UUNk2bUZqvtZa6PYowCQDC+4+B8Bj57nQ0wOAjszsQkiRCEl6W1QFGH0jYTLEWCWHdapwymg8dO6Pl5XrDUVmicuhj5JR435sj1To2LW3o6Xw+Ho95ysttMffR2vL8vAbO00xEjhhzAnekICnrthCxu4IrSgASN++tIVHTRgDgTlCcSEdPkQV0MMAYen88iPDaxkRwD/Vlqx9WayTXAYAISKrqNo7zxCKt9SRyWdZlWSFPJDzGYIrmLhJqVy7tMMXEFAJvwx2ZzOZ5GrWNMcyc988/ERM19X5dcoqBOYVgbq22NgbqiCIxhKI7w1SGWh+NiQDdtLv5lHMUyYH7GBQCENdtI5EpcY7zyKmNQeA7VysKgpG77NrBbdie2OrqO67DwKwruOeUDvM8zBFxjLGW4QAsvPWxNg3CusdcdbALIrbRhBnBzI3A9mlVQBKJS+0h59r7tm3oqrYFptPdmWPsrfucjm++fLy1shYKgRyGqiMYQB2uZg5K8MlbLrRPJhF3KAjpDhakPWhAOzoM9iSnBPpn5FR08B1Iu4e2ANGYwPdhHzAjR04SkGNTj2qIENP87Xfvbx9u39VfXnr4G3/zb4VXb59Kf77durWBMEblmPPhoGBjNOSAOny03rybMqGjP97W2y9+9fJyMeKPl6WbgePNhhoXx20oI6CwCydEG6pbZTOM4cP3H8JxZpSvfvf3+NXrtZRufbst8/HMKTmCASpQyBkQbuvKEkpppr3Vfnp4MNPeWiktT1Nv9XK53m63FAO5p8jb5ZnSYbnc1tvLcPwr/8p/87O3byFE3cEsEsxsPp0AgWIEwF6K6dhTZnk+ttaiEAGU2jjAMWYfejjksQ1yS8TdfAxnwlrr6TCzj4B+ujszkQEsdVxKrbVzDGo++uCux8P0ie3rLsyOGJjj+WTqhyR3EVwSIAchtrOQ3eqbb95/eLosMEa/rfnhYZTtFz//c7Ru2lNOHBIgTtPExF+8fj3f3RdViaK92+gEvi6bpAnBtucXDOGdA3+4chJtI4RPPMQoDKObeTqfEwCDb9umBsE95rBHEU2HqoEE4ORGpkqubrSWNnpLIR6mWQBab9rH5eXleqUc5f7hjY+6u36Q2FURzUf/xC1x1NbcjeIUcray7sojJ2gr9qHMsidDtRUE4hB1NPw//If/x5zj81oRIEuoXWfGH95PHtI3l21tHdNUSkUJaH48TG/vDhPY/RQY4etr+7CWgdLUSikxJXSfpwkJooTzIc8xuNvWtKoPNQkcQvjEKQYzRyKMIiSybltgqqU7ces1hoBAXXuUYMstxtTBppxTCATW3bshM885CMBQ22pVxx0bjYAS9lkkqikRgVlKyRF760Iw5ZSIALyrvSzFAFJMvbUQJKUksOdZKbqe52yMaLY17e6tGwu6E4EigKs5uCEwcesjxGCqKSYdfagOHZE4SjCwbS2ltWEmRNba3f0pihCRtvXrP/vD//q//Acfn597H6P3PrqZmVrr3cx612EOO5UMcT9z7V7VT1ISphhYiPaMPYCnFBmRmQkBEAhJzdRcRFgYAZgIifad+k55ZJY0TSFNKGE+n0+v37TWl49PH3718y8+f/30dP2rf+2vf/nljzbzQWSqrr2ty+X9+/nhVZhmN7vdLstaVU2Yeu+tje+/f38UxtFNMiBJiJfLpdXym1/8so/x2Q9+PH/2ee/t/de/vr48i/ZgICSvf/CD86tXz+tSzaf7V7/1279zd3eSGBiJd9mU275A3LH9IkJM1g0Q8zy1XpelzHMW5t76tm2X27out9FHq1u9vhD489P7n/327z28fbvfrAHo/OrBTJlDrx3QEdAACFFbAdWQ8jB1t5DScrnkHImobqv2Eac5Smi9M0Kc5hDTcrvNOSIxMgdmsfEQ/PFaV46uZqbHKeac58BuJjFuqrXZMO99uFmKwRC3ZQW37F18zKTl+vzNt9/N5zOR4HQ8ne+mwD76w91xUFouz68yPV7L++fr01JYt1no43Ux8GWr27LkFGOafvDbv8cxpxgCszMPVQBYbwsRxpyFiETKugFCyLPWaq5gtjelYsocBAmFSFtfL7e71w8xxT36u2+c9kQBEgix6gDHWrY+ugCAjhDj+fUrM2/bOloLMZ7n9PnDgSS8vFyvW3MbvVWtDehTO91UR2scgqkBeM5p1IroUZBYDKAsq5nuIxdCGnWT989XtrHVkedJDvnukFvH7zuf0X77zd2U5NbtXT1+93I1UwTXPhSVMDsMIJrneWvdkTuzjhFD6KMjUR9j29bT4XCc8mlKs3sd1s0JcfS+KzPAXM0Jsdc6WldAA2BwdKi1IbMQtzGqWogxEE5JgpsitmoKMKwTQhZJIvdzasNfSiulsITRu9Ju6LCUWA3Wy42ZHg4zgR0Z5yiJERE/P06RQR3ePW6ufX1++fycIeYNyW63HwnIfFCKz7ftUsezQlcN5GaWCHKWNw93Y/StjrWPaj4A6rbqGJJSztmGDbfemoOfTkciUoDt+lLWdQGaxR6//cV/9V/+f14+PvcxwD3GAGCmYISA4EOjiIHbMHAw+NRcnnPctZa9m5vvof9PybVdksmgqmOXlJCb+lA1c1YjQg8CZkQkLMzCEhyhmbXaWd3XNczl8vHx63/6Tx7Oh+++/zDleTH/5ft3h9PJkR3USrk9fXx+ftpU0+m8XJfn50vTHQ4t63J1VS3r4XTEfJ7uHsI0IeDx8x8g4Wc//m0AwBCn0xmYfvJ7v+dq7kAAKBLzhMI/TbnVQgDCLISB2dV6707Sh0YJpi7uiKjmMccBwwG3ZTmcjzGm3XVStkVHz+zTm9fuXpbLx9Fjzm9/8IM3b96oupYbS8R0ABIAZBFzYKLWG5mHnN3BeTgicijLDZAP968QnFzb8ClTSHn0gQLqUMagIBJC7TrNwc2GuzBtqnfnE1Nqqn306u5tYCu7s299efn23QcEJCYh8hynaT6Cal3actu2a83pH//BP8FpTsWm47x8+/3hdI75WNab1fX1Od+f71oQcT+R3j1E6LDV9pf+8s9M9UPxb65N3dM0G7KOYYhGOMc5ErmpBNmWxVRL1wQg5Mf7V+bQEVRH2UovNeWJY+ylIKGxjGE8560UG32aM4EZAqKN2oHZzWvbiFlbQ9fT8dTq2nRoH+3DY45REEKQdV3qcoVWfvf3f/s8yeP3H65LuZXr3WE6352sFsl3GKb1trzcbqUU4tDMFIkRttrQapomIQAgFLHRwX0g4f/if/O/OwSpralh7eP+1QOYqoOP/nCcf/LZvfbxrsN12KjDzabjNAk/5PRmYkz5ca1r68NwKa32PUnsIYQ9U84shxiPc3JAQCCg4b7VWkqpXdetINE0zwieg8QYdxKLmbnZnLMiqFmr3c1Pc06BT0EOKTSHqnYttQ/bv5+j8BwDoJeqRhgY3WFPGmShAB6EA2ISPB0mc1jaAJKPj4+Xp49k45v3j4/Ldvfm9XK9kOsU8+V2c7MvTqff+fGXPznK/PoNu22t/+KbD24uKV0hLFsV9LFtD6/uX795hQC3bgoEROVTpYOGGaoDeRBx7cvtyjCe3n17fXm+fHz/4d2H5ba03m0MQAAwHap9qJup7a0p9J3cCQbuDlutZjAlQXdz36Gyh7y3yh12H8ouryZEIgfXoQCwt+2YSIIAuMheUgqSJ0lJncpWYp6Q2VWf372/vTwhYkrpd//KX4nz+XA6IaCOgciEULZtXbehQCHK4fDZ51/2WnurffhAw2HldultHO7u4zTr0JwTIaace29mgERm+/lVUxAw6rWe3r6JeVqXm/VBjCw82oghADrSnikl6yPGSEwSRN21m6CpKcUMDiGyqvbSurZWS60NAFUHOlyfn6bjiQGnw4GFtfW6XpDgsx/8NOZpjP3RBSbcq4iqujeTAEBVmbmVjkxjDAbotTIZEdteIXDnKPM01XXdNVT3U5iFbm2g23DMkYvxIWCy7mbPt5Xn6Xa9rqU+fvhw+fgohO7aypby5KMjejqc1+XKgKoqEtyd0VU1z3OaJnOEtp3nFPP8+jxvWz0G/kd/+MfX0r784s1/42c/enx8+vyzV6+/+OI3lzEodZk4Sq0VzKx3DjJPc8rTGM3dWu+j9d5bzrOr4q746l1SdAckBFURQUQ3q8sVwWWaD0HOSZLQtfS1VEVswzmwq4IDmhGh9lpNERjM+hijroEZiKCXOEqIadTy7t27pbXRCns9RhmjHY/nL7/86vXdnXGYppjPr94/3dZS2xgIHpj66KMWliASrBU3GL3j//7/9H/+0cNBwD9W22pjFiSUEJ5u2xTCm2Ma5ksdA7kDLGtVt8OUDzmFXn/r89enQ2pdl+HfXbdL1a462pAgwzSEkGKMOfEneAAcUxJBAWfhPRnQHd5fbmsbIYZERAAppmGj1EYOIYZ5nvc9LuGnU1tAOJ+P7GAAa6toqICGcIoiiGttIJyDDLNTSsdAvnMKzd1UXKOP2sevPjx/d7n+/E/+dLjlFK/Pl+PdWRjL9UYxkUgr5XA6pfmQXF+d7zLjD075y89fv5rTy8ttc7jcrt98XOY5O/J0Ombi+ylBnrrjVhswqgEQsatp224vy+V5vV23bW29r+u2XC86xk7muV6uAmg2mo5WmqqNMeJOvFBDAOJ9qsX7SeS2lt6Gu+2NS3MXJmE2cySgXT/IuC8N9omYCIcQ9vA3EoYQJIikHFIiCSRBHa4vL7Vpb2273cbwtz/+yenhs1evX7/64gtTDzEGCdZ7SqmVAuDdLB8Oy+UmKTx++21d12meJeY0T6Yap2m7rSIcpxkAAhMQ7fIBIpIgal62qqPneeq1hRhDkE/naREH77WmGACgj65mQjzGQPCUMofwyWcBNIbundN8OGzXBQmG6la21hog2lDcM3e9whhmFlIiZjQfvd+dD6dXn5mjueZpcnMmlMC9D0IAZELa9aNgNtTPk0TT5+u1Oy61kYRpyrsIpdV2vn+IUZbbrbV2iOHLYxLmrfWivmwlMCJg72NtvRuEENh7K/X9h3fEtC7Ly4f3vawxJgfXXpED6OAgEqK1prrP/nQ+nvI8sYQ3p9Rbv66btnpb1r/4W1+8PaTrup7vX49WV4jxcFqeXv7qv/Cz+/u7x4pfX9uOCQOkoTr6CDGMPnSMmCLtJRAAHapuO6QPzQGdiVtrrWwphvOUk9BxSjFGZoQx2A2JQkqU88f3H6/LtvSxtWFma9nQPeTsY5Tl1nu7vTxdPrxnoVE3dBOJ091ZexvbZV3W5XZLkVKMQpRiTCmLyDHHzz7/PE6HAaENdVciMRtt24iYQkTvIsHU5Lpuv3F7k+Scw8/uz2L9wzbel55Z5iSR7LMpnl5NyOHm9PXL9u1lbcOwjYnju+eFwV8f8g/vwptZ/uzj9t3L6gQhCAKjQ6u19h5EYhAAuKwLut3P6fPTnbi+nhMR/Fmkr6/1VltpjZm8g4jkaXI3ZtlqAzcCd2J1u5bq4N8+vZzmec7pboqR4f40dbWI3gwU43C6li6E17GWlJ5u623bLi/PZdtaKZcP77W3ZVu2ywUJD6ezNSHG5w8fgohM+XR3d7289N51jOePHyVEPp5Hnv9w0T/80/d/7fPTcx2XDjic8vGxW2C8PV7iYf664XnTE+uyXsp6DSEs27rerqOUrbUxrGwbpUjMOnqIEQDHGCGn2bxvm6r3bhJTQKy15CCq3dTMgNiIJMYEiBLkcOfX63J5vtTW9mmCmnV1AGdm2qtrSOaupkgYRPYDGgAMNVNUtByIgIq69raVS2/a1TkmycdXr758ePP2q9/6CYfEiIfjsbdqZhITpklEnENXvT4+ynQAkZByPp4Op3Oepp2K5qzCkg/zLiJGolLb8XwmFtMRYyThSDzUQ4oxRSIKMQEiWYVPzjdKOfpQB0gpD4cpxVrqGGOYjVqQKIiojjSnMbyNgaV+/PDy+vPP0oROzNGG6u35Yy+LBAGwVqvrLmlBd83TRASuI+SJOcGnK0topdTeT6dDjKGsW2sjxIhMU4RhNueU1a+3zZCDSK9dArvZznX8+PHFRvfRH5cV4X6O4eE8fZYS9Fpaf1e0iedoGZEIe2938/z08rys6/H+TZ7PH37zq1ZXJGpdA7Cpt7GNy5UQQwjYFFAMOKTp/pC37bb1jkyl9jzP71YF7y+r/vr24SHRosv1u/fzPL//+LFeLz7KF29+65tlMLiqUUhJ+M1xWmp/vlytVSOcgqQQPIiD71V57z2n0Ax0zmOl4Z6jAOCt9Km185wUaO2KWuvzlUJcy2Y2LrfS1eq2lXUZOrR3pF1MYcTihL13c+xtyBi1bSlGcnj96nXKR7VOOlqvZm0opHkezW/fPSW5nO7uYgwhBmT2bhKDmgP4GEPNEUFK7aZjXchD+P7p9qNjqI7X6uuwx+v1Q5D7Of3+6/nVEZPQBvXRx3DcliUe523g01Yft3bIMRImoeOctz7UDB32qpBI6GMgQJ7S6INCeO4Wr0sCR7PfenP6nVd5jvJUczfo+5NqNtRiigDAaCFNtamEXfgdd3y7AlSzx7WC++OyTjHlGJR46Ta0O/itqIO1x+dvvv6NjdrKtjw/l9u1LDcAb62ZOYcQp2n03raVYxy1zIFq3YTlzc9+B4nv7l+FGLdlRZa6XTmkP3jcxtBh6kDgOhFNr46R/BAxYoftw+PT+3cfn9c6DEDdy7YyAIaASBijxMQIHbH0oUOZaJpnRxlmUxSHLeXIjNOY0Hqv2FrDvW6Zc5zmAYRI2Ho6MpbOKG1Zzcz2yggho7KQMKs6s+/nWTXcmoJbCAGZEXF0azBsWfZzewjxix/+8Ec//HGejylPwnt7Orh7nqY+dG/o3pbl7u68lY2J0jyFS+xbZaayrvP5LMKn43kMHdpbbSJhOgizmHstFdL+PJMbScq1VmbiEHBHdcTQSs1TVlOJCRxaazmHocbMIUiSYKppSjKCg9vQNKVSaoiTCDPDlJO5vf3RFwjeWnNHCQEAJE0AdLs8m6owAkIbI8ac83Sc5zFqWV5CKxRiPp7ckZlSCuDe2nCzndfY+5imfQznT0VTym9Senx6EREGoBBqbcRg3qcc3eN6vaWUi4GM7hWI0REPx8MDtXJZh6sNV1cbPZ6OP/vt3/75z3/RWo/TfP78q/V60dGGQe/NhiMyB44piQRiIqTT/fmrLz8XHe9ad0U1k+NdYMbp8Mtl1eHaytMC14/vGCHG8Kc43PAXf/yH/92/+2+l+59suvPVvdTWh0amc4prqzZ81SYcbss1hHAQToIft9r6GA4ofDcflnX79uPCOVrvE/nj8wU5ABO6vX+51dZVbdvWsl60tV7LcrmUWuf5MB2PtRRGyFHK5fn28pxSTIwS5PWrux+8fX13mqfp2Eg+Xm4wOvZtqf2laD6e9tlKb/X55QUB5uMxSDQbIcYQk/XOLKU2RhTvbatjUavI3+r4U/B5mgZCqd0dXoSv1/DxI/744fz5w/npVnpXB05JBlhDeSp6nLOHeCll67Z7c1vvrbU8zZKTqnfVUrbldospAjO4r2tB8G9j2Pp4O8cvpyBu/+T7F0Ywwl6bxbyWKkynwOAqjLMQMwqxq3X0tbS1KTEFhkDyXNeU4vl4cHA1u63L9XJbXp4ev/tGTbVstWzr5cIxmEMrW2/d3amPOB+Red1qUCNhWsqXP3w4f/aWSHBvmI+ViNoYIU97AitliQAR8WHiNwHm2ce2vv/w/lffv3t8vqxbZREHI5b5/r4vWxvjmOfSOhHpp0tcPd2dynKbDtmJOca7h/tem4QENlT7NOfRdrQG61AiAhILeVm25XZdrouqXZctMEPMBE6wA2cYHKbjtLtsCcABJEhv4/pycbcMIgYchDDGeFRzwXiY59/6nd+9f/0Z58zICBBiNDcgNrO9RMyScgh124Z5G8bkkcb54X653tCAiIF4DKtDl+t6vDue8mRuwsIcgsh8NHPQ3hBR8lRbc8RaW0zB3JgEHW/bZSs1TQEAaq8ppaHGQUTEiRC8t8YxSYyINpgRSSTklLqOWjcwK6Wkabq8XGMMQBxikhBOpxOczuf7+9v1st1uQ0eM+Xg+xiBI6G6qvaz17u3nZubmQ9nNHLwstx6E9iWVg6kTIjKx8G4w3k03lOLoY6dIlW1D5BDDfJwkxDFGcfj+1m5r/dEXnzn499dt3bZa+7audV3PD/fbuKQYf/Sz317W9fHd+1YbUJjPxxCnsl133dHoI87HaZqISHsPKV3W+vL4kZhCnFB41FqXG5Y1pVgJJE3a6puvfrhtW7kujxqOdw+f/aW/fvGYBW14b03AW9nm+7ObIwuFpK0j4bJuSLJ1ZffT8ZQ8XLetDrWmZkBEknPtzXTUouu27Yh2BJQYQp5QdWIxt6U/1a5OolaGeiv1MOfzFOdAP3k1qX51nqa3n53jdCSSUdfr5ZYTP8zzIUUwf/v5w0D8+S/efXi+lFoBfJonQkCWMWz0FcH2eNon+hJR7x3/3f/Jv+cO7oCMTDSG+hjEDEw6hiAhM6ekZbub53A4Yspg7jpYJDBzkMNhfpUDgee7e+BwK61027ZtnqbD+dx6PwsExpelruplqKpRCIgwpZSYxPT3P7uLkf/hN8/bMGIGsBTSPr0mRCG6y+EuioTwodTnl4WjcAhraSjsbQATuR+Px22rt+X2/Py83q7L89P16f16vdR10VrjPLGk6XTkEEspvTYHsNEPpxMQ9a2OMZjxL/7Vv5YPx9u1HM7nu7vzMNXR21ZCzrvrZA7847v59QRYL4/vv3/38enx5fayrLUPcGt9sMg/V2OHPNU+BDzNh1LHdDwiACAkoRDjtt5SjKrmqugwRuvbtlwvTOgOu207kC3XFQhvSyuKj49POjoSS0wS03w6EeA0T0AkzCyRmPM86Rgxz3VdzO1wd9dq0WFlW4PIfD6f7x+AudfGLNM05eMhxVRrc3BtTZjNMaSduzkFIh0mQaZ5Uh07lEKHHk8n1aEOvXetLcQgQYhova0hRreuZkFinqbR1dG19WmeDUFYxhgAnnJGgNE7EbuZMA0dpfXIAmC1rK7Kwo4sIr21rhpjImJEc0R3n1N0xyDpZbmM1nprwNxK660iSZwmZkZzQASznIMO3epABCJDVd4v5KOe7x4kJB2j1iIhhhR2EFgMCQm1qwHFKDA05ohErdSYplI3HX06zODoYFHkdlu7obuBdkIBdCTU1rbb7fOH07/w06++fqnffHy5Xa/buo7RT+fzbqA5HI7H87lt5Xa7fv3LXyJhXcvoW12uMU/5cESEtm0xhfv7u+Px4EitdrMxzwcOYbTWyla3Zb0tRACurTQJMp+P03yXD6dpmtR0CnJ3OufDkZjX1kfvaO6jELETobmpqVk37737aOc55+PZ3NQBhbEW1/a0tN77erlMp4Mj22huFkOSIPNhJnR0F3DcnqtCGVDLpm2DUSODDntZ1mVZYwwIkIPsoW4YbS1rFjrM05vPPjvmsNxuP/ny7euvfnjZ+m1ZzCCGsK7Ly7a1bmRjXa7qCMigGqCnaerq+N/+d/77ALDPNYiJiEkE9iQvgITALCKhlm20FkNgEUA3cyKOUx61obCElFN+89lrBhvdjMVY3r55vZVi7rEsmVGmg82nAcTCiPh6zudIH5dSuxKSxKjMBK7qwyyIzCkw09rUxjgzAmEDilGYqA3lEFobA9zUhukYOvqovV8vL08fHrfLy+3x/Rijbmtv22iViNUs5sOXP/udbblNU57Pd3vdOqfsgHGaDsfD6e4eiVOMn04UIqWUWlvOmRBnts+izeP6q1/+2a+/f1xaB8TRxxij9ZZz6mq9jWmeQ57G6NPhcLtt8xQlzTshL89T2VaBMc1nB/ehW1mFxUYvZTMHZl6ul8t16+o6Btp4frki0bpVDJkkxiiAnNI0HY75eCAW7d3cXQeFGIQBcD4eOQTVDgDWjYOk6bBt6+l83quajt5KFwlMhELMAfaQRyl5nmKMvTZkJkIJaeioWzvMU0zBYUewERKB+yiVYxhjsLCI9FrdIcTYeyvLmo+HFELZtlIWAoopIRGBg7sZphQkBWEpWymlSJTeems6pSigBr77PftQQCLiNCVT1d6RKOa8y1oCkcRcW1PT0Xobo9R6eX4GszylvlwJnUJGN9D2+ssf3G6Fp0OvdYpyfzqnw2QOrmP0ziGM3pAIzIkw5hSYkdjN1q2WWnPKOUckAoMphqa91WqAqjrH+HA8mNulja20Vup0mMt6Q4QxtK6lbuubu8PPfvSVsvzm/dOHDx/HGILkBMwShdV8mucQYx+93G6j9ZfLtWxLTEmY19vtOKc3n7/dH3Tt4/r0IR8P5fISQkCJ+XQk4l4Kovc+WtOUJOY5TTO42bCHzz4XkfV2jXlKMfgYe/zl5fHDNCVAIo5tNHA3tT0nCGDIgjtDcdR1XdWst6a9997n0z0F0VoBPKZISDGIjs7gPvry+O3xdJdP973Xl48fnt6/L9tatqW0BvBJer9tawxxnrJE0d4QcJ6Ph+NBvPfej2n6yQ+/nM8P8xS//OK1EKrqspWldCF8fHq5LlvtXVt7fnoeDpJn/O/8u3/PzdwcPvH/6JMUNggzIZCpjdY5sLkK8Z4731dRpmZDHYBjSvPRVEercZpTnuI8789tTMnAg8hhng/zdD+lKadA+DrxjOPrS/nYXFnUPKe46+y6GSBNKZJZM8s570XlFORuzsL8stWylX0gNVSnnC+3dVtXYRyqpRQiBLUQ47ZurW4fP7xHgJTT519+9er1a5IwHw7uNsY4zJPp/oFBBwfznGJOQdVKrTrU++CcrPcvzimv77775ldff/f++XZTwNZ73aowE6OOwSEaESGd7u45RNOR5nld6nyYAIEAOIYQUt22IEQSltvym1/86s0PfyiAXXUrTVI+nO8+fHi/3FZ0zMcTMYWYJMieHQt5alvjGBCRGWtpo/eYc8qJiNxcmImpbGWakpCDw+W2iHBK2dV662Vb0zwjSa/tMM8xpz0GtVNqTdVVR68ppxAnc5sOh7JsihiEp5R67ynlXeRkZrfrVWLc3y+19ZhSCLItC3JAUCYKMW6l9NYQoJbibttyzTmTROvN9VMeGIny4TjG+PKLt0TYW69bQSYwu12v+Xi20d+8/Xy5XSUIuiMJc3AwIVcDc9/Wiky7+H1Z1u368vLh23e/+nPt/c0XX51fPQACmqtpyrOkacrpzdu3IGnnu7RapmlG8Mv1KhIYydyJIOcJEYHotqyt9ciERAY45yRChAhm5LYUnVLspkgUoyxLqbXMh7nV6oYppx1ru16fqZbzYZ5ev75t7eVyuXz8uJf/1mUJMc6nO3ADHXGa0b3WKiHEnATxMGdXBZbeu5ndXl7GvnJ+ekICEZqORwBiifP5DMCEULeVkGKekDildDxMpTYkKssKAIfDFFM0UwcSCUPH6D0GHn0M9dHr/v0E7sK43G47knsMHbWMrrjrJYchOqCDGhE5QLtd+2huY71cEMHNW7m5DtXRax2973143evoZjGnnNP5eDyc70+nkwGN0cFNJBIAEZ3Ox/vj/DrzMWEMIc6TY1iut9Hr09oQaStb6bo1xf/B//TfdwA33auREiIiGgAgutro3U1H7xKDm9Kn3A2A2W58ZpE0HWOMDmDmgEAsIU+n+3sknFLelZp5nqaUTklOTIdA4nZt9rjVzaDUzqBTysfToam1MRDIiXSMu+Ok5lNM4G5g6uAGOYiZXrfCIrV1YkoxCWOe8raW2oewSCA3ZyYCYsJhhjpsWwn8fDqmlJtbJHIbiuG6VSb87DznIOwqLEQwWhf6xNYBwo8fv3t+95tf/urX7z48GSIQbbX1Nphpnqda1iAhzAdiIRscUzqcEUBSGmbTPJmZ9gGAvXVgDjF98/V3777/AAY/+t3fEQl1q2k+5PNJQnSw0bv17qY7MR0IdfSybSGku1evr5erAzCLRDHVkPN+wQPQvm0f379DotP51NdFTdetTtMUUvz+V79hYUO6f/OWkO9eP8yH4+3lEqfISL12YWy9LpfrGP3+/r6PkaZJCF1trc1HyznFfEAEYZkOR5HwqZzfh6qt6xpTZtlZ/AHcAOG2LNtWx+itlnmegWi93qYopRR1Wy7P5Jjn+XQ83b+6d22jtZBybc3MAdGBR2uvv/zyw3ffCFNIh5hSCDxaR+JpngHcxliXZYyxK2zLVoaN5eVle3myPUnv3sqGbu59tPHx++/efvXVFz/84Rc/+pli2F+Ot8v1cJh2gNLuX/vw8elwnHPKxBKFEZGEfYzedauVkIgJkD67P/70Ln681da7pHzd2tbH1nXbNmZm5hQEcV8pxFa7mpatCHOKEgi30ilK3crttiDhfJgRERHNfQ+DmDkFAffR+uF0bKWyMBBJCHUrZSvEtLw8LZeXsq5vvvpBTJOODgaIzsxmyszHu7vR+vl03Kmu13Xtrbnpbr0CACQZvS8vTzHKdDhO85FFeq/gxiza1lpKqRUcOcZ1ucWURh/uDubr7SpRtA/rVUJGtF5rK+Xl8V1ZrhIjgIMZIpmp9tpqAQDTrsMQIU/T8Xh888UXd3cPDsAiPlTHcDDtnRBCCI6EplPASdi1FwUkmQKlKZXSgmAO9O5llek4qzoCxJzANeWJSdRUh/b+aZjBx8O61eFubswiIaqNEEJMKeWU5tMhZ05BJBB460MBQ4z0ieAFKSUCD20TkHzI92TXZs/Nngc44BDaqi2lPfchhGxOzByjjrFtTc36LuzCT2R608FMam5D9y/JOnQt4+W2ImBO0d1GU0BQhSjSat+28v79+zEGE+fDVRBbbXfHww9O6c3dec60dt2WZSDMQu+3/uFy3eqgVn77s+P9BN+/++6P/uwXj5db67pt204dQZJpTkioDkSc5oljdNVhvly3N/OJgyzrcnu+xHk2A1UrdaTjWQRpOB1e/fAvfBVTGr2DyJQPMU8hR3ZfLrfb86P2Rnsf3FBS3nVxrdVWq7ZNHeLxLEQhTzFGBG9l+fDrP3/++Hi9XOfjydprZL5dru5+fX6OKV6XWwgxpLSt6/39GV2fnp7Qxu37j8fzvY/eEXaoWUxp24r6XhLgVuvHdx/ADdDMEZBSzMf7e+F4//CQU3ThXYa0LSu5mQ0A66XEFLMwRC4+5DAh+Oh9Psy9bGYOSA+v37794otAlKdsptr7U/lwe3zctlJru3v92Xx37waPHx6BI+es7kPdfLQ+JGA0Y+Y4HxRwe/8+pTzUDalsdZhxyuzWer9dXhjMVEX4+vyc5kM6vQqHM0lY18bCrXZzc/Sh3loP0edpevPmzV7OV1VzDRx6bYc554lSDbUNAEf33vqfPOq2FdI+43L/cI4Ax/mgp8N1rdfbzc1yjsKUhc+BTjk0u9uuy9bViU732d2nPJ3Od0PNXSUwANRSkVDV63LTtYnEmNMwN4DI0segiPM0MRGKxJTu3r7VNspWEHE+n0OM5FCXVVJEQpFwmKedX2ajn+fE5wOD7+3d4d571xwCU+t9AJRSYoxExAznw9S7rMxq5s7qgChlrWnK7mA6OEZTRSJzdKbWuvaBwpKnYNrKlmJK84GEwWGaEzGLMLmN0UOIMUZiYZFtuanqNM2wh9qYTbWua11uAAjgNyJH8N53NrKpMgEhvjqfjodYtor//v/yfzXU1IyJSEg4hPBpWkSMvfa3x+m3PrsrY/zJtx+vpXXHGAUATg/3vXRTi/MchM9TJgBkvF5uHx6fdHQw3ZFhZkaEIchXb17/K3/hJz8+xa/X8Q+/vbxsLeWUc4pMBrjW1kcHg3w8aOuttxjSIccUwxiac0KH2puOAQClDhISYQ6x9N67igi7rrdbbzVKyPNctnUrpbVubimmlBMhQgiXl8uuyIHez4zH43xtOkjcjBC3Us1ttP7qIG9D/f5XP3/38Xk4IhES1tYBPrW4Y0oxpzEgMsQpj50EudXSATkASxs2FA8Pr9I0p/m4lXo4n9wciWLMIoIErVVGjimA+3a7aW+1bd//8pduTTjEwxEQGRkYAdDGYKH18pKm49svf3h+9ar3joTkHoJ8++s//+7Xv3z58Dif73/4s5+M1p4eH0Ftu73Md/dmhMLa+8ObN3f35zzPvQO41m2xVlKeJOWQJiDS1sZo2huCmWqptWwN3LflwiwhT4fDEZDy8Q5M94nq6XxiFpLgo67XCxNenz+OPiSkPM/DTZgRRXIKIe4eo3k+xJz3lffxMJuO3m2ff66luDpLkBCEaVmLmYYQibD3gQCqQ6IQ7mQWHEN7a09Pz8e7U91qK+Xpw/tyu86nQ0pxvS2jFjCLOccYPv/qq7df/nDoEKZSewgRwBAgpaSqIUR3c1MJQYdv29Z6Z6Ep51Z6zmGaJus9Cwv5F6f5bk5LqX/+/kVHy9aX4XMMd4fJOZxOp63UbYzlekspzgGPOTHLrer9ebZa1q4VUA2eb9s29DBNy7oCYggREIRFVZuOVpuZ1doICcHzNHUd3ruEAIj7S2foYOYxBiKaGxEx4Bgd/JPQOMRoOgJhShlc1ZwBgqACmNPOROjdDNB06Oh7bjZHDqBN3d37UEXqXYHYVc1cRxu9A4KqrrcLAQLRKJvVOuoS0F/fH1OK8XRvJMNBh45WGKnU0ktptTi4j97W1QFClBBjynMQ6WVzorYVJgBVtUHM7m46Wql7lQXB3Zw+qYQQEYWYyXezhZO5grHoUF/XTUJgplvpZa2/8+X9KaefP9enMmoptbUP331AxMP57Kaj2zSn8yRbHx+W6/r0OEZ3tyRyOJ2JGJAoJgjxF19/X+boKb8+HSCktbfR+wR0SnGSvI1o7hyktHZ3PBLh6AMAeq232y1PUwy0q6623gjkJAxjkOrYlkvZrs8v5s6MhJSuqbcmU5acxGyY2bqlaUK1PJ+IyUdrY7wr7TdPL9qa6UBTYJEQzOwYaQ7+//ujP7kuG4rsEp29QrDn5neoppv12uNpXpZtua7d2eM0nV/lu1f5eAoxEYnpCNOkvatfY5oYkYPsRWJ1R4QQZC9ajt4/fPuthBDn0+PXv0bCI4oDmup0PDAHQi5rCdNdOh7j6VxqR7DAoW4LQZzyVLbt8vQUUrp8fCzLqmbXxw+t1jifJEotG7q76eXx0Xp3G61s5bZqr4DgQMe7++kwl62a2bZcY47LbZXpePfZ59u6Jg5IJERbs8PdsesAd9MBvU+HWfsAqjZq2VYmNGQjLzq8VUIiicfzPTGBQ5rznJKbg+oUY2DuXVVNe2dmByaU06vzLt2cp2meZza/bRsyxxBySjpGryUloTDVsjnY3etXrx7uS+0lVJun+4f72+V6ujsL2LJuU06HwyHmHERGH4CIpqO342GurQeRsq4xJEaSwLUokwhzDMTCALhuZRikKY3eWq0++olDfbn+0fv3o3dCv6xrG87CY38fIKScP3v98MOH88Px9MUxC9rH63ZZ25uzoI3H67pWfb7enJiYRm2Uorkd5qm1TogkTIDElFPm00lVS2sI2EdDR0RHJieqWxEmcARzR4whCJK5I4O2Mec8hq5bMYfhLQRZW7ttBdEJHFB24HiICd2CkAgtWx2tIUJZV0DUEQ6CQKSqTBAYWR29DZbAwCn2zo7QSjk+3AfExNZKbOZlPeTjcYyhxM9r723p26q7a7o1IhytmY3dUw6IQ4cVb7WrGhPbGCiCRMjIMZKpqwGAxBTiNLT31vY6jGpHG4fMTCy9d3RwHbviApBVkWJAdgNEpJeuf/ayKsKrSX50lPsstxYupTaFZq5DL5ebjtGX5X4KATFGef3l564+dtkncqu11wpb7Xh9T/yHZgT++vWrME0A4L2xhO/fvUdCihlDIofjPF2X1d0NYL1ct3UFgJRLDIROAF5rQYTvfvnSS9ltuEjIImme3IFiNGIQ6bWN1s39cDoKUC01HUMIAoTXlwVJeDqQORLVZUFiR3QgRj1S+fkf//y2dvU9U797i5D3VpqjAwDismxm9u13azfJ54fp/k0+nY+nu2GWUg4pqY4gwkKMEh7ukSgyOSIKO34CYMCeRXc73t9JkPl4RJGXn/x0u1wkxjzNbfTT6YzoAdFMh8E85W1by+gxhG25gOnysoxW6lrqVkzH+9/8Jk4HCrENvbxcpvMtzbpcXmyMPE1MxCIfv/0mH2aSADHv3/C363VrXfJsSDjfgQhP4fBwPxTSdCKKIQZ3ZKJXr+6fX17mKROACEsMaDgc3n//nrUc5nlZlsNhlpiJOM+ThBRS6r2Z+raWKSZ1IyIzm3LcFR6meS/Hn6ZJAYjZRIKwmhnuTSzxf/YnTjOBjd4B964WglmKMuUwWmfhz9686q3v4axAmFIcQ+vttqsOtTcJ4lBNjYkdSAJvW23XdbfALctqaohICLfL8144G7UCAAf5ZpizOFjftjTlXnpMSQ0opIBUR9/Uvr+UX7+/HE/H05xPgY9TagZ9afcCCQcyyKu7bfhSq4s4wlYrIju4uKNZ6V1i6LWniKoKZo54Ph4BfPTBIZj5mKY+Rvtn61cRUjcCIGCJcWK/v59LTWvrTRWBNpBB0npzZkAA96HetyJMvasQCCJFGb3FyKP3UgYI3x/nN4fJR2fEyzDRvo6ahUdpgfCUQ5qn43E+n+Y58vNl/fXz9uG6rtUUeu21lnJ7fh6tINIYnZkIBIjcWccA0xAjEiI6mLVWU55Qwi6e3rY6ehdhQhra97GVmgHsmQuWnNj7wyFEYfx7/97/TIjnKPenwyFFYykYnETdbdeamUWhY5CM9uU5Z+/T8fRStatelS6X5ft1mOptW9tQtJFSjCkBiuQM7gguzK8SModu/sNzfuy4tM45t+7qJq663CqJDo15enj94O7CbGZC1NVKqQ5GMWpXdUADQNxaXS9P16envm1mFuZDiCFIlCAOPs2H490DoINqmlII2cFbbbX1EIKNQcJlWXQoENXrjZmQKUZBJBv6JuvzL//o2+/eG2DtioghhBACMezAIjMn4TH05eWqitP96/svvkrzSVJydwlRDabjMU+Tth6CTPOBhN2dwAFw6DgeDvt8BBF7G2N0QLDWD+fT7vWSKL1bDKHUom6n+WCjr7fbti6H0xlGXZcbhURIpq1shYViSk+PT7/+0z/t2wboknKaD+vttjw/SQjT8XB7fCzb+tVPfpZSOt7fbds2nc4hRrBxe3pxs/n+Ph1O8/0DA40xRh/MNE3TsmwANk35eDylGBjpcDysy8IhbMtStnW5XA6n0/HuYVm3QL68PPbWgrBLPJ7vU0puBkCACABlq0Sf3LKAlMI+gIc85XUrajrFqGpIpKoEwIHdofceYjJTQLQ+HBzGwF3q4b6uFREQnQBYsBsyUuu9bCXlDIiBqbSKQOttWUt58/pBRHKeai3Wez7MqmbqT8/Px+MxJR6919qQSFtttfbWSCjEoF3NzdTjPIlIX4u5pZx2oQkyC5K5pTSRBAdANwAKUVpr2qqZEyK5IzmObjokZzNk4TE6U4h5BkYmcrPWG5gj+DTPDmAOZoaAxPjw8OAOQdgd3Gyo1la1KUcRIhtjEnyVOQmfM5sjhVhqXZoZYhQ0zu9eluGDdwOJG6HXbWtDAT6R8IgI0RNYZAL3smy3oevo+3ixrZuZSYxJ+HQ8ZKFTks/O0xTDfuH47vH63dOlqb7c1uePj701MOAYzToC9bIBuANGkRAlhOA6dPSYwul0Hr23WojY3a7PT7UUVeu9pRhiShRCijGEgEillO16Ve1uiv/Wf+vvkrAIn+bDj18df/zmzGkC4m8qPq6t10LIqgPAhDmDoo435zkxHwjevnk4JfnF8/pyXb5Z9VrVwFlYWE6vP8vzrNviNl4dD77eDlM29y8PoR3uv71286Gqw5BD6OZJJAQxQHUstfXWYpTjlFPO5rZtxcyCsAIKiwOs67Zt2+16aaUBeJznsqwIOB0P8/EwT/PhOO9x7RQE4dOpbThcLsv1ejPVEKTXSsJ5noUFAMpyQ3Crqzz/8k/+8R9sahxiSIkDu9nu2lIzUx3m21avt5Lm4+n1m3w4pflw/+azfDwuLxdHMbe3P/gqhMhELMzEgBCYVW3oIABmUXNEF5FS6164a7UiM6CDOSIQBcS9GUemvWyl9w5mEoL1hgjq6g5jaO9jmicwc4fa6vtvvmMhJwoSOMrH776/PT+1dXl59z2F+Lt/7a+X6wXM7t68ceL5dE8E6+0WRCTk6XicjydhPh6P4D7NOYUgIqM3ZkHEPgYCmvvL5TrPWYRbbZfLS0rp/uHV6L23vm1LW2/Xp8f71697bXdvPg85M9G6ljzloQoOxDTGEGYdOk+ZCUZTjpGZ3EZtPUhgkbKsHKWVJjGw8L4sf3l+maccouxig1I2YiGCsjUHa+s1xEwiRDwM3I2DCJLEoKOb2p7jC8Ipht0fCDrUnGJYbguzCOO2bhIDAlyen9RtdAVEbVVEJBAiuhMAtq2kOTFL3YqbS2RwI8fD+WTDOEVhQebRS69K5CxBVVspksJ2ve7WrhBTrw0ZkSiF6P/cYmcmTHW5MYOrskSOiUjy8SAxigQGHK0RYYhBJIQYWm3M7Dqo1/tDZvBD4kPOzTwFmkRYSETC8e7PfvHrd9el9waOrQ9i2LY2H2ZzV8dP+gozETHtxEFEzHVXNLVSu/aUMrq5qgTebVWjK5p9eT//3tsjsXz9dP1w2W5d1bFt204vbrVq32u1lvOUApsORDQzMNtpJaVs8GnNqqOst8v15XJRd0YKOc2HA+ggQALoZmXb2rrkHPFv/51/I6QIzAiEAJHpbs53OY40P9eRmD6/P70s21KaCIurcWC0U4qK/Pbu/MPXp1FWHePXFbpM6mimpyk3EtQ+C8nhEBF92Gdv7nm9PpwPNc7v1vHy8rLcbiwBQo4pCmLKaZi/XG592LLcck4pSmDRMcwgzNN+5DkfJgIYpl2BmQghT4mQ1q3syGdmarWJiI6urQORtf7w8BACMRK4pSBjqANGocttCTE31bW155frqCXjWB9/8/M/+uM+bD6dQopluYWYkOj2/ASITtwUJB3uP//i/OpNPszz4RDSJCKHwwwObYyhGjgwooO7W2tdh0oKbgrugQUIdRgzIpHt/XEidxtqQl5qB4AYEhEwA6iVsoGOkKfeh41elnU6HlurYwwCRGEdivSJTtP72K7X0VrZFnNgCciyXV6e379/88MfgFmrxVUPd/ecMiHGlELMOYXT3d1hPoSQEFGEVDXnxMS9dyaapryum4SIAGq2ax9jDPsLWoeCgzCVWoMw2Lg+PbZtU/eHt1/EPO26AlX9tGjiTy9xBJQoo/eylDjlaZ4QQLW7/XOCLoyhiD66qeo0ZUdUsyiChK310eqOmTLAbSvDBjiEGJmJkWopxCQsIkxotQ4Jcb/e+j5UBmegEEIfAxGFuffuiG5qQy+X56aqw7alAGOKUtcFiXr3NE+jFEQLIXHMRBCFW9nGGESc8iRBoGuc0vF86upDlRG22225XhwxpcRx0t4cQJh7KxKklVq3lZhabTFnGCpJ6m2p65XBQ86qOs0zc3AiiWm0YWoShJmned6nyaec7ucohN0REA+BMuKbc54DO2HZqgJ0ta2M7y/L4219enxC1/nuLs+H03EWAlMb7ju2RM2ImRCnaW6ttK3simkH7/tSJSZCMB1A5IDkOoO9Pk4Ph/j1y3qpw4DLtpkrIbZP87KORKfTobVu2vfQCSCMVohExzAdptp7RbcYAiC8PL8Acc4Tu4r3LESI161yCHVd0B3/9b/zt3dxPCAikCNJTIgw5zyfzj94e//Vw+HxZX2qUMxba4iMLDHI8e6cUnxI8tOH+RC4q31Y2/frWBVBmxOvt9thyiTct1LVmHly/fLVnebDzamsqzrkKUvK2jozno7HKMHMulnpbVkKCbFE3VZkmY6HEAITImJikhjcXIjH6H2oyCecHiL01tW1dZUQEpObE+EhxXMkULsLfphyzOm5+lPpz5eFmDHIWpq57U7J223Z1lvKOabUau21AAsitnVVIMk55WmOAgiq2M0A3c3IMQQJwgMxIJmaI4w+JISdv3673hwhMGfh0up+V93TrZJmJETwPhTBXp7XPE+H40RIaJ0JL9drb+14OpVSR+/bsuR56rUAEAIOHSjcSo1TbusmMTLz5cOH5XLdtjXmNJ/uyvUCSOlwqMs1xBRzBoB4PMLQ4/H49osvDodD3TZwnw4TIl0v15QSIRKTAwjinhIABEZy99pqiJEQWMIYAwG0d0BMSepW123TXsv15XA8pMPRDJatBLBtOOy5OZL5MAOiDp0Pc9k7AyIUBA16ayknM2OEIMEAai3f/fnXJPTw2av5eGQJ2jshDVPGnbmHbioSDNHGADMMTEhg5u4ijMSI0PvYxy69D2R0sxiklp6miZm2dUspuiuAj9odiaMQYCsFERTwcJhv15u7o4R1WUcfpuN8f+dmjAhuo/d5ntS8toYso/WYou+08k+tC2tbHe4hhhQzII5W9+iVCJFwbd1URy3r9QUAidhsILi2ggRMJBIJqfdGLAhExPPdmZDcLOY0z3NCz0nmGG4grSsDBPQvTvlhisfMl+vSDdxMguTA3z69fP20Pt82J56n2XSkKDlIjBKDtNbb0OEISKYWyE1HVyUi0N5qzYEBSJGSBButj3E4TMtWehuTj3g83JYSYgACQgLEXmsQ7r2NobUUU1XthDRGB1MbLc9HN+u1IqOZEdI8T1HITRnxPIdR2sttaX2Y47puWy2td3DHf/Vf+1fdXYcSMQnHPLOIiMSUQkzn4xyCGDIQU8ruYEjah8S4nwbJ4Rz5p589/PUfvw5kf/yx/slLK20AQGnN3FtvzCGmuBcXdlft+eFeRBwBHXaoFRHt35tBJOWkDqX2oaru3RSAhCjIfopxRhDiFGKMclu3EEJg3rMaCm4GDkYkOSXXEYRH76A6sz89fvReBpAQNeR0Pt+d7ziGdaul97qVvtbjq3OpwxFyjIzQex9uw2B/cRgRDCXy9XK5f3ioXQ0Q3AAMHCWI6wgxn8+nbauA+18G7c0Ay1ZSSmZmOmKObgC7XoAwxChBVHcotiJRDHF/uwmiqbWyQZBeeopBwUYftaylVjePMbe2icTWGoC7WpA0HQ8SiHwneXueD6OW1sc0zXnKn/RyDr3WmFIIQdVSjKWUnJOpAuEutdvWgkSIHnauJBAyAfjhMBNAKY0/ef+cmeq2vby8MFrM+Xq59tFarYHQzWNKl2Vbb9dh6L2ElLfa5ynPp8P1cvvqBz/Yf/AcQk6xbGXvM7hpr+X+9euyVUSIh0NI2YaO2vI8jTEIYajllLT3/VeRghiAINVWACgEbq2LMCCp2pSzu5VSiVlE+hh1KwlBEZFlnqcxxrZtU44G4I5lq0yQpuzuvEdY910Hi4MT834uHqq9td46k7cxjofDYcqA2FrvrQWRwDRKUaTbWpTDsixBJB8PQfjy9JJzIiJV3R3mo4+yrr3X0UevxVvdBTYMYNpziCFnBDycjgowTTMiTFPKgArIRIyovVV1Q3SiPZuZRILbj18dXk30dN2+u2596HUpp4inKYUQIYRraQNlLR2ZUyAwhTE4cFfftsZC4E4ESLxtq6lZH46ehf/Cl3cphN+81AnGdSuH4yEgbG3kKFsfy1azoKOEENX1+flSSuljrOu63Za63UwVEW20HY09HybigIAhBGKKKeYUaIwU6NUh/fCz8/NSv/5wfXdZWcRUS9n6MHTbZWKOjBITh4Ash9MJzHfxyTZg1cEBhCCIs4jWbjqM2QYRIgcZIXy79f/vN5fffTMfp/hQ/Yq09MFEwng4Hg7T1EppYwBJKYVZ1m1DpJynEEPf24ytPjzczzmbe60tiLDrthVgijG0rgNcGzCRDVM3ME9phMCmXmsPQXZVoupw932MUstWtu3p6enl+Xno6Mtt1MY5U2B0CDEFfvfm7dvD+QSA6jgMl9Z5q8ISopRlvdyuSDTN2Q1qqcwhH6Zd93Z8/RkzTYIYWFtVcw7JeocQc8qAxPu9z2GfozJSF2m1MnOI6XCYW21uThPXdUNwBAwcYgiqhky99k++mP9/U3/WY1m2pulCXzfGmM1ay8ybaPbemVk9VUVJdQniXHODhGiEkOh0jgrED+QHcIPEDUdAQXV5yFNZmbn3jtbdzWw1c47ma7iYvktHCskj5KGQeZitOcf4vvd9HkAkWp6emLBSRQK0KLmsy6zuo5sIj7HkaYoAHypZAHC7XRlxXlcKZ+HRx+n53d4GMydiRKh1/9o5Y6w6nNK88BhqalPJAJBSMvRpPlx/qbev+clcSqtdhwpjzukgRCamodp6c7fr9ZXc7OuSBp0EKD59/jyvy/L8/Pbycr/f09DR+8tPf1zfPalGvb1NyxJITIR4nEWY901SYkl/+MPv69bSNH/4oFPZw9wCJSdhdlNXV9ZABIQylbZXHX2eJwskgmFuHgKYcsExau+MpObgbmZTyeV8en15zVMRYUQws5RTSgUgDvAUwldCsRKbqRBat0iBQF8RGnHMlVIg9bq32lnSsJhzIsI8T2Pb35/O3344TUKPbn+87vepBAQgsPD0/omJgVDHUDVBGq4bQ2sCAL0WtzEt8+l0IoBAXNc1J6l7m5ZpjDGGah/X19tRpjsv0/NpCnGZlmC5bvVaW+/jUvg8lR9e7m8bLwIBeO92tfj80vzztp6Wb9YpoV9O+bvTad/2X+8P4xRmYABu7hbdAJFFlpLcUqsdUybBrv5XP76ccnrZR05cGB1wFSxq4XHvdsRGdVQgdLPa9+vrVT0CY7hdrzfTwcxhZtqJeKillFgkHo+67wAeNhjw6d3z9vHD672WROvEeZOIyCW7KYQCslw+fgMBnNLxamWRXGYzA2JmJkkiUtbZh5GIOwDisa3rvR3OApT8pW8vt+2nx+nDeX1f6HmZXwZU89rH41F/ffslCVPOBLae1inlVHLK2dwDYGAARs4JEdVtytnNa2sl53kFABSRzCqSEPFQnvTWPWKeMwLEV2ot3GtNnCRxqEqS4f52u/34d7+/Xa8B4WZMDCmX9TT6IGZImVLK6+IIY6+OMJ/OnJ/fXq5zkjlytQEpj2E8jBHX9TSGHpocZAqEt+ttLjO7t9pP5xOl9Lbt5U/vWPMIh3mZAAAIiDnlGIThwIRt2wA5JdHwnBNFJAxiMNXH/YYePE9LOQEkZtExInyZV3K/Ph7mCASZWNXWtYw+5mlGogPXs7cqklMqeZoee51zSSTLkoYaMQUEiQijhSVJvbdR9wCSed62SkQs0twEBdXCfSqTgf/y0+dpSpfLfIhRoMRe98fWp5xymcBs7623HY5k//nUt+3TL78+ffgYAa8vb6d1YaLr9Tafn0Qyp1z3drimRhtSpn3bhnaWXLfHtCznD9+hyP3xsPEG7tZ6WpaEuLf+8nJ19/fffCTmo8s5l3z026ecj6uAA3p8bRaLiEgKj/vtfj6fo3eHkCTHXMwDS+ZpmQFpjHE4SRnJXJkFMdaliMhoo/Vq7toH5WTu7lBK6W2MMUrOAYoA4ZbnOYi62nFSYyLO2QE/bePlvstQIORpWs+TEJuqAxCmxNJafXo6gYe66dDLaYVwFEYggFAdGFBy3ltzt3DnLC9fXrf7fV7Wp/fPJPzydmUYnPgbms+X089froD0m3MRKDfit0cTomD54d7XjEQcyHMp67rc3m6tjddUiOjL59v7tXy3cMxyB+kKYwxiLvPa7w8WatqvrxUQ3a3u+6GGunpcqyE4MpZcfnndXsNJcK/bgK8Sw8de1aPVnREC0LWrOTiUeXUbwtJaS6Uc9BQiMh1brUPV3ELHPJVgeav+5fHA+CpCyyJuysy5oAjL6XKZ52Wa5sDYHxsScylI0lt1d2TGQ2GZ2NwQ6ZgySk6jj2meJcnoo7cqKe1DP1+vNxFMmXJ2i0xI64Tn1dzbUIpYTysBmlvbKxAicWGZS9HeBYE83J0QHcjM748tAnwYgH/77cdCuO0tizytEws7kJoHwFDbe9PaOykwzikJWdt2Ifzm24+lCATsrTtgzpMkfozb04cPp/MpZ/YIUN/6SMQ+1MPOSwZ3dUgpEVqkTATruoRDniynxEdRIyI9PxOzjZ5Lmefp0RoSEAARDrWUk4IDopvmzHttWRIT1j7MI8KW0+xugihJ9sdmOgK+Gmjqvk/hYFGW2cZoOnIut7e30ToCEDpqKIeO4aM70jRzuKdSPBw5eQCRzPNECCLy2HeAmKdFCFLKw0bdOxHtWlNKeVqXdY6Ix2OHMFeHlM1DR48IdZ+mMs8ZDxzosdhyA0Biud633MfptLgPG13dDMDVyrp++E12dXdtvUf4+en8+sNPo48+bN+ruTFJXs4sjObTenKPIwaxPWrbB4SrmuvI03L58GG9nJ+fn5fTGZAO+XGtjZPIlLs5jWF9mNmB6ycEizjC4McTjRJJkqEjIHLOEHGgutXMAyUlhyjTSkS11t7HxKX3nnPWMYQ4lwwExzhvmOWUR+8WruoirKaI5BGHyi/nst3vI5znhVJiomMzeHs0AEci0W2aMgaaux3Dp6GppNWjHBwuYdWx17rMM4sAQO9DiIaNMHPzZr2OUbctTWU+rYeRoLaOqup23/bfffP+dL78ctv++q9/YcTTeZ3nqQYViieOXocKA4SEZ8nT01oSh+Rtr4706dHuQ2aCPmo1M9NSplCLsPHYXCSlDMimIyfhPJecCXyM4W7usW9tJnt/mrmUR6IxBhDsA22ea++90TC93+5hOk1ThAIYRIArhYXp/TpE2IYhgbmnlC7z+bQu6zqfTqcxdIxhZmYKhrWrqrEQMweASMrPy5QYSQp5mNo6T4acCPsYLBkYEEE4hTsxmx3inxR+nOsBEXIpiHi/P27aex8okuel1z5N+XQ+lWnea+1DS8n7/ZZyCYgxTFJOiSRsYfaJo3UI7/uwgIB4tBEe6zylhEvJM8UMvdW7SLpQgqrn8yyn5THgFfwiuZwyQXyq2hFbrdu2tdbMNOVCAHk5yTwLUe8dPUpmM923Zqpq7g49cX8Zrh0Be+/5fE4pa+tlWcBxe2zhLqVcH9euyolyKhSopto7At0fW2vdh259uKkDBqCO8dMff56WKU9luz9ykvV8tjGW02kqMwLUfd9uNx217TunBMxZUgTknMPj9fXlpGOYUcqjDxyt9+Fu99e3AJBpOl3OAQgBOnp73O7X15ym+fJMhNMy2+hTFpakww5mCRF/ZeeXMvpATsxiYbW2cE+J96o2NHPSXi0gkBCh7TsLlZIBcWjsbXMbyzyvSwaAoXpwU0pOFAnIzNTciXj46K1TLvd97/riAeBY5uXy7oOOMc3z+enpfFqTCDI9blvYGKqItK6LMA81wiCWMVSEEWkqWc0OxKt57HvLWcpUMICmrxBwFl7KbHAsKOCYCyLicRdWs2PmlSUBITH33s39iOzP8ywsVJCI1UP9cMi4qULAsUCIAD4i6RAiTEyjD0npQPgGRK1tnuc+VIRdlafMJK03kgQY4La3se17KeUokEou5rFvdd/ru3cXMGUCJCRJW+3zQtq7mSJla+PzTz9zzu8+vkfhiFiWGQkOYH9OqakBEEr58hj3/e3y/hkA+xhBHDoS+Qz+L/7hN2+P+m/++OXL222/3xHgdFq+Oc9lQSE0i0vB22P7hIzgo7aIMIs8ld7t+npnkZzSej5FQBFJ3qaI371b17LUkPt9ezplGJ1NT6dET/nx2Bzirz9vd/ecJJZJzYmo9zr2hpROlycfo96umShkyiw6epAzQ855mZfz+XQ6rc+n8u2S98djV2JJ6GMYdI2ttm6+Nd1rFfaYwJZE17qX8HlJTwsrppewUI0wwtSbQvQk4oHuVqaCECJf8SNH16S2Hm697trG8vwMJPO6DB2/fvpCiNqqqkaEq07LvK6naZpO335o2/2+Pe6S7tvuAWmeDuUSQ7iDkVwf29M6G3DtOk20nM+z0Bo1oP3ZNE8LfNmVtV+WaRV428atBwAE4XR89kbHiAM0zsRfPn3po499f71fARB7T1Mp65pznuZZSExz6y2GHr3ZMk/m9th2M5eUYGtj9ESUo/T6cDck8mGIse9CTAg4VHWrESYpmweIBAuyTMu8zBNzMh19v7cdHrf7H//Tf5yX+dtvP3773TfD7dPPvzSL+XzudbMASrn1DoTQ6+i9PW4sycL++Df/7VC9vP/w+UccY5RpwZQfr59ef/rx/O7D+v4dc5qXlfP029/9biqeixCyEDmENys5+7AkSd3XZWmtPmo11Ry0rGuoeoSU5BbDw3TcbzeZ5+vbaynT+ekJgIbh0DHGSFmQ8OXldcoSGlLKOvFQteEzAj1fejdEvN3uCC6SkHmapnVdAYKIh6qrJmFiSc9JVXNOKUlCRITH3lkkXHVYyqkfbTYz32sSKUnGUECq2w6AeSp2xPRF1PwY7UfE4QPDCABURIg40hhjdGRC4iPFIszAoqrH6zkAcko2BgIO8yQcfzp2mdno3SGO3OVQTUkkAgCEBRDWldCdMJhoqIabB5QkLDzUbm+7u085FSGyURhz5lLO21aZYAztrU64HNnsSACB4ZBTqn1cb/fT5UzCpRQmDote9asfG/k3335Qh73WAHia07kwhaLQazVhFuKh/uPjsU7lt5fln3/7HCz386n2ziKbG+s4Jb6rIlAS3NURwRDWzOc5tXDIPM1FciLiCcMLF28L6OfN/3qv//0/e//NyrwPVhvqP77c91+uj8deW6NSalcDRCIILzmDCXgqa9IwNyVO0caBxpzmQgjCMk1SUm616ei993ob14YvX673Pr77cP7+qeQyk9ApnQPor396/flK+L/9P/yXCWMq5d7VzC9T/nBe1rlsDr9srbdR5hWIh6kkkZRM7askm6X3fhAcc04B0LcN3HOelstpXpZlykL4eOyf3976Vol49JZLef7wnhlVbbtvL59f8umUp6LmKSdiqvedhUtK63nN6xkBs1CEz0TnxOphph8KMkIMVY8aQMvZIFob133svTngelqKJA0HQACotbEQEl2vt9FHBDiQlFKY3r9/nudiQK11BChZRMTMWx9gLiXttSFizvn4bKgZmBJGLtNQc4s2GgEmlvU09W5ER2eTeutlmlLiMLu/bZJpqL78/PPby5dwBSQb2vbHX/yjf3C5XLa3FxF5PLba6nK+jN63t+vzd99Pl6f79Z4Q1tPyuL6+fXkNob/9D3/Z635+focEJDKtT/PlYqp//a//n+d379599507LKen999/t8wzAK6XdyycCT0iALJIOATB4/EoKYmk4dZqXZd5qIWbjz7UT+fTNE17rUfWfNsrEyah3jqxpCStNRHiVPatQQwmIkkl595q3TdtfVkXydP7D++OnhAyjaGPxwMgeh+SSwRsj0cRWdcllwThbkbMrba5ZJJU21DVdZ5LSULk4VvvECBEZt7NWY4lCR7qY48Yveecp5wjwtyJKNzXZQEAj0D3gDjOaAjhgXutJNJqA4RlKnbcCiDAw9wlCSKVLAhgHm4GhxWevqrq4Ot/yHWYuZkFgU1Mz0sh190AWKzVIyK/18oR82n1PpZMp7kE8qPblNPe+tv1GmAdEkpy8zBrrZ3P62Vdr7crhENEd+CUAtDGADNDevvyMp+XWbhtG4nU3r2Pp2X+7t0JiLvHLy+3x17nks19AIYbu5GbEhOifo0ogmtPLGWZKacj5xEQTLTdrt5rD1T1MToxPWf+Z9+sv39te28zGnK61zanfC58njhMHyMqpYfhbk7EKQm4qWrbW29tPa2HhVqYI0LHsMPQ4+E61tNKiHXbzRQiGOHjzBNFHzpLoI/f//J6735KdFqX8zL9g4+zQPynX66/PoZMJWVGcMiEx9awWmxvN2E6pQIplSQ58a+v+37frSx9DDSdpslSVjVwR0pDDYmkzMJ4ADas18+3V7BxHJ7n0yqCBAIYL59+3bcHIj62zimLjrZpEqlvd2A2j9aiumq7l8ctPHofgVhKQveunpfTH5a5t9a2SokxYsr39bxOJacpDfC362OYJiJOkkuZS5my1FoDcZrXD9+sSRIgEB7+Q3TA2karfV4mC5xYksDeOwBttbn6PBXEYHApqVZvPaalEPjj8Xg6X5LwPE8B0EfLJbsqAgDilNNc0gFNvd/vdd9v9zuiQy6P143Amcs3f/H3p6n8/Iffp1wuy5rdP//68/XzFyccfXAu27Zvtdbr7en56fR0ppJR0rvf/tlPf/PXfZgkIeT9/jbGyPOiFq+fv0zn5zSvspyC0svL24cP73JiSdK2moSJqO3Hk51P63J9eywLah9msO9dhAAYKNVx9/vW1VxtnTIgFOFSsqqeTus0Tb3V7bpvt8F5Ws6XYXx9/UKMMUYgtm1HIg8T2R3scd9yLqfzaS6lJPn8+XNEpJSOdDER37c9HncGyKUEDPd4bLdpmVwHBlYMxAlLaa3tey8l195yyZPkgMACEGBjBLHrEElTmXpvKWc3O/663u/LsoD7GAPicIMmRsTwy2ltQ1tEQBDhruoRxKR9TDkfNPl9b0QYEK31ZZ7DRuYcgIAx5YQQ1tvELuQdHRCnJN/M8mfPpy/b2DUuH6Za6zZ0k6IG65Kny1xb89afLydCoPY4Z/715dO//09/ePfb3333zYfn56dwj3UKdwqdUrre7u/mlJgpSaudfazsspyjprE/pnmJlLvZqZRv3q3P67SNYHJ3/+05/ew9i5+W6W1ENWTC69ub1iYUBQFDxxjVvJkFk2+7mgPA7fVLmmbKxfo4kjcJJKX0NNHf+80Hl7u5vV/ku3en1320oU9zev/uaT0tfX/86//fD3/96b711g+7ZxJTPchVRHQ4NJMIEcKc+1bVrYjUx2O0FmamysJzTh9W+affn1hb7ZqZlgzfP5X/9OlB4F823163bd++v5T/3u+e+Mcv+L/5V/+nIlzkaHvCQXlW10T0/HQ+n1dB/P6URx+f67gP2PrYrlcphZezedDxwUUws2mesmCv++N23+93Ux1qpnqsR8GDhE3NejcbTCzreX56R24eBhGSJ0Bx8F4b+OAwQgwEZGZOpRRgRJlymeGwcuTkpm+fX95/+81pXcidEPI8D4e67cdw5DDXntc15cTTpMAQMZUy6p5L3tsIh3mZRZAAt1qF08f37/ZWP336nMokWfp+xP+mwtSHugWLMMO+7RpxWRdCyiVvWx3aT/MytAeS5IwRo7c6vNVa93q/vgHg+vzsgL3WMk8iwq7j7dP17frtb3+bc7l+/rTXer/f6lalFGJ8vL56hDsQBQKenp+e3n88v//wyx//+NiqpKnVrd1ep2V9//1v9ttNezu//1DmZT2dP3zzbW8tMxJimpcxRhKapzK6pSR77YTugEUEGbet1q0v67yuEyG13kQOMD9MOZkNV2NmdbDRhYmFmKl1+/nnXx/7rr2padtq2x7r03m/307nc5jWbVtOJwso87yez5fz8/m8/PjjL9vjvqwnycV1iEggTfMszGUqSYQAP7+89qG97giIhPNUSs5Pz8/7Xok5AlTHNBVAZDygvknVdAx3B0IESEnCQ82QiIgiAnSYWiAOs5JLyWI65jKpH1wFrq0NtQMkRETaKosEwOhjXZfemntI4tCvx8y9VfE4P51Ih4/2Yc0n4SD+w88v51OZwh4KNaI284iS+FSEp/nz632ecirl9z/9+mGdFcDGuF7vHaG7A8t5Ku+fni/r9K7wnHi4j66fH9u9uxqclvK8lG9PeUZX80PlXbuZx5fbvgFZa89FcsmIft37r7f26eWl7/XP//w3gfTl7X6Qkyl0ztR6vN32CHXwXvWb92cGvzdXs6PDlMuEzJISIVnbSxZ0U/UIIKtr4m/P8zpNTK69f/vxfL/X//iHn//t3/18fey919GHiABETgkAc86pZDc1tflg8xKFOxGv82SjT+vCRD66jZoRetvJVIce99PvLyXMfn3oy6aN8ONa3k34z3/7/h/+/Q9/9e//E/4v/vf/FREy0lTyt6cyJ3prfmujaQSAEOWpnEqaCDw8ABNT76MF1KBAYmE3Azcb2ltr+2Nsu4ePMQCQU0p/Akuaf+V899alJM6JJRExMyELS5ZpZpH9em2tAYT3KoxIDEgYkHKilFByWdZwz/OSStHW8lSScL1vOsbxxZhayllKTqWIJFV7/vA+iUC4pNyHIctU0rzML29XG7asy2mdrffW6zrN52X5+eW1tc5CFvjyel2XMpVyv90weDmvSEhhTGwA5FZr0wCIQEQGsLB9r8u8lHkKBHXQodr6MD0iYEcTgJnCFKzfX15SSSXlaZmHwjRPj+v1en0bY7z9+uuvP/wxTdNoVfuQkqd5Tin/5h/8/R//9u/my/Pp/XvrioTzaf3u+++XMksWJOqtbbfH0/OTSNLRdQwHPN4ofPwN8zh+HiGyiJn2rkQU4cSEAR6Qkrj60N5bJyLtbds3RlId5+cnN89Cp3UdZq+3+8vPP7x8/kzzab/dt9sLEwlRmJVpYmKDsNbm0/r88ePlcv786+fboz59+LCs59FauOZSJKdwZ+Kn5/frsiCRR4wxJKUDZcwk4YEUzFL3dkQUc8nh9vLTrwEwn9Y0FzcPwr7XnPO6TNM0mbuZ9d6stfa4y3Kqe0MihxAhjMhlmueJSVrvOTEGBgQz1r0O8zRlsgCicBOinJNZIOI8Z9T2yy+/PrZ91KqIwmkWWda59a6PWx2DyskZWXIuZZomCp8ZX673bfTn8xlF1CzUHreblGyqGB4RZV3nafpmnSd0GN1t/PLrp9v19vSb38yXJ6EE7r+Z+bROu8aJgcAd8Lws85J673fF11vNDBH+tJSfXre/+uXtjz9/6abn04IBABDu81ISRCLsQ4d2KbMNfb/m9+u0abw8Gnh07YiYy0QRAT7GSKmYjq3tERAIrsY23s35n337dB86z+Wc8S//8Mu/++sfrtvmcaCC99GauwMEIumRPVcjQgwXSZxkXZZ5WYT5dFpLKWGjPm73x+NRe92aOwSYUCRiQiRAJ+Qk79eylvTteT4n3ari/+q//FcRAAEBccny/WVOwiLyMHjtdn9Udw+IP1GePQsTsrkHImeJwHCz0U1NTX2oH7RikQCUlEhYWw0z1QEBGIAieZkRkFmAUHJBQHcr8xpubd81DtqhSxKWbB5ySBAJ1/OZmAHR7dDfSi65t25jMECe8qO211+/LOuyrKvqCID5dF7PJ0Rs98fzhw8pJwIIomEQocKIgL279nY6zc/ns2qvXT3C3W+Ph2pIotv1pqMTgKSUS04sgJynabS91TatK1hs95u7xTEGTolYiGlopJJMR5iV9QKAEE4EjKB9jFFNVYiXZX3++PGwmfz4ww+np+d3755bqwcO+/PPP+3btpzO+2PTXgnxh//0N7/9h/8oz+Xdx4/f/fZ3ANC7Csn5sqpqbw2QGCCXdLgnzDylBADbVnPOKZGpY5LWB4YHwFfAE8Ch1+xqxyi99qZmYV7rTkxg7u5PT+cANPfMvMyTDjWtf/Xv/90Pv/9BzdOcttcXQmRJR9kzL7PWaqrLaV7W80+//wOX6XR+fv7uu/X81Ot+e/k1WsvzFMQfvvleynQ6n5/OlwAgotY7Iq1zcbM+NOc0hiLg0OER5q571WHLukynSdWQjtZ6chtMHBGt94PnnlJKTODexwCiY6aWmSUlNT9Cv72PnCX8qIOSe7S9onAWJvQkEh7ddL9vU4x2u/4//s1fpmlezycklJyZEEwPJs1hIyVTjJBpplT2x+NxfcuZhIgkpVJ0jLZXZjSNcjrnlFJKkghUH29v+7bVx73u99vtJsLLsggzUpKc3r97F2MgRE5CzCTlssznKX/7/vm0Lh8uMwltj3ZZ5p9v9YeXt7/95dWIjFiHFmYpyTxGawQQpvM0bdv2zZq/fToxYa1VTQ149J6FtTbh+Ln6UJ+W2QLaUDMfY4y6W6uJGUUg4h9+WP7J95e//vHz3/z05e16//TYR61Hgt3Ujgtc3WvfH6Y95zLPCxIkSSI8TdMyldqaqplra0pw8LQ13BHBjyxpBNKB4AIATDkJckqC/+v/6l+5QwC4ewCsU0mI35wLBtwMb63X2sw93OHwZ7i7x0ElJyYdGuGAwSzEDBFqCgGc8+FAiQgwtyO7wBTmeZ7SNNuwlMvhepFcfAwkdFUdAw4991SEmIi5TAiBCJLSPC8RjozgqGachADUXXvXx4PA0rKqeS4TsFjXQCAmJnAPIDpdnhAQesOc6zCKECZOvMwrCk059b3utU7zVHKutW/7vreuavWx6Wiu3cZYL5fz5ZLnGSPaY2v7LlkcMMxYEqeUkrhb3yol4VLmZWn3DcKxzAQArqM1CJcs2hoiny+Xpw8fiAXdWq3/zb//D//oX/zzkpfR2zxP61Tw2CcARKC71dYtIjGF+/l8IiQEqK0ZQErZw4/H0GgNiYWl1g2QwH2eCwBGgJmLcCq51qY6WDhx+iqr+VP2qo+hZuqBTG5gQ0EIMSKCwLMkOhQ2wm5OYNfr9d/+f/7NT3/4PTPVx4MQiFhKmqa5bo/D8BYRo26//vHH3/zDfzyvp/OH9/N6KWX65Yffb18+panM5+dvfvPbaV4Q4Hw6pyRmjsQHUIQJ6dAhMocbBAaCuR/vGIRAcLe4PR6t9Wkqamatl7kgs3AaqjklgDiW70SYJOWU1A0jiNA9VANEwA3iq7kuMx5u55xZR48ANRg6Xl6v+75dlvJ47F9er3XfrO55zlPJRDSfLgQw1MKGPm5MAAT1sY3eI0xyDgtJKRzUFFnMY3l+9+7DR5YEphAGEPXx+PTjjyK43TcPe/n5Z21tOi3TPHNKkpN1VffT5ZJyBuBUprJe8jSv8/yupL848d/73bdA8rKNMcYsgAQ/3P33n17vW53W2cP5CKYGCLGOhoBgPie8zEnr7kgY9iFD68bz/Bhx26sCmgdINg8Pt956bR7BzOYDTb9Z8t//cP7t+yX69n/5v/+7P3y56fCqw/qI8HDvYxzfA2IsaSollZKzUCm57vvtdotwSTkCVPWwgrrZMSiICAAnxOMawURHe/TD0yruwcIRwSxj6FYbE85FLnMWGwmB16X3XvfqHlKyqekhOkIcw8M9IoDwoHRDBBIjEng4OAIiExAJZZZEIto7Ebk5Ens42MGKYORwG+5AqbAIp+QBHjhN5fR0EqQyTUkkl2LubYxt24moVgUMsK6jUxJ3qtuOzJ0GRwTF6IOMFABZCOl+vQOCjw6SUp44sYFPaeqjR/fb29jvj1yKI5kaES6nU1ngdrvbca9gyQstl1OapvVy7rX14TG0qR8UrpTF1a6fP5sbiQi4u1NEAJbT5Zgozes69h3B2GWon5/Pl6fLup5qrfdtb70u51NJEgAK8fnzS5unp3dP5l7vd3TjUnKe1MZ+f3TVrrYsE1iYBSfxoSRsgHXfiQgDzNWB1CICUH0q6aBZMHPv3d0QQEQIQ0dvHsLMCBox3G9vV84JkHpXkeTDgSAJbfc7IYCD5EyE7pBSulze/Q/+i//i85cvnz+/vHz+tN8fiPDxN9//vb/4iz/+3d91j7o9dOjLTz+ev/nN5bvfIpIqmkMdvrz7lsspzfP79+/B7PXzlyT8/PTUh0qSeSrHvUHNEgYmNtMpZ0SobWRmCL9e32ptYwxkftxuakpIpaRe69LmJFLKVMqEbsN876OPMU2lDqe9zXMRJkRaClsOB2hVWcQBHvvuRm3fKSXApHq4kWSayrunk7u9PeqyLH/27t31evv5D39Epj5MslAqNion6T5c0rZt9f6mrWFK8+lcnj+aAQshs7hPpRATIrqH1RYYDB5mhOjhj3tt+74/7siEScy81pYC1KLubX1+5jJZH4ER0SxuhkQiGvDLffvi8rv3J3T7cm9frvc/e7e8X+fpu6ffv+Wq3vae1iUL99bGaHzkkJO+m9Pf/3D+y7+rL00D6GVTsPE7yZe5uMPLY3d3wjHaQMKckqlyQIRhwLD4dbfHT29b7X/v4/I//Gd/8ecvj74//ps//Fp9fjyqmuaccy7ECOFTScs6C1NrfXs8tm0bY0TEGMYix/OLWI4+H7ibm0cIi0Asif7ifSk5pVyeLyv+7/6P/2dAOOpEqjb6QIjM/HRec5JqnkoJgFZ77x1TgojH9WamHgCHiyni+PVIanBK4R5mcNSXCX0oIlKS47JDyP9ZNoOELJlETC0iWHg9nySlsMAk6zIJ8QHwS5KYeV1nYmFJe91rG19er6aqddPeiCjlaVrmQDyCQqZm7oAgxJwTEiMyYoy9Ls9P87LUx8OGni5PcbQAkQCxzCXMrDdhOawKvbWvI5ucTLXf7wCAzJRE9biKwKi1Xa+n8zr6uF2vJHJIA48bcV7PZTnttzcIZ0ltezBDkkQi01QmltO7d3vTPrSPkVN693RBpjH69ctbLuXp/RMDbrcr+eBcWGTfa0jampra99++77UxiYZzxLrOqmYRCNhaz1OBCEJwJAJMWZAozCJi36sIpyQ2zHQEogFe5sKI98djqD62HXNqdbTWhRAPHkVvow9AmOc5lwmFVb3k6bzO6zwxk6ptrW3bph4lp9M8B1EEbvdb2x+t62OvPnpZ5pLn0/nsAClJzomQvPeXl1divFxOp3X1ADiU7MwIoKqEmFNy95xkDLUI8BDCX19eau/7Y4/j/UFoqhghOeWSRhsYkEte5uW7bz5Kks+v92GmOlR1Om4AAERMjImptRHhp3Uy865a9713PaRHrQ1KlAmnuXSN2kZvlQACqfVe9z0RqCpJwvBUplY3iAiPMRozu0cqebm8c3XwkUshFnTIUwGAum9hLlNG1/G49fp43O+ff/7JTW/XKwRISqmUMpVU5jStX37+ZZ7yfDrF4RAVQZLnjx9FUsq5rDO4F4To7fX19ng8SpYiMpVsKM6cJL/7+A51gDZEFEJzZ5aL4D/53TeP2/3T/fHusj6v5T/84eXxuJ+WKUhue21dnXmY9XocVJUQjk3Rkct3d4JIhIIxM/6DD0uY7kNfBm3DbXRAZOZtf2jXl0+f+lDH2B/bfr+bu6pKEjy4IgjTNE/TNHofrfUxAFGYvznnf/Hnz//yz9798W381ZcuxBKqQHS4p5gZCyEhQVxbj20HANx2Fs4pA6Kp5pSXyzmOvq25JIHD36rDzEgSMZnZQarq+xYBph4QDEhIgATMrmamLAk5AZKUaTpnYSLE9bwSUkSknMf+uL688TxLybV1ALher9M8TfM85XxaJiBsrT/ACQBY8umcc9HenQ+SnAYShNfW2TzPcy68v70BUgDuWzXHCA4WTgncbfRj3wruXf1+v+UsgITEOWfAYGIdezsiV4Qk6UBKqKpuFTBut5uZyTxLKWhhNkjSwX2+fflMjBQweq/btpzmiCjzfADX749appKnYh6ErG6hAyDW82qmL58+99YPeYlIOz89yTQDpfcThjsBliwACCFuNsz8T8G6LFyOzbw7MxwXNCYKiFZb+vptDSlZmdRDEMzdHE7n0+Px2PY+um33u0wTpYSI+/V19GYORITqznaeT31UYLxum0GcliUClmkp0/zYt9Hb3jsAllLev3um9885Z3fftt3cp2kS5lYrM0XgVLKs5bv3Zwtk5qEWAHtt3Q2HESMiHk1xQGiqj8fjUPIU4Zzz3rsjELGOYU0BwsfAWg9j0FeunIdGfP/xw2kuTbV1YmYAbGMwIgu4mpdyr5VT6rdtuz8kpa66bxu4g2GYI3jrDRCDhIS9wjD1PhzpGKGaWWsDEaTMANRbHb3Py6zmow8ibve7B+SShFlyCoTRKiGOWgMhmhGiQ3z68Qd1e9weaqrDUinECUnGsDH2p9O7+XQ2Ha0b5UwoHkAQ9/v9cn7SML/dwe1t2wujYXx5fQ3wkvN0ukRA3TZG+MMf8jRN58vZRxeCiCDm+vHDp7/5sm8b696Gsa0z6J99e3K37hiDt9b3asQERAHUHtt2fzjCVCQiRutqA4K+5hZs/HTbP8xpSdyIRyClguFt6P1eb7fr2+ubmwNFmHvAOByFEWF+xFeZOCcZvZkbswDAUvKHd88Py/+3//Z+HbANM2+y7zvndByyze04nzgAOFgABBDRqGMcKVCPpg7EaZqnnDH8aE0zCyJ7+NF/g3AWDo9ea++j7XX0jkRAyEmmeRmthzsS56lMy1zmJeeMANu+2bARGh51r71WYkYiN48IjwD3cb/drrd1Xcs8l2liIjMDIkqynC/CAsL68LBIy6KtmXmYSZ4IyNTyPAmlxJJKKaUIcyrp8IaYIBNzSqaUHAIQCcODWTglN3OPrgpJfAwIcHNtPdwdwdwgQtVMe7JIZbIDTcckSGYKjB6u+845T1NBJJJEzM9P73LOCuQ6IpyIEKLW7mFJ5HG755xPy7wRG7gOqsO4juXEbmNOuamBgiDc9pbmaW89AAiCCJdlhQh0kyyINMzNPDTq/lVAfX66JEZzBMRjyuNu+94sgg3neTZ3VTvNH5hTmUpOMt4/taGO0Pb6eNR5WdZ5Pp/OW2297jp8f9Rc5LQse+voOOWp5DzcjqiqsLg7E4tIJkoiIoxREPH17Y2FnMjNzGydpykJEjKCeWy1uQdAAITVnQ+AogghmVtTFaallIN6Olo/yM+AZKr19TotC+dMKfWhse+f3m5Py3KaC7OoKSI/tscxLhweFP7x+VS7OiCl5IAQKCkT0+N2J0Ttg4lf3x7TXADpsHnaGGYWCISU11Ns+2j7vj0g4kh8WCDnydwDcIwxzbOImJrpjkzbvtvXBZ/mnLSPacoB7A7n99+keSk5+xjh/fZ6UwuZJ1MFSUgpElMuRAJuo3UAzCWL9bFtj9tLktzr/vzhOQm+frmOUoZ7yqXtDwBDoH0rj8fjmIQ/PT0lUXm7peVUW3/c9x9v7V//7a+J8Ek8dHieyvlCZdrv19vtypIQEMJqrQ4AWMx8tCZMy3nVPpy0pKnV/W+3neKgJROJeB/MgIc4nMV8mPo0zchJ3fGYXUEgIbG4+/3+MB2ACODTVEj4xy/bT9wwSZLjs2lSR8+ICUEYwy0AjyMNERMxEmGS8ACPw/ZCETkLMRBjADHzgSRHDGZRVVUlETAngPPlIsxDda/HuK0ggDATUSoFPMYYyASAddvqvtuxpDjoo6plWXLOEG4BHuBmx+8CwN6aAez7HnGwWRIG+L7h+TwvMzMyopnZXOq2Tx/fny7P22NLOeWUzT0nKTnpGKP3er8SU1lPOU8H65xznuaJiQDQdNTaUciP/YGkqA2QzNT74JLBAkyDBAEgQHiWnFutWntaJrvdfej6/l3vQ2tPmXTbdIw8r6fndx/fv1+XNSIS8/3eSykBx40kVMcYipKW07pMJc9z63qEe3WoOzDGTz/8dL8+Tpe1b9sIfP/9N+s0ixAEBOD9tnkoI2RhJNYIADTzfdt1WEocHhp2EFkPcU0fFuGt7m0LROi9ExFE+GjDuksCQIHIuWSISZhTYrSShDE9wpgPGqW4WWZ+Oi9wcAT6aNoHk+YDatSOJcIwc3dmioCnywXwGFlASrn2QaDTVAgpHxY6oNpaAKhZaw0AIAKJDuzyPE3zNJ1O6xjjss4WcL1eP//y6zENqLUWwMTMIh9Op+elkKC6hgcjq6qZKULrbmbbVr/58HyasgaWLBFQ99r6cPP5vADiY4y6d0pkDqrtaB2gpL7vSKR9mLukVFjutysjWThE+H1bni8pF0lpKlPKyc1q69O6ksjTvKrqaI0Ap2UicMF49+G9h0/T3LueS14TsY/W+s/XHUQYorV2f3l9vd66ORAgS2bS3r788AMRhdvj9tZrDYDeq1m0bbu+fJnXUy6H6F49IOduo67L6dtz+c27QgTrKT9dpvsEv2+PZS3ndV4I1ozh+Def3lj3pay6ZtAMkkarxPz88f3oAwByxiSifbAk5oQNSsk2OoXfXl7dLedyzKO+XsSYScRbR6I+BiJO0/wnNkyIMEB4AEYgsrszY87ZA2rbU0qFSGGEh6oKeJjrbVfpAkRw6HnhoKRnYXZXAw8EIk4siQ8w9VEhMhdOKbsZRWCEm+kY6OFMQhzREY45mhABRNRarY/z8xMI970hIhoSM8uxMdBjJhIRQBzHnwPwK0DnyDSmIoftq2TtzfqotzsgDNWyrk4kSYiImJFRH0okScroA836fWgagBBejkFfa73XvpzPZsGMEbBvdZonAdJhZs3Ne++2jdZanhZAAmREcFcIDMfeu+TMwQAGoyOzO/RawU0iR+Dh/hp1lNMaoTZGnpbzu3dPT8+c0t4GQiSxJNyHRgARnU4LuhvQ3tr753MW+fJ6JaQwG+bzugjzz3/8cW/j+ftvOCUj1t5r6wgYqgigESlPEd63W2HhnANRzRJzSgkBEKD3HhGtVQDoQ5npMC1EBLqnnERk3zZi0t6BekpJxzg6Hsw42kDAVPI0lVKKECFA3Tf3AhGIgEgpMQALc9fjQQxmfnA3zay3lkTMaYyRc2aEPBVwNx2qYRDRurohoJsd3oZpKn1YuKtb6+3wTdNMrpqyTEnmRO/WeZh/8/75si51DHMffSDRuixLyc9Pp/dLeXTbNZgQAByBEW+3u6SckoT76/Xh7qWUY06HxCWjuQ3t53U5C7fWDckt/rNhZJhRxOXdu+1+H72Hu4eHx/1xy/PEQpGo1YaAkjOnRCJlWeCgzX6dCLNE4pRRhAHq/RquOsa+dSll1DZyOs3p44f3l6fx5e1xPs0AyN+9Yx+/vtUvTrW20eunX5v2vjydOLjE6cH88vr6+edPy1QuS8E5T/OUz+d3794TxH67MdPlcjqX/BfvTv/kzz4+mhKl7759rm382YmnIufTHH0gwDqV35wJCEnyX/8U73k1kS+b9G2PRHR6sgAiCDdTZWILz2k2Mw8/uDuApGphdvwwuDszT/NyiARGbQEhOVGEt+YR6MwskpIORfJ5mpZlFuFDK3OkoI8wWLgLAoQHAqqOY999MEVVVTyMCMKJGZgDIEwjiUWADkmEAWNvYHGoaiMAAdwsPIhKEDkCS/Khx/XzQL9yTmqmjw2JhKX3ThHTPK/r2rsOU9U+hlJKfd+NB3FCJimFqDAnklSmSYfmJGWa6t4kFzOn3qd1yTkHAEsGiLCYz5ekRgSAmOdFdUhORDTNc9ubCF+en5mplNxaF6GcshUhlqHWbfSux4R+ezza3s1iWtdUcm9NSmmPvd6voeYePrqb1se2Pj+t56cP33wsU86lYOAYbdt3pBSBSH46n0QyCzNh673W4aaX8xIWGiRZPKL3QQClMM9Ta73uNacUEPdbBcT9ceckIfT0/l0pBcLp6XQC2h77y+ub1Yphy/k0Tdncax2DVUyvX96QkcEpTAPfffPdMOutmQ4PZ+aw0NEisCwzhLdbXc/nlAszI3LtDd2BiBLWbZec1T2JoPD1/oDrjfmgtowylfV0Pp3WnFIQpZScnBEJMCVxdkRiJlVFIDdXs4PNY+77vmMAM8/LPFS3vSKAe6TEGhFI5pBEIqdosUzzISrNOWNmU7vf77o/lmVyoDqGR5jqXms/NOA6bJ6ajh8D5mWd5pkJmUKEiDmXknNmonutQnQwxBHJ3A5uogNWZ9s7EYzWJaXhkUueZZ7nEh6n08zM798/a+vMxAzm8Prltda2t1bmKb4qaWheT3M5jsPRx1DzXqv3fr6sxIgsvffusF0fSJjnBIGqet17rvnnXW179NbwS2pdc5Y///jum2+Xb/L0qGOh/uuHJ3LNmdnteebT0+Vv//jLH374VHK6rGXK6fz09OtuR70Un5+IuLZ+8/g3v9Sfx+uHdTrLyAQly1rSXtvnvf983Xuv5wQfz+XD8+XDb95d1uXXXz9/ern/7XXb9pqzrOciDNq1tSbCCEiEIowQBICAkoupfo2mIg1zQgwAznm9PEVo32vvPeVEyG5x/FjmlIiQD2MDoyQxN/dAoJwLMaMZMwch/k/+Z/9LTgIeX1UoRFIyIn5dRiAhITOTCBJbb0jMOQMEcxJJOgYz55ICQCRDgLo5YP4qhslIWO+Pw5J7DDxSShBBzABfmQSIB0MFAjCI3Kz1joGjbixpOl8oCQsfUFNCiXDTUeapb7XtlZMEgLlN85RSLtOUUtofdzc3AAhMwjkXTtL6PvZWSinLTCRmGnE8zmGrPee0zIUQwuOIO5i5SB691Vpb62O0gLBWR+syzdpH33f3kDIJoSSZT8uHbz7O03zwXnIpR5oLEIKlDTuYNq210VqMsVzOHkCEJXFOGZhNjYnMPVyZEACzpDhahWbq8djqY9vXdcly7Kw53NyNiWvrtTYkCncMzzlL4u3REKNM+fXL27QsY3voqPOyzOdLWNgYSOThh59Q3Y8zwiGATrmklExH33aZpjwVHbrtOzNLyV9Fsx5jDGHWMVSVc5IkTMSAU57meb48XVhY7SCMWSklAkT4fntMU8o5qZqpp3yAunrJSVhKSnZ8e44kkvt2fzw9PR3kWdfhEOYQFkfZg+kYdOD15SUwMKBH3G51q7W11vddSmaWUrL2DgHzXE6n83I+U0QWPljSrmruasAYktPoqkORqeRMiETY1VhY++itl6k4ABJCBCGZWd0e2/Z49+GbdZmO5vk8FWHe9vrlei9TMT1wVgEAetznzRCiTPPt7U1Vf/Pb79QBgTzisT227aGtl8PagxCATNi2u6ozSRCVaQIMHab7tq4zMe/XKwKc1mnfam/7JeH3pyKMmubny5Oa//HW9m7dtNZWa0WEMs9q7mZlyoxYwuac6+N+zrQu5cvLTd1+uu/e26g1i1xOy/dr+ovv3z+ta3X/f/1w7wBhwUxHadEdEmPYcAjJmRF73W+3++O+9d7dgwi+RspFkjASogMz2NAAcNPWuschSAEKhwBzZ2IiQIgxhqnmnFMpeoD0IcJdiFDNwz2ldFQy6SuimA/vE8vXZweie4DWlomI2L15eKhFOAkyipsdITdkAXBtlSE4CwMEIQQGYkqJRExVmFtrpppS4pwRgJkcAAidv04HU75QyillFpaUgKiUjIC9tmVZSNh4pCmnnCMgIpBQ+8hJuo3wI2gBaSopyeFnWqZ5D9737bDIqKoPZSImliJJGABbazaGIxIxiqh2C0s5A/EYfXt9HXUnZmKBgOn8xISny+ndhw/CTIgAMVpv1ZEoSI72YikFXE9zaZ0DgZkq0n2ouYvkMANAjfDWWuvEHDoej7uqzvM8TdPBLDvyHDTPYNbevtxbDdcwDwQfXpYp3NVivjyrBVLUVlPK+/3BiNpTmrKZQypTKdrr/fWlTAsngQAfQJlKmaY01X0jxK8L3EAh6HWvtV6WGRA5Z1R1M20tAlptGIFEtffH61ueJi7FPADAwrXtzW1rrZTCTMe3aaj11olRI8yttUDErztKdzPz4N67mx05odZGStx6V/PWex9dWNpey5RbV1PLU6KAxCSJBGNaFkUYtaNZmUobQ9WdCDlN8zzN5cvj0Vvvo99vt/ntVObptJ7mKa8lUyINbuaZQIdmgjzlgx/UevNuOSUBkJwYYJrLvu9uRELax+H9VPVWKxMRuHrUpqe1BGBOOWdWcDViQR9d/2QvHr2mIu8+vKtbGxbuvt3e+jDKkqdJJNkYgdDdwlyEgZgTMwtJYiZJiUWZaLjNc6bTaXR/AI+MLuXTaL98blm4rMbb6zrNzqxCwyLPS17XVvdwTzkRZgIYvbWIq9W69593pGvbrlfVrq2qdmIm97cX+7vP8P/+4e3d5TwLVrWn98/TJHPJY9CXqzk4EjHJGKO3AREQsa6rMN+31ls100PmokOtD9NxAObMHMIPRQ4zE4H7sVMxQKIsfQwbgwmnqQRQbWOMAQCIMHSI+ldYirkT4VHJFjgOC8ncAdGOt3UEcwIk8CCmI1SGiGFmapjYVCHCQflofKl5hKgw4KHzUXMbY4wh8+wRTCTTJCIAQCIsnIhUtW07k6R5QkJE0t4TzznnAAz7mthGRGudhTiVOJAF4bU2VettlyTr+SLMdBxdiBAhpVSSjD5YeNs28JCSRlda5rLMwmSjt1Z1jCPg4UdZeQxOEoCIuJ4vhLRvD9NBInOZnt6/z1kIiSD6XpGOQ41EeHhsWwWIVMpee6t7Tp2FVU1Yzuu8npbX17dtu4Lr9kAgJogDAtH27UiuA8D9djc3BwjAulVRH60O11H3vm/MmEoew9q1u/l8uozeDIKcUp6OwzUy1q3OlwyEnFJOqYaLMKXk4blMmICYMWVCOJ3PqibCvVZCQuLlfDIPA+y1fg18EW+PO5fpEIpra0DIOZGwu0NAHyrMJU2O2N2191b7us5ZJIEP7WuZ3fXxaJI4J1FVYq619d6HTmZxXE9yzs1s23dVdWSsrbXONHofQdT7IJFt2+vWLudlUhlHbxAIJJnaUSk5PydzFZE5pyzy/ptvbrddpgxmADCGOsBQgwJqZu6JydWY8FF7kkTkmRhy3nRHIo0giIBoQ1UdwoXSsc08P13KNB2I12kuOUtr47G182lZl4kJz5fy08+fmxmEb49tuz/yXFIpb19eIKLkKVRb70HECZkZEPf6OBJOcmyiHObzEyO6uYj0uhPAVIrMM4Kf1kWHPbbNIjRgDK0bhZTz5UwQfd+rW2GZp5xSqnVDxFLKfn8sa57n+XG7WziTAMK8zMx8RCLq7f62bWZHwYQROc9FEO+1bxiFAer2fjp9s+DH9+//4w9vf3y99THUouScs2gfYRgRc1rXZfr0S793HX0ceTSmo6fk4REIBw3YIVpr4cFEqiPAiSia+5+eeoFkBuZ+NIvcwy1EwzEAAd09ApEQHIe7mYr8SQ3K7BFEBOCcxCMQoaQC4AjQ+9A+yMEPUePoY3ROwpQcoHdFiON4iCzm7hFJBFmY2d0BIOXESb5eQSNEWEQCUDUITTLTsWM1NVNTw5RdBwCQsKsFBAKGueSU58mHIrOa7duevvqi8LDRWa/adiZyZoseEcvllHMGBHXvfRwuxYO7zci9j9EHqh5W2rKsl/fvl/P5kHKWUg51YwQCuEMkzoR+bCLLVFpXRKyttb2GuepRAjNXm86naSrMpGPc3l6JwvqYlkWYv56VAuq2H5Tq8djSPAMx8EGkJ0wTOOQyee99dEoT5+QRKEIlEWAEcc4xOjD30ed1CbfRKyCrjJzLseDre5/XNJcZic3ddIBHBCBJnuapJB9DRNziy+vb6AqAy2Wt2x7EbdusD8riBgRAkkDE3G1oILo5IOVSSmYCkFLqMDVPHjoUedQ6zO39ejGzMbQQtdaAWS0sLBQdQsdgSZh4Xue9DWSe5wUgKCViOpep69h1OOLhJ0QMU9vaaOp701wmKcmtIaL3/rrdmehyefrm2495yn3fBUF1aNvA+NXqy+eX3loWySWfTqe3fYwxiIiQTudz3TYlAmYUYpZELDmJJHCfhIngoCeIyFBrraeUw72rvd03QgS3nOl2u3769dPhGBxDaZecc7ib2TzNp/MieTKP07KUeTpiA0O19t5rKyUf7atWGzOnJFySufe9u+lpXR9bhfCSJcLVQ1DQsplBOELkkoloqHvbJaXLMo8xdu1JODy2beujIeIYx3LMknAQnU6nkvI8z8MMEJBoyfnEkcJY6HJaEfw5x0LhdetXiO0mfU+pXN0Y/LuV8ykxehGu6q9bY3suKb2+vD1UAcHVDuY1HoloBFePiN66JCEkDw+IOFbeCMKcpiJ59jYwgBhU9Su9HQAjAhE8PAyIiDNTEmQ6svyqg4gA0d35q4MbItxdRQgBmQ9VhyGRcAai0Vs4gGASOcqgruoRiMqS1nlOOR9ImWOVRoBoFoc5J0JECDEsGEGSAGA317rrXutjI6JpXY7AV54KIXJKRHx8eAIAcwoPd0PEcDsqA+G2Px6jNaA/PZeJwcO/fuWM6JITp0RIyKRmRFTcbahH9NanZUk5BwDNRZgBQN3i2NIdHTo8WmLk7ghMzAgG7m4xz8s05QjX4XutytZqH2OcL5ckycMfb68RIMvJh9reZcoB0VpTNRK2gKgDqbu6lxIOaMopAZI7MCeIMI2yTGFea4cASaXXXf1Q64J6jG33iDIXFAEW8DBFKfMYiqzEQcQemHM6nRIBHCDCIJKUkghJal3HcBQaBVxbDBsRsFVi6RreWlkWV+VUADE88mUuOSNSymlKiRmPN1/Kxc2nqbTWwqyUklNBjOeniwXMpYRbANTa3Y9rB4zWizAzgUMfJoxJeCpZBiaEtZTz+TQJInon9trNTd29N1UbY6zrbKqqo0xTLtlteI0SDtp//vHn4QFhItJGf/3lF2ae13VZZwMe6knE3T/9+hMhJslmDkwpT+ijTHNZTsI0lVRKSYzuxkSTcACHGyDIlOtW931/upyi+3I6l/vjcb+rmg7NnILFQh2je9zrYIMkTCKIuJTMEGFu7o/axlAdve6NGbsq7HtK2QH2velQQCnFe+vMwgQl56lITkf+y11HuKeSCejx2MbozBkZlzIZCSbp2g+CQyQwNxs41OjrqpFO754J6UAqFYr3M0+gZP0ff+R/+4fHf/iiEfp6rW2/DzVVZSYPEMLbr2lJ+TLnf/K7d2eB5q2IfPfN+6fL+fZ4mNrj/ni73tz0eKa4k6q6+Rj9mNS7AxEm4ZxSmdJpKshSh7s3OOJgAIiAiMJfJYqGRMiAhOaOEYf0Ied8LNe/tsQB6NhMuYWFsEiSnLND4PHA4zTUxiGiCAAPYSQWTOLuIvlIvqkO0+POHMyEiKoa5nkqiXOYA1EwsDsRDVU11T7cLa2nJJIyA4CHMxHSV5krAIUrIh0LGiJgQkIgBAIMIvDglJBE+4hjcozBSGbRWyO0nBJLEknTVPro5oHhmAQA+OmSS95qMzURhgAAEGL72g8bwkdMCk3dzEjI1CTJkWOYlzkLMrEXD7PB/BXoHCEY333zsT2db29XMy/zHOkIcBIEtdqXy4nI676ba8oTAQWEm1ISYJIyaW0OBu6jjwBkhPAgdjeglJAMmYCES1lKKcscAK4KCMvTxXQwwKFJPorWhDHqjhEG+PnXu6pezqcylXWeSrI8laF+Y1af4gzX6w0hRm9125G4b21ZZ8lJluW0LKd5zjm5WWLOOTEjEz/2utXOCE+niZ5Odd8ZME9JVZOw2hHmwJQki9TW4bgOuEsSIgyPUhIAJCY3BdMYXQjBddudCYl4mmdOmkS+1p5OqyQpKbXajigyQZQkM6P16l2H2U9//MOmpu6ttvvry+n5eVtXG0oivbVUinms6zqfzz7cFLdtD9ec91KHJEGIZZ6eni455eOsjYgMkRB768sy9dFa7ymlofb8/sN8Opvq9tjM/XQ658NJ5uZmSRILu3ur1UenJFkSIaUsAah9tN5JZNt37UPHJinlec5zbPfHaCyJgUm7ITKaRngiPvYbrpAIiyBEue84uqopI0LEIbUhpGMZF+6dR69d3RiPyaYj4hHE393+9qGt7eI2dPz6sA0TYsak3nggVBta62g1TH8h4ZTnUv6/f/Pr8yzL02UELZOcl3Re3veh42lZ13y7PWrtex3hjgBEOJVZciLEqaSSZZ3LUiameCpsgD92BQgWJg/OGCLHeuTrUwoBJcmxBft6M/6TyYZFDkLG8c+IyCwWOvoIBzMlYeIDiE7HxsfDY/QeCikzIDG6R20dceSSzQwRzQYhSkrVlCghYuv9SPmj/CkscpAog1ikSDnsjimng3sRiKaqrqiWUh5x+CsoMMYwRhZ266OnxCklYWYm5ixkAcTSx1Fc9N47ISK4hSLSGN3UjroouBOi9da1Y0AiZkJAOs6zx7iaAAwgIJBou+2MeFrONjQlZkY30tYF5aDFi0gRahrX6xXdRMjMSkrp44e2D8lJ5GveaoxxdKS2650lI9OxVA5IrUbdK3w1dMSBW7CBqUxAKZVEzDllADCLJedpXQhpXmZ3u11vo7ayLjklO26zSBAArojU23h7uZJQmWZAIcLWdagjs5k+Eaxlmt+du7m7n9dZPVQHqOa5EBIjuZu6MTEToNuUxc0IwdTaaI/7rgDzlLatBkJv3SIcAcNbGw5Qch69D7NSykE9cLOUMwK4OTMfBRAmSuDeNSiGh94fQMxEahUQiWgqE2BEjcNpb2YsguETE0KoeUxrlvTbPz9dCv/Lv/ju9eX1drv94Tz/++1xf7vV2qwPThIRqSsSASCk6XjQjDHADYhoNA/z8H2vavH+aU2eCosDuGpKKcxcxzoVD1B3TpIJy1Qi4HQ+H3Pk8/nsh797DCbuvT+2PQkZM6ru7uF/2vplucg8zVOd0u1236uySM6p7hU8HvtdSi7FEXFvlRAgrHkwUU6SiFjtbavBxIwRgRA6wk0BIU8FhSGAmFRHbOOwvmKSA4dDACkLp1xre9Q+ugHRf/3HB0WUoud1+ngqOPFPr3dXC4sRqOophevYbFielvM8k2QEgbgIIsGX7o+m2nUq5St4KZwJCVFNmQUBMBwgIMJtcMDTlDzgj2bMBBgAwJwB0N3lwGh8fWwhppQw4Mh0RDghBcCR5jdVIjr+t3JiRDyafxHGkg8Qx7wsJImI3MHV5Ku2y1nZ3ABQhG00R3IPJDCzvj2AUPKMX3MbEQCSMhGJJFAgTkJJUiLAcBeS8LAxxlAjROZcCkgQAXu0vSMoHGBQFiJE8CA8QHCIkJiIWNwJEAnNo+4VEfNUWFKEuruatdYP80WoESExqmFESAIOAgw3PV4gGBSA+94AsGTMOXFKNkzHwIg00TIXV3X3urcgBiJBAMFSShAN17Y3qGM5n6Z1TsIpJUYsOakpQLhHXxeWJCyuet+24b49csu5t6p9hCqSEBfJmSUfOhXhVOYZwnNOy7IwERMBUWvGRMvT5fJ0XkqOkggDgYaqufcjr3A6O0RGYjaZs5R8vd0z5WkqjtRUiYiYIGLKYgE0l8QM4YQsjEO1DzU/fiih5GJuQ9XMzIFLIvfRx1EZX89nZux9lCSSxNSTiLlrhO6NAI7+5nH+BUSPGKPX2myZn+YijNDCA8cYw2o4UE6mxoAppW3fkDHlAmYQQYyJWTC7+72qA13WycKqxvvT5fnp3ZzwX263f/zbj3/5l3/19narCRmR5lNZ1qammNZ5CYjXz1+YCd1TRGFE15TSx+dLkLyfy58/zyTpbfjLdTfyyALIWGuac1PrPTis1YapuDnnRMzu1ts4Nk3okDKLMACY+faoaubHGV8SbLDMUzcToimlp/OpDRUMHiDnRT48b49Hb52EjyjL8b5lkW1vRLQuJQmDuxyDP0RFc0B1i9oifJpmIRThdDlPKQ+3AAgEUUPCkhOobe5BNJ9OhFQfN7PogS+3XTO+z/Tbp+k08U+f3sQzTbLMs0WQ5I/Pp989LwFw3bua/fxyS4m/3Nvnt/tj2wFCj6MWs2LIIT/t7cjZhgcTJqYkXFuHiDoCANw9MRJB76qq+D/+n/7PERERSilHjowABI/ObYyhw8whDubEMQw6ghRIaOYHswwIIcLVcskpZRJGEvdwd0D46rYJd3OIQDhGMxERNtTdJCeWzMyAYOZIxJJSmeg/B1GAyjJLKXkq67IM1frYAYFEAuAQi6QjkfSfPzMplynb0CSEKbsampd5kiM4AQAA6tC7jjH80LjFEdSinIWYzdzNEUKYJWczOywBjAhEpooALAkD1FQ9gAgcdGieS0mJAI4+2DKVlKTulYgopd4auLMIiThArW3fKxHlnBjp6FOs81RK3h4bICRJdnwUhclBVae5AFHrum3b43HXcZC+F3O3gEDcbveU87zMIuxmJaXnp0sSaWPc7g8mWqZ8SMjj2EW4j945J1MTkT7MPKxWdz1fLuu6ttG3bc8pTVPe9iYiOafex7HHxIOpyxQeB2B6r9XMmCjlnHM6EpKEPNTmeUKI1oeqiiRhUrPa+pEd7b0v69pVj85QFg4AInSzQxNi5kiwbfskfCkSoZ9e71QmYLk+HqpOxAhwfPhH11QkZUH1peRpnoapm/fWc8kAwMzW27nIOSezOJ2WE0ZgsFXfHt26t/4wms/nEXB9tHkp9fH4/Q+/XtaFCV0yAEzTlJL84++f3gb+zafrhPH++WmZ8gjYzcdwQ9wfWxtD3R3wyI5KSkPtOC1GwHFTAfAxBiNxSgdlprZ28Fvu+34kigGwj45jOOLltKD7NKUlZQegJH9KeEUd1oceD3EUGarCBK6EIHSUrx2ZQs3cltNJR9/uGwNclgkxHOkoEqkqCGvddfS55GZx29px5bQ+emuQRFjqtoUb2aDR5ql093DjiNNc9q5BkhLrvj3aaF9xiSFJlkkA4vHYbVhrTVVHHxEmRBqO9JXxe9RIIoKQv+bFmAAdAYqwmdWuiCARjkiIZGZI5KoEaD4QwR1URzeFCBsGhEzsYTZ01IoI5iaSkIiQ3M1U6wZH6QARAQn5gHchHBaJg+kY8HWxcNxtEakyABzHqNE68kEvIQBiYmBConTNkqfzu3dzKZIk52Tm+vWhwEcnzt2naQKAbmFmOjQiLIjMzC0RqVnv3U1zSutpxa9fAOx7D3cWdhth7nrsHIQp4L+j7cEARByqETRUCSEBlpwSp4QYSDo8AKwPZ8wlh7sOrUqmeqRSzdRUkUQQIbwwScmHjQ4OAKwHIZr6vT/6GOaesk/CJUsg9qEpS0oy1BGilLRM74eZm0Jg3WuMMS3LMn9kQmGCcAg+L8uUCAnd6el8IiS17m4RwAxt2PX1bbs/yjqvpwURW2tMtJxXQGARQBDCdZoAwS2OGF0f2nqH8Jzk2LFoOBMd6YqS8zANj4horekwIAxXM0OMknM+ppB0JHMPFgkfJDUdQ4cyAGEws0awEHio6fGZd8DwQCKLqNs21JA0LHrto6v2lqYpT2WYmVu0yCmf1vI0T0NHM/eIbkpKLKKmb7fbp09tLdMwX07zJacUMC/ZYfbISvZWG7/2lGR0N4p5Pn37u+zIoF0daxtbVR6RvtQ8TwPo1+vjU4fvnpYpJXUjZnQQIYdkrSXipt0BOYAJEydiGubCMsYgxBCutfveWJgpBIGQ5mkqhcdwM6u9E1IzI6LrY2eibdidGgJwYgqY51JKKjlabedpyUhfqnUPd1OlsOHuyOTh5IAAWYQiiqROWO+PznF5WtuIMCXwCOPAKYtapb7XDkxMgMjAKMzkdERYCLN45HrHoQCOHsSS1KjbCNWoXYcSkx3kSxEDvO8Dw4/+1pQXN9v3unIUipeqQKJBAcHgjMFEgexIE6Fqv3VHxK03czvaMnIwTxDZPcYYgIgRYQ7hh9ESAD287xURmBjCLA4G7XE7HYhfj15mDgAa0AkP6BgdIcjDjXok3I/RmxlEGAQEMLEOhQhAJREMcDV3EBZkgJQlTxCeynR693y+XETSkX9xdw1wJAdAJDUjRBH2iEJghEciGAIAjlkTRwQKo4MHmDtEJAQuKSLMzLqGmiEgMg5zd3RAJvPISeJPA0Quxc0BDvOxv71eDwmj5MkDEJFzJuLean1sQ41bBgcESMzIvKyzMAdA78dTjRPL0G6mzFxKARzHDTcQSQTcA/m+dXeflyK5GMB9r61WwjjNsw4NIBFGkfF4uPlX55v7MW8iRHcnYURCxGEqIof9t9cmnC7PZ0lJskyl7L2Hw/E66b2PVkMN3A7urFkkwtp7NwtVTgyA0zRtj4eZUc61tpTkcjn77hqKB8TVPUtSHwjRWj9eOapahxITM7uqe0pJInz0DscBBNmGAgASEzP6cfUECzsyXIxFgWptWhsSh0WYIgS4j70fnpFlysuUhPn18XjcH3hkGUTMrNW273tA7K1TmZZ13rp2C61N7gJEW91TzrW2um9lmggZH2Oak5tvW3MbBzzjgIxf+8jM4JaY9n37w2hZaPraarBpysSUmHPJOOjteqt9LOuifWitRFRK/lp9UTO3iMiSw+1+e6SSTa3kvBRBFEYfGlM671v92sYH2McgCHbutX369HmeyrzMAL4mupzzGDqyMOfeZW94pBXlMJFlJjpYjEPVIKc90O59tE4IQhjuqjEV4Ty33s2HTNxrI8ecsmRwiD4oMSKxg2dmG8MjXO1AfqoqRFjEsi5I0HbUoZKEiFut1jsgMBI5YEDJ6ZxxEpAZmPg+ANwumZ6WzISbGgH80w/8x8/4X/9QAQMAgVIA6FBBJIhwVwAEB3fHiIhwHWN0M0PAgNDeCNCIDgwQBJopoEM4BBxWAkcGCAwg5GMXEOEAfExqgY4wMGdmd4gwcQcDzEwH9BqAU5KUPYBEciksiUVYEiHN6zyfTiTSRz8iP4TA7qbmiAZGEcy4b01ySsKmGqY6DHJMeUan49YjxDgLAboDhEcEEydBHRbqOgawcAZzDzOKECnApBGINNSJkCOIkAnd1DQCOYjaMCD7Eze8jU2vb6/77b4+XZ7WlZlHH0F8HCGZqY2xbRujLOus4eYOga4OBU/rXJGCSdUCwHQgiUUjodYHErFIrX273sB0rP2xd87ldFokyfndEwK2NtQ0l5ISm3v3qNd7mVLJBZCPCFieBACbqgRMU04p9d4BIDEvp5xLfux127sOa7WnxKwGHjnLNBVmJrcgYuIIdPMjJ3gQLbd9P64wB0CYmY28t8bCRykCIWiechZTIyZAcLc+VEQQ6aAtH8d6BweP3vrw0DEkcYBOpYTwttXrY1+WaT6pGwjTXLIPTRhCpBYNsawnER5m42CHMiNi2/cyTzpGeLBkD53KnHMxDxtDRCgnSKnX7gG9d2Fal0Vy5py1Nh1HmmmwJCQyszAl4cfjsUVEOIcDiwMS4jyX0cde69Pz82lZiKKNexFZS9q7jj4AYtt3AiTGAyAhwvGwYQ7UwMwRSNKj9Ucdy5TWuZymMoZbfK21uEdrbdSKiMtpkZLHGI/a2hg+xuOxfbkuSADM00GxNTOzzBIQ1sYA6GZDBxOHh6oFUVdHHeuciuDjWkFSDb3t/ThWSwQChh+wMwJ3oaDEB6un5OSEgRxmYSPc4fjXAJgJmeSIjwqrGhEFc7gDoZoPVfTYFJmwCAvjGCPMwskABIHc/8Vv1//RP738X/+NiwyC6G7mNrqa6v8fhVAgsfiBI9sAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from PIL import Image\n", + "import requests\n", + "import torch\n", + "from torchvision import transforms\n", + "from torchvision.transforms.functional import InterpolationMode\n", + "\n", + "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", + "\n", + "img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg' \n", + "raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB') \n", + "\n", + "w,h = raw_image.size\n", + "display(raw_image.resize((w//5,h//5)))" + ] + }, + { + "cell_type": "markdown", + "id": "f72f4406", + "metadata": {}, + "source": [ + "# Image Captioning" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "6835daef", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "load checkpoint from https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth\n", + "caption: a woman sitting on the beach with a dog\n" + ] + } + ], + "source": [ + "from models.blip import blip_decoder\n", + "\n", + "image_size = 384\n", + "transform = transforms.Compose([\n", + " transforms.Resize((image_size,image_size),interpolation=InterpolationMode.BICUBIC),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))\n", + " ]) \n", + "image = transform(raw_image).unsqueeze(0).to(device) \n", + "\n", + "model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth'\n", + " \n", + "model = blip_decoder(pretrained=model_url, image_size=384, vit='base')\n", + "model.eval()\n", + "model = model.to(device)\n", + "\n", + "with torch.no_grad():\n", + " caption = model.generate(image, sample=False, num_beams=3, max_length=20, min_length=5)\n", + " print('caption: '+caption[0])" + ] + }, + { + "cell_type": "markdown", + "id": "fac320a2", + "metadata": {}, + "source": [ + "# VQA" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "5e6f3fb1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "load checkpoint from https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_vqa.pth\n", + "answer: on beach\n" + ] + } + ], + "source": [ + "from models.blip_vqa import blip_vqa\n", + "\n", + "image_size = 480\n", + "transform = transforms.Compose([\n", + " transforms.Resize((image_size,image_size),interpolation=InterpolationMode.BICUBIC),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))\n", + " ]) \n", + "image = transform(raw_image).unsqueeze(0).to(device) \n", + "\n", + "model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_vqa.pth'\n", + " \n", + "model = blip_vqa(pretrained=model_url, image_size=480, vit='base')\n", + "model.eval()\n", + "model = model.to(device)\n", + "\n", + "question = 'where is the woman sitting?'\n", + "\n", + "with torch.no_grad():\n", + " answer = model(image, question, train=False, inference='generate') \n", + " print('answer: '+answer[0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "be95d7b4", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/eval_nocaps.py b/eval_nocaps.py new file mode 100644 index 00000000..3cbb09a8 --- /dev/null +++ b/eval_nocaps.py @@ -0,0 +1,118 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.backends.cudnn as cudnn +import torch.distributed as dist +from torch.utils.data import DataLoader + +from models.blip import blip_decoder +import utils +from data import create_dataset, create_sampler, create_loader +from data.utils import save_result + +@torch.no_grad() +def evaluate(model, data_loader, device, config): + # evaluate + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Evaluation:' + print_freq = 10 + + result = [] + for image, image_id in metric_logger.log_every(data_loader, print_freq, header): + + image = image.to(device) + + captions = model.generate(image, sample=False, num_beams=config['num_beams'], max_length=config['max_length'], + min_length=config['min_length'], repetition_penalty=1.1) + + for caption, img_id in zip(captions, image_id): + result.append({"image_id": img_id.item(), "caption": caption}) + + return result + + +def main(args, config): + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + #### Dataset #### + print("Creating captioning dataset") + val_dataset, test_dataset = create_dataset('nocaps', config) + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + samplers = create_sampler([val_dataset,test_dataset], [False,False], num_tasks, global_rank) + else: + samplers = [None,None] + + val_loader, test_loader = create_loader([val_dataset, test_dataset],samplers, + batch_size=[config['batch_size']]*2,num_workers=[4,4], + is_trains=[False, False], collate_fns=[None,None]) + + #### Model #### + print("Creating model") + model = blip_decoder(pretrained=config['pretrained'], image_size=config['image_size'], vit=config['vit'], + prompt=config['prompt']) + + model = model.to(device) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + val_result = evaluate(model_without_ddp, val_loader, device, config) + val_result_file = save_result(val_result, args.result_dir, 'val', remove_duplicate='image_id') + test_result = evaluate(model_without_ddp, test_loader, device, config) + test_result_file = save_result(test_result, args.result_dir, 'test', remove_duplicate='image_id') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/nocaps.yaml') + parser.add_argument('--output_dir', default='output/NoCaps') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/models/__pycache__/__init__.cpython-36.pyc b/models/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..790416ff Binary files /dev/null and b/models/__pycache__/__init__.cpython-36.pyc differ diff --git a/models/__pycache__/__init__.cpython-38.pyc b/models/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 00000000..b32b2a46 Binary files /dev/null and b/models/__pycache__/__init__.cpython-38.pyc differ diff --git a/models/__pycache__/blip.cpython-38.pyc b/models/__pycache__/blip.cpython-38.pyc new file mode 100644 index 00000000..3dec0a17 Binary files /dev/null and b/models/__pycache__/blip.cpython-38.pyc differ diff --git a/models/__pycache__/blip_nlvr.cpython-38.pyc b/models/__pycache__/blip_nlvr.cpython-38.pyc new file mode 100644 index 00000000..b98c7ae6 Binary files /dev/null and b/models/__pycache__/blip_nlvr.cpython-38.pyc differ diff --git a/models/__pycache__/blip_retrieval.cpython-38.pyc b/models/__pycache__/blip_retrieval.cpython-38.pyc new file mode 100644 index 00000000..ba4960a1 Binary files /dev/null and b/models/__pycache__/blip_retrieval.cpython-38.pyc differ diff --git a/models/__pycache__/blip_vqa.cpython-38.pyc b/models/__pycache__/blip_vqa.cpython-38.pyc new file mode 100644 index 00000000..9816a432 Binary files /dev/null and b/models/__pycache__/blip_vqa.cpython-38.pyc differ diff --git a/models/__pycache__/booster.cpython-38.pyc b/models/__pycache__/booster.cpython-38.pyc new file mode 100644 index 00000000..a31e1f4a Binary files /dev/null and b/models/__pycache__/booster.cpython-38.pyc differ diff --git a/models/__pycache__/booster_nlvr.cpython-38.pyc b/models/__pycache__/booster_nlvr.cpython-38.pyc new file mode 100644 index 00000000..69fcd4fe Binary files /dev/null and b/models/__pycache__/booster_nlvr.cpython-38.pyc differ diff --git a/models/__pycache__/booster_retrieval.cpython-38.pyc b/models/__pycache__/booster_retrieval.cpython-38.pyc new file mode 100644 index 00000000..00f30603 Binary files /dev/null and b/models/__pycache__/booster_retrieval.cpython-38.pyc differ diff --git a/models/__pycache__/booster_retrieval_new.cpython-38.pyc b/models/__pycache__/booster_retrieval_new.cpython-38.pyc new file mode 100644 index 00000000..a5508c38 Binary files /dev/null and b/models/__pycache__/booster_retrieval_new.cpython-38.pyc differ diff --git a/models/__pycache__/booster_vqa.cpython-38.pyc b/models/__pycache__/booster_vqa.cpython-38.pyc new file mode 100644 index 00000000..b70e1783 Binary files /dev/null and b/models/__pycache__/booster_vqa.cpython-38.pyc differ diff --git a/models/__pycache__/med.cpython-36.pyc b/models/__pycache__/med.cpython-36.pyc new file mode 100644 index 00000000..485c2069 Binary files /dev/null and b/models/__pycache__/med.cpython-36.pyc differ diff --git a/models/__pycache__/med.cpython-38.pyc b/models/__pycache__/med.cpython-38.pyc new file mode 100644 index 00000000..dd9baed9 Binary files /dev/null and b/models/__pycache__/med.cpython-38.pyc differ diff --git a/models/__pycache__/nlvr_encoder.cpython-38.pyc b/models/__pycache__/nlvr_encoder.cpython-38.pyc new file mode 100644 index 00000000..206b21b2 Binary files /dev/null and b/models/__pycache__/nlvr_encoder.cpython-38.pyc differ diff --git a/models/__pycache__/univlm.cpython-36.pyc b/models/__pycache__/univlm.cpython-36.pyc new file mode 100644 index 00000000..06ae6378 Binary files /dev/null and b/models/__pycache__/univlm.cpython-36.pyc differ diff --git a/models/__pycache__/univlm.cpython-38.pyc b/models/__pycache__/univlm.cpython-38.pyc new file mode 100644 index 00000000..c0f73fc9 Binary files /dev/null and b/models/__pycache__/univlm.cpython-38.pyc differ diff --git a/models/__pycache__/univlm_pretrain.cpython-38.pyc b/models/__pycache__/univlm_pretrain.cpython-38.pyc new file mode 100644 index 00000000..6a1fb0dd Binary files /dev/null and b/models/__pycache__/univlm_pretrain.cpython-38.pyc differ diff --git a/models/__pycache__/univlm_retrieval.cpython-38.pyc b/models/__pycache__/univlm_retrieval.cpython-38.pyc new file mode 100644 index 00000000..fdf41c77 Binary files /dev/null and b/models/__pycache__/univlm_retrieval.cpython-38.pyc differ diff --git a/models/__pycache__/univlm_vqa.cpython-38.pyc b/models/__pycache__/univlm_vqa.cpython-38.pyc new file mode 100644 index 00000000..a0b68c6b Binary files /dev/null and b/models/__pycache__/univlm_vqa.cpython-38.pyc differ diff --git a/models/__pycache__/vit.cpython-36.pyc b/models/__pycache__/vit.cpython-36.pyc new file mode 100644 index 00000000..21846a1c Binary files /dev/null and b/models/__pycache__/vit.cpython-36.pyc differ diff --git a/models/__pycache__/vit.cpython-38.pyc b/models/__pycache__/vit.cpython-38.pyc new file mode 100644 index 00000000..9c8a211d Binary files /dev/null and b/models/__pycache__/vit.cpython-38.pyc differ diff --git a/models/__pycache__/vl_model.cpython-38.pyc b/models/__pycache__/vl_model.cpython-38.pyc new file mode 100644 index 00000000..a26f8a8b Binary files /dev/null and b/models/__pycache__/vl_model.cpython-38.pyc differ diff --git a/models/__pycache__/xbert.cpython-38.pyc b/models/__pycache__/xbert.cpython-38.pyc new file mode 100644 index 00000000..417a30e0 Binary files /dev/null and b/models/__pycache__/xbert.cpython-38.pyc differ diff --git a/models/blip.py b/models/blip.py new file mode 100644 index 00000000..5c2887e9 --- /dev/null +++ b/models/blip.py @@ -0,0 +1,236 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' + +from models.vit import VisionTransformer, interpolate_pos_embed +from models.med import BertConfig, BertModel, BertLMHeadModel +from transformers import BertTokenizer + +import torch +from torch import nn +import torch.nn.functional as F + +import os +from urllib.parse import urlparse +from timm.models.hub import download_cached_file + +class BLIP_Base(nn.Module): + def __init__(self, + med_config = './configs/med_config.json', + image_size = 384, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_encoder = BertModel(config=med_config, add_pooling_layer=False) + + + def forward(self, image, caption, mode): + + assert mode in ['image', 'text', 'multimodal'], "mode parameter must be image, text, or multimodal" + text = self.tokenizer(caption, return_tensors="pt").to(image.device) + + if mode=='image': + # return image features + image_embeds = self.visual_encoder(image) + return image_embeds + + elif mode=='text': + # return text features + text_output = self.text_encoder(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + return text_output.last_hidden_state + + elif mode=='multimodal': + # return multimodel features + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + + text.input_ids[:,0] = self.tokenizer.enc_token_id + output = self.text_encoder(text.input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True, + ) + return output.last_hidden_state + + + +class BLIP_Decoder(nn.Module): + def __init__(self, + med_config = './configs/med_config.json', + image_size = 384, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + prompt = 'a picture of ', + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_decoder = BertLMHeadModel(config=med_config) + + self.prompt = prompt + self.prompt_length = len(self.tokenizer(self.prompt).input_ids)-1 + + + def forward(self, image, caption): + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + + text = self.tokenizer(caption, padding='longest', truncation=True, max_length=40, return_tensors="pt").to(image.device) + + text.input_ids[:,0] = self.tokenizer.bos_token_id + + decoder_targets = text.input_ids.masked_fill(text.input_ids == self.tokenizer.pad_token_id, -100) + decoder_targets[:,:self.prompt_length] = -100 + + decoder_output = self.text_decoder(text.input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + labels = decoder_targets, + return_dict = True, + ) + loss_lm = decoder_output.loss + + return loss_lm + + def generate(self, image, sample=False, num_beams=3, max_length=30, min_length=10, top_p=0.9, repetition_penalty=1.0): + image_embeds = self.visual_encoder(image) + + if not sample: + image_embeds = image_embeds.repeat_interleave(num_beams,dim=0) + + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + model_kwargs = {"encoder_hidden_states": image_embeds, "encoder_attention_mask":image_atts} + + prompt = [self.prompt] * image.size(0) + input_ids = self.tokenizer(prompt, return_tensors="pt").input_ids.to(image.device) + input_ids[:,0] = self.tokenizer.bos_token_id + input_ids = input_ids[:, :-1] + + if sample: + #nucleus sampling + outputs = self.text_decoder.generate(input_ids=input_ids, + max_length=max_length, + min_length=min_length, + do_sample=True, + top_p=top_p, + num_return_sequences=1, + eos_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + repetition_penalty=1.1, + **model_kwargs) + else: + #beam search + outputs = self.text_decoder.generate(input_ids=input_ids, + max_length=max_length, + min_length=min_length, + num_beams=num_beams, + eos_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + repetition_penalty=repetition_penalty, + **model_kwargs) + + captions = [] + for output in outputs: + caption = self.tokenizer.decode(output, skip_special_tokens=True) + captions.append(caption[len(self.prompt):]) + return captions + + +def blip_decoder(pretrained='',**kwargs): + model = BLIP_Decoder(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + assert(len(msg.missing_keys)==0) + return model + +def blip_feature_extractor(pretrained='',**kwargs): + model = BLIP_Base(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + assert(len(msg.missing_keys)==0) + return model + +def init_tokenizer(): + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + tokenizer.add_special_tokens({'bos_token':'[DEC]'}) + tokenizer.add_special_tokens({'additional_special_tokens':['[ENC]']}) + tokenizer.enc_token_id = tokenizer.additional_special_tokens_ids[0] + return tokenizer + + +def create_vit(vit, image_size, use_grad_checkpointing=False, ckpt_layer=0, drop_path_rate=0): + + assert vit in ['base', 'large'], "vit parameter must be base or large" + if vit=='base': + vision_width = 768 + visual_encoder = VisionTransformer(img_size=image_size, patch_size=16, embed_dim=vision_width, depth=12, + num_heads=12, use_grad_checkpointing=use_grad_checkpointing, ckpt_layer=ckpt_layer, + drop_path_rate=0 or drop_path_rate + ) + elif vit=='large': + vision_width = 1024 + visual_encoder = VisionTransformer(img_size=image_size, patch_size=16, embed_dim=vision_width, depth=24, + num_heads=16, use_grad_checkpointing=use_grad_checkpointing, ckpt_layer=ckpt_layer, + drop_path_rate=0.1 or drop_path_rate + ) + return visual_encoder, vision_width + +def is_url(url_or_filename): + parsed = urlparse(url_or_filename) + return parsed.scheme in ("http", "https") + +def load_checkpoint(model,url_or_filename): + if is_url(url_or_filename): + cached_file = download_cached_file(url_or_filename, check_hash=False, progress=True) + checkpoint = torch.load(cached_file, map_location='cpu') + elif os.path.isfile(url_or_filename): + checkpoint = torch.load(url_or_filename, map_location='cpu') + else: + raise RuntimeError('checkpoint url or path is invalid') + + state_dict = checkpoint['model'] + + state_dict['visual_encoder.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder.pos_embed'],model.visual_encoder) + if 'visual_encoder_m.pos_embed' in model.state_dict().keys(): + state_dict['visual_encoder_m.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder_m.pos_embed'], + model.visual_encoder_m) + for key in model.state_dict().keys(): + if key in state_dict.keys(): + if state_dict[key].shape!=model.state_dict()[key].shape: + del state_dict[key] + + msg = model.load_state_dict(state_dict,strict=False) + print('load checkpoint from %s'%url_or_filename) + return model,msg + \ No newline at end of file diff --git a/models/blip_nlvr.py b/models/blip_nlvr.py new file mode 100644 index 00000000..8824cba6 --- /dev/null +++ b/models/blip_nlvr.py @@ -0,0 +1,103 @@ +from models.med import BertConfig +from models.nlvr_encoder import BertModel +from models.vit import interpolate_pos_embed +from models.blip import create_vit, init_tokenizer, is_url + +from timm.models.hub import download_cached_file + +import torch +from torch import nn +import torch.nn.functional as F +from transformers import BertTokenizer +import numpy as np + +class BLIP_NLVR(nn.Module): + def __init__(self, + med_config = './configs/med_config.json', + image_size = 480, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer, drop_path_rate=0.1) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_encoder = BertModel(config=med_config, add_pooling_layer=False) + + self.cls_head = nn.Sequential( + nn.Linear(self.text_encoder.config.hidden_size, self.text_encoder.config.hidden_size), + nn.ReLU(), + nn.Linear(self.text_encoder.config.hidden_size, 2) + ) + + def forward(self, image, text, targets, train=True): + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + image0_embeds, image1_embeds = torch.split(image_embeds,targets.size(0)) + + text = self.tokenizer(text, padding='longest', return_tensors="pt").to(image.device) + text.input_ids[:,0] = self.tokenizer.enc_token_id + + output = self.text_encoder(text.input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = [image0_embeds,image1_embeds], + encoder_attention_mask = [image_atts[:image0_embeds.size(0)], + image_atts[image0_embeds.size(0):]], + return_dict = True, + ) + hidden_state = output.last_hidden_state[:,0,:] + prediction = self.cls_head(hidden_state) + + if train: + loss = F.cross_entropy(prediction, targets) + return loss + else: + return prediction + +def blip_nlvr(pretrained='',**kwargs): + model = BLIP_NLVR(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + print("missing keys:") + print(msg.missing_keys) + return model + + +def load_checkpoint(model,url_or_filename): + if is_url(url_or_filename): + cached_file = download_cached_file(url_or_filename, check_hash=False, progress=True) + checkpoint = torch.load(cached_file, map_location='cpu') + elif os.path.isfile(url_or_filename): + checkpoint = torch.load(url_or_filename, map_location='cpu') + else: + raise RuntimeError('checkpoint url or path is invalid') + state_dict = checkpoint['model'] + + state_dict['visual_encoder.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder.pos_embed'],model.visual_encoder) + + for key in list(state_dict.keys()): + if 'crossattention.self.' in key: + new_key0 = key.replace('self','self0') + new_key1 = key.replace('self','self1') + state_dict[new_key0] = state_dict[key] + state_dict[new_key1] = state_dict[key] + elif 'crossattention.output.dense.' in key: + new_key0 = key.replace('dense','dense0') + new_key1 = key.replace('dense','dense1') + state_dict[new_key0] = state_dict[key] + state_dict[new_key1] = state_dict[key] + + msg = model.load_state_dict(state_dict,strict=False) + print('load checkpoint from %s'%url_or_filename) + return model,msg + \ No newline at end of file diff --git a/models/blip_pretrain.py b/models/blip_pretrain.py new file mode 100644 index 00000000..9d0db2e6 --- /dev/null +++ b/models/blip_pretrain.py @@ -0,0 +1,339 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +from models.med import BertConfig, BertModel, BertLMHeadModel +from transformers import BertTokenizer +import transformers +transformers.logging.set_verbosity_error() + +import torch +from torch import nn +import torch.nn.functional as F + +from models.blip import create_vit, init_tokenizer, load_checkpoint + +class BLIP_Pretrain(nn.Module): + def __init__(self, + med_config = './configs/bert_config.json', + image_size = 224, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + embed_dim = 256, + queue_size = 57600, + momentum = 0.995, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer, 0) + + if vit=='base': + checkpoint = torch.hub.load_state_dict_from_url( + url="https://dl.fbaipublicfiles.com/deit/deit_base_patch16_224-b5f2ef4d.pth", + map_location="cpu", check_hash=True) + state_dict = checkpoint["model"] + msg = self.visual_encoder.load_state_dict(state_dict,strict=False) + elif vit=='large': + from timm.models.helpers import load_custom_pretrained + from timm.models.vision_transformer import default_cfgs + load_custom_pretrained(self.visual_encoder,default_cfgs['vit_large_patch16_224_in21k']) + + self.tokenizer = init_tokenizer() + encoder_config = BertConfig.from_json_file(med_config) + encoder_config.encoder_width = vision_width + self.text_encoder = BertModel.from_pretrained('bert-base-uncased',config=encoder_config, add_pooling_layer=False) + self.text_encoder.resize_token_embeddings(len(self.tokenizer)) + + text_width = self.text_encoder.config.hidden_size + + self.vision_proj = nn.Linear(vision_width, embed_dim) + self.text_proj = nn.Linear(text_width, embed_dim) + + self.itm_head = nn.Linear(text_width, 2) + + # create momentum encoders + self.visual_encoder_m, vision_width = create_vit(vit,image_size) + self.vision_proj_m = nn.Linear(vision_width, embed_dim) + self.text_encoder_m = BertModel(config=encoder_config, add_pooling_layer=False) + self.text_proj_m = nn.Linear(text_width, embed_dim) + + self.model_pairs = [[self.visual_encoder,self.visual_encoder_m], + [self.vision_proj,self.vision_proj_m], + [self.text_encoder,self.text_encoder_m], + [self.text_proj,self.text_proj_m], + ] + self.copy_params() + + # create the queue + self.register_buffer("image_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("text_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("queue_ptr", torch.zeros(1, dtype=torch.long)) + + self.image_queue = nn.functional.normalize(self.image_queue, dim=0) + self.text_queue = nn.functional.normalize(self.text_queue, dim=0) + + self.queue_size = queue_size + self.momentum = momentum + self.temp = nn.Parameter(0.07*torch.ones([])) + + # create the decoder + decoder_config = BertConfig.from_json_file(med_config) + decoder_config.encoder_width = vision_width + self.text_decoder = BertLMHeadModel.from_pretrained('bert-base-uncased',config=decoder_config) + self.text_decoder.resize_token_embeddings(len(self.tokenizer)) + tie_encoder_decoder_weights(self.text_decoder.bert,self.text_encoder,'','/attention') + + + def forward(self, image, caption, alpha): + with torch.no_grad(): + self.temp.clamp_(0.001,0.5) + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + image_feat = F.normalize(self.vision_proj(image_embeds[:,0,:]),dim=-1) + + text = self.tokenizer(caption, padding='max_length', truncation=True, max_length=30, + return_tensors="pt").to(image.device) + text_output = self.text_encoder(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat = F.normalize(self.text_proj(text_output.last_hidden_state[:,0,:]),dim=-1) + + # get momentum features + with torch.no_grad(): + self._momentum_update() + image_embeds_m = self.visual_encoder_m(image) + image_feat_m = F.normalize(self.vision_proj_m(image_embeds_m[:,0,:]),dim=-1) + image_feat_all = torch.cat([image_feat_m.t(),self.image_queue.clone().detach()],dim=1) + + text_output_m = self.text_encoder_m(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat_m = F.normalize(self.text_proj_m(text_output_m.last_hidden_state[:,0,:]),dim=-1) + text_feat_all = torch.cat([text_feat_m.t(),self.text_queue.clone().detach()],dim=1) + + sim_i2t_m = image_feat_m @ text_feat_all / self.temp + sim_t2i_m = text_feat_m @ image_feat_all / self.temp + + sim_targets = torch.zeros(sim_i2t_m.size()).to(image.device) + sim_targets.fill_diagonal_(1) + + sim_i2t_targets = alpha * F.softmax(sim_i2t_m, dim=1) + (1 - alpha) * sim_targets + sim_t2i_targets = alpha * F.softmax(sim_t2i_m, dim=1) + (1 - alpha) * sim_targets + + sim_i2t = image_feat @ text_feat_all / self.temp + sim_t2i = text_feat @ image_feat_all / self.temp + + loss_i2t = -torch.sum(F.log_softmax(sim_i2t, dim=1)*sim_i2t_targets,dim=1).mean() + loss_t2i = -torch.sum(F.log_softmax(sim_t2i, dim=1)*sim_t2i_targets,dim=1).mean() + + loss_ita = (loss_i2t+loss_t2i)/2 + + self._dequeue_and_enqueue(image_feat_m, text_feat_m) + + ###============== Image-text Matching ===================### + encoder_input_ids = text.input_ids.clone() + encoder_input_ids[:,0] = self.tokenizer.enc_token_id + + # forward the positve image-text pair + bs = image.size(0) + output_pos = self.text_encoder(encoder_input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True, + ) + with torch.no_grad(): + weights_t2i = F.softmax(sim_t2i[:,:bs],dim=1)+1e-4 + weights_t2i.fill_diagonal_(0) + weights_i2t = F.softmax(sim_i2t[:,:bs],dim=1)+1e-4 + weights_i2t.fill_diagonal_(0) + + # select a negative image for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg,dim=0) + + # select a negative text for each image + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(encoder_input_ids[neg_idx]) + text_atts_neg.append(text.attention_mask[neg_idx]) + + text_ids_neg = torch.stack(text_ids_neg,dim=0) + text_atts_neg = torch.stack(text_atts_neg,dim=0) + + text_ids_all = torch.cat([encoder_input_ids, text_ids_neg],dim=0) + text_atts_all = torch.cat([text.attention_mask, text_atts_neg],dim=0) + + image_embeds_all = torch.cat([image_embeds_neg,image_embeds],dim=0) + image_atts_all = torch.cat([image_atts,image_atts],dim=0) + + output_neg = self.text_encoder(text_ids_all, + attention_mask = text_atts_all, + encoder_hidden_states = image_embeds_all, + encoder_attention_mask = image_atts_all, + return_dict = True, + ) + + vl_embeddings = torch.cat([output_pos.last_hidden_state[:,0,:], output_neg.last_hidden_state[:,0,:]],dim=0) + vl_output = self.itm_head(vl_embeddings) + + itm_labels = torch.cat([torch.ones(bs,dtype=torch.long),torch.zeros(2*bs,dtype=torch.long)], + dim=0).to(image.device) + loss_itm = F.cross_entropy(vl_output, itm_labels) + + ##================= LM ========================## + decoder_input_ids = text.input_ids.clone() + decoder_input_ids[:,0] = self.tokenizer.bos_token_id + decoder_targets = decoder_input_ids.masked_fill(decoder_input_ids == self.tokenizer.pad_token_id, -100) + + decoder_output = self.text_decoder(decoder_input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + labels = decoder_targets, + return_dict = True, + ) + + loss_lm = decoder_output.loss + return loss_ita, loss_itm, loss_lm + + + + @torch.no_grad() + def copy_params(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data.copy_(param.data) # initialize + param_m.requires_grad = False # not update by gradient + + + @torch.no_grad() + def _momentum_update(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data = param_m.data * self.momentum + param.data * (1. - self.momentum) + + + @torch.no_grad() + def _dequeue_and_enqueue(self, image_feat, text_feat): + # gather keys before updating queue + image_feats = concat_all_gather(image_feat) + text_feats = concat_all_gather(text_feat) + + batch_size = image_feats.shape[0] + + ptr = int(self.queue_ptr) + assert self.queue_size % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.image_queue[:, ptr:ptr + batch_size] = image_feats.T + self.text_queue[:, ptr:ptr + batch_size] = text_feats.T + ptr = (ptr + batch_size) % self.queue_size # move pointer + + self.queue_ptr[0] = ptr + + +def blip_pretrain(**kwargs): + model = BLIP_Pretrain(**kwargs) + return model + + +@torch.no_grad() +def concat_all_gather(tensor): + """ + Performs all_gather operation on the provided tensors. + *** Warning ***: torch.distributed.all_gather has no gradient. + """ + tensors_gather = [torch.ones_like(tensor) + for _ in range(torch.distributed.get_world_size())] + torch.distributed.all_gather(tensors_gather, tensor, async_op=False) + + output = torch.cat(tensors_gather, dim=0) + return output + + +from typing import List +def tie_encoder_decoder_weights(encoder: nn.Module, decoder: nn.Module, base_model_prefix: str, skip_key:str): + uninitialized_encoder_weights: List[str] = [] + if decoder.__class__ != encoder.__class__: + logger.info( + f"{decoder.__class__} and {encoder.__class__} are not equal. In this case make sure that all encoder weights are correctly initialized." + ) + + def tie_encoder_to_decoder_recursively( + decoder_pointer: nn.Module, + encoder_pointer: nn.Module, + module_name: str, + uninitialized_encoder_weights: List[str], + skip_key: str, + depth=0, + ): + assert isinstance(decoder_pointer, nn.Module) and isinstance( + encoder_pointer, nn.Module + ), f"{decoder_pointer} and {encoder_pointer} have to be of type torch.nn.Module" + if hasattr(decoder_pointer, "weight") and skip_key not in module_name: + assert hasattr(encoder_pointer, "weight") + encoder_pointer.weight = decoder_pointer.weight + if hasattr(decoder_pointer, "bias"): + assert hasattr(encoder_pointer, "bias") + encoder_pointer.bias = decoder_pointer.bias + print(module_name+' is tied') + return + + encoder_modules = encoder_pointer._modules + decoder_modules = decoder_pointer._modules + if len(decoder_modules) > 0: + assert ( + len(encoder_modules) > 0 + ), f"Encoder module {encoder_pointer} does not match decoder module {decoder_pointer}" + + all_encoder_weights = set([module_name + "/" + sub_name for sub_name in encoder_modules.keys()]) + encoder_layer_pos = 0 + for name, module in decoder_modules.items(): + if name.isdigit(): + encoder_name = str(int(name) + encoder_layer_pos) + decoder_name = name + if not isinstance(decoder_modules[decoder_name], type(encoder_modules[encoder_name])) and len( + encoder_modules + ) != len(decoder_modules): + # this can happen if the name corresponds to the position in a list module list of layers + # in this case the decoder has added a cross-attention that the encoder does not have + # thus skip this step and subtract one layer pos from encoder + encoder_layer_pos -= 1 + continue + elif name not in encoder_modules: + continue + elif depth > 500: + raise ValueError( + "Max depth of recursive function `tie_encoder_to_decoder` reached. It seems that there is a circular dependency between two or more `nn.Modules` of your model." + ) + else: + decoder_name = encoder_name = name + tie_encoder_to_decoder_recursively( + decoder_modules[decoder_name], + encoder_modules[encoder_name], + module_name + "/" + name, + uninitialized_encoder_weights, + skip_key, + depth=depth + 1, + ) + all_encoder_weights.remove(module_name + "/" + encoder_name) + + uninitialized_encoder_weights += list(all_encoder_weights) + + # tie weights recursively + tie_encoder_to_decoder_recursively(decoder, encoder, base_model_prefix, uninitialized_encoder_weights, skip_key) diff --git a/models/blip_retrieval.py b/models/blip_retrieval.py new file mode 100644 index 00000000..2294db6e --- /dev/null +++ b/models/blip_retrieval.py @@ -0,0 +1,322 @@ +from models.med import BertConfig, BertModel +from transformers import BertTokenizer + +import torch +from torch import nn +import torch.nn.functional as F + +from models.blip import create_vit, init_tokenizer, load_checkpoint + +class BLIP_Retrieval(nn.Module): + def __init__(self, + med_config = './configs/med_config.json', + image_size = 384, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + embed_dim = 256, + queue_size = 57600, + momentum = 0.995, + negative_all_rank = False, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_encoder = BertModel(config=med_config, add_pooling_layer=False) + + text_width = self.text_encoder.config.hidden_size + + self.vision_proj = nn.Linear(vision_width, embed_dim) + self.text_proj = nn.Linear(text_width, embed_dim) + + self.itm_head = nn.Linear(text_width, 2) + + # create momentum encoders + self.visual_encoder_m, vision_width = create_vit(vit,image_size) + self.vision_proj_m = nn.Linear(vision_width, embed_dim) + self.text_encoder_m = BertModel(config=med_config, add_pooling_layer=False) + self.text_proj_m = nn.Linear(text_width, embed_dim) + + self.model_pairs = [[self.visual_encoder,self.visual_encoder_m], + [self.vision_proj,self.vision_proj_m], + [self.text_encoder,self.text_encoder_m], + [self.text_proj,self.text_proj_m], + ] + self.copy_params() + + # create the queue + self.register_buffer("image_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("text_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("idx_queue", torch.full((1,queue_size),-100)) + self.register_buffer("ptr_queue", torch.zeros(1, dtype=torch.long)) + + self.image_queue = nn.functional.normalize(self.image_queue, dim=0) + self.text_queue = nn.functional.normalize(self.text_queue, dim=0) + + self.queue_size = queue_size + self.momentum = momentum + self.temp = nn.Parameter(0.07*torch.ones([])) + + self.negative_all_rank = negative_all_rank + + + def forward(self, image, caption, alpha, idx): + with torch.no_grad(): + self.temp.clamp_(0.001,0.5) + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + image_feat = F.normalize(self.vision_proj(image_embeds[:,0,:]),dim=-1) + + text = self.tokenizer(caption, padding='max_length', truncation=True, max_length=35, + return_tensors="pt").to(image.device) + + text_output = self.text_encoder(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat = F.normalize(self.text_proj(text_output.last_hidden_state[:,0,:]),dim=-1) + + ###============== Image-text Contrastive Learning ===================### + idx = idx.view(-1,1) + idx_all = torch.cat([idx.t(), self.idx_queue.clone().detach()],dim=1) + pos_idx = torch.eq(idx, idx_all).float() + sim_targets = pos_idx / pos_idx.sum(1,keepdim=True) + + # get momentum features + with torch.no_grad(): + self._momentum_update() + image_embeds_m = self.visual_encoder_m(image) + image_feat_m = F.normalize(self.vision_proj_m(image_embeds_m[:,0,:]),dim=-1) + image_feat_m_all = torch.cat([image_feat_m.t(),self.image_queue.clone().detach()],dim=1) + + text_output_m = self.text_encoder_m(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat_m = F.normalize(self.text_proj_m(text_output_m.last_hidden_state[:,0,:]),dim=-1) + text_feat_m_all = torch.cat([text_feat_m.t(),self.text_queue.clone().detach()],dim=1) + + sim_i2t_m = image_feat_m @ text_feat_m_all / self.temp + sim_t2i_m = text_feat_m @ image_feat_m_all / self.temp + + sim_targets = torch.zeros(sim_i2t_m.size()).to(image.device) + sim_targets.fill_diagonal_(1) + + sim_i2t_targets = alpha * F.softmax(sim_i2t_m, dim=1) + (1 - alpha) * sim_targets + sim_t2i_targets = alpha * F.softmax(sim_t2i_m, dim=1) + (1 - alpha) * sim_targets + + sim_i2t = image_feat @ text_feat_m_all / self.temp + sim_t2i = text_feat @ image_feat_m_all / self.temp + + loss_i2t = -torch.sum(F.log_softmax(sim_i2t, dim=1)*sim_i2t_targets,dim=1).mean() + loss_t2i = -torch.sum(F.log_softmax(sim_t2i, dim=1)*sim_t2i_targets,dim=1).mean() + + loss_ita = (loss_i2t+loss_t2i)/2 + + idxs = concat_all_gather(idx) + self._dequeue_and_enqueue(image_feat_m, text_feat_m, idxs) + + ###============== Image-text Matching ===================### + encoder_input_ids = text.input_ids.clone() + encoder_input_ids[:,0] = self.tokenizer.enc_token_id + + # forward the positve image-text pair + bs = image.size(0) + output_pos = self.text_encoder(encoder_input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True, + ) + + + if self.negative_all_rank: + # compute sample similarity + with torch.no_grad(): + mask = torch.eq(idx, idxs.t()) + + image_feat_world = concat_all_gather(image_feat) + text_feat_world = concat_all_gather(text_feat) + + sim_i2t = image_feat @ text_feat_world.t() / self.temp + sim_t2i = text_feat @ image_feat_world.t() / self.temp + + weights_i2t = F.softmax(sim_i2t,dim=1) + weights_i2t.masked_fill_(mask, 0) + + weights_t2i = F.softmax(sim_t2i,dim=1) + weights_t2i.masked_fill_(mask, 0) + + image_embeds_world = all_gather_with_grad(image_embeds) + + # select a negative image (from all ranks) for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds_world[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg,dim=0) + + # select a negative text (from all ranks) for each image + input_ids_world = concat_all_gather(encoder_input_ids) + att_mask_world = concat_all_gather(text.attention_mask) + + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(input_ids_world[neg_idx]) + text_atts_neg.append(att_mask_world[neg_idx]) + + else: + with torch.no_grad(): + mask = torch.eq(idx, idx.t()) + + sim_i2t = image_feat @ text_feat.t() / self.temp + sim_t2i = text_feat @ image_feat.t() / self.temp + + weights_i2t = F.softmax(sim_i2t,dim=1) + weights_i2t.masked_fill_(mask, 0) + + weights_t2i = F.softmax(sim_t2i,dim=1) + weights_t2i.masked_fill_(mask, 0) + + # select a negative image (from same rank) for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg,dim=0) + + # select a negative text (from same rank) for each image + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(encoder_input_ids[neg_idx]) + text_atts_neg.append(text.attention_mask[neg_idx]) + + text_ids_neg = torch.stack(text_ids_neg,dim=0) + text_atts_neg = torch.stack(text_atts_neg,dim=0) + + text_ids_all = torch.cat([encoder_input_ids, text_ids_neg],dim=0) + text_atts_all = torch.cat([text.attention_mask, text_atts_neg],dim=0) + + image_embeds_all = torch.cat([image_embeds_neg,image_embeds],dim=0) + image_atts_all = torch.cat([image_atts,image_atts],dim=0) + + output_neg = self.text_encoder(text_ids_all, + attention_mask = text_atts_all, + encoder_hidden_states = image_embeds_all, + encoder_attention_mask = image_atts_all, + return_dict = True, + ) + + + vl_embeddings = torch.cat([output_pos.last_hidden_state[:,0,:], output_neg.last_hidden_state[:,0,:]],dim=0) + vl_output = self.itm_head(vl_embeddings) + + itm_labels = torch.cat([torch.ones(bs,dtype=torch.long),torch.zeros(2*bs,dtype=torch.long)], + dim=0).to(image.device) + loss_itm = F.cross_entropy(vl_output, itm_labels) + + return loss_ita, loss_itm + + + @torch.no_grad() + def copy_params(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data.copy_(param.data) # initialize + param_m.requires_grad = False # not update by gradient + + + @torch.no_grad() + def _momentum_update(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data = param_m.data * self.momentum + param.data * (1. - self.momentum) + + + @torch.no_grad() + def _dequeue_and_enqueue(self, image_feat, text_feat, idxs): + # gather keys before updating queue + image_feats = concat_all_gather(image_feat) + text_feats = concat_all_gather(text_feat) + + + batch_size = image_feats.shape[0] + + ptr = int(self.ptr_queue) + assert self.queue_size % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.image_queue[:, ptr:ptr + batch_size] = image_feats.T + self.text_queue[:, ptr:ptr + batch_size] = text_feats.T + self.idx_queue[:, ptr:ptr + batch_size] = idxs.T + ptr = (ptr + batch_size) % self.queue_size # move pointer + + self.ptr_queue[0] = ptr + + +def blip_retrieval(pretrained='',**kwargs): + model = BLIP_Retrieval(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + print("missing keys:") + print(msg.missing_keys) + return model + + +@torch.no_grad() +def concat_all_gather(tensor): + """ + Performs all_gather operation on the provided tensors. + *** Warning ***: torch.distributed.all_gather has no gradient. + """ + tensors_gather = [torch.ones_like(tensor) + for _ in range(torch.distributed.get_world_size())] + torch.distributed.all_gather(tensors_gather, tensor, async_op=False) + + output = torch.cat(tensors_gather, dim=0) + return output + + +class GatherLayer(torch.autograd.Function): + """ + Gather tensors from all workers with support for backward propagation: + This implementation does not cut the gradients as torch.distributed.all_gather does. + """ + + @staticmethod + def forward(ctx, x): + output = [torch.zeros_like(x) for _ in range(torch.distributed.get_world_size())] + torch.distributed.all_gather(output, x) + return tuple(output) + + @staticmethod + def backward(ctx, *grads): + all_gradients = torch.stack(grads) + torch.distributed.all_reduce(all_gradients) + return all_gradients[torch.distributed.get_rank()] + + +def all_gather_with_grad(tensors): + """ + Performs all_gather operation on the provided tensors. + Graph remains connected for backward grad computation. + """ + # Queue the gathered tensors + world_size = torch.distributed.get_world_size() + # There is no need for reduction in the single-proc case + if world_size == 1: + return tensors + + tensor_all = GatherLayer.apply(tensors) + + return torch.cat(tensor_all, dim=0) diff --git a/models/blip_vqa.py b/models/blip_vqa.py new file mode 100644 index 00000000..9f284b49 --- /dev/null +++ b/models/blip_vqa.py @@ -0,0 +1,186 @@ +from models.med import BertConfig, BertModel, BertLMHeadModel +from models.blip import create_vit, init_tokenizer, load_checkpoint + +import torch +from torch import nn +import torch.nn.functional as F +from transformers import BertTokenizer +import numpy as np + +class BLIP_VQA(nn.Module): + def __init__(self, + med_config = './configs/med_config.json', + image_size = 480, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit, image_size, vit_grad_ckpt, vit_ckpt_layer, drop_path_rate=0.1) + self.tokenizer = init_tokenizer() + + encoder_config = BertConfig.from_json_file(med_config) + encoder_config.encoder_width = vision_width + self.text_encoder = BertModel(config=encoder_config, add_pooling_layer=False) + + decoder_config = BertConfig.from_json_file(med_config) + self.text_decoder = BertLMHeadModel(config=decoder_config) + + + def forward(self, image, question, answer=None, n=None, weights=None, train=True, inference='rank', k_test=128): + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + + question = self.tokenizer(question, padding='longest', truncation=True, max_length=35, + return_tensors="pt").to(image.device) + question.input_ids[:,0] = self.tokenizer.enc_token_id + + if train: + ''' + n: number of answers for each question + weights: weight for each answer + ''' + answer = self.tokenizer(answer, padding='longest', return_tensors="pt").to(image.device) + answer.input_ids[:,0] = self.tokenizer.bos_token_id + answer_targets = answer.input_ids.masked_fill(answer.input_ids == self.tokenizer.pad_token_id, -100) + + question_output = self.text_encoder(question.input_ids, + attention_mask = question.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True) + + question_states = [] + question_atts = [] + for b, n in enumerate(n): + question_states += [question_output.last_hidden_state[b]]*n + question_atts += [question.attention_mask[b]]*n + question_states = torch.stack(question_states,0) + question_atts = torch.stack(question_atts,0) + + answer_output = self.text_decoder(answer.input_ids, + attention_mask = answer.attention_mask, + encoder_hidden_states = question_states, + encoder_attention_mask = question_atts, + labels = answer_targets, + return_dict = True, + reduction = 'none', + ) + + loss = weights * answer_output.loss + loss = loss.sum()/image.size(0) + + return loss + + + else: + question_output = self.text_encoder(question.input_ids, + attention_mask = question.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True) + + if inference=='generate': + num_beams = 3 + question_states = question_output.last_hidden_state.repeat_interleave(num_beams,dim=0) + question_atts = torch.ones(question_states.size()[:-1],dtype=torch.long).to(question_states.device) + model_kwargs = {"encoder_hidden_states": question_states, "encoder_attention_mask":question_atts} + + bos_ids = torch.full((image.size(0),1),fill_value=self.tokenizer.bos_token_id,device=image.device) + + outputs = self.text_decoder.generate(input_ids=bos_ids, + max_length=10, + min_length=1, + num_beams=num_beams, + eos_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + **model_kwargs) + + answers = [] + for output in outputs: + answer = self.tokenizer.decode(output, skip_special_tokens=True) + answers.append(answer) + return answers + + elif inference=='rank': + max_ids = self.rank_answer(question_output.last_hidden_state, question.attention_mask, + answer.input_ids, answer.attention_mask, k_test) + return max_ids + + + + def rank_answer(self, question_states, question_atts, answer_ids, answer_atts, k): + + num_ques = question_states.size(0) + start_ids = answer_ids[0,0].repeat(num_ques,1) # bos token + + start_output = self.text_decoder(start_ids, + encoder_hidden_states = question_states, + encoder_attention_mask = question_atts, + return_dict = True, + reduction = 'none') + logits = start_output.logits[:,0,:] # first token's logit + + # topk_probs: top-k probability + # topk_ids: [num_question, k] + answer_first_token = answer_ids[:,1] + prob_first_token = F.softmax(logits,dim=1).index_select(dim=1, index=answer_first_token) + topk_probs, topk_ids = prob_first_token.topk(k,dim=1) + + # answer input: [num_question*k, answer_len] + input_ids = [] + input_atts = [] + for b, topk_id in enumerate(topk_ids): + input_ids.append(answer_ids.index_select(dim=0, index=topk_id)) + input_atts.append(answer_atts.index_select(dim=0, index=topk_id)) + input_ids = torch.cat(input_ids,dim=0) + input_atts = torch.cat(input_atts,dim=0) + + targets_ids = input_ids.masked_fill(input_ids == self.tokenizer.pad_token_id, -100) + + # repeat encoder's output for top-k answers + question_states = tile(question_states, 0, k) + question_atts = tile(question_atts, 0, k) + + output = self.text_decoder(input_ids, + attention_mask = input_atts, + encoder_hidden_states = question_states, + encoder_attention_mask = question_atts, + labels = targets_ids, + return_dict = True, + reduction = 'none') + + log_probs_sum = -output.loss + log_probs_sum = log_probs_sum.view(num_ques,k) + + max_topk_ids = log_probs_sum.argmax(dim=1) + max_ids = topk_ids[max_topk_ids>=0,max_topk_ids] + + return max_ids + + +def blip_vqa(pretrained='',**kwargs): + model = BLIP_VQA(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) +# assert(len(msg.missing_keys)==0) + return model + + +def tile(x, dim, n_tile): + init_dim = x.size(dim) + repeat_idx = [1] * x.dim() + repeat_idx[dim] = n_tile + x = x.repeat(*(repeat_idx)) + order_index = torch.LongTensor(np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])) + return torch.index_select(x, dim, order_index.to(x.device)) + + \ No newline at end of file diff --git a/models/med.py b/models/med.py new file mode 100644 index 00000000..7b00a354 --- /dev/null +++ b/models/med.py @@ -0,0 +1,955 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li + * Based on huggingface code base + * https://github.com/huggingface/transformers/blob/v4.15.0/src/transformers/models/bert +''' + +import math +import os +import warnings +from dataclasses import dataclass +from typing import Optional, Tuple + +import torch +from torch import Tensor, device, dtype, nn +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss +import torch.nn.functional as F + +from transformers.activations import ACT2FN +from transformers.file_utils import ( + ModelOutput, +) +from transformers.modeling_outputs import ( + BaseModelOutputWithPastAndCrossAttentions, + BaseModelOutputWithPoolingAndCrossAttentions, + CausalLMOutputWithCrossAttentions, + MaskedLMOutput, + MultipleChoiceModelOutput, + NextSentencePredictorOutput, + QuestionAnsweringModelOutput, + SequenceClassifierOutput, + TokenClassifierOutput, +) +from transformers.modeling_utils import ( + PreTrainedModel, + apply_chunking_to_forward, + find_pruneable_heads_and_indices, + prune_linear_layer, +) +from transformers.utils import logging +from transformers.models.bert.configuration_bert import BertConfig + + +logger = logging.get_logger(__name__) + + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word and position embeddings.""" + + def __init__(self, config): + super().__init__() + self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load + # any TensorFlow checkpoint file + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + # position_ids (1, len position emb) is contiguous in memory and exported when serialized + self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + + self.config = config + + def forward( + self, input_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0 + ): + if input_ids is not None: + input_shape = input_ids.size() + else: + input_shape = inputs_embeds.size()[:-1] + + seq_length = input_shape[1] + + if position_ids is None: + position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length] + + if inputs_embeds is None: + inputs_embeds = self.word_embeddings(input_ids) + + embeddings = inputs_embeds + + if self.position_embedding_type == "absolute": + position_embeddings = self.position_embeddings(position_ids) + embeddings += position_embeddings + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertSelfAttention(nn.Module): + def __init__(self, config, is_cross_attention): + super().__init__() + self.config = config + if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (config.hidden_size, config.num_attention_heads) + ) + + self.num_attention_heads = config.num_attention_heads + self.attention_head_size = int(config.hidden_size / config.num_attention_heads) + self.all_head_size = self.num_attention_heads * self.attention_head_size + + self.query = nn.Linear(config.hidden_size, self.all_head_size) + if is_cross_attention: + self.key = nn.Linear(config.encoder_width, self.all_head_size) + self.value = nn.Linear(config.encoder_width, self.all_head_size) + else: + self.key = nn.Linear(config.hidden_size, self.all_head_size) + self.value = nn.Linear(config.hidden_size, self.all_head_size) + + self.dropout = nn.Dropout(config.attention_probs_dropout_prob) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + self.max_position_embeddings = config.max_position_embeddings + self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size) + self.save_attention = False + + def save_attn_gradients(self, attn_gradients): + self.attn_gradients = attn_gradients + + def get_attn_gradients(self): + return self.attn_gradients + + def save_attention_map(self, attention_map): + self.attention_map = attention_map + + def get_attention_map(self): + return self.attention_map + + def transpose_for_scores(self, x): + new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) + x = x.view(*new_x_shape) + return x.permute(0, 2, 1, 3) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + mixed_query_layer = self.query(hidden_states) + + # If this is instantiated as a cross-attention module, the keys + # and values come from an encoder; the attention mask needs to be + # such that the encoder's padding tokens are not attended to. + is_cross_attention = encoder_hidden_states is not None + + if is_cross_attention: + key_layer = self.transpose_for_scores(self.key(encoder_hidden_states)) + value_layer = self.transpose_for_scores(self.value(encoder_hidden_states)) + attention_mask = encoder_attention_mask + elif past_key_value is not None: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + key_layer = torch.cat([past_key_value[0], key_layer], dim=2) + value_layer = torch.cat([past_key_value[1], value_layer], dim=2) + else: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + + query_layer = self.transpose_for_scores(mixed_query_layer) + + past_key_value = (key_layer, value_layer) + + # Take the dot product between "query" and "key" to get the raw attention scores. + attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) + + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + seq_length = hidden_states.size()[1] + position_ids_l = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(-1, 1) + position_ids_r = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(1, -1) + distance = position_ids_l - position_ids_r + positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1) + positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility + + if self.position_embedding_type == "relative_key": + relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores + elif self.position_embedding_type == "relative_key_query": + relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key + + attention_scores = attention_scores / math.sqrt(self.attention_head_size) + if attention_mask is not None: + # Apply the attention mask is (precomputed for all layers in BertModel forward() function) + attention_scores = attention_scores + attention_mask + + # Normalize the attention scores to probabilities. + attention_probs = nn.Softmax(dim=-1)(attention_scores) + + if is_cross_attention and self.save_attention: + self.save_attention_map(attention_probs) + attention_probs.register_hook(self.save_attn_gradients) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs_dropped = self.dropout(attention_probs) + + # Mask heads if we want to + if head_mask is not None: + attention_probs_dropped = attention_probs_dropped * head_mask + + context_layer = torch.matmul(attention_probs_dropped, value_layer) + + context_layer = context_layer.permute(0, 2, 1, 3).contiguous() + new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) + context_layer = context_layer.view(*new_context_layer_shape) + + outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) + + outputs = outputs + (past_key_value,) + return outputs + + +class BertSelfOutput(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertAttention(nn.Module): + def __init__(self, config, is_cross_attention=False): + super().__init__() + self.self = BertSelfAttention(config, is_cross_attention) + self.output = BertSelfOutput(config) + self.pruned_heads = set() + + def prune_heads(self, heads): + if len(heads) == 0: + return + heads, index = find_pruneable_heads_and_indices( + heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads + ) + + # Prune linear layers + self.self.query = prune_linear_layer(self.self.query, index) + self.self.key = prune_linear_layer(self.self.key, index) + self.self.value = prune_linear_layer(self.self.value, index) + self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) + + # Update hyper params and store pruned heads + self.self.num_attention_heads = self.self.num_attention_heads - len(heads) + self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads + self.pruned_heads = self.pruned_heads.union(heads) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + self_outputs = self.self( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + ) + attention_output = self.output(self_outputs[0], hidden_states) + outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them + return outputs + + +class BertIntermediate(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.intermediate_size) + if isinstance(config.hidden_act, str): + self.intermediate_act_fn = ACT2FN[config.hidden_act] + else: + self.intermediate_act_fn = config.hidden_act + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.intermediate_act_fn(hidden_states) + return hidden_states + + +class BertOutput(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.intermediate_size, config.hidden_size) + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertLayer(nn.Module): + def __init__(self, config, layer_num): + super().__init__() + self.config = config + self.chunk_size_feed_forward = config.chunk_size_feed_forward + self.seq_len_dim = 1 + self.attention = BertAttention(config) + self.layer_num = layer_num + if self.config.add_cross_attention: + self.crossattention = BertAttention(config, is_cross_attention=self.config.add_cross_attention) + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + mode=None, + ): + # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 + self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None + self_attention_outputs = self.attention( + hidden_states, + attention_mask, + head_mask, + output_attentions=output_attentions, + past_key_value=self_attn_past_key_value, + ) + attention_output = self_attention_outputs[0] + + outputs = self_attention_outputs[1:-1] + present_key_value = self_attention_outputs[-1] + + if mode=='multimodal': + assert encoder_hidden_states is not None, "encoder_hidden_states must be given for cross-attention layers" + + cross_attention_outputs = self.crossattention( + attention_output, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + output_attentions=output_attentions, + ) + attention_output = cross_attention_outputs[0] + outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights + layer_output = apply_chunking_to_forward( + self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output + ) + outputs = (layer_output,) + outputs + + outputs = outputs + (present_key_value,) + + return outputs + + def feed_forward_chunk(self, attention_output): + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + return layer_output + + +class BertEncoder(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.layer = nn.ModuleList([BertLayer(config,i) for i in range(config.num_hidden_layers)]) + self.gradient_checkpointing = False + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=False, + output_hidden_states=False, + return_dict=True, + mode='multimodal', + ): + all_hidden_states = () if output_hidden_states else None + all_self_attentions = () if output_attentions else None + all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None + + next_decoder_cache = () if use_cache else None + + for i in range(self.config.num_hidden_layers): + layer_module = self.layer[i] + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer_head_mask = head_mask[i] if head_mask is not None else None + past_key_value = past_key_values[i] if past_key_values is not None else None + + if self.gradient_checkpointing and self.training: + + if use_cache: + logger.warn( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs, past_key_value, output_attentions) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer_module), + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + mode=mode, + ) + else: + layer_outputs = layer_module( + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + mode=mode, + ) + + hidden_states = layer_outputs[0] + if use_cache: + next_decoder_cache += (layer_outputs[-1],) + if output_attentions: + all_self_attentions = all_self_attentions + (layer_outputs[1],) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + if not return_dict: + return tuple( + v + for v in [ + hidden_states, + next_decoder_cache, + all_hidden_states, + all_self_attentions, + all_cross_attentions, + ] + if v is not None + ) + return BaseModelOutputWithPastAndCrossAttentions( + last_hidden_state=hidden_states, + past_key_values=next_decoder_cache, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + cross_attentions=all_cross_attentions, + ) + + +class BertPooler(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.activation = nn.Tanh() + + def forward(self, hidden_states): + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. + first_token_tensor = hidden_states[:, 0] + pooled_output = self.dense(first_token_tensor) + pooled_output = self.activation(pooled_output) + return pooled_output + + +class BertPredictionHeadTransform(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if isinstance(config.hidden_act, str): + self.transform_act_fn = ACT2FN[config.hidden_act] + else: + self.transform_act_fn = config.hidden_act + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.transform_act_fn(hidden_states) + hidden_states = self.LayerNorm(hidden_states) + return hidden_states + + +class BertLMPredictionHead(nn.Module): + def __init__(self, config): + super().__init__() + self.transform = BertPredictionHeadTransform(config) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + self.bias = nn.Parameter(torch.zeros(config.vocab_size)) + + # Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings` + self.decoder.bias = self.bias + + def forward(self, hidden_states): + hidden_states = self.transform(hidden_states) + hidden_states = self.decoder(hidden_states) + return hidden_states + + +class BertOnlyMLMHead(nn.Module): + def __init__(self, config): + super().__init__() + self.predictions = BertLMPredictionHead(config) + + def forward(self, sequence_output): + prediction_scores = self.predictions(sequence_output) + return prediction_scores + + +class BertPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained + models. + """ + + config_class = BertConfig + base_model_prefix = "bert" + _keys_to_ignore_on_load_missing = [r"position_ids"] + + def _init_weights(self, module): + """ Initialize the weights """ + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +class BertModel(BertPreTrainedModel): + """ + The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of + cross-attention is added between the self-attention layers, following the architecture described in `Attention is + all you need `__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, + Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin. + argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an + input to the forward pass. + """ + + def __init__(self, config, add_pooling_layer=True): + super().__init__(config) + self.config = config + + self.embeddings = BertEmbeddings(config) + + self.encoder = BertEncoder(config) + + self.pooler = BertPooler(config) if add_pooling_layer else None + + self.init_weights() + + + def get_input_embeddings(self): + return self.embeddings.word_embeddings + + def set_input_embeddings(self, value): + self.embeddings.word_embeddings = value + + def _prune_heads(self, heads_to_prune): + """ + Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base + class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + + def get_extended_attention_mask(self, attention_mask: Tensor, input_shape: Tuple[int], device: device, is_decoder: bool) -> Tensor: + """ + Makes broadcastable attention and causal masks so that future and masked tokens are ignored. + + Arguments: + attention_mask (:obj:`torch.Tensor`): + Mask with ones indicating tokens to attend to, zeros for tokens to ignore. + input_shape (:obj:`Tuple[int]`): + The shape of the input to the model. + device: (:obj:`torch.device`): + The device of the input to the model. + + Returns: + :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`. + """ + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + if attention_mask.dim() == 3: + extended_attention_mask = attention_mask[:, None, :, :] + elif attention_mask.dim() == 2: + # Provided a padding mask of dimensions [batch_size, seq_length] + # - if the model is a decoder, apply a causal mask in addition to the padding mask + # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length] + if is_decoder: + batch_size, seq_length = input_shape + + seq_ids = torch.arange(seq_length, device=device) + causal_mask = seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= seq_ids[None, :, None] + # in case past_key_values are used we need to add a prefix ones mask to the causal mask + # causal and attention masks must have same type with pytorch version < 1.3 + causal_mask = causal_mask.to(attention_mask.dtype) + + if causal_mask.shape[1] < attention_mask.shape[1]: + prefix_seq_len = attention_mask.shape[1] - causal_mask.shape[1] + causal_mask = torch.cat( + [ + torch.ones((batch_size, seq_length, prefix_seq_len), device=device, dtype=causal_mask.dtype), + causal_mask, + ], + axis=-1, + ) + + extended_attention_mask = causal_mask[:, None, :, :] * attention_mask[:, None, None, :] + else: + extended_attention_mask = attention_mask[:, None, None, :] + else: + raise ValueError( + "Wrong shape for input_ids (shape {}) or attention_mask (shape {})".format( + input_shape, attention_mask.shape + ) + ) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to(dtype=self.dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + return extended_attention_mask + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + is_decoder=False, + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if is_decoder: + use_cache = use_cache if use_cache is not None else self.config.use_cache + else: + use_cache = False + + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") + elif input_ids is not None: + input_shape = input_ids.size() + batch_size, seq_length = input_shape + device = input_ids.device + elif inputs_embeds is not None: + input_shape = inputs_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = inputs_embeds.device + elif encoder_embeds is not None: + input_shape = encoder_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = encoder_embeds.device + else: + raise ValueError("You have to specify either input_ids or inputs_embeds or encoder_embeds") + + # past_key_values_length + past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 + + if attention_mask is None: + attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device) + + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, + device, is_decoder) + + # If a 2D or 3D attention mask is provided for the cross-attention + # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] + if encoder_hidden_states is not None: + if type(encoder_hidden_states) == list: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states[0].size() + else: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size() + encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length) + + if type(encoder_attention_mask) == list: + encoder_extended_attention_mask = [self.invert_attention_mask(mask) for mask in encoder_attention_mask] + elif encoder_attention_mask is None: + encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device) + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = None + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) + + if encoder_embeds is None: + embedding_output = self.embeddings( + input_ids=input_ids, + position_ids=position_ids, + inputs_embeds=inputs_embeds, + past_key_values_length=past_key_values_length, + ) + else: + embedding_output = encoder_embeds + + encoder_outputs = self.encoder( + embedding_output, + attention_mask=extended_attention_mask, + head_mask=head_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_extended_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + mode=mode, + ) + sequence_output = encoder_outputs[0] + pooled_output = self.pooler(sequence_output) if self.pooler is not None else None + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPoolingAndCrossAttentions( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + past_key_values=encoder_outputs.past_key_values, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + cross_attentions=encoder_outputs.cross_attentions, + ) + + + +class BertLMHeadModel(BertPreTrainedModel): + + _keys_to_ignore_on_load_unexpected = [r"pooler"] + _keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"] + + def __init__(self, config): + super().__init__(config) + + self.bert = BertModel(config, add_pooling_layer=False) + self.cls = BertOnlyMLMHead(config) + + self.init_weights() + + def get_output_embeddings(self): + return self.cls.predictions.decoder + + def set_output_embeddings(self, new_embeddings): + self.cls.predictions.decoder = new_embeddings + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + labels=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + return_logits=False, + is_decoder=True, + reduction='mean', + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in + ``[-100, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are + ignored (masked), the loss is only computed for the tokens with labels n ``[0, ..., config.vocab_size]`` + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + Returns: + Example:: + >>> from transformers import BertTokenizer, BertLMHeadModel, BertConfig + >>> import torch + >>> tokenizer = BertTokenizer.from_pretrained('bert-base-cased') + >>> config = BertConfig.from_pretrained("bert-base-cased") + >>> model = BertLMHeadModel.from_pretrained('bert-base-cased', config=config) + >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") + >>> outputs = model(**inputs) + >>> prediction_logits = outputs.logits + """ + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + if labels is not None: + use_cache = False + + outputs = self.bert( + input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + head_mask=head_mask, + inputs_embeds=inputs_embeds, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + is_decoder=is_decoder, + mode=mode, + ) + + sequence_output = outputs[0] + prediction_scores = self.cls(sequence_output) + + if return_logits: + return prediction_scores[:, :-1, :].contiguous() + + lm_loss = None + if labels is not None: + # we are doing next-token prediction; shift prediction scores and input ids by one + shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous() + labels = labels[:, 1:].contiguous() + loss_fct = CrossEntropyLoss(reduction=reduction, label_smoothing=0.1) + lm_loss = loss_fct(shifted_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1)) + if reduction=='none': + lm_loss = lm_loss.view(prediction_scores.size(0),-1).sum(1) + + if not return_dict: + output = (prediction_scores,) + outputs[2:] + return ((lm_loss,) + output) if lm_loss is not None else output + + return CausalLMOutputWithCrossAttentions( + loss=lm_loss, + logits=prediction_scores, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + cross_attentions=outputs.cross_attentions, + ) + + def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs): + input_shape = input_ids.shape + # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly + if attention_mask is None: + attention_mask = input_ids.new_ones(input_shape) + + # cut decoder_input_ids if past is used + if past is not None: + input_ids = input_ids[:, -1:] + + return { + "input_ids": input_ids, + "attention_mask": attention_mask, + "past_key_values": past, + "encoder_hidden_states": model_kwargs.get("encoder_hidden_states", None), + "encoder_attention_mask": model_kwargs.get("encoder_attention_mask", None), + "is_decoder": True, + } + + def _reorder_cache(self, past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) + return reordered_past diff --git a/models/nlvr_encoder.py b/models/nlvr_encoder.py new file mode 100644 index 00000000..1946bb4a --- /dev/null +++ b/models/nlvr_encoder.py @@ -0,0 +1,843 @@ +import math +import os +import warnings +from dataclasses import dataclass +from typing import Optional, Tuple + +import torch +from torch import Tensor, device, dtype, nn +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss +import torch.nn.functional as F + +from transformers.activations import ACT2FN +from transformers.file_utils import ( + ModelOutput, +) +from transformers.modeling_outputs import ( + BaseModelOutputWithPastAndCrossAttentions, + BaseModelOutputWithPoolingAndCrossAttentions, + CausalLMOutputWithCrossAttentions, + MaskedLMOutput, + MultipleChoiceModelOutput, + NextSentencePredictorOutput, + QuestionAnsweringModelOutput, + SequenceClassifierOutput, + TokenClassifierOutput, +) +from transformers.modeling_utils import ( + PreTrainedModel, + apply_chunking_to_forward, + find_pruneable_heads_and_indices, + prune_linear_layer, +) +from transformers.utils import logging +from transformers.models.bert.configuration_bert import BertConfig + + +logger = logging.get_logger(__name__) + + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word and position embeddings.""" + + def __init__(self, config): + super().__init__() + self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load + # any TensorFlow checkpoint file + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + # position_ids (1, len position emb) is contiguous in memory and exported when serialized + self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + + self.config = config + + def forward( + self, input_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0 + ): + if input_ids is not None: + input_shape = input_ids.size() + else: + input_shape = inputs_embeds.size()[:-1] + + seq_length = input_shape[1] + + if position_ids is None: + position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length] + + if inputs_embeds is None: + inputs_embeds = self.word_embeddings(input_ids) + + embeddings = inputs_embeds + + if self.position_embedding_type == "absolute": + position_embeddings = self.position_embeddings(position_ids) + embeddings += position_embeddings + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertSelfAttention(nn.Module): + def __init__(self, config, is_cross_attention): + super().__init__() + self.config = config + if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (config.hidden_size, config.num_attention_heads) + ) + + self.num_attention_heads = config.num_attention_heads + self.attention_head_size = int(config.hidden_size / config.num_attention_heads) + self.all_head_size = self.num_attention_heads * self.attention_head_size + + self.query = nn.Linear(config.hidden_size, self.all_head_size) + if is_cross_attention: + self.key = nn.Linear(config.encoder_width, self.all_head_size) + self.value = nn.Linear(config.encoder_width, self.all_head_size) + else: + self.key = nn.Linear(config.hidden_size, self.all_head_size) + self.value = nn.Linear(config.hidden_size, self.all_head_size) + + self.dropout = nn.Dropout(config.attention_probs_dropout_prob) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + self.max_position_embeddings = config.max_position_embeddings + self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size) + self.save_attention = False + + def save_attn_gradients(self, attn_gradients): + self.attn_gradients = attn_gradients + + def get_attn_gradients(self): + return self.attn_gradients + + def save_attention_map(self, attention_map): + self.attention_map = attention_map + + def get_attention_map(self): + return self.attention_map + + def transpose_for_scores(self, x): + new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) + x = x.view(*new_x_shape) + return x.permute(0, 2, 1, 3) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + mixed_query_layer = self.query(hidden_states) + + # If this is instantiated as a cross-attention module, the keys + # and values come from an encoder; the attention mask needs to be + # such that the encoder's padding tokens are not attended to. + is_cross_attention = encoder_hidden_states is not None + + if is_cross_attention: + key_layer = self.transpose_for_scores(self.key(encoder_hidden_states)) + value_layer = self.transpose_for_scores(self.value(encoder_hidden_states)) + attention_mask = encoder_attention_mask + elif past_key_value is not None: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + key_layer = torch.cat([past_key_value[0], key_layer], dim=2) + value_layer = torch.cat([past_key_value[1], value_layer], dim=2) + else: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + + query_layer = self.transpose_for_scores(mixed_query_layer) + + past_key_value = (key_layer, value_layer) + + # Take the dot product between "query" and "key" to get the raw attention scores. + attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) + + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + seq_length = hidden_states.size()[1] + position_ids_l = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(-1, 1) + position_ids_r = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(1, -1) + distance = position_ids_l - position_ids_r + positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1) + positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility + + if self.position_embedding_type == "relative_key": + relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores + elif self.position_embedding_type == "relative_key_query": + relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key + + attention_scores = attention_scores / math.sqrt(self.attention_head_size) + if attention_mask is not None: + # Apply the attention mask is (precomputed for all layers in BertModel forward() function) + attention_scores = attention_scores + attention_mask + + # Normalize the attention scores to probabilities. + attention_probs = nn.Softmax(dim=-1)(attention_scores) + + if is_cross_attention and self.save_attention: + self.save_attention_map(attention_probs) + attention_probs.register_hook(self.save_attn_gradients) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs_dropped = self.dropout(attention_probs) + + # Mask heads if we want to + if head_mask is not None: + attention_probs_dropped = attention_probs_dropped * head_mask + + context_layer = torch.matmul(attention_probs_dropped, value_layer) + + context_layer = context_layer.permute(0, 2, 1, 3).contiguous() + new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) + context_layer = context_layer.view(*new_context_layer_shape) + + outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) + + outputs = outputs + (past_key_value,) + return outputs + + +class BertSelfOutput(nn.Module): + def __init__(self, config, twin=False, merge=False): + super().__init__() + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + if twin: + self.dense0 = nn.Linear(config.hidden_size, config.hidden_size) + self.dense1 = nn.Linear(config.hidden_size, config.hidden_size) + else: + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if merge: + self.act = ACT2FN[config.hidden_act] + self.merge_layer = nn.Linear(config.hidden_size * 2, config.hidden_size) + self.merge = True + else: + self.merge = False + + def forward(self, hidden_states, input_tensor): + if type(hidden_states) == list: + hidden_states0 = self.dense0(hidden_states[0]) + hidden_states1 = self.dense1(hidden_states[1]) + if self.merge: + #hidden_states = self.merge_layer(self.act(torch.cat([hidden_states0,hidden_states1],dim=-1))) + hidden_states = self.merge_layer(torch.cat([hidden_states0,hidden_states1],dim=-1)) + else: + hidden_states = (hidden_states0+hidden_states1)/2 + else: + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertAttention(nn.Module): + def __init__(self, config, is_cross_attention=False, layer_num=-1): + super().__init__() + if is_cross_attention: + self.self0 = BertSelfAttention(config, is_cross_attention) + self.self1 = BertSelfAttention(config, is_cross_attention) + else: + self.self = BertSelfAttention(config, is_cross_attention) + self.output = BertSelfOutput(config, twin=is_cross_attention, merge=(is_cross_attention and layer_num>=6)) + self.pruned_heads = set() + + def prune_heads(self, heads): + if len(heads) == 0: + return + heads, index = find_pruneable_heads_and_indices( + heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads + ) + + # Prune linear layers + self.self.query = prune_linear_layer(self.self.query, index) + self.self.key = prune_linear_layer(self.self.key, index) + self.self.value = prune_linear_layer(self.self.value, index) + self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) + + # Update hyper params and store pruned heads + self.self.num_attention_heads = self.self.num_attention_heads - len(heads) + self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads + self.pruned_heads = self.pruned_heads.union(heads) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + if type(encoder_hidden_states)==list: + self_outputs0 = self.self0( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states[0], + encoder_attention_mask[0], + past_key_value, + output_attentions, + ) + self_outputs1 = self.self1( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states[1], + encoder_attention_mask[1], + past_key_value, + output_attentions, + ) + attention_output = self.output([self_outputs0[0],self_outputs1[0]], hidden_states) + + outputs = (attention_output,) + self_outputs0[1:] # add attentions if we output them + else: + self_outputs = self.self( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + ) + attention_output = self.output(self_outputs[0], hidden_states) + outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them + return outputs + + +class BertIntermediate(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.intermediate_size) + if isinstance(config.hidden_act, str): + self.intermediate_act_fn = ACT2FN[config.hidden_act] + else: + self.intermediate_act_fn = config.hidden_act + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.intermediate_act_fn(hidden_states) + return hidden_states + + +class BertOutput(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.intermediate_size, config.hidden_size) + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertLayer(nn.Module): + def __init__(self, config, layer_num): + super().__init__() + self.config = config + self.chunk_size_feed_forward = config.chunk_size_feed_forward + self.seq_len_dim = 1 + self.attention = BertAttention(config) + self.layer_num = layer_num + if self.config.add_cross_attention: + self.crossattention = BertAttention(config, is_cross_attention=self.config.add_cross_attention, layer_num=layer_num) + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + mode=None, + ): + # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 + self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None + self_attention_outputs = self.attention( + hidden_states, + attention_mask, + head_mask, + output_attentions=output_attentions, + past_key_value=self_attn_past_key_value, + ) + attention_output = self_attention_outputs[0] + + outputs = self_attention_outputs[1:-1] + present_key_value = self_attention_outputs[-1] + + if mode=='multimodal': + assert encoder_hidden_states is not None, "encoder_hidden_states must be given for cross-attention layers" + cross_attention_outputs = self.crossattention( + attention_output, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + output_attentions=output_attentions, + ) + attention_output = cross_attention_outputs[0] + outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights + layer_output = apply_chunking_to_forward( + self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output + ) + outputs = (layer_output,) + outputs + + outputs = outputs + (present_key_value,) + + return outputs + + def feed_forward_chunk(self, attention_output): + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + return layer_output + + +class BertEncoder(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.layer = nn.ModuleList([BertLayer(config,i) for i in range(config.num_hidden_layers)]) + self.gradient_checkpointing = False + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=False, + output_hidden_states=False, + return_dict=True, + mode='multimodal', + ): + all_hidden_states = () if output_hidden_states else None + all_self_attentions = () if output_attentions else None + all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None + + next_decoder_cache = () if use_cache else None + + for i in range(self.config.num_hidden_layers): + layer_module = self.layer[i] + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer_head_mask = head_mask[i] if head_mask is not None else None + past_key_value = past_key_values[i] if past_key_values is not None else None + + if self.gradient_checkpointing and self.training: + + if use_cache: + logger.warn( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs, past_key_value, output_attentions) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer_module), + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + mode=mode, + ) + else: + layer_outputs = layer_module( + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + mode=mode, + ) + + hidden_states = layer_outputs[0] + if use_cache: + next_decoder_cache += (layer_outputs[-1],) + if output_attentions: + all_self_attentions = all_self_attentions + (layer_outputs[1],) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + if not return_dict: + return tuple( + v + for v in [ + hidden_states, + next_decoder_cache, + all_hidden_states, + all_self_attentions, + all_cross_attentions, + ] + if v is not None + ) + return BaseModelOutputWithPastAndCrossAttentions( + last_hidden_state=hidden_states, + past_key_values=next_decoder_cache, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + cross_attentions=all_cross_attentions, + ) + + +class BertPooler(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.activation = nn.Tanh() + + def forward(self, hidden_states): + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. + first_token_tensor = hidden_states[:, 0] + pooled_output = self.dense(first_token_tensor) + pooled_output = self.activation(pooled_output) + return pooled_output + + +class BertPredictionHeadTransform(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if isinstance(config.hidden_act, str): + self.transform_act_fn = ACT2FN[config.hidden_act] + else: + self.transform_act_fn = config.hidden_act + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.transform_act_fn(hidden_states) + hidden_states = self.LayerNorm(hidden_states) + return hidden_states + + +class BertLMPredictionHead(nn.Module): + def __init__(self, config): + super().__init__() + self.transform = BertPredictionHeadTransform(config) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + self.bias = nn.Parameter(torch.zeros(config.vocab_size)) + + # Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings` + self.decoder.bias = self.bias + + def forward(self, hidden_states): + hidden_states = self.transform(hidden_states) + hidden_states = self.decoder(hidden_states) + return hidden_states + + +class BertOnlyMLMHead(nn.Module): + def __init__(self, config): + super().__init__() + self.predictions = BertLMPredictionHead(config) + + def forward(self, sequence_output): + prediction_scores = self.predictions(sequence_output) + return prediction_scores + + +class BertPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained + models. + """ + + config_class = BertConfig + base_model_prefix = "bert" + _keys_to_ignore_on_load_missing = [r"position_ids"] + + def _init_weights(self, module): + """ Initialize the weights """ + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +class BertModel(BertPreTrainedModel): + """ + The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of + cross-attention is added between the self-attention layers, following the architecture described in `Attention is + all you need `__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, + Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin. + argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an + input to the forward pass. + """ + + def __init__(self, config, add_pooling_layer=True): + super().__init__(config) + self.config = config + + self.embeddings = BertEmbeddings(config) + + self.encoder = BertEncoder(config) + + self.pooler = BertPooler(config) if add_pooling_layer else None + + self.init_weights() + + + def get_input_embeddings(self): + return self.embeddings.word_embeddings + + def set_input_embeddings(self, value): + self.embeddings.word_embeddings = value + + def _prune_heads(self, heads_to_prune): + """ + Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base + class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + + def get_extended_attention_mask(self, attention_mask: Tensor, input_shape: Tuple[int], device: device, is_decoder: bool) -> Tensor: + """ + Makes broadcastable attention and causal masks so that future and masked tokens are ignored. + + Arguments: + attention_mask (:obj:`torch.Tensor`): + Mask with ones indicating tokens to attend to, zeros for tokens to ignore. + input_shape (:obj:`Tuple[int]`): + The shape of the input to the model. + device: (:obj:`torch.device`): + The device of the input to the model. + + Returns: + :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`. + """ + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + if attention_mask.dim() == 3: + extended_attention_mask = attention_mask[:, None, :, :] + elif attention_mask.dim() == 2: + # Provided a padding mask of dimensions [batch_size, seq_length] + # - if the model is a decoder, apply a causal mask in addition to the padding mask + # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length] + if is_decoder: + batch_size, seq_length = input_shape + + seq_ids = torch.arange(seq_length, device=device) + causal_mask = seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= seq_ids[None, :, None] + # in case past_key_values are used we need to add a prefix ones mask to the causal mask + # causal and attention masks must have same type with pytorch version < 1.3 + causal_mask = causal_mask.to(attention_mask.dtype) + + if causal_mask.shape[1] < attention_mask.shape[1]: + prefix_seq_len = attention_mask.shape[1] - causal_mask.shape[1] + causal_mask = torch.cat( + [ + torch.ones((batch_size, seq_length, prefix_seq_len), device=device, dtype=causal_mask.dtype), + causal_mask, + ], + axis=-1, + ) + + extended_attention_mask = causal_mask[:, None, :, :] * attention_mask[:, None, None, :] + else: + extended_attention_mask = attention_mask[:, None, None, :] + else: + raise ValueError( + "Wrong shape for input_ids (shape {}) or attention_mask (shape {})".format( + input_shape, attention_mask.shape + ) + ) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to(dtype=self.dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + return extended_attention_mask + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + is_decoder=False, + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if is_decoder: + use_cache = use_cache if use_cache is not None else self.config.use_cache + else: + use_cache = False + + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") + elif input_ids is not None: + input_shape = input_ids.size() + batch_size, seq_length = input_shape + device = input_ids.device + elif inputs_embeds is not None: + input_shape = inputs_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = inputs_embeds.device + elif encoder_embeds is not None: + input_shape = encoder_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = encoder_embeds.device + else: + raise ValueError("You have to specify either input_ids or inputs_embeds or encoder_embeds") + + # past_key_values_length + past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 + + if attention_mask is None: + attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device) + + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, + device, is_decoder) + + # If a 2D or 3D attention mask is provided for the cross-attention + # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] + if encoder_hidden_states is not None: + if type(encoder_hidden_states) == list: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states[0].size() + else: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size() + encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length) + + if type(encoder_attention_mask) == list: + encoder_extended_attention_mask = [self.invert_attention_mask(mask) for mask in encoder_attention_mask] + elif encoder_attention_mask is None: + encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device) + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = None + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) + + if encoder_embeds is None: + embedding_output = self.embeddings( + input_ids=input_ids, + position_ids=position_ids, + inputs_embeds=inputs_embeds, + past_key_values_length=past_key_values_length, + ) + else: + embedding_output = encoder_embeds + + encoder_outputs = self.encoder( + embedding_output, + attention_mask=extended_attention_mask, + head_mask=head_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_extended_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + mode=mode, + ) + sequence_output = encoder_outputs[0] + pooled_output = self.pooler(sequence_output) if self.pooler is not None else None + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPoolingAndCrossAttentions( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + past_key_values=encoder_outputs.past_key_values, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + cross_attentions=encoder_outputs.cross_attentions, + ) + diff --git a/models/vit.py b/models/vit.py new file mode 100644 index 00000000..cec3d8e0 --- /dev/null +++ b/models/vit.py @@ -0,0 +1,305 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li + * Based on timm code base + * https://github.com/rwightman/pytorch-image-models/tree/master/timm +''' + +import torch +import torch.nn as nn +import torch.nn.functional as F +from functools import partial + +from timm.models.vision_transformer import _cfg, PatchEmbed +from timm.models.registry import register_model +from timm.models.layers import trunc_normal_, DropPath +from timm.models.helpers import named_apply, adapt_input_conv + +from fairscale.nn.checkpoint.checkpoint_activations import checkpoint_wrapper + +class Mlp(nn.Module): + """ MLP as used in Vision Transformer, MLP-Mixer and related networks + """ + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + # NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights + self.scale = qk_scale or head_dim ** -0.5 + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + self.attn_gradients = None + self.attention_map = None + + def save_attn_gradients(self, attn_gradients): + self.attn_gradients = attn_gradients + + def get_attn_gradients(self): + return self.attn_gradients + + def save_attention_map(self, attention_map): + self.attention_map = attention_map + + def get_attention_map(self): + return self.attention_map + + def forward(self, x, register_hook=False): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + if register_hook: + self.save_attention_map(attn) + attn.register_hook(self.save_attn_gradients) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class Block(nn.Module): + + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, use_grad_checkpointing=False): + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + if use_grad_checkpointing: + self.attn = checkpoint_wrapper(self.attn) + self.mlp = checkpoint_wrapper(self.mlp) + + def forward(self, x, register_hook=False): + x = x + self.drop_path(self.attn(self.norm1(x), register_hook=register_hook)) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + + +class VisionTransformer(nn.Module): + """ Vision Transformer + A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` - + https://arxiv.org/abs/2010.11929 + """ + def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, + num_heads=12, mlp_ratio=4., qkv_bias=True, qk_scale=None, representation_size=None, + drop_rate=0., attn_drop_rate=0., drop_path_rate=0., norm_layer=None, + use_grad_checkpointing=False, ckpt_layer=0): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + num_classes (int): number of classes for classification head + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + qk_scale (float): override default qk scale of head_dim ** -0.5 if set + representation_size (Optional[int]): enable and set representation layer (pre-logits) to this value if set + drop_rate (float): dropout rate + attn_drop_rate (float): attention dropout rate + drop_path_rate (float): stochastic depth rate + norm_layer: (nn.Module): normalization layer + """ + super().__init__() + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6) + + self.patch_embed = PatchEmbed( + img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim)) + self.pos_drop = nn.Dropout(p=drop_rate) + + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + Block( + dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale, + drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, + use_grad_checkpointing=(use_grad_checkpointing and i>=depth-ckpt_layer) + ) + for i in range(depth)]) + self.norm = norm_layer(embed_dim) + + trunc_normal_(self.pos_embed, std=.02) + trunc_normal_(self.cls_token, std=.02) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'pos_embed', 'cls_token'} + + def forward(self, x, register_blk=-1): + B = x.shape[0] + x = self.patch_embed(x) + + cls_tokens = self.cls_token.expand(B, -1, -1) # stole cls_tokens impl from Phil Wang, thanks + x = torch.cat((cls_tokens, x), dim=1) + + x = x + self.pos_embed[:,:x.size(1),:] + x = self.pos_drop(x) + + for i,blk in enumerate(self.blocks): + x = blk(x, register_blk==i) + x = self.norm(x) + + return x + + @torch.jit.ignore() + def load_pretrained(self, checkpoint_path, prefix=''): + _load_weights(self, checkpoint_path, prefix) + + +@torch.no_grad() +def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = ''): + """ Load weights from .npz checkpoints for official Google Brain Flax implementation + """ + import numpy as np + + def _n2p(w, t=True): + if w.ndim == 4 and w.shape[0] == w.shape[1] == w.shape[2] == 1: + w = w.flatten() + if t: + if w.ndim == 4: + w = w.transpose([3, 2, 0, 1]) + elif w.ndim == 3: + w = w.transpose([2, 0, 1]) + elif w.ndim == 2: + w = w.transpose([1, 0]) + return torch.from_numpy(w) + + w = np.load(checkpoint_path) + if not prefix and 'opt/target/embedding/kernel' in w: + prefix = 'opt/target/' + + if hasattr(model.patch_embed, 'backbone'): + # hybrid + backbone = model.patch_embed.backbone + stem_only = not hasattr(backbone, 'stem') + stem = backbone if stem_only else backbone.stem + stem.conv.weight.copy_(adapt_input_conv(stem.conv.weight.shape[1], _n2p(w[f'{prefix}conv_root/kernel']))) + stem.norm.weight.copy_(_n2p(w[f'{prefix}gn_root/scale'])) + stem.norm.bias.copy_(_n2p(w[f'{prefix}gn_root/bias'])) + if not stem_only: + for i, stage in enumerate(backbone.stages): + for j, block in enumerate(stage.blocks): + bp = f'{prefix}block{i + 1}/unit{j + 1}/' + for r in range(3): + getattr(block, f'conv{r + 1}').weight.copy_(_n2p(w[f'{bp}conv{r + 1}/kernel'])) + getattr(block, f'norm{r + 1}').weight.copy_(_n2p(w[f'{bp}gn{r + 1}/scale'])) + getattr(block, f'norm{r + 1}').bias.copy_(_n2p(w[f'{bp}gn{r + 1}/bias'])) + if block.downsample is not None: + block.downsample.conv.weight.copy_(_n2p(w[f'{bp}conv_proj/kernel'])) + block.downsample.norm.weight.copy_(_n2p(w[f'{bp}gn_proj/scale'])) + block.downsample.norm.bias.copy_(_n2p(w[f'{bp}gn_proj/bias'])) + embed_conv_w = _n2p(w[f'{prefix}embedding/kernel']) + else: + embed_conv_w = adapt_input_conv( + model.patch_embed.proj.weight.shape[1], _n2p(w[f'{prefix}embedding/kernel'])) + model.patch_embed.proj.weight.copy_(embed_conv_w) + model.patch_embed.proj.bias.copy_(_n2p(w[f'{prefix}embedding/bias'])) + model.cls_token.copy_(_n2p(w[f'{prefix}cls'], t=False)) + pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False) + if pos_embed_w.shape != model.pos_embed.shape: + pos_embed_w = resize_pos_embed( # resize pos embedding when different size from pretrained weights + pos_embed_w, model.pos_embed, getattr(model, 'num_tokens', 1), model.patch_embed.grid_size) + model.pos_embed.copy_(pos_embed_w) + model.norm.weight.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/scale'])) + model.norm.bias.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/bias'])) +# if isinstance(model.head, nn.Linear) and model.head.bias.shape[0] == w[f'{prefix}head/bias'].shape[-1]: +# model.head.weight.copy_(_n2p(w[f'{prefix}head/kernel'])) +# model.head.bias.copy_(_n2p(w[f'{prefix}head/bias'])) +# if isinstance(getattr(model.pre_logits, 'fc', None), nn.Linear) and f'{prefix}pre_logits/bias' in w: +# model.pre_logits.fc.weight.copy_(_n2p(w[f'{prefix}pre_logits/kernel'])) +# model.pre_logits.fc.bias.copy_(_n2p(w[f'{prefix}pre_logits/bias'])) + for i, block in enumerate(model.blocks.children()): + block_prefix = f'{prefix}Transformer/encoderblock_{i}/' + mha_prefix = block_prefix + 'MultiHeadDotProductAttention_1/' + block.norm1.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale'])) + block.norm1.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias'])) + block.attn.qkv.weight.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/kernel'], t=False).flatten(1).T for n in ('query', 'key', 'value')])) + block.attn.qkv.bias.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/bias'], t=False).reshape(-1) for n in ('query', 'key', 'value')])) + block.attn.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel']).flatten(1)) + block.attn.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias'])) + for r in range(2): + getattr(block.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/kernel'])) + getattr(block.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/bias'])) + block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/scale'])) + block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/bias'])) + + +def interpolate_pos_embed(pos_embed_checkpoint, visual_encoder): + # interpolate position embedding + embedding_size = pos_embed_checkpoint.shape[-1] + num_patches = visual_encoder.patch_embed.num_patches + num_extra_tokens = visual_encoder.pos_embed.shape[-2] - num_patches + # height (== width) for the checkpoint position embedding + orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) + # height (== width) for the new position embedding + new_size = int(num_patches ** 0.5) + + if orig_size!=new_size: + # class_token and dist_token are kept unchanged + extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] + # only the position tokens are interpolated + pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] + pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2) + pos_tokens = torch.nn.functional.interpolate( + pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False) + pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) + new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) + print('reshape position embedding from %d to %d'%(orig_size ** 2,new_size ** 2)) + + return new_pos_embed + else: + return pos_embed_checkpoint \ No newline at end of file diff --git a/pretrain.py b/pretrain.py new file mode 100644 index 00000000..c9490ec8 --- /dev/null +++ b/pretrain.py @@ -0,0 +1,173 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.backends.cudnn as cudnn +import torch.distributed as dist +from torch.utils.data import DataLoader + +from models.blip_pretrain import blip_pretrain +import utils +from utils import warmup_lr_schedule, step_lr_schedule +from data import create_dataset, create_sampler, create_loader + +def train(model, data_loader, optimizer, epoch, device, config): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=50, fmt='{value:.6f}')) + metric_logger.add_meter('loss_ita', utils.SmoothedValue(window_size=50, fmt='{value:.4f}')) + metric_logger.add_meter('loss_itm', utils.SmoothedValue(window_size=50, fmt='{value:.4f}')) + metric_logger.add_meter('loss_lm', utils.SmoothedValue(window_size=50, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + + if config['laion_path']: + data_loader.dataset.reload_laion(epoch) + + data_loader.sampler.set_epoch(epoch) + + for i, (image, caption) in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + if epoch==0: + warmup_lr_schedule(optimizer, i, config['warmup_steps'], config['warmup_lr'], config['init_lr']) + + optimizer.zero_grad() + + image = image.to(device,non_blocking=True) + + # ramp up alpha in the first 2 epochs + alpha = config['alpha']*min(1,(epoch*len(data_loader)+i)/(2*len(data_loader))) + + loss_ita, loss_itm, loss_lm = model(image, caption, alpha = alpha) + loss = loss_ita + loss_itm + loss_lm + + loss.backward() + optimizer.step() + + metric_logger.update(loss_ita=loss_ita.item()) + metric_logger.update(loss_itm=loss_itm.item()) + metric_logger.update(loss_lm=loss_lm.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + +def main(args, config): + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + #### Dataset #### + print("Creating dataset") + datasets = [create_dataset('pretrain', config, min_scale=0.2)] + print('number of training samples: %d'%len(datasets[0])) + + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + samplers = create_sampler(datasets, [True], num_tasks, global_rank) + + data_loader = create_loader(datasets,samplers,batch_size=[config['batch_size']], num_workers=[4], is_trains=[True], collate_fns=[None])[0] + + #### Model #### + print("Creating model") + model = blip_pretrain(image_size=config['image_size'], vit=config['vit'], vit_grad_ckpt=config['vit_grad_ckpt'], + vit_ckpt_layer=config['vit_ckpt_layer'], queue_size=config['queue_size']) + + model = model.to(device) + + optimizer = torch.optim.AdamW(params=model.parameters(), lr=config['init_lr'], weight_decay=config['weight_decay']) + + start_epoch = 0 + if args.checkpoint: + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + model.load_state_dict(state_dict) + + optimizer.load_state_dict(checkpoint['optimizer']) + start_epoch = checkpoint['epoch']+1 + print('resume checkpoint from %s'%args.checkpoint) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + print("Start training") + start_time = time.time() + for epoch in range(start_epoch, config['max_epoch']): + + step_lr_schedule(optimizer, epoch, config['init_lr'], config['min_lr'], config['lr_decay_rate']) + + train_stats = train(model, data_loader, optimizer, epoch, device, config) + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + save_obj = { + 'model': model_without_ddp.state_dict(), + 'optimizer': optimizer.state_dict(), + 'config': config, + 'epoch': epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_%02d.pth'%epoch)) + + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + dist.barrier() + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/pretrain.yaml') + parser.add_argument('--output_dir', default='output/Pretrain') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..1fb1e5fe --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +timm==0.4.12 +transformers==4.15.0 +fairscale==0.4.4 +pycocotools \ No newline at end of file diff --git a/train_caption.py b/train_caption.py new file mode 100644 index 00000000..7c639ac6 --- /dev/null +++ b/train_caption.py @@ -0,0 +1,206 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.backends.cudnn as cudnn +import torch.distributed as dist +from torch.utils.data import DataLoader + +from models.blip import blip_decoder +import utils +from utils import cosine_lr_schedule +from data import create_dataset, create_sampler, create_loader +from data.utils import save_result, coco_caption_eval + +def train(model, data_loader, optimizer, epoch, device): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + header = 'Train Caption Epoch: [{}]'.format(epoch) + print_freq = 50 + + for i, (image, caption, _) in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + image = image.to(device) + + loss = model(image, caption) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + +@torch.no_grad() +def evaluate(model, data_loader, device, config): + # evaluate + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Caption generation:' + print_freq = 10 + + result = [] + for image, image_id in metric_logger.log_every(data_loader, print_freq, header): + + image = image.to(device) + + captions = model.generate(image, sample=False, num_beams=config['num_beams'], max_length=config['max_length'], + min_length=config['min_length']) + + for caption, img_id in zip(captions, image_id): + result.append({"image_id": img_id.item(), "caption": caption}) + + return result + + +def main(args, config): + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + #### Dataset #### + print("Creating captioning dataset") + train_dataset, val_dataset, test_dataset = create_dataset('caption_coco', config) + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + samplers = create_sampler([train_dataset,val_dataset,test_dataset], [True,False,False], num_tasks, global_rank) + else: + samplers = [None, None, None] + + train_loader, val_loader, test_loader = create_loader([train_dataset, val_dataset, test_dataset],samplers, + batch_size=[config['batch_size']]*3,num_workers=[4,4,4], + is_trains=[True, False, False], collate_fns=[None,None,None]) + + #### Model #### + print("Creating model") + model = blip_decoder(pretrained=config['pretrained'], image_size=config['image_size'], vit=config['vit'], + vit_grad_ckpt=config['vit_grad_ckpt'], vit_ckpt_layer=config['vit_ckpt_layer'], + prompt=config['prompt']) + + model = model.to(device) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + optimizer = torch.optim.AdamW(params=model.parameters(), lr=config['init_lr'], weight_decay=config['weight_decay']) + + best = 0 + best_epoch = 0 + + print("Start training") + start_time = time.time() + for epoch in range(0, config['max_epoch']): + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + cosine_lr_schedule(optimizer, epoch, config['max_epoch'], config['init_lr'], config['min_lr']) + + train_stats = train(model, train_loader, optimizer, epoch, device) + + val_result = evaluate(model_without_ddp, val_loader, device, config) + val_result_file = save_result(val_result, args.result_dir, 'val_epoch%d'%epoch, remove_duplicate='image_id') + + test_result = evaluate(model_without_ddp, test_loader, device, config) + test_result_file = save_result(test_result, args.result_dir, 'test_epoch%d'%epoch, remove_duplicate='image_id') + + if utils.is_main_process(): + coco_val = coco_caption_eval(config['coco_gt_root'],val_result_file,'val') + coco_test = coco_caption_eval(config['coco_gt_root'],test_result_file,'test') + + if args.evaluate: + log_stats = {**{f'val_{k}': v for k, v in coco_val.eval.items()}, + **{f'test_{k}': v for k, v in coco_test.eval.items()}, + } + with open(os.path.join(args.output_dir, "evaluate.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + else: + save_obj = { + 'model': model_without_ddp.state_dict(), + 'optimizer': optimizer.state_dict(), + 'config': config, + 'epoch': epoch, + } + + if coco_val.eval['CIDEr'] + coco_val.eval['Bleu_4'] > best: + best = coco_val.eval['CIDEr'] + coco_val.eval['Bleu_4'] + best_epoch = epoch + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + **{f'val_{k}': v for k, v in coco_val.eval.items()}, + **{f'test_{k}': v for k, v in coco_test.eval.items()}, + 'epoch': epoch, + 'best_epoch': best_epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if args.evaluate: + break + dist.barrier() + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/caption_coco.yaml') + parser.add_argument('--output_dir', default='output/Caption_coco') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/train_nlvr.py b/train_nlvr.py new file mode 100644 index 00000000..84b247bd --- /dev/null +++ b/train_nlvr.py @@ -0,0 +1,213 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path +import json +import pickle + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.data import DataLoader +import torch.backends.cudnn as cudnn +import torch.distributed as dist + +from models.blip_nlvr import blip_nlvr + +import utils +from utils import cosine_lr_schedule, warmup_lr_schedule +from data import create_dataset, create_sampler, create_loader + +def train(model, data_loader, optimizer, epoch, device, config): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=50, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=50, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 10 + + for i,(image0, image1, text, targets) in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + images = torch.cat([image0, image1], dim=0) + images, targets = images.to(device), targets.to(device) + + loss = model(images, text, targets=targets, train=True) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + metric_logger.update(loss=loss.item()) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.4f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + +@torch.no_grad() +def evaluate(model, data_loader, device, config): + # test + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + + header = 'Evaluation:' + print_freq = 50 + + for image0, image1, text, targets in metric_logger.log_every(data_loader, print_freq, header): + images = torch.cat([image0, image1], dim=0) + images, targets = images.to(device), targets.to(device) + + prediction = model(images, text, targets=targets, train=False) + + _, pred_class = prediction.max(1) + accuracy = (targets==pred_class).sum() / targets.size(0) + + metric_logger.meters['acc'].update(accuracy.item(), n=image0.size(0)) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.4f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + +def main(args, config): + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + #### Dataset #### + print("Creating dataset") + datasets = create_dataset('nlvr', config) + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + samplers = create_sampler(datasets, [True,False,False], num_tasks, global_rank) + else: + samplers = [None, None, None] + + batch_size=[config['batch_size_train'],config['batch_size_test'],config['batch_size_test']] + train_loader, val_loader, test_loader = create_loader(datasets,samplers,batch_size=batch_size, + num_workers=[4,4,4],is_trains=[True,False,False], + collate_fns=[None,None,None]) + + #### Model #### + print("Creating model") + model = blip_nlvr(pretrained=config['pretrained'], image_size=config['image_size'], + vit=config['vit'], vit_grad_ckpt=config['vit_grad_ckpt'], vit_ckpt_layer=config['vit_ckpt_layer']) + + model = model.to(device) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + optimizer = torch.optim.AdamW(params=model.parameters(), lr=config['init_lr'], weight_decay=config['weight_decay']) + + print("Start training") + start_time = time.time() + best = 0 + best_epoch = 0 + + for epoch in range(0, config['max_epoch']): + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + cosine_lr_schedule(optimizer, epoch, config['max_epoch'], config['init_lr'], config['min_lr']) + + train_stats = train(model, train_loader, optimizer, epoch, device, config) + + val_stats = evaluate(model, val_loader, device, config) + test_stats = evaluate(model, test_loader, device, config) + + if utils.is_main_process(): + if args.evaluate: + log_stats = {**{f'val_{k}': v for k, v in val_stats.items()}, + **{f'test_{k}': v for k, v in test_stats.items()}, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + else: + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + **{f'val_{k}': v for k, v in val_stats.items()}, + **{f'test_{k}': v for k, v in test_stats.items()}, + 'epoch': epoch, + } + + if float(val_stats['acc'])>best: + save_obj = { + 'model': model_without_ddp.state_dict(), + 'optimizer': optimizer.state_dict(), + 'config': config, + 'epoch': epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + best = float(val_stats['acc']) + best_epoch = epoch + + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + if args.evaluate: + break + + dist.barrier() + + if utils.is_main_process(): + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write("best epoch: %d"%best_epoch) + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/nlvr.yaml') + parser.add_argument('--output_dir', default='output/NLVR') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/train_retrieval.py b/train_retrieval.py new file mode 100644 index 00000000..574f0338 --- /dev/null +++ b/train_retrieval.py @@ -0,0 +1,345 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.backends.cudnn as cudnn +import torch.distributed as dist +from torch.utils.data import DataLoader + +from models.blip_retrieval import blip_retrieval +import utils +from utils import cosine_lr_schedule +from data import create_dataset, create_sampler, create_loader + + +def train(model, data_loader, optimizer, epoch, device, config): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss_itm', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + metric_logger.add_meter('loss_ita', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + + for i,(image, caption, idx) in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + image = image.to(device,non_blocking=True) + idx = idx.to(device,non_blocking=True) + + if epoch>0: + alpha = config['alpha'] + else: + alpha = config['alpha']*min(1,i/len(data_loader)) + + loss_ita, loss_itm = model(image, caption, alpha=alpha, idx=idx) + loss = loss_ita + loss_itm + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss_itm=loss_itm.item()) + metric_logger.update(loss_ita=loss_ita.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + +@torch.no_grad() +def evaluation(model, data_loader, device, config): + # test + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Evaluation:' + + print('Computing features for evaluation...') + start_time = time.time() + + texts = data_loader.dataset.text + num_text = len(texts) + text_bs = 256 + text_ids = [] + text_embeds = [] + text_atts = [] + for i in range(0, num_text, text_bs): + text = texts[i: min(num_text, i+text_bs)] + text_input = model.tokenizer(text, padding='max_length', truncation=True, max_length=35, return_tensors="pt").to(device) + text_output = model.text_encoder(text_input.input_ids, attention_mask = text_input.attention_mask, mode='text') + text_embed = F.normalize(model.text_proj(text_output.last_hidden_state[:,0,:])) + text_embeds.append(text_embed) + text_ids.append(text_input.input_ids) + text_atts.append(text_input.attention_mask) + + text_embeds = torch.cat(text_embeds,dim=0) + text_ids = torch.cat(text_ids,dim=0) + text_atts = torch.cat(text_atts,dim=0) + text_ids[:,0] = model.tokenizer.enc_token_id + + image_feats = [] + image_embeds = [] + for image, img_id in data_loader: + image = image.to(device) + image_feat = model.visual_encoder(image) + image_embed = model.vision_proj(image_feat[:,0,:]) + image_embed = F.normalize(image_embed,dim=-1) + + image_feats.append(image_feat.cpu()) + image_embeds.append(image_embed) + + image_feats = torch.cat(image_feats,dim=0) + image_embeds = torch.cat(image_embeds,dim=0) + + sims_matrix = image_embeds @ text_embeds.t() + score_matrix_i2t = torch.full((len(data_loader.dataset.image),len(texts)),-100.0).to(device) + + num_tasks = utils.get_world_size() + rank = utils.get_rank() + step = sims_matrix.size(0)//num_tasks + 1 + start = rank*step + end = min(sims_matrix.size(0),start+step) + + for i,sims in enumerate(metric_logger.log_every(sims_matrix[start:end], 50, header)): + topk_sim, topk_idx = sims.topk(k=config['k_test'], dim=0) + + encoder_output = image_feats[start+i].repeat(config['k_test'],1,1).to(device) + encoder_att = torch.ones(encoder_output.size()[:-1],dtype=torch.long).to(device) + output = model.text_encoder(text_ids[topk_idx], + attention_mask = text_atts[topk_idx], + encoder_hidden_states = encoder_output, + encoder_attention_mask = encoder_att, + return_dict = True, + ) + score = model.itm_head(output.last_hidden_state[:,0,:])[:,1] + score_matrix_i2t[start+i,topk_idx] = score + topk_sim + + sims_matrix = sims_matrix.t() + score_matrix_t2i = torch.full((len(texts),len(data_loader.dataset.image)),-100.0).to(device) + + step = sims_matrix.size(0)//num_tasks + 1 + start = rank*step + end = min(sims_matrix.size(0),start+step) + + for i,sims in enumerate(metric_logger.log_every(sims_matrix[start:end], 50, header)): + + topk_sim, topk_idx = sims.topk(k=config['k_test'], dim=0) + encoder_output = image_feats[topk_idx].to(device) + encoder_att = torch.ones(encoder_output.size()[:-1],dtype=torch.long).to(device) + output = model.text_encoder(text_ids[start+i].repeat(config['k_test'],1), + attention_mask = text_atts[start+i].repeat(config['k_test'],1), + encoder_hidden_states = encoder_output, + encoder_attention_mask = encoder_att, + return_dict = True, + ) + score = model.itm_head(output.last_hidden_state[:,0,:])[:,1] + score_matrix_t2i[start+i,topk_idx] = score + topk_sim + + if args.distributed: + dist.barrier() + torch.distributed.all_reduce(score_matrix_i2t, op=torch.distributed.ReduceOp.SUM) + torch.distributed.all_reduce(score_matrix_t2i, op=torch.distributed.ReduceOp.SUM) + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Evaluation time {}'.format(total_time_str)) + + return score_matrix_i2t.cpu().numpy(), score_matrix_t2i.cpu().numpy() + + + +@torch.no_grad() +def itm_eval(scores_i2t, scores_t2i, txt2img, img2txt): + + #Images->Text + ranks = np.zeros(scores_i2t.shape[0]) + for index,score in enumerate(scores_i2t): + inds = np.argsort(score)[::-1] + # Score + rank = 1e20 + for i in img2txt[index]: + tmp = np.where(inds == i)[0][0] + if tmp < rank: + rank = tmp + ranks[index] = rank + + # Compute metrics + tr1 = 100.0 * len(np.where(ranks < 1)[0]) / len(ranks) + tr5 = 100.0 * len(np.where(ranks < 5)[0]) / len(ranks) + tr10 = 100.0 * len(np.where(ranks < 10)[0]) / len(ranks) + + #Text->Images + ranks = np.zeros(scores_t2i.shape[0]) + + for index,score in enumerate(scores_t2i): + inds = np.argsort(score)[::-1] + ranks[index] = np.where(inds == txt2img[index])[0][0] + + # Compute metrics + ir1 = 100.0 * len(np.where(ranks < 1)[0]) / len(ranks) + ir5 = 100.0 * len(np.where(ranks < 5)[0]) / len(ranks) + ir10 = 100.0 * len(np.where(ranks < 10)[0]) / len(ranks) + + tr_mean = (tr1 + tr5 + tr10) / 3 + ir_mean = (ir1 + ir5 + ir10) / 3 + r_mean = (tr_mean + ir_mean) / 2 + + eval_result = {'txt_r1': tr1, + 'txt_r5': tr5, + 'txt_r10': tr10, + 'txt_r_mean': tr_mean, + 'img_r1': ir1, + 'img_r5': ir5, + 'img_r10': ir10, + 'img_r_mean': ir_mean, + 'r_mean': r_mean} + return eval_result + + +def main(args, config): + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + #### Dataset #### + print("Creating retrieval dataset") + train_dataset, val_dataset, test_dataset = create_dataset('retrieval_%s'%config['dataset'], config) + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + samplers = create_sampler([train_dataset], [True], num_tasks, global_rank) + [None, None] + else: + samplers = [None, None, None] + + train_loader, val_loader, test_loader = create_loader([train_dataset, val_dataset, test_dataset],samplers, + batch_size=[config['batch_size_train']]+[config['batch_size_test']]*2, + num_workers=[4,4,4], + is_trains=[True, False, False], + collate_fns=[None,None,None]) + + + #### Model #### + print("Creating model") + model = blip_retrieval(pretrained=config['pretrained'], image_size=config['image_size'], vit=config['vit'], + vit_grad_ckpt=config['vit_grad_ckpt'], vit_ckpt_layer=config['vit_ckpt_layer'], + queue_size=config['queue_size'], negative_all_rank=config['negative_all_rank']) + + model = model.to(device) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + optimizer = torch.optim.AdamW(params=model.parameters(), lr=config['init_lr'], weight_decay=config['weight_decay']) + + best = 0 + best_epoch = 0 + + print("Start training") + start_time = time.time() + + for epoch in range(0, config['max_epoch']): + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + cosine_lr_schedule(optimizer, epoch, config['max_epoch'], config['init_lr'], config['min_lr']) + + train_stats = train(model, train_loader, optimizer, epoch, device, config) + + score_val_i2t, score_val_t2i, = evaluation(model_without_ddp, val_loader, device, config) + score_test_i2t, score_test_t2i = evaluation(model_without_ddp, test_loader, device, config) + + if utils.is_main_process(): + + val_result = itm_eval(score_val_i2t, score_val_t2i, val_loader.dataset.txt2img, val_loader.dataset.img2txt) + print(val_result) + + if val_result['r_mean']>best: + save_obj = { + 'model': model_without_ddp.state_dict(), + 'optimizer': optimizer.state_dict(), + 'config': config, + 'epoch': epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + best = val_result['r_mean'] + best_epoch = epoch + + test_result = itm_eval(score_test_i2t, score_test_t2i, test_loader.dataset.txt2img, test_loader.dataset.img2txt) + print(test_result) + + if args.evaluate: + log_stats = {**{f'val_{k}': v for k, v in val_result.items()}, + **{f'test_{k}': v for k, v in test_result.items()}, + } + with open(os.path.join(args.output_dir, "evaluate.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + else: + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + **{f'val_{k}': v for k, v in val_result.items()}, + **{f'test_{k}': v for k, v in test_result.items()}, + 'epoch': epoch, + 'best_epoch': best_epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if args.evaluate: + break + + dist.barrier() + torch.cuda.empty_cache() + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/retrieval_flickr.yaml') + parser.add_argument('--output_dir', default='output/Retrieval_flickr') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/train_vqa.py b/train_vqa.py new file mode 100644 index 00000000..89eb7490 --- /dev/null +++ b/train_vqa.py @@ -0,0 +1,202 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.data import DataLoader +import torch.backends.cudnn as cudnn +import torch.distributed as dist + +from models.blip_vqa import blip_vqa +import utils +from utils import cosine_lr_schedule +from data import create_dataset, create_sampler, create_loader +from data.vqa_dataset import vqa_collate_fn +from data.utils import save_result + + +def train(model, data_loader, optimizer, epoch, device): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + + for i,(image, question, answer, weights, n) in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + image, weights = image.to(device,non_blocking=True), weights.to(device,non_blocking=True) + + loss = model(image, question, answer, train=True, n=n, weights=weights) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + +@torch.no_grad() +def evaluation(model, data_loader, device, config) : + # test + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Generate VQA test result:' + print_freq = 50 + + result = [] + + if config['inference']=='rank': + answer_list = data_loader.dataset.answer_list + answer_candidates = model.tokenizer(answer_list, padding='longest', return_tensors='pt').to(device) + answer_candidates.input_ids[:,0] = model.tokenizer.bos_token_id + + for n, (image, question, question_id) in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + image = image.to(device,non_blocking=True) + + if config['inference']=='generate': + answers = model(image, question, train=False, inference='generate') + + for answer, ques_id in zip(answers, question_id): + ques_id = int(ques_id.item()) + result.append({"question_id":ques_id, "answer":answer}) + + elif config['inference']=='rank': + answer_ids = model(image, question, answer_candidates, train=False, inference='rank', k_test=config['k_test']) + + for ques_id, answer_id in zip(question_id, answer_ids): + result.append({"question_id":int(ques_id.item()), "answer":answer_list[answer_id]}) + + return result + + +def main(args, config): + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + #### Dataset #### + print("Creating vqa datasets") + datasets = create_dataset('vqa', config) + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + samplers = create_sampler(datasets, [True, False], num_tasks, global_rank) + else: + samplers = [None, None] + + train_loader, test_loader = create_loader(datasets,samplers, + batch_size=[config['batch_size_train'],config['batch_size_test']], + num_workers=[4,4],is_trains=[True, False], + collate_fns=[vqa_collate_fn,None]) + #### Model #### + print("Creating model") + model = blip_vqa(pretrained=config['pretrained'], image_size=config['image_size'], + vit=config['vit'], vit_grad_ckpt=config['vit_grad_ckpt'], vit_ckpt_layer=config['vit_ckpt_layer']) + + model = model.to(device) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + optimizer = torch.optim.AdamW(params=model.parameters(), lr=config['init_lr'], weight_decay=config['weight_decay']) + + best = 0 + best_epoch = 0 + + print("Start training") + start_time = time.time() + for epoch in range(0, config['max_epoch']): + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + cosine_lr_schedule(optimizer, epoch, config['max_epoch'], config['init_lr'], config['min_lr']) + + train_stats = train(model, train_loader, optimizer, epoch, device) + + else: + break + + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + save_obj = { + 'model': model_without_ddp.state_dict(), + 'optimizer': optimizer.state_dict(), + 'config': config, + 'epoch': epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_%02d.pth'%epoch)) + + dist.barrier() + + vqa_result = evaluation(model_without_ddp, test_loader, device, config) + result_file = save_result(vqa_result, args.result_dir, 'vqa_result') + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/vqa.yaml') + parser.add_argument('--output_dir', default='output/VQA') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/transform/__pycache__/randaugment.cpython-36.pyc b/transform/__pycache__/randaugment.cpython-36.pyc new file mode 100644 index 00000000..30e4c1da Binary files /dev/null and b/transform/__pycache__/randaugment.cpython-36.pyc differ diff --git a/transform/__pycache__/randaugment.cpython-38.pyc b/transform/__pycache__/randaugment.cpython-38.pyc new file mode 100644 index 00000000..cce973c9 Binary files /dev/null and b/transform/__pycache__/randaugment.cpython-38.pyc differ diff --git a/transform/randaugment.py b/transform/randaugment.py new file mode 100644 index 00000000..094d9f4c --- /dev/null +++ b/transform/randaugment.py @@ -0,0 +1,340 @@ +import cv2 +import numpy as np + + +## aug functions +def identity_func(img): + return img + + +def autocontrast_func(img, cutoff=0): + ''' + same output as PIL.ImageOps.autocontrast + ''' + n_bins = 256 + + def tune_channel(ch): + n = ch.size + cut = cutoff * n // 100 + if cut == 0: + high, low = ch.max(), ch.min() + else: + hist = cv2.calcHist([ch], [0], None, [n_bins], [0, n_bins]) + low = np.argwhere(np.cumsum(hist) > cut) + low = 0 if low.shape[0] == 0 else low[0] + high = np.argwhere(np.cumsum(hist[::-1]) > cut) + high = n_bins - 1 if high.shape[0] == 0 else n_bins - 1 - high[0] + if high <= low: + table = np.arange(n_bins) + else: + scale = (n_bins - 1) / (high - low) + offset = -low * scale + table = np.arange(n_bins) * scale + offset + table[table < 0] = 0 + table[table > n_bins - 1] = n_bins - 1 + table = table.clip(0, 255).astype(np.uint8) + return table[ch] + + channels = [tune_channel(ch) for ch in cv2.split(img)] + out = cv2.merge(channels) + return out + + +def equalize_func(img): + ''' + same output as PIL.ImageOps.equalize + PIL's implementation is different from cv2.equalize + ''' + n_bins = 256 + + def tune_channel(ch): + hist = cv2.calcHist([ch], [0], None, [n_bins], [0, n_bins]) + non_zero_hist = hist[hist != 0].reshape(-1) + step = np.sum(non_zero_hist[:-1]) // (n_bins - 1) + if step == 0: return ch + n = np.empty_like(hist) + n[0] = step // 2 + n[1:] = hist[:-1] + table = (np.cumsum(n) // step).clip(0, 255).astype(np.uint8) + return table[ch] + + channels = [tune_channel(ch) for ch in cv2.split(img)] + out = cv2.merge(channels) + return out + + +def rotate_func(img, degree, fill=(0, 0, 0)): + ''' + like PIL, rotate by degree, not radians + ''' + H, W = img.shape[0], img.shape[1] + center = W / 2, H / 2 + M = cv2.getRotationMatrix2D(center, degree, 1) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill) + return out + + +def solarize_func(img, thresh=128): + ''' + same output as PIL.ImageOps.posterize + ''' + table = np.array([el if el < thresh else 255 - el for el in range(256)]) + table = table.clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def color_func(img, factor): + ''' + same output as PIL.ImageEnhance.Color + ''' + ## implementation according to PIL definition, quite slow + # degenerate = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)[:, :, np.newaxis] + # out = blend(degenerate, img, factor) + # M = ( + # np.eye(3) * factor + # + np.float32([0.114, 0.587, 0.299]).reshape(3, 1) * (1. - factor) + # )[np.newaxis, np.newaxis, :] + M = ( + np.float32([ + [0.886, -0.114, -0.114], + [-0.587, 0.413, -0.587], + [-0.299, -0.299, 0.701]]) * factor + + np.float32([[0.114], [0.587], [0.299]]) + ) + out = np.matmul(img, M).clip(0, 255).astype(np.uint8) + return out + + +def contrast_func(img, factor): + """ + same output as PIL.ImageEnhance.Contrast + """ + mean = np.sum(np.mean(img, axis=(0, 1)) * np.array([0.114, 0.587, 0.299])) + table = np.array([( + el - mean) * factor + mean + for el in range(256) + ]).clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def brightness_func(img, factor): + ''' + same output as PIL.ImageEnhance.Contrast + ''' + table = (np.arange(256, dtype=np.float32) * factor).clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def sharpness_func(img, factor): + ''' + The differences the this result and PIL are all on the 4 boundaries, the center + areas are same + ''' + kernel = np.ones((3, 3), dtype=np.float32) + kernel[1][1] = 5 + kernel /= 13 + degenerate = cv2.filter2D(img, -1, kernel) + if factor == 0.0: + out = degenerate + elif factor == 1.0: + out = img + else: + out = img.astype(np.float32) + degenerate = degenerate.astype(np.float32)[1:-1, 1:-1, :] + out[1:-1, 1:-1, :] = degenerate + factor * (out[1:-1, 1:-1, :] - degenerate) + out = out.astype(np.uint8) + return out + + +def shear_x_func(img, factor, fill=(0, 0, 0)): + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, factor, 0], [0, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def translate_x_func(img, offset, fill=(0, 0, 0)): + ''' + same output as PIL.Image.transform + ''' + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, -offset], [0, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def translate_y_func(img, offset, fill=(0, 0, 0)): + ''' + same output as PIL.Image.transform + ''' + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, 0], [0, 1, -offset]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def posterize_func(img, bits): + ''' + same output as PIL.ImageOps.posterize + ''' + out = np.bitwise_and(img, np.uint8(255 << (8 - bits))) + return out + + +def shear_y_func(img, factor, fill=(0, 0, 0)): + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, 0], [factor, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def cutout_func(img, pad_size, replace=(0, 0, 0)): + replace = np.array(replace, dtype=np.uint8) + H, W = img.shape[0], img.shape[1] + rh, rw = np.random.random(2) + pad_size = pad_size // 2 + ch, cw = int(rh * H), int(rw * W) + x1, x2 = max(ch - pad_size, 0), min(ch + pad_size, H) + y1, y2 = max(cw - pad_size, 0), min(cw + pad_size, W) + out = img.copy() + out[x1:x2, y1:y2, :] = replace + return out + + +### level to args +def enhance_level_to_args(MAX_LEVEL): + def level_to_args(level): + return ((level / MAX_LEVEL) * 1.8 + 0.1,) + return level_to_args + + +def shear_level_to_args(MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * 0.3 + if np.random.random() > 0.5: level = -level + return (level, replace_value) + + return level_to_args + + +def translate_level_to_args(translate_const, MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * float(translate_const) + if np.random.random() > 0.5: level = -level + return (level, replace_value) + + return level_to_args + + +def cutout_level_to_args(cutout_const, MAX_LEVEL, replace_value): + def level_to_args(level): + level = int((level / MAX_LEVEL) * cutout_const) + return (level, replace_value) + + return level_to_args + + +def solarize_level_to_args(MAX_LEVEL): + def level_to_args(level): + level = int((level / MAX_LEVEL) * 256) + return (level, ) + return level_to_args + + +def none_level_to_args(level): + return () + + +def posterize_level_to_args(MAX_LEVEL): + def level_to_args(level): + level = int((level / MAX_LEVEL) * 4) + return (level, ) + return level_to_args + + +def rotate_level_to_args(MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * 30 + if np.random.random() < 0.5: + level = -level + return (level, replace_value) + + return level_to_args + + +func_dict = { + 'Identity': identity_func, + 'AutoContrast': autocontrast_func, + 'Equalize': equalize_func, + 'Rotate': rotate_func, + 'Solarize': solarize_func, + 'Color': color_func, + 'Contrast': contrast_func, + 'Brightness': brightness_func, + 'Sharpness': sharpness_func, + 'ShearX': shear_x_func, + 'TranslateX': translate_x_func, + 'TranslateY': translate_y_func, + 'Posterize': posterize_func, + 'ShearY': shear_y_func, +} + +translate_const = 10 +MAX_LEVEL = 10 +replace_value = (128, 128, 128) +arg_dict = { + 'Identity': none_level_to_args, + 'AutoContrast': none_level_to_args, + 'Equalize': none_level_to_args, + 'Rotate': rotate_level_to_args(MAX_LEVEL, replace_value), + 'Solarize': solarize_level_to_args(MAX_LEVEL), + 'Color': enhance_level_to_args(MAX_LEVEL), + 'Contrast': enhance_level_to_args(MAX_LEVEL), + 'Brightness': enhance_level_to_args(MAX_LEVEL), + 'Sharpness': enhance_level_to_args(MAX_LEVEL), + 'ShearX': shear_level_to_args(MAX_LEVEL, replace_value), + 'TranslateX': translate_level_to_args( + translate_const, MAX_LEVEL, replace_value + ), + 'TranslateY': translate_level_to_args( + translate_const, MAX_LEVEL, replace_value + ), + 'Posterize': posterize_level_to_args(MAX_LEVEL), + 'ShearY': shear_level_to_args(MAX_LEVEL, replace_value), +} + + +class RandomAugment(object): + + def __init__(self, N=2, M=10, isPIL=False, augs=[]): + self.N = N + self.M = M + self.isPIL = isPIL + if augs: + self.augs = augs + else: + self.augs = list(arg_dict.keys()) + + def get_random_ops(self): + sampled_ops = np.random.choice(self.augs, self.N) + return [(op, 0.5, self.M) for op in sampled_ops] + + def __call__(self, img): + if self.isPIL: + img = np.array(img) + ops = self.get_random_ops() + for name, prob, level in ops: + if np.random.random() > prob: + continue + args = arg_dict[name](level) + img = func_dict[name](img, *args) + return img + + +if __name__ == '__main__': + a = RandomAugment() + img = np.random.randn(32, 32, 3) + a(img) \ No newline at end of file diff --git a/utils.py b/utils.py new file mode 100644 index 00000000..ebe0e1dc --- /dev/null +++ b/utils.py @@ -0,0 +1,278 @@ +import math +def cosine_lr_schedule(optimizer, epoch, max_epoch, init_lr, min_lr): + """Decay the learning rate""" + lr = (init_lr - min_lr) * 0.5 * (1. + math.cos(math.pi * epoch / max_epoch)) + min_lr + for param_group in optimizer.param_groups: + param_group['lr'] = lr + +def warmup_lr_schedule(optimizer, step, max_step, init_lr, max_lr): + """Warmup the learning rate""" + lr = min(max_lr, init_lr + (max_lr - init_lr) * step / max_step) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + +def step_lr_schedule(optimizer, epoch, init_lr, min_lr, decay_rate): + """Decay the learning rate""" + lr = max(min_lr, init_lr * (decay_rate**epoch)) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + +import numpy as np +import io +import os +import time +from collections import defaultdict, deque +import datetime + +import torch +import torch.distributed as dist + +class SmoothedValue(object): + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=20, fmt=None): + if fmt is None: + fmt = "{median:.4f} ({global_avg:.4f})" + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, n=1): + self.deque.append(value) + self.count += n + self.total += value * n + + def synchronize_between_processes(self): + """ + Warning: does not synchronize the deque! + """ + if not is_dist_avail_and_initialized(): + return + t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda') + dist.barrier() + dist.all_reduce(t) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, + avg=self.avg, + global_avg=self.global_avg, + max=self.max, + value=self.value) + + +class MetricLogger(object): + def __init__(self, delimiter="\t"): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + + def update(self, **kwargs): + for k, v in kwargs.items(): + if isinstance(v, torch.Tensor): + v = v.item() + assert isinstance(v, (float, int)) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError("'{}' object has no attribute '{}'".format( + type(self).__name__, attr)) + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append( + "{}: {}".format(name, str(meter)) + ) + return self.delimiter.join(loss_str) + + def global_avg(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append( + "{}: {:.4f}".format(name, meter.global_avg) + ) + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, meter): + self.meters[name] = meter + + def log_every(self, iterable, print_freq, header=None): + i = 0 + if not header: + header = '' + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt='{avg:.4f}') + data_time = SmoothedValue(fmt='{avg:.4f}') + space_fmt = ':' + str(len(str(len(iterable)))) + 'd' + log_msg = [ + header, + '[{0' + space_fmt + '}/{1}]', + 'eta: {eta}', + '{meters}', + 'time: {time}', + 'data: {data}' + ] + if torch.cuda.is_available(): + log_msg.append('max mem: {memory:.0f}') + log_msg = self.delimiter.join(log_msg) + MB = 1024.0 * 1024.0 + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if i % print_freq == 0 or i == len(iterable) - 1: + eta_seconds = iter_time.global_avg * (len(iterable) - i) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + if torch.cuda.is_available(): + print(log_msg.format( + i, len(iterable), eta=eta_string, + meters=str(self), + time=str(iter_time), data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB)) + else: + print(log_msg.format( + i, len(iterable), eta=eta_string, + meters=str(self), + time=str(iter_time), data=str(data_time))) + i += 1 + end = time.time() + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('{} Total time: {} ({:.4f} s / it)'.format( + header, total_time_str, total_time / len(iterable))) + + +class AttrDict(dict): + def __init__(self, *args, **kwargs): + super(AttrDict, self).__init__(*args, **kwargs) + self.__dict__ = self + + +def compute_acc(logits, label, reduction='mean'): + ret = (torch.argmax(logits, dim=1) == label).float() + if reduction == 'none': + return ret.detach() + elif reduction == 'mean': + return ret.mean().item() + +def compute_n_params(model, return_str=True): + tot = 0 + for p in model.parameters(): + w = 1 + for x in p.shape: + w *= x + tot += w + if return_str: + if tot >= 1e6: + return '{:.1f}M'.format(tot / 1e6) + else: + return '{:.1f}K'.format(tot / 1e3) + else: + return tot + +def setup_for_distributed(is_master): + """ + This function disables printing when not in master process + """ + import builtins as __builtin__ + builtin_print = __builtin__.print + + def print(*args, **kwargs): + force = kwargs.pop('force', False) + if is_master or force: + builtin_print(*args, **kwargs) + + __builtin__.print = print + + +def is_dist_avail_and_initialized(): + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + + +def get_world_size(): + if not is_dist_avail_and_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + + +def is_main_process(): + return get_rank() == 0 + + +def save_on_master(*args, **kwargs): + if is_main_process(): + torch.save(*args, **kwargs) + + +def init_distributed_mode(args): + if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ['WORLD_SIZE']) + args.gpu = int(os.environ['LOCAL_RANK']) + elif 'SLURM_PROCID' in os.environ: + args.rank = int(os.environ['SLURM_PROCID']) + args.gpu = args.rank % torch.cuda.device_count() + else: + print('Not using distributed mode') + args.distributed = False + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + args.dist_backend = 'nccl' + print('| distributed init (rank {}, word {}): {}'.format( + args.rank, args.world_size, args.dist_url), flush=True) + torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, + world_size=args.world_size, rank=args.rank) + torch.distributed.barrier() + setup_for_distributed(args.rank == 0) + + \ No newline at end of file